2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "hash-table.h"
34 #include "tree-iterator.h"
35 #include "alloc-pool.h"
37 #include "tree-pass.h"
40 #include "langhooks.h"
42 #include "tree-ssa-sccvn.h"
43 #include "tree-scalar-evolution.h"
50 1. Avail sets can be shared by making an avail_find_leader that
51 walks up the dominator tree and looks in those avail sets.
52 This might affect code optimality, it's unclear right now.
53 2. Strength reduction can be performed by anticipating expressions
54 we can repair later on.
55 3. We can do back-substitution or smarter value numbering to catch
56 commutative expressions split up over multiple statements.
59 /* For ease of terminology, "expression node" in the below refers to
60 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
61 represent the actual statement containing the expressions we care about,
62 and we cache the value number by putting it in the expression. */
66 First we walk the statements to generate the AVAIL sets, the
67 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
68 generation of values/expressions by a given block. We use them
69 when computing the ANTIC sets. The AVAIL sets consist of
70 SSA_NAME's that represent values, so we know what values are
71 available in what blocks. AVAIL is a forward dataflow problem. In
72 SSA, values are never killed, so we don't need a kill set, or a
73 fixpoint iteration, in order to calculate the AVAIL sets. In
74 traditional parlance, AVAIL sets tell us the downsafety of the
77 Next, we generate the ANTIC sets. These sets represent the
78 anticipatable expressions. ANTIC is a backwards dataflow
79 problem. An expression is anticipatable in a given block if it could
80 be generated in that block. This means that if we had to perform
81 an insertion in that block, of the value of that expression, we
82 could. Calculating the ANTIC sets requires phi translation of
83 expressions, because the flow goes backwards through phis. We must
84 iterate to a fixpoint of the ANTIC sets, because we have a kill
85 set. Even in SSA form, values are not live over the entire
86 function, only from their definition point onwards. So we have to
87 remove values from the ANTIC set once we go past the definition
88 point of the leaders that make them up.
89 compute_antic/compute_antic_aux performs this computation.
91 Third, we perform insertions to make partially redundant
92 expressions fully redundant.
94 An expression is partially redundant (excluding partial
97 1. It is AVAIL in some, but not all, of the predecessors of a
99 2. It is ANTIC in all the predecessors.
101 In order to make it fully redundant, we insert the expression into
102 the predecessors where it is not available, but is ANTIC.
104 For the partial anticipation case, we only perform insertion if it
105 is partially anticipated in some block, and fully available in all
108 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
109 performs these steps.
111 Fourth, we eliminate fully redundant expressions.
112 This is a simple statement walk that replaces redundant
113 calculations with the now available values. */
115 /* Representations of value numbers:
117 Value numbers are represented by a representative SSA_NAME. We
118 will create fake SSA_NAME's in situations where we need a
119 representative but do not have one (because it is a complex
120 expression). In order to facilitate storing the value numbers in
121 bitmaps, and keep the number of wasted SSA_NAME's down, we also
122 associate a value_id with each value number, and create full blown
123 ssa_name's only where we actually need them (IE in operands of
124 existing expressions).
126 Theoretically you could replace all the value_id's with
127 SSA_NAME_VERSION, but this would allocate a large number of
128 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
129 It would also require an additional indirection at each point we
132 /* Representation of expressions on value numbers:
134 Expressions consisting of value numbers are represented the same
135 way as our VN internally represents them, with an additional
136 "pre_expr" wrapping around them in order to facilitate storing all
137 of the expressions in the same sets. */
139 /* Representation of sets:
141 The dataflow sets do not need to be sorted in any particular order
142 for the majority of their lifetime, are simply represented as two
143 bitmaps, one that keeps track of values present in the set, and one
144 that keeps track of expressions present in the set.
146 When we need them in topological order, we produce it on demand by
147 transforming the bitmap into an array and sorting it into topo
150 /* Type of expression, used to know which member of the PRE_EXPR union
161 typedef union pre_expr_union_d
166 vn_reference_t reference
;
169 typedef struct pre_expr_d
: typed_noop_remove
<pre_expr_d
>
171 enum pre_expr_kind kind
;
175 /* hash_table support. */
176 typedef pre_expr_d T
;
177 static inline hashval_t
hash (const pre_expr_d
*);
178 static inline int equal (const pre_expr_d
*, const pre_expr_d
*);
181 #define PRE_EXPR_NAME(e) (e)->u.name
182 #define PRE_EXPR_NARY(e) (e)->u.nary
183 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
184 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
186 /* Compare E1 and E1 for equality. */
189 pre_expr_d::equal (const struct pre_expr_d
*e1
, const struct pre_expr_d
*e2
)
191 if (e1
->kind
!= e2
->kind
)
197 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
198 PRE_EXPR_CONSTANT (e2
));
200 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
202 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
204 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
205 PRE_EXPR_REFERENCE (e2
));
214 pre_expr_d::hash (const struct pre_expr_d
*e
)
219 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
221 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
223 return PRE_EXPR_NARY (e
)->hashcode
;
225 return PRE_EXPR_REFERENCE (e
)->hashcode
;
231 /* Next global expression id number. */
232 static unsigned int next_expression_id
;
234 /* Mapping from expression to id number we can use in bitmap sets. */
235 DEF_VEC_P (pre_expr
);
236 DEF_VEC_ALLOC_P (pre_expr
, heap
);
237 static VEC(pre_expr
, heap
) *expressions
;
238 static hash_table
<pre_expr_d
> expression_to_id
;
239 static VEC(unsigned, heap
) *name_to_id
;
241 /* Allocate an expression id for EXPR. */
243 static inline unsigned int
244 alloc_expression_id (pre_expr expr
)
246 struct pre_expr_d
**slot
;
247 /* Make sure we won't overflow. */
248 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
249 expr
->id
= next_expression_id
++;
250 VEC_safe_push (pre_expr
, heap
, expressions
, expr
);
251 if (expr
->kind
== NAME
)
253 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
254 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
255 re-allocations by using VEC_reserve upfront. There is no
256 VEC_quick_grow_cleared unfortunately. */
257 unsigned old_len
= VEC_length (unsigned, name_to_id
);
258 VEC_reserve (unsigned, heap
, name_to_id
, num_ssa_names
- old_len
);
259 VEC_safe_grow_cleared (unsigned, heap
, name_to_id
, num_ssa_names
);
260 gcc_assert (VEC_index (unsigned, name_to_id
, version
) == 0);
261 VEC_replace (unsigned, name_to_id
, version
, expr
->id
);
265 slot
= expression_to_id
.find_slot (expr
, INSERT
);
269 return next_expression_id
- 1;
272 /* Return the expression id for tree EXPR. */
274 static inline unsigned int
275 get_expression_id (const pre_expr expr
)
280 static inline unsigned int
281 lookup_expression_id (const pre_expr expr
)
283 struct pre_expr_d
**slot
;
285 if (expr
->kind
== NAME
)
287 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
288 if (VEC_length (unsigned, name_to_id
) <= version
)
290 return VEC_index (unsigned, name_to_id
, version
);
294 slot
= expression_to_id
.find_slot (expr
, NO_INSERT
);
297 return ((pre_expr
)*slot
)->id
;
301 /* Return the existing expression id for EXPR, or create one if one
302 does not exist yet. */
304 static inline unsigned int
305 get_or_alloc_expression_id (pre_expr expr
)
307 unsigned int id
= lookup_expression_id (expr
);
309 return alloc_expression_id (expr
);
310 return expr
->id
= id
;
313 /* Return the expression that has expression id ID */
315 static inline pre_expr
316 expression_for_id (unsigned int id
)
318 return VEC_index (pre_expr
, expressions
, id
);
321 /* Free the expression id field in all of our expressions,
322 and then destroy the expressions array. */
325 clear_expression_ids (void)
327 VEC_free (pre_expr
, heap
, expressions
);
330 static alloc_pool pre_expr_pool
;
332 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
335 get_or_alloc_expr_for_name (tree name
)
337 struct pre_expr_d expr
;
339 unsigned int result_id
;
343 PRE_EXPR_NAME (&expr
) = name
;
344 result_id
= lookup_expression_id (&expr
);
346 return expression_for_id (result_id
);
348 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
350 PRE_EXPR_NAME (result
) = name
;
351 alloc_expression_id (result
);
355 /* An unordered bitmap set. One bitmap tracks values, the other,
357 typedef struct bitmap_set
359 bitmap_head expressions
;
363 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
364 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
366 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
367 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
369 /* Mapping from value id to expressions with that value_id. */
370 static VEC(bitmap
, heap
) *value_expressions
;
372 /* Sets that we need to keep track of. */
373 typedef struct bb_bitmap_sets
375 /* The EXP_GEN set, which represents expressions/values generated in
377 bitmap_set_t exp_gen
;
379 /* The PHI_GEN set, which represents PHI results generated in a
381 bitmap_set_t phi_gen
;
383 /* The TMP_GEN set, which represents results/temporaries generated
384 in a basic block. IE the LHS of an expression. */
385 bitmap_set_t tmp_gen
;
387 /* The AVAIL_OUT set, which represents which values are available in
388 a given basic block. */
389 bitmap_set_t avail_out
;
391 /* The ANTIC_IN set, which represents which values are anticipatable
392 in a given basic block. */
393 bitmap_set_t antic_in
;
395 /* The PA_IN set, which represents which values are
396 partially anticipatable in a given basic block. */
399 /* The NEW_SETS set, which is used during insertion to augment the
400 AVAIL_OUT set of blocks with the new insertions performed during
401 the current iteration. */
402 bitmap_set_t new_sets
;
404 /* A cache for value_dies_in_block_x. */
407 /* True if we have visited this block during ANTIC calculation. */
408 unsigned int visited
: 1;
410 /* True we have deferred processing this block during ANTIC
411 calculation until its successor is processed. */
412 unsigned int deferred
: 1;
414 /* True when the block contains a call that might not return. */
415 unsigned int contains_may_not_return_call
: 1;
418 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
419 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
420 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
421 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
422 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
423 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
424 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
425 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
426 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
427 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
428 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
431 /* Basic block list in postorder. */
432 static int *postorder
;
434 /* This structure is used to keep track of statistics on what
435 optimization PRE was able to perform. */
438 /* The number of RHS computations eliminated by PRE. */
441 /* The number of new expressions/temporaries generated by PRE. */
444 /* The number of inserts found due to partial anticipation */
447 /* The number of new PHI nodes added by PRE. */
450 /* The number of values found constant. */
455 static bool do_partial_partial
;
456 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int);
457 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
458 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
459 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
460 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
461 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
462 static void bitmap_insert_into_set_1 (bitmap_set_t
, pre_expr
,
464 static bitmap_set_t
bitmap_set_new (void);
465 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
467 static tree
find_or_generate_expression (basic_block
, tree
, gimple_seq
*);
468 static unsigned int get_expr_value_id (pre_expr
);
470 /* We can add and remove elements and entries to and from sets
471 and hash tables, so we use alloc pools for them. */
473 static alloc_pool bitmap_set_pool
;
474 static bitmap_obstack grand_bitmap_obstack
;
476 /* Set of blocks with statements that have had their EH properties changed. */
477 static bitmap need_eh_cleanup
;
479 /* Set of blocks with statements that have had their AB properties changed. */
480 static bitmap need_ab_cleanup
;
482 /* A three tuple {e, pred, v} used to cache phi translations in the
483 phi_translate_table. */
485 typedef struct expr_pred_trans_d
: typed_free_remove
<expr_pred_trans_d
>
487 /* The expression. */
490 /* The predecessor block along which we translated the expression. */
493 /* The value that resulted from the translation. */
496 /* The hashcode for the expression, pred pair. This is cached for
500 /* hash_table support. */
501 typedef expr_pred_trans_d T
;
502 static inline hashval_t
hash (const expr_pred_trans_d
*);
503 static inline int equal (const expr_pred_trans_d
*, const expr_pred_trans_d
*);
504 } *expr_pred_trans_t
;
505 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
508 expr_pred_trans_d::hash (const expr_pred_trans_d
*e
)
514 expr_pred_trans_d::equal (const expr_pred_trans_d
*ve1
,
515 const expr_pred_trans_d
*ve2
)
517 basic_block b1
= ve1
->pred
;
518 basic_block b2
= ve2
->pred
;
520 /* If they are not translations for the same basic block, they can't
524 return pre_expr_d::equal (ve1
->e
, ve2
->e
);
527 /* The phi_translate_table caches phi translations for a given
528 expression and predecessor. */
529 static hash_table
<expr_pred_trans_d
> phi_translate_table
;
531 /* Search in the phi translation table for the translation of
532 expression E in basic block PRED.
533 Return the translated value, if found, NULL otherwise. */
535 static inline pre_expr
536 phi_trans_lookup (pre_expr e
, basic_block pred
)
538 expr_pred_trans_t
*slot
;
539 struct expr_pred_trans_d ept
;
543 ept
.hashcode
= iterative_hash_hashval_t (pre_expr_d::hash (e
), pred
->index
);
544 slot
= phi_translate_table
.find_slot_with_hash (&ept
, ept
.hashcode
,
553 /* Add the tuple mapping from {expression E, basic block PRED} to
554 value V, to the phi translation table. */
557 phi_trans_add (pre_expr e
, pre_expr v
, basic_block pred
)
559 expr_pred_trans_t
*slot
;
560 expr_pred_trans_t new_pair
= XNEW (struct expr_pred_trans_d
);
562 new_pair
->pred
= pred
;
564 new_pair
->hashcode
= iterative_hash_hashval_t (pre_expr_d::hash (e
),
567 slot
= phi_translate_table
.find_slot_with_hash (new_pair
,
568 new_pair
->hashcode
, INSERT
);
574 /* Add expression E to the expression set of value id V. */
577 add_to_value (unsigned int v
, pre_expr e
)
581 gcc_checking_assert (get_expr_value_id (e
) == v
);
583 if (v
>= VEC_length (bitmap
, value_expressions
))
585 VEC_safe_grow_cleared (bitmap
, heap
, value_expressions
, v
+ 1);
588 set
= VEC_index (bitmap
, value_expressions
, v
);
591 set
= BITMAP_ALLOC (&grand_bitmap_obstack
);
592 VEC_replace (bitmap
, value_expressions
, v
, set
);
595 bitmap_set_bit (set
, get_or_alloc_expression_id (e
));
598 /* Create a new bitmap set and return it. */
601 bitmap_set_new (void)
603 bitmap_set_t ret
= (bitmap_set_t
) pool_alloc (bitmap_set_pool
);
604 bitmap_initialize (&ret
->expressions
, &grand_bitmap_obstack
);
605 bitmap_initialize (&ret
->values
, &grand_bitmap_obstack
);
609 /* Return the value id for a PRE expression EXPR. */
612 get_expr_value_id (pre_expr expr
)
619 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
622 id
= get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr
));
623 add_to_value (id
, expr
);
628 return VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
630 return PRE_EXPR_NARY (expr
)->value_id
;
632 return PRE_EXPR_REFERENCE (expr
)->value_id
;
638 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
641 sccvn_valnum_from_value_id (unsigned int val
)
645 bitmap exprset
= VEC_index (bitmap
, value_expressions
, val
);
646 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
648 pre_expr vexpr
= expression_for_id (i
);
649 if (vexpr
->kind
== NAME
)
650 return VN_INFO (PRE_EXPR_NAME (vexpr
))->valnum
;
651 else if (vexpr
->kind
== CONSTANT
)
652 return PRE_EXPR_CONSTANT (vexpr
);
657 /* Remove an expression EXPR from a bitmapped set. */
660 bitmap_remove_from_set (bitmap_set_t set
, pre_expr expr
)
662 unsigned int val
= get_expr_value_id (expr
);
663 if (!value_id_constant_p (val
))
665 bitmap_clear_bit (&set
->values
, val
);
666 bitmap_clear_bit (&set
->expressions
, get_expression_id (expr
));
671 bitmap_insert_into_set_1 (bitmap_set_t set
, pre_expr expr
,
672 unsigned int val
, bool allow_constants
)
674 if (allow_constants
|| !value_id_constant_p (val
))
676 /* We specifically expect this and only this function to be able to
677 insert constants into a set. */
678 bitmap_set_bit (&set
->values
, val
);
679 bitmap_set_bit (&set
->expressions
, get_or_alloc_expression_id (expr
));
683 /* Insert an expression EXPR into a bitmapped set. */
686 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
688 bitmap_insert_into_set_1 (set
, expr
, get_expr_value_id (expr
), false);
691 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
694 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
696 bitmap_copy (&dest
->expressions
, &orig
->expressions
);
697 bitmap_copy (&dest
->values
, &orig
->values
);
701 /* Free memory used up by SET. */
703 bitmap_set_free (bitmap_set_t set
)
705 bitmap_clear (&set
->expressions
);
706 bitmap_clear (&set
->values
);
710 /* Generate an topological-ordered array of bitmap set SET. */
712 static VEC(pre_expr
, heap
) *
713 sorted_array_from_bitmap_set (bitmap_set_t set
)
716 bitmap_iterator bi
, bj
;
717 VEC(pre_expr
, heap
) *result
;
719 /* Pre-allocate roughly enough space for the array. */
720 result
= VEC_alloc (pre_expr
, heap
, bitmap_count_bits (&set
->values
));
722 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
724 /* The number of expressions having a given value is usually
725 relatively small. Thus, rather than making a vector of all
726 the expressions and sorting it by value-id, we walk the values
727 and check in the reverse mapping that tells us what expressions
728 have a given value, to filter those in our set. As a result,
729 the expressions are inserted in value-id order, which means
732 If this is somehow a significant lose for some cases, we can
733 choose which set to walk based on the set size. */
734 bitmap exprset
= VEC_index (bitmap
, value_expressions
, i
);
735 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bj
)
737 if (bitmap_bit_p (&set
->expressions
, j
))
738 VEC_safe_push (pre_expr
, heap
, result
, expression_for_id (j
));
745 /* Perform bitmapped set operation DEST &= ORIG. */
748 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
756 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
758 bitmap_and_into (&dest
->values
, &orig
->values
);
759 bitmap_copy (&temp
, &dest
->expressions
);
760 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
762 pre_expr expr
= expression_for_id (i
);
763 unsigned int value_id
= get_expr_value_id (expr
);
764 if (!bitmap_bit_p (&dest
->values
, value_id
))
765 bitmap_clear_bit (&dest
->expressions
, i
);
767 bitmap_clear (&temp
);
771 /* Subtract all values and expressions contained in ORIG from DEST. */
774 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
776 bitmap_set_t result
= bitmap_set_new ();
780 bitmap_and_compl (&result
->expressions
, &dest
->expressions
,
783 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
785 pre_expr expr
= expression_for_id (i
);
786 unsigned int value_id
= get_expr_value_id (expr
);
787 bitmap_set_bit (&result
->values
, value_id
);
793 /* Subtract all the values in bitmap set B from bitmap set A. */
796 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
802 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
804 bitmap_copy (&temp
, &a
->expressions
);
805 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
807 pre_expr expr
= expression_for_id (i
);
808 if (bitmap_set_contains_value (b
, get_expr_value_id (expr
)))
809 bitmap_remove_from_set (a
, expr
);
811 bitmap_clear (&temp
);
815 /* Return true if bitmapped set SET contains the value VALUE_ID. */
818 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
820 if (value_id_constant_p (value_id
))
823 if (!set
|| bitmap_empty_p (&set
->expressions
))
826 return bitmap_bit_p (&set
->values
, value_id
);
830 bitmap_set_contains_expr (bitmap_set_t set
, const pre_expr expr
)
832 return bitmap_bit_p (&set
->expressions
, get_expression_id (expr
));
835 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
838 bitmap_set_replace_value (bitmap_set_t set
, unsigned int lookfor
,
845 if (value_id_constant_p (lookfor
))
848 if (!bitmap_set_contains_value (set
, lookfor
))
851 /* The number of expressions having a given value is usually
852 significantly less than the total number of expressions in SET.
853 Thus, rather than check, for each expression in SET, whether it
854 has the value LOOKFOR, we walk the reverse mapping that tells us
855 what expressions have a given value, and see if any of those
856 expressions are in our set. For large testcases, this is about
857 5-10x faster than walking the bitmap. If this is somehow a
858 significant lose for some cases, we can choose which set to walk
859 based on the set size. */
860 exprset
= VEC_index (bitmap
, value_expressions
, lookfor
);
861 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
863 if (bitmap_clear_bit (&set
->expressions
, i
))
865 bitmap_set_bit (&set
->expressions
, get_expression_id (expr
));
871 /* Return true if two bitmap sets are equal. */
874 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
876 return bitmap_equal_p (&a
->values
, &b
->values
);
879 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
880 and add it otherwise. */
883 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
885 unsigned int val
= get_expr_value_id (expr
);
887 if (bitmap_set_contains_value (set
, val
))
888 bitmap_set_replace_value (set
, val
, expr
);
890 bitmap_insert_into_set (set
, expr
);
893 /* Insert EXPR into SET if EXPR's value is not already present in
897 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
899 unsigned int val
= get_expr_value_id (expr
);
901 gcc_checking_assert (expr
->id
== get_or_alloc_expression_id (expr
));
903 /* Constant values are always considered to be part of the set. */
904 if (value_id_constant_p (val
))
907 /* If the value membership changed, add the expression. */
908 if (bitmap_set_bit (&set
->values
, val
))
909 bitmap_set_bit (&set
->expressions
, expr
->id
);
912 /* Print out EXPR to outfile. */
915 print_pre_expr (FILE *outfile
, const pre_expr expr
)
920 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
), 0);
923 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
), 0);
928 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
929 fprintf (outfile
, "{%s,", tree_code_name
[nary
->opcode
]);
930 for (i
= 0; i
< nary
->length
; i
++)
932 print_generic_expr (outfile
, nary
->op
[i
], 0);
933 if (i
!= (unsigned) nary
->length
- 1)
934 fprintf (outfile
, ",");
936 fprintf (outfile
, "}");
942 vn_reference_op_t vro
;
944 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
945 fprintf (outfile
, "{");
947 VEC_iterate (vn_reference_op_s
, ref
->operands
, i
, vro
);
950 bool closebrace
= false;
951 if (vro
->opcode
!= SSA_NAME
952 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
954 fprintf (outfile
, "%s", tree_code_name
[vro
->opcode
]);
957 fprintf (outfile
, "<");
963 print_generic_expr (outfile
, vro
->op0
, 0);
966 fprintf (outfile
, ",");
967 print_generic_expr (outfile
, vro
->op1
, 0);
971 fprintf (outfile
, ",");
972 print_generic_expr (outfile
, vro
->op2
, 0);
976 fprintf (outfile
, ">");
977 if (i
!= VEC_length (vn_reference_op_s
, ref
->operands
) - 1)
978 fprintf (outfile
, ",");
980 fprintf (outfile
, "}");
983 fprintf (outfile
, "@");
984 print_generic_expr (outfile
, ref
->vuse
, 0);
990 void debug_pre_expr (pre_expr
);
992 /* Like print_pre_expr but always prints to stderr. */
994 debug_pre_expr (pre_expr e
)
996 print_pre_expr (stderr
, e
);
997 fprintf (stderr
, "\n");
1000 /* Print out SET to OUTFILE. */
1003 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
1004 const char *setname
, int blockindex
)
1006 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
1013 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1015 const pre_expr expr
= expression_for_id (i
);
1018 fprintf (outfile
, ", ");
1020 print_pre_expr (outfile
, expr
);
1022 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1025 fprintf (outfile
, " }\n");
1028 void debug_bitmap_set (bitmap_set_t
);
1031 debug_bitmap_set (bitmap_set_t set
)
1033 print_bitmap_set (stderr
, set
, "debug", 0);
1036 void debug_bitmap_sets_for (basic_block
);
1039 debug_bitmap_sets_for (basic_block bb
)
1041 print_bitmap_set (stderr
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
1042 print_bitmap_set (stderr
, EXP_GEN (bb
), "exp_gen", bb
->index
);
1043 print_bitmap_set (stderr
, PHI_GEN (bb
), "phi_gen", bb
->index
);
1044 print_bitmap_set (stderr
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
1045 print_bitmap_set (stderr
, ANTIC_IN (bb
), "antic_in", bb
->index
);
1046 if (do_partial_partial
)
1047 print_bitmap_set (stderr
, PA_IN (bb
), "pa_in", bb
->index
);
1048 print_bitmap_set (stderr
, NEW_SETS (bb
), "new_sets", bb
->index
);
1051 /* Print out the expressions that have VAL to OUTFILE. */
1054 print_value_expressions (FILE *outfile
, unsigned int val
)
1056 bitmap set
= VEC_index (bitmap
, value_expressions
, val
);
1061 sprintf (s
, "%04d", val
);
1062 x
.expressions
= *set
;
1063 print_bitmap_set (outfile
, &x
, s
, 0);
1069 debug_value_expressions (unsigned int val
)
1071 print_value_expressions (stderr
, val
);
1074 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1078 get_or_alloc_expr_for_constant (tree constant
)
1080 unsigned int result_id
;
1081 unsigned int value_id
;
1082 struct pre_expr_d expr
;
1085 expr
.kind
= CONSTANT
;
1086 PRE_EXPR_CONSTANT (&expr
) = constant
;
1087 result_id
= lookup_expression_id (&expr
);
1089 return expression_for_id (result_id
);
1091 newexpr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1092 newexpr
->kind
= CONSTANT
;
1093 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1094 alloc_expression_id (newexpr
);
1095 value_id
= get_or_alloc_constant_value_id (constant
);
1096 add_to_value (value_id
, newexpr
);
1100 /* Given a value id V, find the actual tree representing the constant
1101 value if there is one, and return it. Return NULL if we can't find
1105 get_constant_for_value_id (unsigned int v
)
1107 if (value_id_constant_p (v
))
1111 bitmap exprset
= VEC_index (bitmap
, value_expressions
, v
);
1113 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1115 pre_expr expr
= expression_for_id (i
);
1116 if (expr
->kind
== CONSTANT
)
1117 return PRE_EXPR_CONSTANT (expr
);
1123 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1124 Currently only supports constants and SSA_NAMES. */
1126 get_or_alloc_expr_for (tree t
)
1128 if (TREE_CODE (t
) == SSA_NAME
)
1129 return get_or_alloc_expr_for_name (t
);
1130 else if (is_gimple_min_invariant (t
))
1131 return get_or_alloc_expr_for_constant (t
);
1134 /* More complex expressions can result from SCCVN expression
1135 simplification that inserts values for them. As they all
1136 do not have VOPs the get handled by the nary ops struct. */
1137 vn_nary_op_t result
;
1138 unsigned int result_id
;
1139 vn_nary_op_lookup (t
, &result
);
1142 pre_expr e
= (pre_expr
) pool_alloc (pre_expr_pool
);
1144 PRE_EXPR_NARY (e
) = result
;
1145 result_id
= lookup_expression_id (e
);
1148 pool_free (pre_expr_pool
, e
);
1149 e
= expression_for_id (result_id
);
1152 alloc_expression_id (e
);
1159 /* Return the folded version of T if T, when folded, is a gimple
1160 min_invariant. Otherwise, return T. */
1163 fully_constant_expression (pre_expr e
)
1171 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1172 switch (TREE_CODE_CLASS (nary
->opcode
))
1175 case tcc_comparison
:
1177 /* We have to go from trees to pre exprs to value ids to
1179 tree naryop0
= nary
->op
[0];
1180 tree naryop1
= nary
->op
[1];
1182 if (!is_gimple_min_invariant (naryop0
))
1184 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1185 unsigned int vrep0
= get_expr_value_id (rep0
);
1186 tree const0
= get_constant_for_value_id (vrep0
);
1188 naryop0
= fold_convert (TREE_TYPE (naryop0
), const0
);
1190 if (!is_gimple_min_invariant (naryop1
))
1192 pre_expr rep1
= get_or_alloc_expr_for (naryop1
);
1193 unsigned int vrep1
= get_expr_value_id (rep1
);
1194 tree const1
= get_constant_for_value_id (vrep1
);
1196 naryop1
= fold_convert (TREE_TYPE (naryop1
), const1
);
1198 result
= fold_binary (nary
->opcode
, nary
->type
,
1200 if (result
&& is_gimple_min_invariant (result
))
1201 return get_or_alloc_expr_for_constant (result
);
1202 /* We might have simplified the expression to a
1203 SSA_NAME for example from x_1 * 1. But we cannot
1204 insert a PHI for x_1 unconditionally as x_1 might
1205 not be available readily. */
1209 if (nary
->opcode
!= REALPART_EXPR
1210 && nary
->opcode
!= IMAGPART_EXPR
1211 && nary
->opcode
!= VIEW_CONVERT_EXPR
)
1216 /* We have to go from trees to pre exprs to value ids to
1218 tree naryop0
= nary
->op
[0];
1219 tree const0
, result
;
1220 if (is_gimple_min_invariant (naryop0
))
1224 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1225 unsigned int vrep0
= get_expr_value_id (rep0
);
1226 const0
= get_constant_for_value_id (vrep0
);
1231 tree type1
= TREE_TYPE (nary
->op
[0]);
1232 const0
= fold_convert (type1
, const0
);
1233 result
= fold_unary (nary
->opcode
, nary
->type
, const0
);
1235 if (result
&& is_gimple_min_invariant (result
))
1236 return get_or_alloc_expr_for_constant (result
);
1245 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1247 if ((folded
= fully_constant_vn_reference_p (ref
)))
1248 return get_or_alloc_expr_for_constant (folded
);
1257 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1258 it has the value it would have in BLOCK. Set *SAME_VALID to true
1259 in case the new vuse doesn't change the value id of the OPERANDS. */
1262 translate_vuse_through_block (VEC (vn_reference_op_s
, heap
) *operands
,
1263 alias_set_type set
, tree type
, tree vuse
,
1264 basic_block phiblock
,
1265 basic_block block
, bool *same_valid
)
1267 gimple phi
= SSA_NAME_DEF_STMT (vuse
);
1274 if (gimple_bb (phi
) != phiblock
)
1277 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, type
, operands
);
1279 /* Use the alias-oracle to find either the PHI node in this block,
1280 the first VUSE used in this block that is equivalent to vuse or
1281 the first VUSE which definition in this block kills the value. */
1282 if (gimple_code (phi
) == GIMPLE_PHI
)
1283 e
= find_edge (block
, phiblock
);
1284 else if (use_oracle
)
1285 while (!stmt_may_clobber_ref_p_1 (phi
, &ref
))
1287 vuse
= gimple_vuse (phi
);
1288 phi
= SSA_NAME_DEF_STMT (vuse
);
1289 if (gimple_bb (phi
) != phiblock
)
1291 if (gimple_code (phi
) == GIMPLE_PHI
)
1293 e
= find_edge (block
, phiblock
);
1304 bitmap visited
= NULL
;
1306 /* Try to find a vuse that dominates this phi node by skipping
1307 non-clobbering statements. */
1308 vuse
= get_continuation_for_phi (phi
, &ref
, &cnt
, &visited
, false);
1310 BITMAP_FREE (visited
);
1316 /* If we didn't find any, the value ID can't stay the same,
1317 but return the translated vuse. */
1318 *same_valid
= false;
1319 vuse
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1321 /* ??? We would like to return vuse here as this is the canonical
1322 upmost vdef that this reference is associated with. But during
1323 insertion of the references into the hash tables we only ever
1324 directly insert with their direct gimple_vuse, hence returning
1325 something else would make us not find the other expression. */
1326 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1332 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1333 SET2. This is used to avoid making a set consisting of the union
1334 of PA_IN and ANTIC_IN during insert. */
1336 static inline pre_expr
1337 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
)
1341 result
= bitmap_find_leader (set1
, val
);
1342 if (!result
&& set2
)
1343 result
= bitmap_find_leader (set2
, val
);
1347 /* Get the tree type for our PRE expression e. */
1350 get_expr_type (const pre_expr e
)
1355 return TREE_TYPE (PRE_EXPR_NAME (e
));
1357 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1359 return PRE_EXPR_REFERENCE (e
)->type
;
1361 return PRE_EXPR_NARY (e
)->type
;
1366 /* Get a representative SSA_NAME for a given expression.
1367 Since all of our sub-expressions are treated as values, we require
1368 them to be SSA_NAME's for simplicity.
1369 Prior versions of GVNPRE used to use "value handles" here, so that
1370 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1371 either case, the operands are really values (IE we do not expect
1372 them to be usable without finding leaders). */
1375 get_representative_for (const pre_expr e
)
1378 unsigned int value_id
= get_expr_value_id (e
);
1383 return PRE_EXPR_NAME (e
);
1385 return PRE_EXPR_CONSTANT (e
);
1389 /* Go through all of the expressions representing this value
1390 and pick out an SSA_NAME. */
1393 bitmap exprs
= VEC_index (bitmap
, value_expressions
, value_id
);
1394 EXECUTE_IF_SET_IN_BITMAP (exprs
, 0, i
, bi
)
1396 pre_expr rep
= expression_for_id (i
);
1397 if (rep
->kind
== NAME
)
1398 return PRE_EXPR_NAME (rep
);
1403 /* If we reached here we couldn't find an SSA_NAME. This can
1404 happen when we've discovered a value that has never appeared in
1405 the program as set to an SSA_NAME, most likely as the result of
1410 "Could not find SSA_NAME representative for expression:");
1411 print_pre_expr (dump_file
, e
);
1412 fprintf (dump_file
, "\n");
1415 /* Build and insert the assignment of the end result to the temporary
1416 that we will return. */
1417 name
= make_temp_ssa_name (get_expr_type (e
), gimple_build_nop (), "pretmp");
1418 VN_INFO_GET (name
)->value_id
= value_id
;
1419 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
1420 if (VN_INFO (name
)->valnum
== NULL_TREE
)
1421 VN_INFO (name
)->valnum
= name
;
1422 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1425 fprintf (dump_file
, "Created SSA_NAME representative ");
1426 print_generic_expr (dump_file
, name
, 0);
1427 fprintf (dump_file
, " for expression:");
1428 print_pre_expr (dump_file
, e
);
1429 fprintf (dump_file
, "\n");
1438 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1439 basic_block pred
, basic_block phiblock
);
1441 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1442 the phis in PRED. Return NULL if we can't find a leader for each part
1443 of the translated expression. */
1446 phi_translate_1 (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1447 basic_block pred
, basic_block phiblock
)
1454 bool changed
= false;
1455 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1456 vn_nary_op_t newnary
= XALLOCAVAR (struct vn_nary_op_s
,
1457 sizeof_vn_nary_op (nary
->length
));
1458 memcpy (newnary
, nary
, sizeof_vn_nary_op (nary
->length
));
1460 for (i
= 0; i
< newnary
->length
; i
++)
1462 if (TREE_CODE (newnary
->op
[i
]) != SSA_NAME
)
1466 pre_expr leader
, result
;
1467 unsigned int op_val_id
= VN_INFO (newnary
->op
[i
])->value_id
;
1468 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1469 result
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1470 if (result
&& result
!= leader
)
1472 tree name
= get_representative_for (result
);
1475 newnary
->op
[i
] = name
;
1480 changed
|= newnary
->op
[i
] != nary
->op
[i
];
1486 unsigned int new_val_id
;
1488 tree result
= vn_nary_op_lookup_pieces (newnary
->length
,
1493 if (result
&& is_gimple_min_invariant (result
))
1494 return get_or_alloc_expr_for_constant (result
);
1496 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1501 PRE_EXPR_NARY (expr
) = nary
;
1502 constant
= fully_constant_expression (expr
);
1503 if (constant
!= expr
)
1506 new_val_id
= nary
->value_id
;
1507 get_or_alloc_expression_id (expr
);
1511 new_val_id
= get_next_value_id ();
1512 VEC_safe_grow_cleared (bitmap
, heap
,
1514 get_max_value_id() + 1);
1515 nary
= vn_nary_op_insert_pieces (newnary
->length
,
1519 result
, new_val_id
);
1520 PRE_EXPR_NARY (expr
) = nary
;
1521 constant
= fully_constant_expression (expr
);
1522 if (constant
!= expr
)
1524 get_or_alloc_expression_id (expr
);
1526 add_to_value (new_val_id
, expr
);
1534 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1535 VEC (vn_reference_op_s
, heap
) *operands
= ref
->operands
;
1536 tree vuse
= ref
->vuse
;
1537 tree newvuse
= vuse
;
1538 VEC (vn_reference_op_s
, heap
) *newoperands
= NULL
;
1539 bool changed
= false, same_valid
= true;
1540 unsigned int i
, j
, n
;
1541 vn_reference_op_t operand
;
1542 vn_reference_t newref
;
1545 VEC_iterate (vn_reference_op_s
, operands
, i
, operand
); i
++, j
++)
1550 tree type
= operand
->type
;
1551 vn_reference_op_s newop
= *operand
;
1552 op
[0] = operand
->op0
;
1553 op
[1] = operand
->op1
;
1554 op
[2] = operand
->op2
;
1555 for (n
= 0; n
< 3; ++n
)
1557 unsigned int op_val_id
;
1560 if (TREE_CODE (op
[n
]) != SSA_NAME
)
1562 /* We can't possibly insert these. */
1564 && !is_gimple_min_invariant (op
[n
]))
1568 op_val_id
= VN_INFO (op
[n
])->value_id
;
1569 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1572 /* Make sure we do not recursively translate ourselves
1573 like for translating a[n_1] with the leader for
1574 n_1 being a[n_1]. */
1575 if (get_expression_id (leader
) != get_expression_id (expr
))
1577 opresult
= phi_translate (leader
, set1
, set2
,
1581 if (opresult
!= leader
)
1583 tree name
= get_representative_for (opresult
);
1586 changed
|= name
!= op
[n
];
1594 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1598 newoperands
= VEC_copy (vn_reference_op_s
, heap
, operands
);
1599 /* We may have changed from an SSA_NAME to a constant */
1600 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op
[0]) != SSA_NAME
)
1601 newop
.opcode
= TREE_CODE (op
[0]);
1606 /* If it transforms a non-constant ARRAY_REF into a constant
1607 one, adjust the constant offset. */
1608 if (newop
.opcode
== ARRAY_REF
1610 && TREE_CODE (op
[0]) == INTEGER_CST
1611 && TREE_CODE (op
[1]) == INTEGER_CST
1612 && TREE_CODE (op
[2]) == INTEGER_CST
)
1614 double_int off
= tree_to_double_int (op
[0]);
1615 off
+= -tree_to_double_int (op
[1]);
1616 off
*= tree_to_double_int (op
[2]);
1617 if (off
.fits_shwi ())
1618 newop
.off
= off
.low
;
1620 VEC_replace (vn_reference_op_s
, newoperands
, j
, newop
);
1621 /* If it transforms from an SSA_NAME to an address, fold with
1622 a preceding indirect reference. */
1623 if (j
> 0 && op
[0] && TREE_CODE (op
[0]) == ADDR_EXPR
1624 && VEC_index (vn_reference_op_s
,
1625 newoperands
, j
- 1).opcode
== MEM_REF
)
1626 vn_reference_fold_indirect (&newoperands
, &j
);
1628 if (i
!= VEC_length (vn_reference_op_s
, operands
))
1631 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1637 newvuse
= translate_vuse_through_block (newoperands
,
1638 ref
->set
, ref
->type
,
1639 vuse
, phiblock
, pred
,
1641 if (newvuse
== NULL_TREE
)
1643 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1648 if (changed
|| newvuse
!= vuse
)
1650 unsigned int new_val_id
;
1653 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1658 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1660 /* We can always insert constants, so if we have a partial
1661 redundant constant load of another type try to translate it
1662 to a constant of appropriate type. */
1663 if (result
&& is_gimple_min_invariant (result
))
1666 if (!useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1668 tem
= fold_unary (VIEW_CONVERT_EXPR
, ref
->type
, result
);
1669 if (tem
&& !is_gimple_min_invariant (tem
))
1673 return get_or_alloc_expr_for_constant (tem
);
1676 /* If we'd have to convert things we would need to validate
1677 if we can insert the translated expression. So fail
1678 here for now - we cannot insert an alias with a different
1679 type in the VN tables either, as that would assert. */
1681 && !useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1683 else if (!result
&& newref
1684 && !useless_type_conversion_p (ref
->type
, newref
->type
))
1686 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1690 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1691 expr
->kind
= REFERENCE
;
1696 PRE_EXPR_REFERENCE (expr
) = newref
;
1697 constant
= fully_constant_expression (expr
);
1698 if (constant
!= expr
)
1701 new_val_id
= newref
->value_id
;
1702 get_or_alloc_expression_id (expr
);
1706 if (changed
|| !same_valid
)
1708 new_val_id
= get_next_value_id ();
1709 VEC_safe_grow_cleared (bitmap
, heap
,
1711 get_max_value_id() + 1);
1714 new_val_id
= ref
->value_id
;
1715 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1718 result
, new_val_id
);
1720 PRE_EXPR_REFERENCE (expr
) = newref
;
1721 constant
= fully_constant_expression (expr
);
1722 if (constant
!= expr
)
1724 get_or_alloc_expression_id (expr
);
1726 add_to_value (new_val_id
, expr
);
1728 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1735 tree name
= PRE_EXPR_NAME (expr
);
1736 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
1737 /* If the SSA name is defined by a PHI node in this block,
1739 if (gimple_code (def_stmt
) == GIMPLE_PHI
1740 && gimple_bb (def_stmt
) == phiblock
)
1742 edge e
= find_edge (pred
, gimple_bb (def_stmt
));
1743 tree def
= PHI_ARG_DEF (def_stmt
, e
->dest_idx
);
1745 /* Handle constant. */
1746 if (is_gimple_min_invariant (def
))
1747 return get_or_alloc_expr_for_constant (def
);
1749 return get_or_alloc_expr_for_name (def
);
1751 /* Otherwise return it unchanged - it will get cleaned if its
1752 value is not available in PREDs AVAIL_OUT set of expressions. */
1761 /* Wrapper around phi_translate_1 providing caching functionality. */
1764 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1765 basic_block pred
, basic_block phiblock
)
1772 /* Constants contain no values that need translation. */
1773 if (expr
->kind
== CONSTANT
)
1776 if (value_id_constant_p (get_expr_value_id (expr
)))
1779 if (expr
->kind
!= NAME
)
1781 phitrans
= phi_trans_lookup (expr
, pred
);
1787 phitrans
= phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
);
1789 /* Don't add empty translations to the cache. Neither add
1790 translations of NAMEs as those are cheap to translate. */
1792 && expr
->kind
!= NAME
)
1793 phi_trans_add (expr
, phitrans
, pred
);
1799 /* For each expression in SET, translate the values through phi nodes
1800 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1801 expressions in DEST. */
1804 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1805 basic_block phiblock
)
1807 VEC (pre_expr
, heap
) *exprs
;
1811 if (gimple_seq_empty_p (phi_nodes (phiblock
)))
1813 bitmap_set_copy (dest
, set
);
1817 exprs
= sorted_array_from_bitmap_set (set
);
1818 FOR_EACH_VEC_ELT (pre_expr
, exprs
, i
, expr
)
1820 pre_expr translated
;
1821 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1825 /* We might end up with multiple expressions from SET being
1826 translated to the same value. In this case we do not want
1827 to retain the NARY or REFERENCE expression but prefer a NAME
1828 which would be the leader. */
1829 if (translated
->kind
== NAME
)
1830 bitmap_value_replace_in_set (dest
, translated
);
1832 bitmap_value_insert_into_set (dest
, translated
);
1834 VEC_free (pre_expr
, heap
, exprs
);
1837 /* Find the leader for a value (i.e., the name representing that
1838 value) in a given set, and return it. If STMT is non-NULL it
1839 makes sure the defining statement for the leader dominates it.
1840 Return NULL if no leader is found. */
1843 bitmap_find_leader (bitmap_set_t set
, unsigned int val
)
1845 if (value_id_constant_p (val
))
1849 bitmap exprset
= VEC_index (bitmap
, value_expressions
, val
);
1851 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1853 pre_expr expr
= expression_for_id (i
);
1854 if (expr
->kind
== CONSTANT
)
1858 if (bitmap_set_contains_value (set
, val
))
1860 /* Rather than walk the entire bitmap of expressions, and see
1861 whether any of them has the value we are looking for, we look
1862 at the reverse mapping, which tells us the set of expressions
1863 that have a given value (IE value->expressions with that
1864 value) and see if any of those expressions are in our set.
1865 The number of expressions per value is usually significantly
1866 less than the number of expressions in the set. In fact, for
1867 large testcases, doing it this way is roughly 5-10x faster
1868 than walking the bitmap.
1869 If this is somehow a significant lose for some cases, we can
1870 choose which set to walk based on which set is smaller. */
1873 bitmap exprset
= VEC_index (bitmap
, value_expressions
, val
);
1875 EXECUTE_IF_AND_IN_BITMAP (exprset
, &set
->expressions
, 0, i
, bi
)
1876 return expression_for_id (i
);
1881 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1882 BLOCK by seeing if it is not killed in the block. Note that we are
1883 only determining whether there is a store that kills it. Because
1884 of the order in which clean iterates over values, we are guaranteed
1885 that altered operands will have caused us to be eliminated from the
1886 ANTIC_IN set already. */
1889 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1891 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1892 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1894 gimple_stmt_iterator gsi
;
1895 unsigned id
= get_expression_id (expr
);
1902 /* Lookup a previously calculated result. */
1903 if (EXPR_DIES (block
)
1904 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1905 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1907 /* A memory expression {e, VUSE} dies in the block if there is a
1908 statement that may clobber e. If, starting statement walk from the
1909 top of the basic block, a statement uses VUSE there can be no kill
1910 inbetween that use and the original statement that loaded {e, VUSE},
1911 so we can stop walking. */
1912 ref
.base
= NULL_TREE
;
1913 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1915 tree def_vuse
, def_vdef
;
1916 def
= gsi_stmt (gsi
);
1917 def_vuse
= gimple_vuse (def
);
1918 def_vdef
= gimple_vdef (def
);
1920 /* Not a memory statement. */
1924 /* Not a may-def. */
1927 /* A load with the same VUSE, we're done. */
1928 if (def_vuse
== vuse
)
1934 /* Init ref only if we really need it. */
1935 if (ref
.base
== NULL_TREE
1936 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->type
,
1942 /* If the statement may clobber expr, it dies. */
1943 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1950 /* Remember the result. */
1951 if (!EXPR_DIES (block
))
1952 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1953 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1955 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1961 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1962 contains its value-id. */
1965 op_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree op
)
1967 if (op
&& TREE_CODE (op
) == SSA_NAME
)
1969 unsigned int value_id
= VN_INFO (op
)->value_id
;
1970 if (!(bitmap_set_contains_value (set1
, value_id
)
1971 || (set2
&& bitmap_set_contains_value (set2
, value_id
))))
1977 /* Determine if the expression EXPR is valid in SET1 U SET2.
1978 ONLY SET2 CAN BE NULL.
1979 This means that we have a leader for each part of the expression
1980 (if it consists of values), or the expression is an SSA_NAME.
1981 For loads/calls, we also see if the vuse is killed in this block. */
1984 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
,
1990 return bitmap_set_contains_expr (AVAIL_OUT (block
), expr
);
1994 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1995 for (i
= 0; i
< nary
->length
; i
++)
1996 if (!op_valid_in_sets (set1
, set2
, nary
->op
[i
]))
2003 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2004 vn_reference_op_t vro
;
2007 FOR_EACH_VEC_ELT (vn_reference_op_s
, ref
->operands
, i
, vro
)
2009 if (!op_valid_in_sets (set1
, set2
, vro
->op0
)
2010 || !op_valid_in_sets (set1
, set2
, vro
->op1
)
2011 || !op_valid_in_sets (set1
, set2
, vro
->op2
))
2021 /* Clean the set of expressions that are no longer valid in SET1 or
2022 SET2. This means expressions that are made up of values we have no
2023 leaders for in SET1 or SET2. This version is used for partial
2024 anticipation, which means it is not valid in either ANTIC_IN or
2028 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
, basic_block block
)
2030 VEC (pre_expr
, heap
) *exprs
= sorted_array_from_bitmap_set (set1
);
2034 FOR_EACH_VEC_ELT (pre_expr
, exprs
, i
, expr
)
2036 if (!valid_in_sets (set1
, set2
, expr
, block
))
2037 bitmap_remove_from_set (set1
, expr
);
2039 VEC_free (pre_expr
, heap
, exprs
);
2042 /* Clean the set of expressions that are no longer valid in SET. This
2043 means expressions that are made up of values we have no leaders for
2047 clean (bitmap_set_t set
, basic_block block
)
2049 VEC (pre_expr
, heap
) *exprs
= sorted_array_from_bitmap_set (set
);
2053 FOR_EACH_VEC_ELT (pre_expr
, exprs
, i
, expr
)
2055 if (!valid_in_sets (set
, NULL
, expr
, block
))
2056 bitmap_remove_from_set (set
, expr
);
2058 VEC_free (pre_expr
, heap
, exprs
);
2061 /* Clean the set of expressions that are no longer valid in SET because
2062 they are clobbered in BLOCK or because they trap and may not be executed. */
2065 prune_clobbered_mems (bitmap_set_t set
, basic_block block
)
2070 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
2072 pre_expr expr
= expression_for_id (i
);
2073 if (expr
->kind
== REFERENCE
)
2075 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2078 gimple def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
2079 if (!gimple_nop_p (def_stmt
)
2080 && ((gimple_bb (def_stmt
) != block
2081 && !dominated_by_p (CDI_DOMINATORS
,
2082 block
, gimple_bb (def_stmt
)))
2083 || (gimple_bb (def_stmt
) == block
2084 && value_dies_in_block_x (expr
, block
))))
2085 bitmap_remove_from_set (set
, expr
);
2088 else if (expr
->kind
== NARY
)
2090 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2091 /* If the NARY may trap make sure the block does not contain
2092 a possible exit point.
2093 ??? This is overly conservative if we translate AVAIL_OUT
2094 as the available expression might be after the exit point. */
2095 if (BB_MAY_NOTRETURN (block
)
2096 && vn_nary_may_trap (nary
))
2097 bitmap_remove_from_set (set
, expr
);
2102 static sbitmap has_abnormal_preds
;
2104 /* List of blocks that may have changed during ANTIC computation and
2105 thus need to be iterated over. */
2107 static sbitmap changed_blocks
;
2109 /* Decide whether to defer a block for a later iteration, or PHI
2110 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2111 should defer the block, and true if we processed it. */
2114 defer_or_phi_translate_block (bitmap_set_t dest
, bitmap_set_t source
,
2115 basic_block block
, basic_block phiblock
)
2117 if (!BB_VISITED (phiblock
))
2119 SET_BIT (changed_blocks
, block
->index
);
2120 BB_VISITED (block
) = 0;
2121 BB_DEFERRED (block
) = 1;
2125 phi_translate_set (dest
, source
, block
, phiblock
);
2129 /* Compute the ANTIC set for BLOCK.
2131 If succs(BLOCK) > 1 then
2132 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2133 else if succs(BLOCK) == 1 then
2134 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2136 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2140 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2142 bool changed
= false;
2143 bitmap_set_t S
, old
, ANTIC_OUT
;
2149 old
= ANTIC_OUT
= S
= NULL
;
2150 BB_VISITED (block
) = 1;
2152 /* If any edges from predecessors are abnormal, antic_in is empty,
2154 if (block_has_abnormal_pred_edge
)
2155 goto maybe_dump_sets
;
2157 old
= ANTIC_IN (block
);
2158 ANTIC_OUT
= bitmap_set_new ();
2160 /* If the block has no successors, ANTIC_OUT is empty. */
2161 if (EDGE_COUNT (block
->succs
) == 0)
2163 /* If we have one successor, we could have some phi nodes to
2164 translate through. */
2165 else if (single_succ_p (block
))
2167 basic_block succ_bb
= single_succ (block
);
2169 /* We trade iterations of the dataflow equations for having to
2170 phi translate the maximal set, which is incredibly slow
2171 (since the maximal set often has 300+ members, even when you
2172 have a small number of blocks).
2173 Basically, we defer the computation of ANTIC for this block
2174 until we have processed it's successor, which will inevitably
2175 have a *much* smaller set of values to phi translate once
2176 clean has been run on it.
2177 The cost of doing this is that we technically perform more
2178 iterations, however, they are lower cost iterations.
2180 Timings for PRE on tramp3d-v4:
2181 without maximal set fix: 11 seconds
2182 with maximal set fix/without deferring: 26 seconds
2183 with maximal set fix/with deferring: 11 seconds
2186 if (!defer_or_phi_translate_block (ANTIC_OUT
, ANTIC_IN (succ_bb
),
2190 goto maybe_dump_sets
;
2193 /* If we have multiple successors, we take the intersection of all of
2194 them. Note that in the case of loop exit phi nodes, we may have
2195 phis to translate through. */
2198 VEC(basic_block
, heap
) * worklist
;
2200 basic_block bprime
, first
= NULL
;
2202 worklist
= VEC_alloc (basic_block
, heap
, EDGE_COUNT (block
->succs
));
2203 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2206 && BB_VISITED (e
->dest
))
2208 else if (BB_VISITED (e
->dest
))
2209 VEC_quick_push (basic_block
, worklist
, e
->dest
);
2212 /* Of multiple successors we have to have visited one already. */
2215 SET_BIT (changed_blocks
, block
->index
);
2216 BB_VISITED (block
) = 0;
2217 BB_DEFERRED (block
) = 1;
2219 VEC_free (basic_block
, heap
, worklist
);
2220 goto maybe_dump_sets
;
2223 if (!gimple_seq_empty_p (phi_nodes (first
)))
2224 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
), block
, first
);
2226 bitmap_set_copy (ANTIC_OUT
, ANTIC_IN (first
));
2228 FOR_EACH_VEC_ELT (basic_block
, worklist
, i
, bprime
)
2230 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2232 bitmap_set_t tmp
= bitmap_set_new ();
2233 phi_translate_set (tmp
, ANTIC_IN (bprime
), block
, bprime
);
2234 bitmap_set_and (ANTIC_OUT
, tmp
);
2235 bitmap_set_free (tmp
);
2238 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
2240 VEC_free (basic_block
, heap
, worklist
);
2243 /* Prune expressions that are clobbered in block and thus become
2244 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2245 prune_clobbered_mems (ANTIC_OUT
, block
);
2247 /* Generate ANTIC_OUT - TMP_GEN. */
2248 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
2250 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2251 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
2254 /* Then union in the ANTIC_OUT - TMP_GEN values,
2255 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2256 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
2257 bitmap_value_insert_into_set (ANTIC_IN (block
),
2258 expression_for_id (bii
));
2260 clean (ANTIC_IN (block
), block
);
2262 if (!bitmap_set_equal (old
, ANTIC_IN (block
)))
2265 SET_BIT (changed_blocks
, block
->index
);
2266 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2267 SET_BIT (changed_blocks
, e
->src
->index
);
2270 RESET_BIT (changed_blocks
, block
->index
);
2273 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2275 if (!BB_DEFERRED (block
) || BB_VISITED (block
))
2278 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2280 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2284 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2289 "Block %d was deferred for a future iteration.\n",
2294 bitmap_set_free (old
);
2296 bitmap_set_free (S
);
2298 bitmap_set_free (ANTIC_OUT
);
2302 /* Compute PARTIAL_ANTIC for BLOCK.
2304 If succs(BLOCK) > 1 then
2305 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2306 in ANTIC_OUT for all succ(BLOCK)
2307 else if succs(BLOCK) == 1 then
2308 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2310 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2315 compute_partial_antic_aux (basic_block block
,
2316 bool block_has_abnormal_pred_edge
)
2318 bool changed
= false;
2319 bitmap_set_t old_PA_IN
;
2320 bitmap_set_t PA_OUT
;
2323 unsigned long max_pa
= PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH
);
2325 old_PA_IN
= PA_OUT
= NULL
;
2327 /* If any edges from predecessors are abnormal, antic_in is empty,
2329 if (block_has_abnormal_pred_edge
)
2330 goto maybe_dump_sets
;
2332 /* If there are too many partially anticipatable values in the
2333 block, phi_translate_set can take an exponential time: stop
2334 before the translation starts. */
2336 && single_succ_p (block
)
2337 && bitmap_count_bits (&PA_IN (single_succ (block
))->values
) > max_pa
)
2338 goto maybe_dump_sets
;
2340 old_PA_IN
= PA_IN (block
);
2341 PA_OUT
= bitmap_set_new ();
2343 /* If the block has no successors, ANTIC_OUT is empty. */
2344 if (EDGE_COUNT (block
->succs
) == 0)
2346 /* If we have one successor, we could have some phi nodes to
2347 translate through. Note that we can't phi translate across DFS
2348 back edges in partial antic, because it uses a union operation on
2349 the successors. For recurrences like IV's, we will end up
2350 generating a new value in the set on each go around (i + 3 (VH.1)
2351 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2352 else if (single_succ_p (block
))
2354 basic_block succ
= single_succ (block
);
2355 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
2356 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
2358 /* If we have multiple successors, we take the union of all of
2362 VEC(basic_block
, heap
) * worklist
;
2366 worklist
= VEC_alloc (basic_block
, heap
, EDGE_COUNT (block
->succs
));
2367 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2369 if (e
->flags
& EDGE_DFS_BACK
)
2371 VEC_quick_push (basic_block
, worklist
, e
->dest
);
2373 if (VEC_length (basic_block
, worklist
) > 0)
2375 FOR_EACH_VEC_ELT (basic_block
, worklist
, i
, bprime
)
2380 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
2381 bitmap_value_insert_into_set (PA_OUT
,
2382 expression_for_id (i
));
2383 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2385 bitmap_set_t pa_in
= bitmap_set_new ();
2386 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
2387 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2388 bitmap_value_insert_into_set (PA_OUT
,
2389 expression_for_id (i
));
2390 bitmap_set_free (pa_in
);
2393 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
2394 bitmap_value_insert_into_set (PA_OUT
,
2395 expression_for_id (i
));
2398 VEC_free (basic_block
, heap
, worklist
);
2401 /* Prune expressions that are clobbered in block and thus become
2402 invalid if translated from PA_OUT to PA_IN. */
2403 prune_clobbered_mems (PA_OUT
, block
);
2405 /* PA_IN starts with PA_OUT - TMP_GEN.
2406 Then we subtract things from ANTIC_IN. */
2407 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
2409 /* For partial antic, we want to put back in the phi results, since
2410 we will properly avoid making them partially antic over backedges. */
2411 bitmap_ior_into (&PA_IN (block
)->values
, &PHI_GEN (block
)->values
);
2412 bitmap_ior_into (&PA_IN (block
)->expressions
, &PHI_GEN (block
)->expressions
);
2414 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2415 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2417 dependent_clean (PA_IN (block
), ANTIC_IN (block
), block
);
2419 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
2422 SET_BIT (changed_blocks
, block
->index
);
2423 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2424 SET_BIT (changed_blocks
, e
->src
->index
);
2427 RESET_BIT (changed_blocks
, block
->index
);
2430 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2433 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2435 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2438 bitmap_set_free (old_PA_IN
);
2440 bitmap_set_free (PA_OUT
);
2444 /* Compute ANTIC and partial ANTIC sets. */
2447 compute_antic (void)
2449 bool changed
= true;
2450 int num_iterations
= 0;
2454 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2455 We pre-build the map of blocks with incoming abnormal edges here. */
2456 has_abnormal_preds
= sbitmap_alloc (last_basic_block
);
2457 sbitmap_zero (has_abnormal_preds
);
2464 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2466 e
->flags
&= ~EDGE_DFS_BACK
;
2467 if (e
->flags
& EDGE_ABNORMAL
)
2469 SET_BIT (has_abnormal_preds
, block
->index
);
2474 BB_VISITED (block
) = 0;
2475 BB_DEFERRED (block
) = 0;
2477 /* While we are here, give empty ANTIC_IN sets to each block. */
2478 ANTIC_IN (block
) = bitmap_set_new ();
2479 PA_IN (block
) = bitmap_set_new ();
2482 /* At the exit block we anticipate nothing. */
2483 ANTIC_IN (EXIT_BLOCK_PTR
) = bitmap_set_new ();
2484 BB_VISITED (EXIT_BLOCK_PTR
) = 1;
2485 PA_IN (EXIT_BLOCK_PTR
) = bitmap_set_new ();
2487 changed_blocks
= sbitmap_alloc (last_basic_block
+ 1);
2488 sbitmap_ones (changed_blocks
);
2491 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2492 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2493 /* ??? We need to clear our PHI translation cache here as the
2494 ANTIC sets shrink and we restrict valid translations to
2495 those having operands with leaders in ANTIC. Same below
2496 for PA ANTIC computation. */
2499 for (i
= n_basic_blocks
- NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
2501 if (TEST_BIT (changed_blocks
, postorder
[i
]))
2503 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2504 changed
|= compute_antic_aux (block
,
2505 TEST_BIT (has_abnormal_preds
,
2509 /* Theoretically possible, but *highly* unlikely. */
2510 gcc_checking_assert (num_iterations
< 500);
2513 statistics_histogram_event (cfun
, "compute_antic iterations",
2516 if (do_partial_partial
)
2518 sbitmap_ones (changed_blocks
);
2519 mark_dfs_back_edges ();
2524 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2525 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2528 for (i
= n_basic_blocks
- NUM_FIXED_BLOCKS
- 1 ; i
>= 0; i
--)
2530 if (TEST_BIT (changed_blocks
, postorder
[i
]))
2532 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2534 |= compute_partial_antic_aux (block
,
2535 TEST_BIT (has_abnormal_preds
,
2539 /* Theoretically possible, but *highly* unlikely. */
2540 gcc_checking_assert (num_iterations
< 500);
2542 statistics_histogram_event (cfun
, "compute_partial_antic iterations",
2545 sbitmap_free (has_abnormal_preds
);
2546 sbitmap_free (changed_blocks
);
2550 /* Inserted expressions are placed onto this worklist, which is used
2551 for performing quick dead code elimination of insertions we made
2552 that didn't turn out to be necessary. */
2553 static bitmap inserted_exprs
;
2555 /* The actual worker for create_component_ref_by_pieces. */
2558 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2559 unsigned int *operand
, gimple_seq
*stmts
)
2561 vn_reference_op_t currop
= &VEC_index (vn_reference_op_s
, ref
->operands
,
2565 switch (currop
->opcode
)
2569 tree folded
, sc
= NULL_TREE
;
2570 unsigned int nargs
= 0;
2572 if (TREE_CODE (currop
->op0
) == FUNCTION_DECL
)
2575 fn
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2577 sc
= find_or_generate_expression (block
, currop
->op1
, stmts
);
2578 args
= XNEWVEC (tree
, VEC_length (vn_reference_op_s
,
2579 ref
->operands
) - 1);
2580 while (*operand
< VEC_length (vn_reference_op_s
, ref
->operands
))
2582 args
[nargs
] = create_component_ref_by_pieces_1 (block
, ref
,
2586 folded
= build_call_array (currop
->type
,
2587 (TREE_CODE (fn
) == FUNCTION_DECL
2588 ? build_fold_addr_expr (fn
) : fn
),
2592 CALL_EXPR_STATIC_CHAIN (folded
) = sc
;
2598 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2600 tree offset
= currop
->op0
;
2601 if (TREE_CODE (baseop
) == ADDR_EXPR
2602 && handled_component_p (TREE_OPERAND (baseop
, 0)))
2606 base
= get_addr_base_and_unit_offset (TREE_OPERAND (baseop
, 0),
2609 offset
= int_const_binop (PLUS_EXPR
, offset
,
2610 build_int_cst (TREE_TYPE (offset
),
2612 baseop
= build_fold_addr_expr (base
);
2614 return fold_build2 (MEM_REF
, currop
->type
, baseop
, offset
);
2617 case TARGET_MEM_REF
:
2619 tree genop0
= NULL_TREE
, genop1
= NULL_TREE
;
2620 vn_reference_op_t nextop
= &VEC_index (vn_reference_op_s
, ref
->operands
,
2622 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2625 genop0
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2627 genop1
= find_or_generate_expression (block
, nextop
->op0
, stmts
);
2628 return build5 (TARGET_MEM_REF
, currop
->type
,
2629 baseop
, currop
->op2
, genop0
, currop
->op1
, genop1
);
2635 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2641 case VIEW_CONVERT_EXPR
:
2643 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
,
2645 return fold_build1 (currop
->opcode
, currop
->type
, genop0
);
2648 case WITH_SIZE_EXPR
:
2650 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2652 tree genop1
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2653 return fold_build2 (currop
->opcode
, currop
->type
, genop0
, genop1
);
2658 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2660 tree op1
= currop
->op0
;
2661 tree op2
= currop
->op1
;
2662 return fold_build3 (BIT_FIELD_REF
, currop
->type
, genop0
, op1
, op2
);
2665 /* For array ref vn_reference_op's, operand 1 of the array ref
2666 is op0 of the reference op and operand 3 of the array ref is
2668 case ARRAY_RANGE_REF
:
2672 tree genop1
= currop
->op0
;
2673 tree genop2
= currop
->op1
;
2674 tree genop3
= currop
->op2
;
2675 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2676 genop1
= find_or_generate_expression (block
, genop1
, stmts
);
2679 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (genop0
));
2680 /* Drop zero minimum index if redundant. */
2681 if (integer_zerop (genop2
)
2683 || integer_zerop (TYPE_MIN_VALUE (domain_type
))))
2686 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2690 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2691 /* We can't always put a size in units of the element alignment
2692 here as the element alignment may be not visible. See
2693 PR43783. Simply drop the element size for constant
2695 if (tree_int_cst_equal (genop3
, TYPE_SIZE_UNIT (elmt_type
)))
2699 genop3
= size_binop (EXACT_DIV_EXPR
, genop3
,
2700 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
2701 genop3
= find_or_generate_expression (block
, genop3
, stmts
);
2704 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2711 tree genop2
= currop
->op1
;
2712 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2713 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2716 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2717 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
, genop2
);
2722 genop
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2743 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2744 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2745 trying to rename aggregates into ssa form directly, which is a no no.
2747 Thus, this routine doesn't create temporaries, it just builds a
2748 single access expression for the array, calling
2749 find_or_generate_expression to build the innermost pieces.
2751 This function is a subroutine of create_expression_by_pieces, and
2752 should not be called on it's own unless you really know what you
2756 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2759 unsigned int op
= 0;
2760 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
);
2763 /* Find a leader for an expression, or generate one using
2764 create_expression_by_pieces if it's ANTIC but
2766 BLOCK is the basic_block we are looking for leaders in.
2767 OP is the tree expression to find a leader for or generate.
2768 STMTS is the statement list to put the inserted expressions on.
2769 Returns the SSA_NAME of the LHS of the generated expression or the
2771 DOMSTMT if non-NULL is a statement that should be dominated by
2772 all uses in the generated expression. If DOMSTMT is non-NULL this
2773 routine can fail and return NULL_TREE. Otherwise it will assert
2777 find_or_generate_expression (basic_block block
, tree op
, gimple_seq
*stmts
)
2779 pre_expr expr
= get_or_alloc_expr_for (op
);
2780 unsigned int lookfor
= get_expr_value_id (expr
);
2781 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
), lookfor
);
2784 if (leader
->kind
== NAME
)
2785 return PRE_EXPR_NAME (leader
);
2786 else if (leader
->kind
== CONSTANT
)
2787 return PRE_EXPR_CONSTANT (leader
);
2790 /* It must be a complex expression, so generate it recursively. */
2791 bitmap exprset
= VEC_index (bitmap
, value_expressions
, lookfor
);
2794 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
2796 pre_expr temp
= expression_for_id (i
);
2797 if (temp
->kind
!= NAME
)
2798 return create_expression_by_pieces (block
, temp
, stmts
,
2799 get_expr_type (expr
));
2805 #define NECESSARY GF_PLF_1
2807 /* Create an expression in pieces, so that we can handle very complex
2808 expressions that may be ANTIC, but not necessary GIMPLE.
2809 BLOCK is the basic block the expression will be inserted into,
2810 EXPR is the expression to insert (in value form)
2811 STMTS is a statement list to append the necessary insertions into.
2813 This function will die if we hit some value that shouldn't be
2814 ANTIC but is (IE there is no leader for it, or its components).
2815 This function may also generate expressions that are themselves
2816 partially or fully redundant. Those that are will be either made
2817 fully redundant during the next iteration of insert (for partially
2818 redundant ones), or eliminated by eliminate (for fully redundant
2821 If DOMSTMT is non-NULL then we make sure that all uses in the
2822 expressions dominate that statement. In this case the function
2823 can return NULL_TREE to signal failure. */
2826 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2827 gimple_seq
*stmts
, tree type
)
2831 gimple_seq forced_stmts
= NULL
;
2832 unsigned int value_id
;
2833 gimple_stmt_iterator gsi
;
2834 tree exprtype
= type
? type
: get_expr_type (expr
);
2840 /* We may hit the NAME/CONSTANT case if we have to convert types
2841 that value numbering saw through. */
2843 folded
= PRE_EXPR_NAME (expr
);
2846 folded
= PRE_EXPR_CONSTANT (expr
);
2850 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2851 folded
= create_component_ref_by_pieces (block
, ref
, stmts
);
2856 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2859 for (i
= 0; i
< nary
->length
; ++i
)
2861 genop
[i
] = find_or_generate_expression (block
, nary
->op
[i
], stmts
);
2862 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2863 may have conversions stripped. */
2864 if (nary
->opcode
== POINTER_PLUS_EXPR
)
2867 genop
[i
] = fold_convert (nary
->type
, genop
[i
]);
2869 genop
[i
] = convert_to_ptrofftype (genop
[i
]);
2872 genop
[i
] = fold_convert (TREE_TYPE (nary
->op
[i
]), genop
[i
]);
2874 if (nary
->opcode
== CONSTRUCTOR
)
2876 VEC(constructor_elt
,gc
) *elts
= NULL
;
2877 for (i
= 0; i
< nary
->length
; ++i
)
2878 CONSTRUCTOR_APPEND_ELT (elts
, NULL_TREE
, genop
[i
]);
2879 folded
= build_constructor (nary
->type
, elts
);
2883 switch (nary
->length
)
2886 folded
= fold_build1 (nary
->opcode
, nary
->type
,
2890 folded
= fold_build2 (nary
->opcode
, nary
->type
,
2891 genop
[0], genop
[1]);
2894 folded
= fold_build3 (nary
->opcode
, nary
->type
,
2895 genop
[0], genop
[1], genop
[3]);
2907 if (!useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
2908 folded
= fold_convert (exprtype
, folded
);
2910 /* Force the generated expression to be a sequence of GIMPLE
2912 We have to call unshare_expr because force_gimple_operand may
2913 modify the tree we pass to it. */
2914 folded
= force_gimple_operand (unshare_expr (folded
), &forced_stmts
,
2917 /* If we have any intermediate expressions to the value sets, add them
2918 to the value sets and chain them in the instruction stream. */
2921 gsi
= gsi_start (forced_stmts
);
2922 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
2924 gimple stmt
= gsi_stmt (gsi
);
2925 tree forcedname
= gimple_get_lhs (stmt
);
2928 if (TREE_CODE (forcedname
) == SSA_NAME
)
2930 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (forcedname
));
2931 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
2932 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
2933 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
2934 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
2935 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2936 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2939 gimple_seq_add_seq (stmts
, forced_stmts
);
2942 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2943 newstmt
= gimple_build_assign (name
, folded
);
2944 gimple_set_plf (newstmt
, NECESSARY
, false);
2946 gimple_seq_add_stmt (stmts
, newstmt
);
2947 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (name
));
2949 /* Fold the last statement. */
2950 gsi
= gsi_last (*stmts
);
2951 if (fold_stmt_inplace (&gsi
))
2952 update_stmt (gsi_stmt (gsi
));
2954 /* Add a value number to the temporary.
2955 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2956 we are creating the expression by pieces, and this particular piece of
2957 the expression may have been represented. There is no harm in replacing
2959 value_id
= get_expr_value_id (expr
);
2960 VN_INFO_GET (name
)->value_id
= value_id
;
2961 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
2962 if (VN_INFO (name
)->valnum
== NULL_TREE
)
2963 VN_INFO (name
)->valnum
= name
;
2964 gcc_assert (VN_INFO (name
)->valnum
!= NULL_TREE
);
2965 nameexpr
= get_or_alloc_expr_for_name (name
);
2966 add_to_value (value_id
, nameexpr
);
2967 if (NEW_SETS (block
))
2968 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2969 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2971 pre_stats
.insertions
++;
2972 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2974 fprintf (dump_file
, "Inserted ");
2975 print_gimple_stmt (dump_file
, newstmt
, 0, 0);
2976 fprintf (dump_file
, " in predecessor %d\n", block
->index
);
2983 /* Returns true if we want to inhibit the insertions of PHI nodes
2984 for the given EXPR for basic block BB (a member of a loop).
2985 We want to do this, when we fear that the induction variable we
2986 create might inhibit vectorization. */
2989 inhibit_phi_insertion (basic_block bb
, pre_expr expr
)
2991 vn_reference_t vr
= PRE_EXPR_REFERENCE (expr
);
2992 VEC (vn_reference_op_s
, heap
) *ops
= vr
->operands
;
2993 vn_reference_op_t op
;
2996 /* If we aren't going to vectorize we don't inhibit anything. */
2997 if (!flag_tree_vectorize
)
3000 /* Otherwise we inhibit the insertion when the address of the
3001 memory reference is a simple induction variable. In other
3002 cases the vectorizer won't do anything anyway (either it's
3003 loop invariant or a complicated expression). */
3004 FOR_EACH_VEC_ELT (vn_reference_op_s
, ops
, i
, op
)
3009 /* Calls are not a problem. */
3013 case ARRAY_RANGE_REF
:
3014 if (TREE_CODE (op
->op0
) != SSA_NAME
)
3019 basic_block defbb
= gimple_bb (SSA_NAME_DEF_STMT (op
->op0
));
3021 /* Default defs are loop invariant. */
3024 /* Defined outside this loop, also loop invariant. */
3025 if (!flow_bb_inside_loop_p (bb
->loop_father
, defbb
))
3027 /* If it's a simple induction variable inhibit insertion,
3028 the vectorizer might be interested in this one. */
3029 if (simple_iv (bb
->loop_father
, bb
->loop_father
,
3030 op
->op0
, &iv
, true))
3032 /* No simple IV, vectorizer can't do anything, hence no
3033 reason to inhibit the transformation for this operand. */
3043 /* Insert the to-be-made-available values of expression EXPRNUM for each
3044 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3045 merge the result with a phi node, given the same value number as
3046 NODE. Return true if we have inserted new stuff. */
3049 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
3050 VEC(pre_expr
, heap
) *avail
)
3052 pre_expr expr
= expression_for_id (exprnum
);
3054 unsigned int val
= get_expr_value_id (expr
);
3056 bool insertions
= false;
3061 tree type
= get_expr_type (expr
);
3065 /* Make sure we aren't creating an induction variable. */
3066 if (bb_loop_depth (block
) > 0 && EDGE_COUNT (block
->preds
) == 2)
3068 bool firstinsideloop
= false;
3069 bool secondinsideloop
= false;
3070 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3071 EDGE_PRED (block
, 0)->src
);
3072 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3073 EDGE_PRED (block
, 1)->src
);
3074 /* Induction variables only have one edge inside the loop. */
3075 if ((firstinsideloop
^ secondinsideloop
)
3076 && (expr
->kind
!= REFERENCE
3077 || inhibit_phi_insertion (block
, expr
)))
3079 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3080 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3085 /* Make the necessary insertions. */
3086 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3088 gimple_seq stmts
= NULL
;
3091 eprime
= VEC_index (pre_expr
, avail
, pred
->dest_idx
);
3093 if (eprime
->kind
!= NAME
&& eprime
->kind
!= CONSTANT
)
3095 builtexpr
= create_expression_by_pieces (bprime
, eprime
,
3097 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3098 gsi_insert_seq_on_edge (pred
, stmts
);
3099 VEC_replace (pre_expr
, avail
, pred
->dest_idx
,
3100 get_or_alloc_expr_for_name (builtexpr
));
3103 else if (eprime
->kind
== CONSTANT
)
3105 /* Constants may not have the right type, fold_convert
3106 should give us back a constant with the right type. */
3107 tree constant
= PRE_EXPR_CONSTANT (eprime
);
3108 if (!useless_type_conversion_p (type
, TREE_TYPE (constant
)))
3110 tree builtexpr
= fold_convert (type
, constant
);
3111 if (!is_gimple_min_invariant (builtexpr
))
3113 tree forcedexpr
= force_gimple_operand (builtexpr
,
3116 if (!is_gimple_min_invariant (forcedexpr
))
3118 if (forcedexpr
!= builtexpr
)
3120 VN_INFO_GET (forcedexpr
)->valnum
= PRE_EXPR_CONSTANT (eprime
);
3121 VN_INFO (forcedexpr
)->value_id
= get_expr_value_id (eprime
);
3125 gimple_stmt_iterator gsi
;
3126 gsi
= gsi_start (stmts
);
3127 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3129 gimple stmt
= gsi_stmt (gsi
);
3130 tree lhs
= gimple_get_lhs (stmt
);
3131 if (TREE_CODE (lhs
) == SSA_NAME
)
3132 bitmap_set_bit (inserted_exprs
,
3133 SSA_NAME_VERSION (lhs
));
3134 gimple_set_plf (stmt
, NECESSARY
, false);
3136 gsi_insert_seq_on_edge (pred
, stmts
);
3138 VEC_replace (pre_expr
, avail
, pred
->dest_idx
,
3139 get_or_alloc_expr_for_name (forcedexpr
));
3143 VEC_replace (pre_expr
, avail
, pred
->dest_idx
,
3144 get_or_alloc_expr_for_constant (builtexpr
));
3147 else if (eprime
->kind
== NAME
)
3149 /* We may have to do a conversion because our value
3150 numbering can look through types in certain cases, but
3151 our IL requires all operands of a phi node have the same
3153 tree name
= PRE_EXPR_NAME (eprime
);
3154 if (!useless_type_conversion_p (type
, TREE_TYPE (name
)))
3158 builtexpr
= fold_convert (type
, name
);
3159 forcedexpr
= force_gimple_operand (builtexpr
,
3163 if (forcedexpr
!= name
)
3165 VN_INFO_GET (forcedexpr
)->valnum
= VN_INFO (name
)->valnum
;
3166 VN_INFO (forcedexpr
)->value_id
= VN_INFO (name
)->value_id
;
3171 gimple_stmt_iterator gsi
;
3172 gsi
= gsi_start (stmts
);
3173 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3175 gimple stmt
= gsi_stmt (gsi
);
3176 tree lhs
= gimple_get_lhs (stmt
);
3177 if (TREE_CODE (lhs
) == SSA_NAME
)
3178 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
3179 gimple_set_plf (stmt
, NECESSARY
, false);
3181 gsi_insert_seq_on_edge (pred
, stmts
);
3183 VEC_replace (pre_expr
, avail
, pred
->dest_idx
,
3184 get_or_alloc_expr_for_name (forcedexpr
));
3188 /* If we didn't want a phi node, and we made insertions, we still have
3189 inserted new stuff, and thus return true. If we didn't want a phi node,
3190 and didn't make insertions, we haven't added anything new, so return
3192 if (nophi
&& insertions
)
3194 else if (nophi
&& !insertions
)
3197 /* Now build a phi for the new variable. */
3198 temp
= make_temp_ssa_name (type
, NULL
, "prephitmp");
3199 phi
= create_phi_node (temp
, block
);
3201 gimple_set_plf (phi
, NECESSARY
, false);
3202 VN_INFO_GET (temp
)->value_id
= val
;
3203 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3204 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3205 VN_INFO (temp
)->valnum
= temp
;
3206 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3207 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3209 pre_expr ae
= VEC_index (pre_expr
, avail
, pred
->dest_idx
);
3210 gcc_assert (get_expr_type (ae
) == type
3211 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3212 if (ae
->kind
== CONSTANT
)
3213 add_phi_arg (phi
, PRE_EXPR_CONSTANT (ae
), pred
, UNKNOWN_LOCATION
);
3215 add_phi_arg (phi
, PRE_EXPR_NAME (ae
), pred
, UNKNOWN_LOCATION
);
3218 newphi
= get_or_alloc_expr_for_name (temp
);
3219 add_to_value (val
, newphi
);
3221 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3222 this insertion, since we test for the existence of this value in PHI_GEN
3223 before proceeding with the partial redundancy checks in insert_aux.
3225 The value may exist in AVAIL_OUT, in particular, it could be represented
3226 by the expression we are trying to eliminate, in which case we want the
3227 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3230 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3231 this block, because if it did, it would have existed in our dominator's
3232 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3235 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3236 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3238 bitmap_insert_into_set (NEW_SETS (block
),
3241 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3243 fprintf (dump_file
, "Created phi ");
3244 print_gimple_stmt (dump_file
, phi
, 0, 0);
3245 fprintf (dump_file
, " in block %d\n", block
->index
);
3253 /* Perform insertion of partially redundant values.
3254 For BLOCK, do the following:
3255 1. Propagate the NEW_SETS of the dominator into the current block.
3256 If the block has multiple predecessors,
3257 2a. Iterate over the ANTIC expressions for the block to see if
3258 any of them are partially redundant.
3259 2b. If so, insert them into the necessary predecessors to make
3260 the expression fully redundant.
3261 2c. Insert a new PHI merging the values of the predecessors.
3262 2d. Insert the new PHI, and the new expressions, into the
3264 3. Recursively call ourselves on the dominator children of BLOCK.
3266 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3267 do_regular_insertion and do_partial_insertion.
3272 do_regular_insertion (basic_block block
, basic_block dom
)
3274 bool new_stuff
= false;
3275 VEC (pre_expr
, heap
) *exprs
;
3277 VEC (pre_expr
, heap
) *avail
= NULL
;
3280 exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3281 VEC_safe_grow (pre_expr
, heap
, avail
, EDGE_COUNT (block
->preds
));
3283 FOR_EACH_VEC_ELT (pre_expr
, exprs
, i
, expr
)
3285 if (expr
->kind
!= NAME
)
3288 bool by_some
= false;
3289 bool cant_insert
= false;
3290 bool all_same
= true;
3291 pre_expr first_s
= NULL
;
3294 pre_expr eprime
= NULL
;
3296 pre_expr edoubleprime
= NULL
;
3297 bool do_insertion
= false;
3299 val
= get_expr_value_id (expr
);
3300 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3302 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3304 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3305 fprintf (dump_file
, "Found fully redundant value\n");
3309 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3311 unsigned int vprime
;
3313 /* We should never run insertion for the exit block
3314 and so not come across fake pred edges. */
3315 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3317 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
3320 /* eprime will generally only be NULL if the
3321 value of the expression, translated
3322 through the PHI for this predecessor, is
3323 undefined. If that is the case, we can't
3324 make the expression fully redundant,
3325 because its value is undefined along a
3326 predecessor path. We can thus break out
3327 early because it doesn't matter what the
3328 rest of the results are. */
3331 VEC_replace (pre_expr
, avail
, pred
->dest_idx
, NULL
);
3336 eprime
= fully_constant_expression (eprime
);
3337 vprime
= get_expr_value_id (eprime
);
3338 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3340 if (edoubleprime
== NULL
)
3342 VEC_replace (pre_expr
, avail
, pred
->dest_idx
, eprime
);
3347 VEC_replace (pre_expr
, avail
, pred
->dest_idx
, edoubleprime
);
3349 /* We want to perform insertions to remove a redundancy on
3350 a path in the CFG we want to optimize for speed. */
3351 if (optimize_edge_for_speed_p (pred
))
3352 do_insertion
= true;
3353 if (first_s
== NULL
)
3354 first_s
= edoubleprime
;
3355 else if (!pre_expr_d::equal (first_s
, edoubleprime
))
3359 /* If we can insert it, it's not the same value
3360 already existing along every predecessor, and
3361 it's defined by some predecessor, it is
3362 partially redundant. */
3363 if (!cant_insert
&& !all_same
&& by_some
)
3367 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3369 fprintf (dump_file
, "Skipping partial redundancy for "
3371 print_pre_expr (dump_file
, expr
);
3372 fprintf (dump_file
, " (%04d), no redundancy on to be "
3373 "optimized for speed edge\n", val
);
3376 else if (dbg_cnt (treepre_insert
))
3378 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3380 fprintf (dump_file
, "Found partial redundancy for "
3382 print_pre_expr (dump_file
, expr
);
3383 fprintf (dump_file
, " (%04d)\n",
3384 get_expr_value_id (expr
));
3386 if (insert_into_preds_of_block (block
,
3387 get_expression_id (expr
),
3392 /* If all edges produce the same value and that value is
3393 an invariant, then the PHI has the same value on all
3394 edges. Note this. */
3395 else if (!cant_insert
&& all_same
&& eprime
3396 && (edoubleprime
->kind
== CONSTANT
3397 || edoubleprime
->kind
== NAME
)
3398 && !value_id_constant_p (val
))
3402 bitmap exprset
= VEC_index (bitmap
, value_expressions
, val
);
3404 unsigned int new_val
= get_expr_value_id (edoubleprime
);
3405 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bi
)
3407 pre_expr expr
= expression_for_id (j
);
3409 if (expr
->kind
== NAME
)
3411 vn_ssa_aux_t info
= VN_INFO (PRE_EXPR_NAME (expr
));
3412 /* Just reset the value id and valnum so it is
3413 the same as the constant we have discovered. */
3414 if (edoubleprime
->kind
== CONSTANT
)
3416 info
->valnum
= PRE_EXPR_CONSTANT (edoubleprime
);
3417 pre_stats
.constified
++;
3420 info
->valnum
= VN_INFO (PRE_EXPR_NAME (edoubleprime
))->valnum
;
3421 info
->value_id
= new_val
;
3428 VEC_free (pre_expr
, heap
, exprs
);
3429 VEC_free (pre_expr
, heap
, avail
);
3434 /* Perform insertion for partially anticipatable expressions. There
3435 is only one case we will perform insertion for these. This case is
3436 if the expression is partially anticipatable, and fully available.
3437 In this case, we know that putting it earlier will enable us to
3438 remove the later computation. */
3442 do_partial_partial_insertion (basic_block block
, basic_block dom
)
3444 bool new_stuff
= false;
3445 VEC (pre_expr
, heap
) *exprs
;
3447 VEC (pre_expr
, heap
) *avail
= NULL
;
3450 exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3451 VEC_safe_grow (pre_expr
, heap
, avail
, EDGE_COUNT (block
->preds
));
3453 FOR_EACH_VEC_ELT (pre_expr
, exprs
, i
, expr
)
3455 if (expr
->kind
!= NAME
)
3459 bool cant_insert
= false;
3462 pre_expr eprime
= NULL
;
3465 val
= get_expr_value_id (expr
);
3466 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3468 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3471 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3473 unsigned int vprime
;
3474 pre_expr edoubleprime
;
3476 /* We should never run insertion for the exit block
3477 and so not come across fake pred edges. */
3478 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3480 eprime
= phi_translate (expr
, ANTIC_IN (block
),
3484 /* eprime will generally only be NULL if the
3485 value of the expression, translated
3486 through the PHI for this predecessor, is
3487 undefined. If that is the case, we can't
3488 make the expression fully redundant,
3489 because its value is undefined along a
3490 predecessor path. We can thus break out
3491 early because it doesn't matter what the
3492 rest of the results are. */
3495 VEC_replace (pre_expr
, avail
, pred
->dest_idx
, NULL
);
3500 eprime
= fully_constant_expression (eprime
);
3501 vprime
= get_expr_value_id (eprime
);
3502 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
), vprime
);
3503 VEC_replace (pre_expr
, avail
, pred
->dest_idx
, edoubleprime
);
3504 if (edoubleprime
== NULL
)
3511 /* If we can insert it, it's not the same value
3512 already existing along every predecessor, and
3513 it's defined by some predecessor, it is
3514 partially redundant. */
3515 if (!cant_insert
&& by_all
)
3518 bool do_insertion
= false;
3520 /* Insert only if we can remove a later expression on a path
3521 that we want to optimize for speed.
3522 The phi node that we will be inserting in BLOCK is not free,
3523 and inserting it for the sake of !optimize_for_speed successor
3524 may cause regressions on the speed path. */
3525 FOR_EACH_EDGE (succ
, ei
, block
->succs
)
3527 if (bitmap_set_contains_value (PA_IN (succ
->dest
), val
))
3529 if (optimize_edge_for_speed_p (succ
))
3530 do_insertion
= true;
3536 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3538 fprintf (dump_file
, "Skipping partial partial redundancy "
3540 print_pre_expr (dump_file
, expr
);
3541 fprintf (dump_file
, " (%04d), not partially anticipated "
3542 "on any to be optimized for speed edges\n", val
);
3545 else if (dbg_cnt (treepre_insert
))
3547 pre_stats
.pa_insert
++;
3548 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3550 fprintf (dump_file
, "Found partial partial redundancy "
3552 print_pre_expr (dump_file
, expr
);
3553 fprintf (dump_file
, " (%04d)\n",
3554 get_expr_value_id (expr
));
3556 if (insert_into_preds_of_block (block
,
3557 get_expression_id (expr
),
3565 VEC_free (pre_expr
, heap
, exprs
);
3566 VEC_free (pre_expr
, heap
, avail
);
3571 insert_aux (basic_block block
)
3574 bool new_stuff
= false;
3579 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3584 bitmap_set_t newset
= NEW_SETS (dom
);
3587 /* Note that we need to value_replace both NEW_SETS, and
3588 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3589 represented by some non-simple expression here that we want
3590 to replace it with. */
3591 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3593 pre_expr expr
= expression_for_id (i
);
3594 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3595 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3598 if (!single_pred_p (block
))
3600 new_stuff
|= do_regular_insertion (block
, dom
);
3601 if (do_partial_partial
)
3602 new_stuff
|= do_partial_partial_insertion (block
, dom
);
3606 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3608 son
= next_dom_son (CDI_DOMINATORS
, son
))
3610 new_stuff
|= insert_aux (son
);
3616 /* Perform insertion of partially redundant values. */
3621 bool new_stuff
= true;
3623 int num_iterations
= 0;
3626 NEW_SETS (bb
) = bitmap_set_new ();
3631 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3632 fprintf (dump_file
, "Starting insert iteration %d\n", num_iterations
);
3633 new_stuff
= insert_aux (ENTRY_BLOCK_PTR
);
3635 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3639 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3642 add_to_exp_gen (basic_block block
, tree op
)
3646 if (TREE_CODE (op
) == SSA_NAME
&& ssa_undefined_value_p (op
))
3649 result
= get_or_alloc_expr_for_name (op
);
3650 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3653 /* Create value ids for PHI in BLOCK. */
3656 make_values_for_phi (gimple phi
, basic_block block
)
3658 tree result
= gimple_phi_result (phi
);
3661 /* We have no need for virtual phis, as they don't represent
3662 actual computations. */
3663 if (virtual_operand_p (result
))
3666 pre_expr e
= get_or_alloc_expr_for_name (result
);
3667 add_to_value (get_expr_value_id (e
), e
);
3668 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3669 bitmap_insert_into_set (PHI_GEN (block
), e
);
3670 for (i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
3672 tree arg
= gimple_phi_arg_def (phi
, i
);
3673 if (TREE_CODE (arg
) == SSA_NAME
)
3675 e
= get_or_alloc_expr_for_name (arg
);
3676 add_to_value (get_expr_value_id (e
), e
);
3681 /* Compute the AVAIL set for all basic blocks.
3683 This function performs value numbering of the statements in each basic
3684 block. The AVAIL sets are built from information we glean while doing
3685 this value numbering, since the AVAIL sets contain only one entry per
3688 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3689 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3692 compute_avail (void)
3695 basic_block block
, son
;
3696 basic_block
*worklist
;
3700 /* We pretend that default definitions are defined in the entry block.
3701 This includes function arguments and the static chain decl. */
3702 for (i
= 1; i
< num_ssa_names
; ++i
)
3704 tree name
= ssa_name (i
);
3707 || !SSA_NAME_IS_DEFAULT_DEF (name
)
3708 || has_zero_uses (name
)
3709 || virtual_operand_p (name
))
3712 e
= get_or_alloc_expr_for_name (name
);
3713 add_to_value (get_expr_value_id (e
), e
);
3714 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR
), e
);
3715 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR
), e
);
3718 /* Allocate the worklist. */
3719 worklist
= XNEWVEC (basic_block
, n_basic_blocks
);
3721 /* Seed the algorithm by putting the dominator children of the entry
3722 block on the worklist. */
3723 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR
);
3725 son
= next_dom_son (CDI_DOMINATORS
, son
))
3726 worklist
[sp
++] = son
;
3728 /* Loop until the worklist is empty. */
3731 gimple_stmt_iterator gsi
;
3735 /* Pick a block from the worklist. */
3736 block
= worklist
[--sp
];
3738 /* Initially, the set of available values in BLOCK is that of
3739 its immediate dominator. */
3740 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3742 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3744 /* Generate values for PHI nodes. */
3745 for (gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3746 make_values_for_phi (gsi_stmt (gsi
), block
);
3748 BB_MAY_NOTRETURN (block
) = 0;
3750 /* Now compute value numbers and populate value sets with all
3751 the expressions computed in BLOCK. */
3752 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3757 stmt
= gsi_stmt (gsi
);
3759 /* Cache whether the basic-block has any non-visible side-effect
3761 If this isn't a call or it is the last stmt in the
3762 basic-block then the CFG represents things correctly. */
3763 if (is_gimple_call (stmt
) && !stmt_ends_bb_p (stmt
))
3765 /* Non-looping const functions always return normally.
3766 Otherwise the call might not return or have side-effects
3767 that forbids hoisting possibly trapping expressions
3769 int flags
= gimple_call_flags (stmt
);
3770 if (!(flags
& ECF_CONST
)
3771 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3772 BB_MAY_NOTRETURN (block
) = 1;
3775 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3777 pre_expr e
= get_or_alloc_expr_for_name (op
);
3779 add_to_value (get_expr_value_id (e
), e
);
3780 bitmap_insert_into_set (TMP_GEN (block
), e
);
3781 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3784 if (gimple_has_side_effects (stmt
)
3785 || stmt_could_throw_p (stmt
)
3786 || is_gimple_debug (stmt
))
3789 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3790 add_to_exp_gen (block
, op
);
3792 switch (gimple_code (stmt
))
3800 pre_expr result
= NULL
;
3801 VEC(vn_reference_op_s
, heap
) *ops
= NULL
;
3803 /* We can value number only calls to real functions. */
3804 if (gimple_call_internal_p (stmt
))
3807 copy_reference_ops_from_call (stmt
, &ops
);
3808 vn_reference_lookup_pieces (gimple_vuse (stmt
), 0,
3809 gimple_expr_type (stmt
),
3810 ops
, &ref
, VN_NOWALK
);
3811 VEC_free (vn_reference_op_s
, heap
, ops
);
3815 /* If the value of the call is not invalidated in
3816 this block until it is computed, add the expression
3818 if (!gimple_vuse (stmt
)
3820 (SSA_NAME_DEF_STMT (gimple_vuse (stmt
))) == GIMPLE_PHI
3821 || gimple_bb (SSA_NAME_DEF_STMT
3822 (gimple_vuse (stmt
))) != block
)
3824 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3825 result
->kind
= REFERENCE
;
3827 PRE_EXPR_REFERENCE (result
) = ref
;
3829 get_or_alloc_expression_id (result
);
3830 add_to_value (get_expr_value_id (result
), result
);
3831 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3838 pre_expr result
= NULL
;
3839 switch (vn_get_stmt_kind (stmt
))
3843 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3846 /* COND_EXPR and VEC_COND_EXPR are awkward in
3847 that they contain an embedded complex expression.
3848 Don't even try to shove those through PRE. */
3849 if (code
== COND_EXPR
3850 || code
== VEC_COND_EXPR
)
3853 vn_nary_op_lookup_pieces (gimple_num_ops (stmt
) - 1,
3855 gimple_expr_type (stmt
),
3856 gimple_assign_rhs1_ptr (stmt
),
3861 /* If the NARY traps and there was a preceding
3862 point in the block that might not return avoid
3863 adding the nary to EXP_GEN. */
3864 if (BB_MAY_NOTRETURN (block
)
3865 && vn_nary_may_trap (nary
))
3868 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3869 result
->kind
= NARY
;
3871 PRE_EXPR_NARY (result
) = nary
;
3878 vn_reference_lookup (gimple_assign_rhs1 (stmt
),
3884 /* If the value of the reference is not invalidated in
3885 this block until it is computed, add the expression
3887 if (gimple_vuse (stmt
))
3891 def_stmt
= SSA_NAME_DEF_STMT (gimple_vuse (stmt
));
3892 while (!gimple_nop_p (def_stmt
)
3893 && gimple_code (def_stmt
) != GIMPLE_PHI
3894 && gimple_bb (def_stmt
) == block
)
3896 if (stmt_may_clobber_ref_p
3897 (def_stmt
, gimple_assign_rhs1 (stmt
)))
3903 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt
));
3909 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3910 result
->kind
= REFERENCE
;
3912 PRE_EXPR_REFERENCE (result
) = ref
;
3920 get_or_alloc_expression_id (result
);
3921 add_to_value (get_expr_value_id (result
), result
);
3922 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3930 /* Put the dominator children of BLOCK on the worklist of blocks
3931 to compute available sets for. */
3932 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3934 son
= next_dom_son (CDI_DOMINATORS
, son
))
3935 worklist
[sp
++] = son
;
3942 /* Local state for the eliminate domwalk. */
3943 static VEC (gimple
, heap
) *el_to_remove
;
3944 static VEC (gimple
, heap
) *el_to_update
;
3945 static unsigned int el_todo
;
3946 static VEC (tree
, heap
) *el_avail
;
3947 static VEC (tree
, heap
) *el_avail_stack
;
3949 /* Return a leader for OP that is available at the current point of the
3950 eliminate domwalk. */
3953 eliminate_avail (tree op
)
3955 tree valnum
= VN_INFO (op
)->valnum
;
3956 if (TREE_CODE (valnum
) == SSA_NAME
)
3958 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
3960 if (VEC_length (tree
, el_avail
) > SSA_NAME_VERSION (valnum
))
3961 return VEC_index (tree
, el_avail
, SSA_NAME_VERSION (valnum
));
3963 else if (is_gimple_min_invariant (valnum
))
3968 /* At the current point of the eliminate domwalk make OP available. */
3971 eliminate_push_avail (tree op
)
3973 tree valnum
= VN_INFO (op
)->valnum
;
3974 if (TREE_CODE (valnum
) == SSA_NAME
)
3976 if (VEC_length (tree
, el_avail
) <= SSA_NAME_VERSION (valnum
))
3977 VEC_safe_grow_cleared (tree
, heap
,
3978 el_avail
, SSA_NAME_VERSION (valnum
) + 1);
3979 VEC_replace (tree
, el_avail
, SSA_NAME_VERSION (valnum
), op
);
3980 VEC_safe_push (tree
, heap
, el_avail_stack
, op
);
3984 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3985 the leader for the expression if insertion was successful. */
3988 eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
)
3990 tree expr
= vn_get_expr_for (val
);
3991 if (!CONVERT_EXPR_P (expr
)
3992 && TREE_CODE (expr
) != VIEW_CONVERT_EXPR
)
3995 tree op
= TREE_OPERAND (expr
, 0);
3996 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (op
) : op
;
4000 tree res
= make_temp_ssa_name (TREE_TYPE (val
), NULL
, "pretmp");
4001 gimple tem
= gimple_build_assign (res
,
4002 build1 (TREE_CODE (expr
),
4003 TREE_TYPE (expr
), leader
));
4004 gsi_insert_before (gsi
, tem
, GSI_SAME_STMT
);
4005 VN_INFO_GET (res
)->valnum
= val
;
4007 if (TREE_CODE (leader
) == SSA_NAME
)
4008 gimple_set_plf (SSA_NAME_DEF_STMT (leader
), NECESSARY
, true);
4010 pre_stats
.insertions
++;
4011 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4013 fprintf (dump_file
, "Inserted ");
4014 print_gimple_stmt (dump_file
, tem
, 0, 0);
4020 /* Perform elimination for the basic-block B during the domwalk. */
4023 eliminate_bb (dom_walk_data
*, basic_block b
)
4025 gimple_stmt_iterator gsi
;
4029 VEC_safe_push (tree
, heap
, el_avail_stack
, NULL_TREE
);
4031 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
4033 gimple stmt
, phi
= gsi_stmt (gsi
);
4034 tree sprime
= NULL_TREE
, res
= PHI_RESULT (phi
);
4035 gimple_stmt_iterator gsi2
;
4037 /* We want to perform redundant PHI elimination. Do so by
4038 replacing the PHI with a single copy if possible.
4039 Do not touch inserted, single-argument or virtual PHIs. */
4040 if (gimple_phi_num_args (phi
) == 1
4041 || virtual_operand_p (res
))
4047 sprime
= eliminate_avail (res
);
4051 eliminate_push_avail (res
);
4055 else if (is_gimple_min_invariant (sprime
))
4057 if (!useless_type_conversion_p (TREE_TYPE (res
),
4058 TREE_TYPE (sprime
)))
4059 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4062 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4064 fprintf (dump_file
, "Replaced redundant PHI node defining ");
4065 print_generic_expr (dump_file
, res
, 0);
4066 fprintf (dump_file
, " with ");
4067 print_generic_expr (dump_file
, sprime
, 0);
4068 fprintf (dump_file
, "\n");
4071 remove_phi_node (&gsi
, false);
4074 && !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
))
4075 && TREE_CODE (sprime
) == SSA_NAME
)
4076 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4078 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
4079 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4080 stmt
= gimple_build_assign (res
, sprime
);
4081 SSA_NAME_DEF_STMT (res
) = stmt
;
4082 gimple_set_plf (stmt
, NECESSARY
, gimple_plf (phi
, NECESSARY
));
4084 gsi2
= gsi_after_labels (b
);
4085 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
4086 /* Queue the copy for eventual removal. */
4087 VEC_safe_push (gimple
, heap
, el_to_remove
, stmt
);
4088 /* If we inserted this PHI node ourself, it's not an elimination. */
4090 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
4093 pre_stats
.eliminations
++;
4096 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4098 tree lhs
= NULL_TREE
;
4099 tree rhs
= NULL_TREE
;
4101 stmt
= gsi_stmt (gsi
);
4103 if (gimple_has_lhs (stmt
))
4104 lhs
= gimple_get_lhs (stmt
);
4106 if (gimple_assign_single_p (stmt
))
4107 rhs
= gimple_assign_rhs1 (stmt
);
4109 /* Lookup the RHS of the expression, see if we have an
4110 available computation for it. If so, replace the RHS with
4111 the available computation.
4114 We don't replace global register variable when it is a the RHS of
4115 a single assign. We do replace local register variable since gcc
4116 does not guarantee local variable will be allocated in register. */
4117 if (gimple_has_lhs (stmt
)
4118 && TREE_CODE (lhs
) == SSA_NAME
4119 && !gimple_assign_ssa_name_copy_p (stmt
)
4120 && (!gimple_assign_single_p (stmt
)
4121 || (!is_gimple_min_invariant (rhs
)
4122 && (gimple_assign_rhs_code (stmt
) != VAR_DECL
4123 || !is_global_var (rhs
)
4124 || !DECL_HARD_REGISTER (rhs
))))
4125 && !gimple_has_volatile_ops (stmt
))
4128 gimple orig_stmt
= stmt
;
4130 sprime
= eliminate_avail (lhs
);
4133 /* If there is no existing usable leader but SCCVN thinks
4134 it has an expression it wants to use as replacement,
4136 tree val
= VN_INFO (lhs
)->valnum
;
4138 && TREE_CODE (val
) == SSA_NAME
4139 && VN_INFO (val
)->needs_insertion
4140 && (sprime
= eliminate_insert (&gsi
, val
)) != NULL_TREE
)
4141 eliminate_push_avail (sprime
);
4143 else if (is_gimple_min_invariant (sprime
))
4145 /* If there is no existing leader but SCCVN knows this
4146 value is constant, use that constant. */
4147 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4148 TREE_TYPE (sprime
)))
4149 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4151 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4153 fprintf (dump_file
, "Replaced ");
4154 print_gimple_expr (dump_file
, stmt
, 0, 0);
4155 fprintf (dump_file
, " with ");
4156 print_generic_expr (dump_file
, sprime
, 0);
4157 fprintf (dump_file
, " in ");
4158 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4160 pre_stats
.eliminations
++;
4161 propagate_tree_value_into_stmt (&gsi
, sprime
);
4162 stmt
= gsi_stmt (gsi
);
4165 /* If we removed EH side-effects from the statement, clean
4166 its EH information. */
4167 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4169 bitmap_set_bit (need_eh_cleanup
,
4170 gimple_bb (stmt
)->index
);
4171 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4172 fprintf (dump_file
, " Removed EH side-effects.\n");
4177 /* If there is no usable leader mark lhs as leader for its value. */
4179 eliminate_push_avail (lhs
);
4183 && (rhs
== NULL_TREE
4184 || TREE_CODE (rhs
) != SSA_NAME
4185 || may_propagate_copy (rhs
, sprime
)))
4187 bool can_make_abnormal_goto
4188 = is_gimple_call (stmt
)
4189 && stmt_can_make_abnormal_goto (stmt
);
4191 gcc_assert (sprime
!= rhs
);
4193 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4195 fprintf (dump_file
, "Replaced ");
4196 print_gimple_expr (dump_file
, stmt
, 0, 0);
4197 fprintf (dump_file
, " with ");
4198 print_generic_expr (dump_file
, sprime
, 0);
4199 fprintf (dump_file
, " in ");
4200 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4203 if (TREE_CODE (sprime
) == SSA_NAME
)
4204 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4206 /* We need to make sure the new and old types actually match,
4207 which may require adding a simple cast, which fold_convert
4209 if ((!rhs
|| TREE_CODE (rhs
) != SSA_NAME
)
4210 && !useless_type_conversion_p (gimple_expr_type (stmt
),
4211 TREE_TYPE (sprime
)))
4212 sprime
= fold_convert (gimple_expr_type (stmt
), sprime
);
4214 pre_stats
.eliminations
++;
4215 propagate_tree_value_into_stmt (&gsi
, sprime
);
4216 stmt
= gsi_stmt (gsi
);
4219 /* If we removed EH side-effects from the statement, clean
4220 its EH information. */
4221 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4223 bitmap_set_bit (need_eh_cleanup
,
4224 gimple_bb (stmt
)->index
);
4225 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4226 fprintf (dump_file
, " Removed EH side-effects.\n");
4229 /* Likewise for AB side-effects. */
4230 if (can_make_abnormal_goto
4231 && !stmt_can_make_abnormal_goto (stmt
))
4233 bitmap_set_bit (need_ab_cleanup
,
4234 gimple_bb (stmt
)->index
);
4235 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4236 fprintf (dump_file
, " Removed AB side-effects.\n");
4240 /* If the statement is a scalar store, see if the expression
4241 has the same value number as its rhs. If so, the store is
4243 else if (gimple_assign_single_p (stmt
)
4244 && !gimple_has_volatile_ops (stmt
)
4245 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4246 && (TREE_CODE (rhs
) == SSA_NAME
4247 || is_gimple_min_invariant (rhs
)))
4250 val
= vn_reference_lookup (gimple_assign_lhs (stmt
),
4251 gimple_vuse (stmt
), VN_WALK
, NULL
);
4252 if (TREE_CODE (rhs
) == SSA_NAME
)
4253 rhs
= VN_INFO (rhs
)->valnum
;
4255 && operand_equal_p (val
, rhs
, 0))
4257 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4259 fprintf (dump_file
, "Deleted redundant store ");
4260 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4263 /* Queue stmt for removal. */
4264 VEC_safe_push (gimple
, heap
, el_to_remove
, stmt
);
4267 /* Visit COND_EXPRs and fold the comparison with the
4268 available value-numbers. */
4269 else if (gimple_code (stmt
) == GIMPLE_COND
)
4271 tree op0
= gimple_cond_lhs (stmt
);
4272 tree op1
= gimple_cond_rhs (stmt
);
4275 if (TREE_CODE (op0
) == SSA_NAME
)
4276 op0
= VN_INFO (op0
)->valnum
;
4277 if (TREE_CODE (op1
) == SSA_NAME
)
4278 op1
= VN_INFO (op1
)->valnum
;
4279 result
= fold_binary (gimple_cond_code (stmt
), boolean_type_node
,
4281 if (result
&& TREE_CODE (result
) == INTEGER_CST
)
4283 if (integer_zerop (result
))
4284 gimple_cond_make_false (stmt
);
4286 gimple_cond_make_true (stmt
);
4288 el_todo
= TODO_cleanup_cfg
;
4291 /* Visit indirect calls and turn them into direct calls if
4293 if (is_gimple_call (stmt
))
4295 tree orig_fn
= gimple_call_fn (stmt
);
4299 if (TREE_CODE (orig_fn
) == SSA_NAME
)
4300 fn
= VN_INFO (orig_fn
)->valnum
;
4301 else if (TREE_CODE (orig_fn
) == OBJ_TYPE_REF
4302 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn
)) == SSA_NAME
)
4303 fn
= VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn
))->valnum
;
4306 if (gimple_call_addr_fndecl (fn
) != NULL_TREE
4307 && useless_type_conversion_p (TREE_TYPE (orig_fn
),
4310 bool can_make_abnormal_goto
4311 = stmt_can_make_abnormal_goto (stmt
);
4312 bool was_noreturn
= gimple_call_noreturn_p (stmt
);
4314 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4316 fprintf (dump_file
, "Replacing call target with ");
4317 print_generic_expr (dump_file
, fn
, 0);
4318 fprintf (dump_file
, " in ");
4319 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4322 gimple_call_set_fn (stmt
, fn
);
4323 VEC_safe_push (gimple
, heap
, el_to_update
, stmt
);
4325 /* When changing a call into a noreturn call, cfg cleanup
4326 is needed to fix up the noreturn call. */
4327 if (!was_noreturn
&& gimple_call_noreturn_p (stmt
))
4328 el_todo
|= TODO_cleanup_cfg
;
4330 /* If we removed EH side-effects from the statement, clean
4331 its EH information. */
4332 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
4334 bitmap_set_bit (need_eh_cleanup
,
4335 gimple_bb (stmt
)->index
);
4336 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4337 fprintf (dump_file
, " Removed EH side-effects.\n");
4340 /* Likewise for AB side-effects. */
4341 if (can_make_abnormal_goto
4342 && !stmt_can_make_abnormal_goto (stmt
))
4344 bitmap_set_bit (need_ab_cleanup
,
4345 gimple_bb (stmt
)->index
);
4346 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4347 fprintf (dump_file
, " Removed AB side-effects.\n");
4350 /* Changing an indirect call to a direct call may
4351 have exposed different semantics. This may
4352 require an SSA update. */
4353 el_todo
|= TODO_update_ssa_only_virtuals
;
4359 /* Make no longer available leaders no longer available. */
4362 eliminate_leave_block (dom_walk_data
*, basic_block
)
4365 while ((entry
= VEC_pop (tree
, el_avail_stack
)) != NULL_TREE
)
4366 VEC_replace (tree
, el_avail
,
4367 SSA_NAME_VERSION (VN_INFO (entry
)->valnum
), NULL_TREE
);
4370 /* Eliminate fully redundant computations. */
4375 struct dom_walk_data walk_data
;
4376 gimple_stmt_iterator gsi
;
4380 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4381 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4383 el_to_remove
= NULL
;
4384 el_to_update
= NULL
;
4387 el_avail_stack
= NULL
;
4389 walk_data
.dom_direction
= CDI_DOMINATORS
;
4390 walk_data
.initialize_block_local_data
= NULL
;
4391 walk_data
.before_dom_children
= eliminate_bb
;
4392 walk_data
.after_dom_children
= eliminate_leave_block
;
4393 walk_data
.global_data
= NULL
;
4394 walk_data
.block_local_data_size
= 0;
4395 init_walk_dominator_tree (&walk_data
);
4396 walk_dominator_tree (&walk_data
, ENTRY_BLOCK_PTR
);
4397 fini_walk_dominator_tree (&walk_data
);
4399 VEC_free (tree
, heap
, el_avail
);
4400 VEC_free (tree
, heap
, el_avail_stack
);
4402 /* We cannot remove stmts during BB walk, especially not release SSA
4403 names there as this confuses the VN machinery. The stmts ending
4404 up in el_to_remove are either stores or simple copies. */
4405 FOR_EACH_VEC_ELT (gimple
, el_to_remove
, i
, stmt
)
4407 tree lhs
= gimple_assign_lhs (stmt
);
4408 tree rhs
= gimple_assign_rhs1 (stmt
);
4409 use_operand_p use_p
;
4412 /* If there is a single use only, propagate the equivalency
4413 instead of keeping the copy. */
4414 if (TREE_CODE (lhs
) == SSA_NAME
4415 && TREE_CODE (rhs
) == SSA_NAME
4416 && single_imm_use (lhs
, &use_p
, &use_stmt
)
4417 && may_propagate_copy (USE_FROM_PTR (use_p
), rhs
))
4419 SET_USE (use_p
, rhs
);
4420 update_stmt (use_stmt
);
4422 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (lhs
))
4423 && TREE_CODE (rhs
) == SSA_NAME
)
4424 gimple_set_plf (SSA_NAME_DEF_STMT (rhs
), NECESSARY
, true);
4427 /* If this is a store or a now unused copy, remove it. */
4428 if (TREE_CODE (lhs
) != SSA_NAME
4429 || has_zero_uses (lhs
))
4431 basic_block bb
= gimple_bb (stmt
);
4432 gsi
= gsi_for_stmt (stmt
);
4433 unlink_stmt_vdef (stmt
);
4434 if (gsi_remove (&gsi
, true))
4435 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
4437 && TREE_CODE (lhs
) == SSA_NAME
)
4438 bitmap_clear_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
4439 release_defs (stmt
);
4442 VEC_free (gimple
, heap
, el_to_remove
);
4444 /* We cannot update call statements with virtual operands during
4445 SSA walk. This might remove them which in turn makes our
4446 VN lattice invalid. */
4447 FOR_EACH_VEC_ELT (gimple
, el_to_update
, i
, stmt
)
4449 VEC_free (gimple
, heap
, el_to_update
);
4454 /* Perform CFG cleanups made necessary by elimination. */
4457 fini_eliminate (void)
4459 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
4460 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
4463 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
4466 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
4468 BITMAP_FREE (need_eh_cleanup
);
4469 BITMAP_FREE (need_ab_cleanup
);
4471 if (do_eh_cleanup
|| do_ab_cleanup
)
4472 cleanup_tree_cfg ();
4475 /* Borrow a bit of tree-ssa-dce.c for the moment.
4476 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4477 this may be a bit faster, and we may want critical edges kept split. */
4479 /* If OP's defining statement has not already been determined to be necessary,
4480 mark that statement necessary. Return the stmt, if it is newly
4483 static inline gimple
4484 mark_operand_necessary (tree op
)
4490 if (TREE_CODE (op
) != SSA_NAME
)
4493 stmt
= SSA_NAME_DEF_STMT (op
);
4496 if (gimple_plf (stmt
, NECESSARY
)
4497 || gimple_nop_p (stmt
))
4500 gimple_set_plf (stmt
, NECESSARY
, true);
4504 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4505 to insert PHI nodes sometimes, and because value numbering of casts isn't
4506 perfect, we sometimes end up inserting dead code. This simple DCE-like
4507 pass removes any insertions we made that weren't actually used. */
4510 remove_dead_inserted_code (void)
4517 worklist
= BITMAP_ALLOC (NULL
);
4518 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4520 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4521 if (gimple_plf (t
, NECESSARY
))
4522 bitmap_set_bit (worklist
, i
);
4524 while (!bitmap_empty_p (worklist
))
4526 i
= bitmap_first_set_bit (worklist
);
4527 bitmap_clear_bit (worklist
, i
);
4528 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4530 /* PHI nodes are somewhat special in that each PHI alternative has
4531 data and control dependencies. All the statements feeding the
4532 PHI node's arguments are always necessary. */
4533 if (gimple_code (t
) == GIMPLE_PHI
)
4537 for (k
= 0; k
< gimple_phi_num_args (t
); k
++)
4539 tree arg
= PHI_ARG_DEF (t
, k
);
4540 if (TREE_CODE (arg
) == SSA_NAME
)
4542 gimple n
= mark_operand_necessary (arg
);
4544 bitmap_set_bit (worklist
, SSA_NAME_VERSION (arg
));
4550 /* Propagate through the operands. Examine all the USE, VUSE and
4551 VDEF operands in this statement. Mark all the statements
4552 which feed this statement's uses as necessary. */
4556 /* The operands of VDEF expressions are also needed as they
4557 represent potential definitions that may reach this
4558 statement (VDEF operands allow us to follow def-def
4561 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
4563 gimple n
= mark_operand_necessary (use
);
4565 bitmap_set_bit (worklist
, SSA_NAME_VERSION (use
));
4570 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4572 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4573 if (!gimple_plf (t
, NECESSARY
))
4575 gimple_stmt_iterator gsi
;
4577 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4579 fprintf (dump_file
, "Removing unnecessary insertion:");
4580 print_gimple_stmt (dump_file
, t
, 0, 0);
4583 gsi
= gsi_for_stmt (t
);
4584 if (gimple_code (t
) == GIMPLE_PHI
)
4585 remove_phi_node (&gsi
, true);
4588 gsi_remove (&gsi
, true);
4593 BITMAP_FREE (worklist
);
4596 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4597 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4598 the number of visited blocks. */
4601 my_rev_post_order_compute (int *post_order
, bool include_entry_exit
)
4603 edge_iterator
*stack
;
4605 int post_order_num
= 0;
4608 if (include_entry_exit
)
4609 post_order
[post_order_num
++] = EXIT_BLOCK
;
4611 /* Allocate stack for back-tracking up CFG. */
4612 stack
= XNEWVEC (edge_iterator
, n_basic_blocks
+ 1);
4615 /* Allocate bitmap to track nodes that have been visited. */
4616 visited
= sbitmap_alloc (last_basic_block
);
4618 /* None of the nodes in the CFG have been visited yet. */
4619 sbitmap_zero (visited
);
4621 /* Push the last edge on to the stack. */
4622 stack
[sp
++] = ei_start (EXIT_BLOCK_PTR
->preds
);
4630 /* Look at the edge on the top of the stack. */
4632 src
= ei_edge (ei
)->src
;
4633 dest
= ei_edge (ei
)->dest
;
4635 /* Check if the edge source has been visited yet. */
4636 if (src
!= ENTRY_BLOCK_PTR
&& ! TEST_BIT (visited
, src
->index
))
4638 /* Mark that we have visited the destination. */
4639 SET_BIT (visited
, src
->index
);
4641 if (EDGE_COUNT (src
->preds
) > 0)
4642 /* Since the SRC node has been visited for the first
4643 time, check its predecessors. */
4644 stack
[sp
++] = ei_start (src
->preds
);
4646 post_order
[post_order_num
++] = src
->index
;
4650 if (ei_one_before_end_p (ei
) && dest
!= EXIT_BLOCK_PTR
)
4651 post_order
[post_order_num
++] = dest
->index
;
4653 if (!ei_one_before_end_p (ei
))
4654 ei_next (&stack
[sp
- 1]);
4660 if (include_entry_exit
)
4661 post_order
[post_order_num
++] = ENTRY_BLOCK
;
4664 sbitmap_free (visited
);
4665 return post_order_num
;
4669 /* Initialize data structures used by PRE. */
4676 next_expression_id
= 1;
4678 VEC_safe_push (pre_expr
, heap
, expressions
, NULL
);
4679 value_expressions
= VEC_alloc (bitmap
, heap
, get_max_value_id () + 1);
4680 VEC_safe_grow_cleared (bitmap
, heap
, value_expressions
,
4681 get_max_value_id() + 1);
4684 inserted_exprs
= BITMAP_ALLOC (NULL
);
4686 connect_infinite_loops_to_exit ();
4687 memset (&pre_stats
, 0, sizeof (pre_stats
));
4690 postorder
= XNEWVEC (int, n_basic_blocks
- NUM_FIXED_BLOCKS
);
4691 my_rev_post_order_compute (postorder
, false);
4693 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets
));
4695 calculate_dominance_info (CDI_POST_DOMINATORS
);
4696 calculate_dominance_info (CDI_DOMINATORS
);
4698 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4699 phi_translate_table
.create (5110);
4700 expression_to_id
.create (num_ssa_names
* 3);
4701 bitmap_set_pool
= create_alloc_pool ("Bitmap sets",
4702 sizeof (struct bitmap_set
), 30);
4703 pre_expr_pool
= create_alloc_pool ("pre_expr nodes",
4704 sizeof (struct pre_expr_d
), 30);
4707 EXP_GEN (bb
) = bitmap_set_new ();
4708 PHI_GEN (bb
) = bitmap_set_new ();
4709 TMP_GEN (bb
) = bitmap_set_new ();
4710 AVAIL_OUT (bb
) = bitmap_set_new ();
4715 /* Deallocate data structures used by PRE. */
4721 VEC_free (bitmap
, heap
, value_expressions
);
4722 BITMAP_FREE (inserted_exprs
);
4723 bitmap_obstack_release (&grand_bitmap_obstack
);
4724 free_alloc_pool (bitmap_set_pool
);
4725 free_alloc_pool (pre_expr_pool
);
4726 phi_translate_table
.dispose ();
4727 expression_to_id
.dispose ();
4728 VEC_free (unsigned, heap
, name_to_id
);
4730 free_aux_for_blocks ();
4732 free_dominance_info (CDI_POST_DOMINATORS
);
4735 /* Gate and execute functions for PRE. */
4740 unsigned int todo
= 0;
4742 do_partial_partial
=
4743 flag_tree_partial_pre
&& optimize_function_for_speed_p (cfun
);
4745 /* This has to happen before SCCVN runs because
4746 loop_optimizer_init may create new phis, etc. */
4747 loop_optimizer_init (LOOPS_NORMAL
);
4749 if (!run_scc_vn (VN_WALK
))
4751 loop_optimizer_finalize ();
4758 /* Collect and value number expressions computed in each basic block. */
4761 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4766 print_bitmap_set (dump_file
, EXP_GEN (bb
),
4767 "exp_gen", bb
->index
);
4768 print_bitmap_set (dump_file
, PHI_GEN (bb
),
4769 "phi_gen", bb
->index
);
4770 print_bitmap_set (dump_file
, TMP_GEN (bb
),
4771 "tmp_gen", bb
->index
);
4772 print_bitmap_set (dump_file
, AVAIL_OUT (bb
),
4773 "avail_out", bb
->index
);
4777 /* Insert can get quite slow on an incredibly large number of basic
4778 blocks due to some quadratic behavior. Until this behavior is
4779 fixed, don't run it when he have an incredibly large number of
4780 bb's. If we aren't going to run insert, there is no point in
4781 computing ANTIC, either, even though it's plenty fast. */
4782 if (n_basic_blocks
< 4000)
4788 /* Make sure to remove fake edges before committing our inserts.
4789 This makes sure we don't end up with extra critical edges that
4790 we would need to split. */
4791 remove_fake_exit_edges ();
4792 gsi_commit_edge_inserts ();
4794 /* Remove all the redundant expressions. */
4795 todo
|= eliminate ();
4797 statistics_counter_event (cfun
, "Insertions", pre_stats
.insertions
);
4798 statistics_counter_event (cfun
, "PA inserted", pre_stats
.pa_insert
);
4799 statistics_counter_event (cfun
, "New PHIs", pre_stats
.phis
);
4800 statistics_counter_event (cfun
, "Eliminated", pre_stats
.eliminations
);
4801 statistics_counter_event (cfun
, "Constified", pre_stats
.constified
);
4803 clear_expression_ids ();
4804 remove_dead_inserted_code ();
4805 todo
|= TODO_verify_flow
;
4810 loop_optimizer_finalize ();
4812 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4813 case we can merge the block with the remaining predecessor of the block.
4815 - call merge_blocks after each tail merge iteration
4816 - call merge_blocks after all tail merge iterations
4817 - mark TODO_cleanup_cfg when necessary
4818 - share the cfg cleanup with fini_pre. */
4819 todo
|= tail_merge_optimize (todo
);
4829 return flag_tree_pre
!= 0;
4832 struct gimple_opt_pass pass_pre
=
4837 gate_pre
, /* gate */
4838 do_pre
, /* execute */
4841 0, /* static_pass_number */
4842 TV_TREE_PRE
, /* tv_id */
4843 PROP_no_crit_edges
| PROP_cfg
4844 | PROP_ssa
, /* properties_required */
4845 0, /* properties_provided */
4846 0, /* properties_destroyed */
4847 TODO_rebuild_alias
, /* todo_flags_start */
4848 TODO_update_ssa_only_virtuals
| TODO_ggc_collect
4849 | TODO_verify_ssa
/* todo_flags_finish */
4854 /* Gate and execute functions for FRE. */
4859 unsigned int todo
= 0;
4861 if (!run_scc_vn (VN_WALKREWRITE
))
4864 memset (&pre_stats
, 0, sizeof (pre_stats
));
4866 /* Remove all the redundant expressions. */
4867 todo
|= eliminate ();
4873 statistics_counter_event (cfun
, "Insertions", pre_stats
.insertions
);
4874 statistics_counter_event (cfun
, "Eliminated", pre_stats
.eliminations
);
4875 statistics_counter_event (cfun
, "Constified", pre_stats
.constified
);
4883 return flag_tree_fre
!= 0;
4886 struct gimple_opt_pass pass_fre
=
4891 gate_fre
, /* gate */
4892 execute_fre
, /* execute */
4895 0, /* static_pass_number */
4896 TV_TREE_FRE
, /* tv_id */
4897 PROP_cfg
| PROP_ssa
, /* properties_required */
4898 0, /* properties_provided */
4899 0, /* properties_destroyed */
4900 0, /* todo_flags_start */
4901 TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */