2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "hash-table.h"
34 #include "tree-iterator.h"
35 #include "alloc-pool.h"
37 #include "tree-pass.h"
40 #include "langhooks.h"
42 #include "tree-ssa-sccvn.h"
43 #include "tree-scalar-evolution.h"
50 1. Avail sets can be shared by making an avail_find_leader that
51 walks up the dominator tree and looks in those avail sets.
52 This might affect code optimality, it's unclear right now.
53 2. Strength reduction can be performed by anticipating expressions
54 we can repair later on.
55 3. We can do back-substitution or smarter value numbering to catch
56 commutative expressions split up over multiple statements.
59 /* For ease of terminology, "expression node" in the below refers to
60 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
61 represent the actual statement containing the expressions we care about,
62 and we cache the value number by putting it in the expression. */
66 First we walk the statements to generate the AVAIL sets, the
67 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
68 generation of values/expressions by a given block. We use them
69 when computing the ANTIC sets. The AVAIL sets consist of
70 SSA_NAME's that represent values, so we know what values are
71 available in what blocks. AVAIL is a forward dataflow problem. In
72 SSA, values are never killed, so we don't need a kill set, or a
73 fixpoint iteration, in order to calculate the AVAIL sets. In
74 traditional parlance, AVAIL sets tell us the downsafety of the
77 Next, we generate the ANTIC sets. These sets represent the
78 anticipatable expressions. ANTIC is a backwards dataflow
79 problem. An expression is anticipatable in a given block if it could
80 be generated in that block. This means that if we had to perform
81 an insertion in that block, of the value of that expression, we
82 could. Calculating the ANTIC sets requires phi translation of
83 expressions, because the flow goes backwards through phis. We must
84 iterate to a fixpoint of the ANTIC sets, because we have a kill
85 set. Even in SSA form, values are not live over the entire
86 function, only from their definition point onwards. So we have to
87 remove values from the ANTIC set once we go past the definition
88 point of the leaders that make them up.
89 compute_antic/compute_antic_aux performs this computation.
91 Third, we perform insertions to make partially redundant
92 expressions fully redundant.
94 An expression is partially redundant (excluding partial
97 1. It is AVAIL in some, but not all, of the predecessors of a
99 2. It is ANTIC in all the predecessors.
101 In order to make it fully redundant, we insert the expression into
102 the predecessors where it is not available, but is ANTIC.
104 For the partial anticipation case, we only perform insertion if it
105 is partially anticipated in some block, and fully available in all
108 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
109 performs these steps.
111 Fourth, we eliminate fully redundant expressions.
112 This is a simple statement walk that replaces redundant
113 calculations with the now available values. */
115 /* Representations of value numbers:
117 Value numbers are represented by a representative SSA_NAME. We
118 will create fake SSA_NAME's in situations where we need a
119 representative but do not have one (because it is a complex
120 expression). In order to facilitate storing the value numbers in
121 bitmaps, and keep the number of wasted SSA_NAME's down, we also
122 associate a value_id with each value number, and create full blown
123 ssa_name's only where we actually need them (IE in operands of
124 existing expressions).
126 Theoretically you could replace all the value_id's with
127 SSA_NAME_VERSION, but this would allocate a large number of
128 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
129 It would also require an additional indirection at each point we
132 /* Representation of expressions on value numbers:
134 Expressions consisting of value numbers are represented the same
135 way as our VN internally represents them, with an additional
136 "pre_expr" wrapping around them in order to facilitate storing all
137 of the expressions in the same sets. */
139 /* Representation of sets:
141 The dataflow sets do not need to be sorted in any particular order
142 for the majority of their lifetime, are simply represented as two
143 bitmaps, one that keeps track of values present in the set, and one
144 that keeps track of expressions present in the set.
146 When we need them in topological order, we produce it on demand by
147 transforming the bitmap into an array and sorting it into topo
150 /* Type of expression, used to know which member of the PRE_EXPR union
161 typedef union pre_expr_union_d
166 vn_reference_t reference
;
169 typedef struct pre_expr_d
: typed_noop_remove
<pre_expr_d
>
171 enum pre_expr_kind kind
;
175 /* hash_table support. */
176 typedef pre_expr_d value_type
;
177 typedef pre_expr_d compare_type
;
178 static inline hashval_t
hash (const pre_expr_d
*);
179 static inline int equal (const pre_expr_d
*, const pre_expr_d
*);
182 #define PRE_EXPR_NAME(e) (e)->u.name
183 #define PRE_EXPR_NARY(e) (e)->u.nary
184 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
185 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
187 /* Compare E1 and E1 for equality. */
190 pre_expr_d::equal (const value_type
*e1
, const compare_type
*e2
)
192 if (e1
->kind
!= e2
->kind
)
198 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
199 PRE_EXPR_CONSTANT (e2
));
201 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
203 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
205 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
206 PRE_EXPR_REFERENCE (e2
));
215 pre_expr_d::hash (const value_type
*e
)
220 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
222 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
224 return PRE_EXPR_NARY (e
)->hashcode
;
226 return PRE_EXPR_REFERENCE (e
)->hashcode
;
232 /* Next global expression id number. */
233 static unsigned int next_expression_id
;
235 /* Mapping from expression to id number we can use in bitmap sets. */
236 DEF_VEC_P (pre_expr
);
237 DEF_VEC_ALLOC_P (pre_expr
, heap
);
238 static VEC(pre_expr
, heap
) *expressions
;
239 static hash_table
<pre_expr_d
> expression_to_id
;
240 static VEC(unsigned, heap
) *name_to_id
;
242 /* Allocate an expression id for EXPR. */
244 static inline unsigned int
245 alloc_expression_id (pre_expr expr
)
247 struct pre_expr_d
**slot
;
248 /* Make sure we won't overflow. */
249 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
250 expr
->id
= next_expression_id
++;
251 VEC_safe_push (pre_expr
, heap
, expressions
, expr
);
252 if (expr
->kind
== NAME
)
254 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
255 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
256 re-allocations by using VEC_reserve upfront. There is no
257 VEC_quick_grow_cleared unfortunately. */
258 unsigned old_len
= VEC_length (unsigned, name_to_id
);
259 VEC_reserve (unsigned, heap
, name_to_id
, num_ssa_names
- old_len
);
260 VEC_safe_grow_cleared (unsigned, heap
, name_to_id
, num_ssa_names
);
261 gcc_assert (VEC_index (unsigned, name_to_id
, version
) == 0);
262 VEC_replace (unsigned, name_to_id
, version
, expr
->id
);
266 slot
= expression_to_id
.find_slot (expr
, INSERT
);
270 return next_expression_id
- 1;
273 /* Return the expression id for tree EXPR. */
275 static inline unsigned int
276 get_expression_id (const pre_expr expr
)
281 static inline unsigned int
282 lookup_expression_id (const pre_expr expr
)
284 struct pre_expr_d
**slot
;
286 if (expr
->kind
== NAME
)
288 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
289 if (VEC_length (unsigned, name_to_id
) <= version
)
291 return VEC_index (unsigned, name_to_id
, version
);
295 slot
= expression_to_id
.find_slot (expr
, NO_INSERT
);
298 return ((pre_expr
)*slot
)->id
;
302 /* Return the existing expression id for EXPR, or create one if one
303 does not exist yet. */
305 static inline unsigned int
306 get_or_alloc_expression_id (pre_expr expr
)
308 unsigned int id
= lookup_expression_id (expr
);
310 return alloc_expression_id (expr
);
311 return expr
->id
= id
;
314 /* Return the expression that has expression id ID */
316 static inline pre_expr
317 expression_for_id (unsigned int id
)
319 return VEC_index (pre_expr
, expressions
, id
);
322 /* Free the expression id field in all of our expressions,
323 and then destroy the expressions array. */
326 clear_expression_ids (void)
328 VEC_free (pre_expr
, heap
, expressions
);
331 static alloc_pool pre_expr_pool
;
333 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
336 get_or_alloc_expr_for_name (tree name
)
338 struct pre_expr_d expr
;
340 unsigned int result_id
;
344 PRE_EXPR_NAME (&expr
) = name
;
345 result_id
= lookup_expression_id (&expr
);
347 return expression_for_id (result_id
);
349 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
351 PRE_EXPR_NAME (result
) = name
;
352 alloc_expression_id (result
);
356 /* An unordered bitmap set. One bitmap tracks values, the other,
358 typedef struct bitmap_set
360 bitmap_head expressions
;
364 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
365 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
367 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
368 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
370 /* Mapping from value id to expressions with that value_id. */
371 static VEC(bitmap
, heap
) *value_expressions
;
373 /* Sets that we need to keep track of. */
374 typedef struct bb_bitmap_sets
376 /* The EXP_GEN set, which represents expressions/values generated in
378 bitmap_set_t exp_gen
;
380 /* The PHI_GEN set, which represents PHI results generated in a
382 bitmap_set_t phi_gen
;
384 /* The TMP_GEN set, which represents results/temporaries generated
385 in a basic block. IE the LHS of an expression. */
386 bitmap_set_t tmp_gen
;
388 /* The AVAIL_OUT set, which represents which values are available in
389 a given basic block. */
390 bitmap_set_t avail_out
;
392 /* The ANTIC_IN set, which represents which values are anticipatable
393 in a given basic block. */
394 bitmap_set_t antic_in
;
396 /* The PA_IN set, which represents which values are
397 partially anticipatable in a given basic block. */
400 /* The NEW_SETS set, which is used during insertion to augment the
401 AVAIL_OUT set of blocks with the new insertions performed during
402 the current iteration. */
403 bitmap_set_t new_sets
;
405 /* A cache for value_dies_in_block_x. */
408 /* True if we have visited this block during ANTIC calculation. */
409 unsigned int visited
: 1;
411 /* True we have deferred processing this block during ANTIC
412 calculation until its successor is processed. */
413 unsigned int deferred
: 1;
415 /* True when the block contains a call that might not return. */
416 unsigned int contains_may_not_return_call
: 1;
419 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
420 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
421 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
422 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
423 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
424 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
425 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
426 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
427 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
428 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
429 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
432 /* Basic block list in postorder. */
433 static int *postorder
;
434 static int postorder_num
;
436 /* This structure is used to keep track of statistics on what
437 optimization PRE was able to perform. */
440 /* The number of RHS computations eliminated by PRE. */
443 /* The number of new expressions/temporaries generated by PRE. */
446 /* The number of inserts found due to partial anticipation */
449 /* The number of new PHI nodes added by PRE. */
452 /* The number of values found constant. */
457 static bool do_partial_partial
;
458 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int);
459 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
460 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
461 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
462 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
463 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
464 static void bitmap_insert_into_set_1 (bitmap_set_t
, pre_expr
,
466 static bitmap_set_t
bitmap_set_new (void);
467 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
469 static tree
find_or_generate_expression (basic_block
, tree
, gimple_seq
*);
470 static unsigned int get_expr_value_id (pre_expr
);
472 /* We can add and remove elements and entries to and from sets
473 and hash tables, so we use alloc pools for them. */
475 static alloc_pool bitmap_set_pool
;
476 static bitmap_obstack grand_bitmap_obstack
;
478 /* Set of blocks with statements that have had their EH properties changed. */
479 static bitmap need_eh_cleanup
;
481 /* Set of blocks with statements that have had their AB properties changed. */
482 static bitmap need_ab_cleanup
;
484 /* A three tuple {e, pred, v} used to cache phi translations in the
485 phi_translate_table. */
487 typedef struct expr_pred_trans_d
: typed_free_remove
<expr_pred_trans_d
>
489 /* The expression. */
492 /* The predecessor block along which we translated the expression. */
495 /* The value that resulted from the translation. */
498 /* The hashcode for the expression, pred pair. This is cached for
502 /* hash_table support. */
503 typedef expr_pred_trans_d value_type
;
504 typedef expr_pred_trans_d compare_type
;
505 static inline hashval_t
hash (const value_type
*);
506 static inline int equal (const value_type
*, const compare_type
*);
507 } *expr_pred_trans_t
;
508 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
511 expr_pred_trans_d::hash (const expr_pred_trans_d
*e
)
517 expr_pred_trans_d::equal (const value_type
*ve1
,
518 const compare_type
*ve2
)
520 basic_block b1
= ve1
->pred
;
521 basic_block b2
= ve2
->pred
;
523 /* If they are not translations for the same basic block, they can't
527 return pre_expr_d::equal (ve1
->e
, ve2
->e
);
530 /* The phi_translate_table caches phi translations for a given
531 expression and predecessor. */
532 static hash_table
<expr_pred_trans_d
> phi_translate_table
;
534 /* Search in the phi translation table for the translation of
535 expression E in basic block PRED.
536 Return the translated value, if found, NULL otherwise. */
538 static inline pre_expr
539 phi_trans_lookup (pre_expr e
, basic_block pred
)
541 expr_pred_trans_t
*slot
;
542 struct expr_pred_trans_d ept
;
546 ept
.hashcode
= iterative_hash_hashval_t (pre_expr_d::hash (e
), pred
->index
);
547 slot
= phi_translate_table
.find_slot_with_hash (&ept
, ept
.hashcode
,
556 /* Add the tuple mapping from {expression E, basic block PRED} to
557 value V, to the phi translation table. */
560 phi_trans_add (pre_expr e
, pre_expr v
, basic_block pred
)
562 expr_pred_trans_t
*slot
;
563 expr_pred_trans_t new_pair
= XNEW (struct expr_pred_trans_d
);
565 new_pair
->pred
= pred
;
567 new_pair
->hashcode
= iterative_hash_hashval_t (pre_expr_d::hash (e
),
570 slot
= phi_translate_table
.find_slot_with_hash (new_pair
,
571 new_pair
->hashcode
, INSERT
);
577 /* Add expression E to the expression set of value id V. */
580 add_to_value (unsigned int v
, pre_expr e
)
584 gcc_checking_assert (get_expr_value_id (e
) == v
);
586 if (v
>= VEC_length (bitmap
, value_expressions
))
588 VEC_safe_grow_cleared (bitmap
, heap
, value_expressions
, v
+ 1);
591 set
= VEC_index (bitmap
, value_expressions
, v
);
594 set
= BITMAP_ALLOC (&grand_bitmap_obstack
);
595 VEC_replace (bitmap
, value_expressions
, v
, set
);
598 bitmap_set_bit (set
, get_or_alloc_expression_id (e
));
601 /* Create a new bitmap set and return it. */
604 bitmap_set_new (void)
606 bitmap_set_t ret
= (bitmap_set_t
) pool_alloc (bitmap_set_pool
);
607 bitmap_initialize (&ret
->expressions
, &grand_bitmap_obstack
);
608 bitmap_initialize (&ret
->values
, &grand_bitmap_obstack
);
612 /* Return the value id for a PRE expression EXPR. */
615 get_expr_value_id (pre_expr expr
)
622 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
625 id
= get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr
));
626 add_to_value (id
, expr
);
631 return VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
633 return PRE_EXPR_NARY (expr
)->value_id
;
635 return PRE_EXPR_REFERENCE (expr
)->value_id
;
641 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
644 sccvn_valnum_from_value_id (unsigned int val
)
648 bitmap exprset
= VEC_index (bitmap
, value_expressions
, val
);
649 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
651 pre_expr vexpr
= expression_for_id (i
);
652 if (vexpr
->kind
== NAME
)
653 return VN_INFO (PRE_EXPR_NAME (vexpr
))->valnum
;
654 else if (vexpr
->kind
== CONSTANT
)
655 return PRE_EXPR_CONSTANT (vexpr
);
660 /* Remove an expression EXPR from a bitmapped set. */
663 bitmap_remove_from_set (bitmap_set_t set
, pre_expr expr
)
665 unsigned int val
= get_expr_value_id (expr
);
666 if (!value_id_constant_p (val
))
668 bitmap_clear_bit (&set
->values
, val
);
669 bitmap_clear_bit (&set
->expressions
, get_expression_id (expr
));
674 bitmap_insert_into_set_1 (bitmap_set_t set
, pre_expr expr
,
675 unsigned int val
, bool allow_constants
)
677 if (allow_constants
|| !value_id_constant_p (val
))
679 /* We specifically expect this and only this function to be able to
680 insert constants into a set. */
681 bitmap_set_bit (&set
->values
, val
);
682 bitmap_set_bit (&set
->expressions
, get_or_alloc_expression_id (expr
));
686 /* Insert an expression EXPR into a bitmapped set. */
689 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
691 bitmap_insert_into_set_1 (set
, expr
, get_expr_value_id (expr
), false);
694 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
697 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
699 bitmap_copy (&dest
->expressions
, &orig
->expressions
);
700 bitmap_copy (&dest
->values
, &orig
->values
);
704 /* Free memory used up by SET. */
706 bitmap_set_free (bitmap_set_t set
)
708 bitmap_clear (&set
->expressions
);
709 bitmap_clear (&set
->values
);
713 /* Generate an topological-ordered array of bitmap set SET. */
715 static VEC(pre_expr
, heap
) *
716 sorted_array_from_bitmap_set (bitmap_set_t set
)
719 bitmap_iterator bi
, bj
;
720 VEC(pre_expr
, heap
) *result
;
722 /* Pre-allocate roughly enough space for the array. */
723 result
= VEC_alloc (pre_expr
, heap
, bitmap_count_bits (&set
->values
));
725 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
727 /* The number of expressions having a given value is usually
728 relatively small. Thus, rather than making a vector of all
729 the expressions and sorting it by value-id, we walk the values
730 and check in the reverse mapping that tells us what expressions
731 have a given value, to filter those in our set. As a result,
732 the expressions are inserted in value-id order, which means
735 If this is somehow a significant lose for some cases, we can
736 choose which set to walk based on the set size. */
737 bitmap exprset
= VEC_index (bitmap
, value_expressions
, i
);
738 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bj
)
740 if (bitmap_bit_p (&set
->expressions
, j
))
741 VEC_safe_push (pre_expr
, heap
, result
, expression_for_id (j
));
748 /* Perform bitmapped set operation DEST &= ORIG. */
751 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
759 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
761 bitmap_and_into (&dest
->values
, &orig
->values
);
762 bitmap_copy (&temp
, &dest
->expressions
);
763 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
765 pre_expr expr
= expression_for_id (i
);
766 unsigned int value_id
= get_expr_value_id (expr
);
767 if (!bitmap_bit_p (&dest
->values
, value_id
))
768 bitmap_clear_bit (&dest
->expressions
, i
);
770 bitmap_clear (&temp
);
774 /* Subtract all values and expressions contained in ORIG from DEST. */
777 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
779 bitmap_set_t result
= bitmap_set_new ();
783 bitmap_and_compl (&result
->expressions
, &dest
->expressions
,
786 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
788 pre_expr expr
= expression_for_id (i
);
789 unsigned int value_id
= get_expr_value_id (expr
);
790 bitmap_set_bit (&result
->values
, value_id
);
796 /* Subtract all the values in bitmap set B from bitmap set A. */
799 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
805 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
807 bitmap_copy (&temp
, &a
->expressions
);
808 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
810 pre_expr expr
= expression_for_id (i
);
811 if (bitmap_set_contains_value (b
, get_expr_value_id (expr
)))
812 bitmap_remove_from_set (a
, expr
);
814 bitmap_clear (&temp
);
818 /* Return true if bitmapped set SET contains the value VALUE_ID. */
821 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
823 if (value_id_constant_p (value_id
))
826 if (!set
|| bitmap_empty_p (&set
->expressions
))
829 return bitmap_bit_p (&set
->values
, value_id
);
833 bitmap_set_contains_expr (bitmap_set_t set
, const pre_expr expr
)
835 return bitmap_bit_p (&set
->expressions
, get_expression_id (expr
));
838 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
841 bitmap_set_replace_value (bitmap_set_t set
, unsigned int lookfor
,
848 if (value_id_constant_p (lookfor
))
851 if (!bitmap_set_contains_value (set
, lookfor
))
854 /* The number of expressions having a given value is usually
855 significantly less than the total number of expressions in SET.
856 Thus, rather than check, for each expression in SET, whether it
857 has the value LOOKFOR, we walk the reverse mapping that tells us
858 what expressions have a given value, and see if any of those
859 expressions are in our set. For large testcases, this is about
860 5-10x faster than walking the bitmap. If this is somehow a
861 significant lose for some cases, we can choose which set to walk
862 based on the set size. */
863 exprset
= VEC_index (bitmap
, value_expressions
, lookfor
);
864 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
866 if (bitmap_clear_bit (&set
->expressions
, i
))
868 bitmap_set_bit (&set
->expressions
, get_expression_id (expr
));
874 /* Return true if two bitmap sets are equal. */
877 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
879 return bitmap_equal_p (&a
->values
, &b
->values
);
882 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
883 and add it otherwise. */
886 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
888 unsigned int val
= get_expr_value_id (expr
);
890 if (bitmap_set_contains_value (set
, val
))
891 bitmap_set_replace_value (set
, val
, expr
);
893 bitmap_insert_into_set (set
, expr
);
896 /* Insert EXPR into SET if EXPR's value is not already present in
900 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
902 unsigned int val
= get_expr_value_id (expr
);
904 gcc_checking_assert (expr
->id
== get_or_alloc_expression_id (expr
));
906 /* Constant values are always considered to be part of the set. */
907 if (value_id_constant_p (val
))
910 /* If the value membership changed, add the expression. */
911 if (bitmap_set_bit (&set
->values
, val
))
912 bitmap_set_bit (&set
->expressions
, expr
->id
);
915 /* Print out EXPR to outfile. */
918 print_pre_expr (FILE *outfile
, const pre_expr expr
)
923 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
), 0);
926 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
), 0);
931 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
932 fprintf (outfile
, "{%s,", tree_code_name
[nary
->opcode
]);
933 for (i
= 0; i
< nary
->length
; i
++)
935 print_generic_expr (outfile
, nary
->op
[i
], 0);
936 if (i
!= (unsigned) nary
->length
- 1)
937 fprintf (outfile
, ",");
939 fprintf (outfile
, "}");
945 vn_reference_op_t vro
;
947 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
948 fprintf (outfile
, "{");
950 VEC_iterate (vn_reference_op_s
, ref
->operands
, i
, vro
);
953 bool closebrace
= false;
954 if (vro
->opcode
!= SSA_NAME
955 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
957 fprintf (outfile
, "%s", tree_code_name
[vro
->opcode
]);
960 fprintf (outfile
, "<");
966 print_generic_expr (outfile
, vro
->op0
, 0);
969 fprintf (outfile
, ",");
970 print_generic_expr (outfile
, vro
->op1
, 0);
974 fprintf (outfile
, ",");
975 print_generic_expr (outfile
, vro
->op2
, 0);
979 fprintf (outfile
, ">");
980 if (i
!= VEC_length (vn_reference_op_s
, ref
->operands
) - 1)
981 fprintf (outfile
, ",");
983 fprintf (outfile
, "}");
986 fprintf (outfile
, "@");
987 print_generic_expr (outfile
, ref
->vuse
, 0);
993 void debug_pre_expr (pre_expr
);
995 /* Like print_pre_expr but always prints to stderr. */
997 debug_pre_expr (pre_expr e
)
999 print_pre_expr (stderr
, e
);
1000 fprintf (stderr
, "\n");
1003 /* Print out SET to OUTFILE. */
1006 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
1007 const char *setname
, int blockindex
)
1009 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
1016 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1018 const pre_expr expr
= expression_for_id (i
);
1021 fprintf (outfile
, ", ");
1023 print_pre_expr (outfile
, expr
);
1025 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1028 fprintf (outfile
, " }\n");
1031 void debug_bitmap_set (bitmap_set_t
);
1034 debug_bitmap_set (bitmap_set_t set
)
1036 print_bitmap_set (stderr
, set
, "debug", 0);
1039 void debug_bitmap_sets_for (basic_block
);
1042 debug_bitmap_sets_for (basic_block bb
)
1044 print_bitmap_set (stderr
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
1045 print_bitmap_set (stderr
, EXP_GEN (bb
), "exp_gen", bb
->index
);
1046 print_bitmap_set (stderr
, PHI_GEN (bb
), "phi_gen", bb
->index
);
1047 print_bitmap_set (stderr
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
1048 print_bitmap_set (stderr
, ANTIC_IN (bb
), "antic_in", bb
->index
);
1049 if (do_partial_partial
)
1050 print_bitmap_set (stderr
, PA_IN (bb
), "pa_in", bb
->index
);
1051 print_bitmap_set (stderr
, NEW_SETS (bb
), "new_sets", bb
->index
);
1054 /* Print out the expressions that have VAL to OUTFILE. */
1057 print_value_expressions (FILE *outfile
, unsigned int val
)
1059 bitmap set
= VEC_index (bitmap
, value_expressions
, val
);
1064 sprintf (s
, "%04d", val
);
1065 x
.expressions
= *set
;
1066 print_bitmap_set (outfile
, &x
, s
, 0);
1072 debug_value_expressions (unsigned int val
)
1074 print_value_expressions (stderr
, val
);
1077 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1081 get_or_alloc_expr_for_constant (tree constant
)
1083 unsigned int result_id
;
1084 unsigned int value_id
;
1085 struct pre_expr_d expr
;
1088 expr
.kind
= CONSTANT
;
1089 PRE_EXPR_CONSTANT (&expr
) = constant
;
1090 result_id
= lookup_expression_id (&expr
);
1092 return expression_for_id (result_id
);
1094 newexpr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1095 newexpr
->kind
= CONSTANT
;
1096 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1097 alloc_expression_id (newexpr
);
1098 value_id
= get_or_alloc_constant_value_id (constant
);
1099 add_to_value (value_id
, newexpr
);
1103 /* Given a value id V, find the actual tree representing the constant
1104 value if there is one, and return it. Return NULL if we can't find
1108 get_constant_for_value_id (unsigned int v
)
1110 if (value_id_constant_p (v
))
1114 bitmap exprset
= VEC_index (bitmap
, value_expressions
, v
);
1116 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1118 pre_expr expr
= expression_for_id (i
);
1119 if (expr
->kind
== CONSTANT
)
1120 return PRE_EXPR_CONSTANT (expr
);
1126 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1127 Currently only supports constants and SSA_NAMES. */
1129 get_or_alloc_expr_for (tree t
)
1131 if (TREE_CODE (t
) == SSA_NAME
)
1132 return get_or_alloc_expr_for_name (t
);
1133 else if (is_gimple_min_invariant (t
))
1134 return get_or_alloc_expr_for_constant (t
);
1137 /* More complex expressions can result from SCCVN expression
1138 simplification that inserts values for them. As they all
1139 do not have VOPs the get handled by the nary ops struct. */
1140 vn_nary_op_t result
;
1141 unsigned int result_id
;
1142 vn_nary_op_lookup (t
, &result
);
1145 pre_expr e
= (pre_expr
) pool_alloc (pre_expr_pool
);
1147 PRE_EXPR_NARY (e
) = result
;
1148 result_id
= lookup_expression_id (e
);
1151 pool_free (pre_expr_pool
, e
);
1152 e
= expression_for_id (result_id
);
1155 alloc_expression_id (e
);
1162 /* Return the folded version of T if T, when folded, is a gimple
1163 min_invariant. Otherwise, return T. */
1166 fully_constant_expression (pre_expr e
)
1174 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1175 switch (TREE_CODE_CLASS (nary
->opcode
))
1178 case tcc_comparison
:
1180 /* We have to go from trees to pre exprs to value ids to
1182 tree naryop0
= nary
->op
[0];
1183 tree naryop1
= nary
->op
[1];
1185 if (!is_gimple_min_invariant (naryop0
))
1187 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1188 unsigned int vrep0
= get_expr_value_id (rep0
);
1189 tree const0
= get_constant_for_value_id (vrep0
);
1191 naryop0
= fold_convert (TREE_TYPE (naryop0
), const0
);
1193 if (!is_gimple_min_invariant (naryop1
))
1195 pre_expr rep1
= get_or_alloc_expr_for (naryop1
);
1196 unsigned int vrep1
= get_expr_value_id (rep1
);
1197 tree const1
= get_constant_for_value_id (vrep1
);
1199 naryop1
= fold_convert (TREE_TYPE (naryop1
), const1
);
1201 result
= fold_binary (nary
->opcode
, nary
->type
,
1203 if (result
&& is_gimple_min_invariant (result
))
1204 return get_or_alloc_expr_for_constant (result
);
1205 /* We might have simplified the expression to a
1206 SSA_NAME for example from x_1 * 1. But we cannot
1207 insert a PHI for x_1 unconditionally as x_1 might
1208 not be available readily. */
1212 if (nary
->opcode
!= REALPART_EXPR
1213 && nary
->opcode
!= IMAGPART_EXPR
1214 && nary
->opcode
!= VIEW_CONVERT_EXPR
)
1219 /* We have to go from trees to pre exprs to value ids to
1221 tree naryop0
= nary
->op
[0];
1222 tree const0
, result
;
1223 if (is_gimple_min_invariant (naryop0
))
1227 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1228 unsigned int vrep0
= get_expr_value_id (rep0
);
1229 const0
= get_constant_for_value_id (vrep0
);
1234 tree type1
= TREE_TYPE (nary
->op
[0]);
1235 const0
= fold_convert (type1
, const0
);
1236 result
= fold_unary (nary
->opcode
, nary
->type
, const0
);
1238 if (result
&& is_gimple_min_invariant (result
))
1239 return get_or_alloc_expr_for_constant (result
);
1248 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1250 if ((folded
= fully_constant_vn_reference_p (ref
)))
1251 return get_or_alloc_expr_for_constant (folded
);
1260 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1261 it has the value it would have in BLOCK. Set *SAME_VALID to true
1262 in case the new vuse doesn't change the value id of the OPERANDS. */
1265 translate_vuse_through_block (VEC (vn_reference_op_s
, heap
) *operands
,
1266 alias_set_type set
, tree type
, tree vuse
,
1267 basic_block phiblock
,
1268 basic_block block
, bool *same_valid
)
1270 gimple phi
= SSA_NAME_DEF_STMT (vuse
);
1277 if (gimple_bb (phi
) != phiblock
)
1280 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, type
, operands
);
1282 /* Use the alias-oracle to find either the PHI node in this block,
1283 the first VUSE used in this block that is equivalent to vuse or
1284 the first VUSE which definition in this block kills the value. */
1285 if (gimple_code (phi
) == GIMPLE_PHI
)
1286 e
= find_edge (block
, phiblock
);
1287 else if (use_oracle
)
1288 while (!stmt_may_clobber_ref_p_1 (phi
, &ref
))
1290 vuse
= gimple_vuse (phi
);
1291 phi
= SSA_NAME_DEF_STMT (vuse
);
1292 if (gimple_bb (phi
) != phiblock
)
1294 if (gimple_code (phi
) == GIMPLE_PHI
)
1296 e
= find_edge (block
, phiblock
);
1307 bitmap visited
= NULL
;
1309 /* Try to find a vuse that dominates this phi node by skipping
1310 non-clobbering statements. */
1311 vuse
= get_continuation_for_phi (phi
, &ref
, &cnt
, &visited
, false);
1313 BITMAP_FREE (visited
);
1319 /* If we didn't find any, the value ID can't stay the same,
1320 but return the translated vuse. */
1321 *same_valid
= false;
1322 vuse
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1324 /* ??? We would like to return vuse here as this is the canonical
1325 upmost vdef that this reference is associated with. But during
1326 insertion of the references into the hash tables we only ever
1327 directly insert with their direct gimple_vuse, hence returning
1328 something else would make us not find the other expression. */
1329 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1335 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1336 SET2. This is used to avoid making a set consisting of the union
1337 of PA_IN and ANTIC_IN during insert. */
1339 static inline pre_expr
1340 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
)
1344 result
= bitmap_find_leader (set1
, val
);
1345 if (!result
&& set2
)
1346 result
= bitmap_find_leader (set2
, val
);
1350 /* Get the tree type for our PRE expression e. */
1353 get_expr_type (const pre_expr e
)
1358 return TREE_TYPE (PRE_EXPR_NAME (e
));
1360 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1362 return PRE_EXPR_REFERENCE (e
)->type
;
1364 return PRE_EXPR_NARY (e
)->type
;
1369 /* Get a representative SSA_NAME for a given expression.
1370 Since all of our sub-expressions are treated as values, we require
1371 them to be SSA_NAME's for simplicity.
1372 Prior versions of GVNPRE used to use "value handles" here, so that
1373 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1374 either case, the operands are really values (IE we do not expect
1375 them to be usable without finding leaders). */
1378 get_representative_for (const pre_expr e
)
1381 unsigned int value_id
= get_expr_value_id (e
);
1386 return PRE_EXPR_NAME (e
);
1388 return PRE_EXPR_CONSTANT (e
);
1392 /* Go through all of the expressions representing this value
1393 and pick out an SSA_NAME. */
1396 bitmap exprs
= VEC_index (bitmap
, value_expressions
, value_id
);
1397 EXECUTE_IF_SET_IN_BITMAP (exprs
, 0, i
, bi
)
1399 pre_expr rep
= expression_for_id (i
);
1400 if (rep
->kind
== NAME
)
1401 return PRE_EXPR_NAME (rep
);
1406 /* If we reached here we couldn't find an SSA_NAME. This can
1407 happen when we've discovered a value that has never appeared in
1408 the program as set to an SSA_NAME, most likely as the result of
1413 "Could not find SSA_NAME representative for expression:");
1414 print_pre_expr (dump_file
, e
);
1415 fprintf (dump_file
, "\n");
1418 /* Build and insert the assignment of the end result to the temporary
1419 that we will return. */
1420 name
= make_temp_ssa_name (get_expr_type (e
), gimple_build_nop (), "pretmp");
1421 VN_INFO_GET (name
)->value_id
= value_id
;
1422 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
1423 if (VN_INFO (name
)->valnum
== NULL_TREE
)
1424 VN_INFO (name
)->valnum
= name
;
1425 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1428 fprintf (dump_file
, "Created SSA_NAME representative ");
1429 print_generic_expr (dump_file
, name
, 0);
1430 fprintf (dump_file
, " for expression:");
1431 print_pre_expr (dump_file
, e
);
1432 fprintf (dump_file
, "\n");
1441 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1442 basic_block pred
, basic_block phiblock
);
1444 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1445 the phis in PRED. Return NULL if we can't find a leader for each part
1446 of the translated expression. */
1449 phi_translate_1 (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1450 basic_block pred
, basic_block phiblock
)
1457 bool changed
= false;
1458 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1459 vn_nary_op_t newnary
= XALLOCAVAR (struct vn_nary_op_s
,
1460 sizeof_vn_nary_op (nary
->length
));
1461 memcpy (newnary
, nary
, sizeof_vn_nary_op (nary
->length
));
1463 for (i
= 0; i
< newnary
->length
; i
++)
1465 if (TREE_CODE (newnary
->op
[i
]) != SSA_NAME
)
1469 pre_expr leader
, result
;
1470 unsigned int op_val_id
= VN_INFO (newnary
->op
[i
])->value_id
;
1471 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1472 result
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1473 if (result
&& result
!= leader
)
1475 tree name
= get_representative_for (result
);
1478 newnary
->op
[i
] = name
;
1483 changed
|= newnary
->op
[i
] != nary
->op
[i
];
1489 unsigned int new_val_id
;
1491 tree result
= vn_nary_op_lookup_pieces (newnary
->length
,
1496 if (result
&& is_gimple_min_invariant (result
))
1497 return get_or_alloc_expr_for_constant (result
);
1499 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1504 PRE_EXPR_NARY (expr
) = nary
;
1505 constant
= fully_constant_expression (expr
);
1506 if (constant
!= expr
)
1509 new_val_id
= nary
->value_id
;
1510 get_or_alloc_expression_id (expr
);
1514 new_val_id
= get_next_value_id ();
1515 VEC_safe_grow_cleared (bitmap
, heap
,
1517 get_max_value_id() + 1);
1518 nary
= vn_nary_op_insert_pieces (newnary
->length
,
1522 result
, new_val_id
);
1523 PRE_EXPR_NARY (expr
) = nary
;
1524 constant
= fully_constant_expression (expr
);
1525 if (constant
!= expr
)
1527 get_or_alloc_expression_id (expr
);
1529 add_to_value (new_val_id
, expr
);
1537 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1538 VEC (vn_reference_op_s
, heap
) *operands
= ref
->operands
;
1539 tree vuse
= ref
->vuse
;
1540 tree newvuse
= vuse
;
1541 VEC (vn_reference_op_s
, heap
) *newoperands
= NULL
;
1542 bool changed
= false, same_valid
= true;
1543 unsigned int i
, j
, n
;
1544 vn_reference_op_t operand
;
1545 vn_reference_t newref
;
1548 VEC_iterate (vn_reference_op_s
, operands
, i
, operand
); i
++, j
++)
1553 tree type
= operand
->type
;
1554 vn_reference_op_s newop
= *operand
;
1555 op
[0] = operand
->op0
;
1556 op
[1] = operand
->op1
;
1557 op
[2] = operand
->op2
;
1558 for (n
= 0; n
< 3; ++n
)
1560 unsigned int op_val_id
;
1563 if (TREE_CODE (op
[n
]) != SSA_NAME
)
1565 /* We can't possibly insert these. */
1567 && !is_gimple_min_invariant (op
[n
]))
1571 op_val_id
= VN_INFO (op
[n
])->value_id
;
1572 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1575 /* Make sure we do not recursively translate ourselves
1576 like for translating a[n_1] with the leader for
1577 n_1 being a[n_1]. */
1578 if (get_expression_id (leader
) != get_expression_id (expr
))
1580 opresult
= phi_translate (leader
, set1
, set2
,
1584 if (opresult
!= leader
)
1586 tree name
= get_representative_for (opresult
);
1589 changed
|= name
!= op
[n
];
1597 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1601 newoperands
= VEC_copy (vn_reference_op_s
, heap
, operands
);
1602 /* We may have changed from an SSA_NAME to a constant */
1603 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op
[0]) != SSA_NAME
)
1604 newop
.opcode
= TREE_CODE (op
[0]);
1609 /* If it transforms a non-constant ARRAY_REF into a constant
1610 one, adjust the constant offset. */
1611 if (newop
.opcode
== ARRAY_REF
1613 && TREE_CODE (op
[0]) == INTEGER_CST
1614 && TREE_CODE (op
[1]) == INTEGER_CST
1615 && TREE_CODE (op
[2]) == INTEGER_CST
)
1617 double_int off
= tree_to_double_int (op
[0]);
1618 off
+= -tree_to_double_int (op
[1]);
1619 off
*= tree_to_double_int (op
[2]);
1620 if (off
.fits_shwi ())
1621 newop
.off
= off
.low
;
1623 VEC_replace (vn_reference_op_s
, newoperands
, j
, newop
);
1624 /* If it transforms from an SSA_NAME to an address, fold with
1625 a preceding indirect reference. */
1626 if (j
> 0 && op
[0] && TREE_CODE (op
[0]) == ADDR_EXPR
1627 && VEC_index (vn_reference_op_s
,
1628 newoperands
, j
- 1).opcode
== MEM_REF
)
1629 vn_reference_fold_indirect (&newoperands
, &j
);
1631 if (i
!= VEC_length (vn_reference_op_s
, operands
))
1634 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1640 newvuse
= translate_vuse_through_block (newoperands
,
1641 ref
->set
, ref
->type
,
1642 vuse
, phiblock
, pred
,
1644 if (newvuse
== NULL_TREE
)
1646 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1651 if (changed
|| newvuse
!= vuse
)
1653 unsigned int new_val_id
;
1656 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1661 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1663 /* We can always insert constants, so if we have a partial
1664 redundant constant load of another type try to translate it
1665 to a constant of appropriate type. */
1666 if (result
&& is_gimple_min_invariant (result
))
1669 if (!useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1671 tem
= fold_unary (VIEW_CONVERT_EXPR
, ref
->type
, result
);
1672 if (tem
&& !is_gimple_min_invariant (tem
))
1676 return get_or_alloc_expr_for_constant (tem
);
1679 /* If we'd have to convert things we would need to validate
1680 if we can insert the translated expression. So fail
1681 here for now - we cannot insert an alias with a different
1682 type in the VN tables either, as that would assert. */
1684 && !useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1686 else if (!result
&& newref
1687 && !useless_type_conversion_p (ref
->type
, newref
->type
))
1689 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1693 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1694 expr
->kind
= REFERENCE
;
1699 PRE_EXPR_REFERENCE (expr
) = newref
;
1700 constant
= fully_constant_expression (expr
);
1701 if (constant
!= expr
)
1704 new_val_id
= newref
->value_id
;
1705 get_or_alloc_expression_id (expr
);
1709 if (changed
|| !same_valid
)
1711 new_val_id
= get_next_value_id ();
1712 VEC_safe_grow_cleared (bitmap
, heap
,
1714 get_max_value_id() + 1);
1717 new_val_id
= ref
->value_id
;
1718 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1721 result
, new_val_id
);
1723 PRE_EXPR_REFERENCE (expr
) = newref
;
1724 constant
= fully_constant_expression (expr
);
1725 if (constant
!= expr
)
1727 get_or_alloc_expression_id (expr
);
1729 add_to_value (new_val_id
, expr
);
1731 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1738 tree name
= PRE_EXPR_NAME (expr
);
1739 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
1740 /* If the SSA name is defined by a PHI node in this block,
1742 if (gimple_code (def_stmt
) == GIMPLE_PHI
1743 && gimple_bb (def_stmt
) == phiblock
)
1745 edge e
= find_edge (pred
, gimple_bb (def_stmt
));
1746 tree def
= PHI_ARG_DEF (def_stmt
, e
->dest_idx
);
1748 /* Handle constant. */
1749 if (is_gimple_min_invariant (def
))
1750 return get_or_alloc_expr_for_constant (def
);
1752 return get_or_alloc_expr_for_name (def
);
1754 /* Otherwise return it unchanged - it will get cleaned if its
1755 value is not available in PREDs AVAIL_OUT set of expressions. */
1764 /* Wrapper around phi_translate_1 providing caching functionality. */
1767 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1768 basic_block pred
, basic_block phiblock
)
1775 /* Constants contain no values that need translation. */
1776 if (expr
->kind
== CONSTANT
)
1779 if (value_id_constant_p (get_expr_value_id (expr
)))
1782 if (expr
->kind
!= NAME
)
1784 phitrans
= phi_trans_lookup (expr
, pred
);
1790 phitrans
= phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
);
1792 /* Don't add empty translations to the cache. Neither add
1793 translations of NAMEs as those are cheap to translate. */
1795 && expr
->kind
!= NAME
)
1796 phi_trans_add (expr
, phitrans
, pred
);
1802 /* For each expression in SET, translate the values through phi nodes
1803 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1804 expressions in DEST. */
1807 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1808 basic_block phiblock
)
1810 VEC (pre_expr
, heap
) *exprs
;
1814 if (gimple_seq_empty_p (phi_nodes (phiblock
)))
1816 bitmap_set_copy (dest
, set
);
1820 exprs
= sorted_array_from_bitmap_set (set
);
1821 FOR_EACH_VEC_ELT (pre_expr
, exprs
, i
, expr
)
1823 pre_expr translated
;
1824 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1828 /* We might end up with multiple expressions from SET being
1829 translated to the same value. In this case we do not want
1830 to retain the NARY or REFERENCE expression but prefer a NAME
1831 which would be the leader. */
1832 if (translated
->kind
== NAME
)
1833 bitmap_value_replace_in_set (dest
, translated
);
1835 bitmap_value_insert_into_set (dest
, translated
);
1837 VEC_free (pre_expr
, heap
, exprs
);
1840 /* Find the leader for a value (i.e., the name representing that
1841 value) in a given set, and return it. If STMT is non-NULL it
1842 makes sure the defining statement for the leader dominates it.
1843 Return NULL if no leader is found. */
1846 bitmap_find_leader (bitmap_set_t set
, unsigned int val
)
1848 if (value_id_constant_p (val
))
1852 bitmap exprset
= VEC_index (bitmap
, value_expressions
, val
);
1854 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1856 pre_expr expr
= expression_for_id (i
);
1857 if (expr
->kind
== CONSTANT
)
1861 if (bitmap_set_contains_value (set
, val
))
1863 /* Rather than walk the entire bitmap of expressions, and see
1864 whether any of them has the value we are looking for, we look
1865 at the reverse mapping, which tells us the set of expressions
1866 that have a given value (IE value->expressions with that
1867 value) and see if any of those expressions are in our set.
1868 The number of expressions per value is usually significantly
1869 less than the number of expressions in the set. In fact, for
1870 large testcases, doing it this way is roughly 5-10x faster
1871 than walking the bitmap.
1872 If this is somehow a significant lose for some cases, we can
1873 choose which set to walk based on which set is smaller. */
1876 bitmap exprset
= VEC_index (bitmap
, value_expressions
, val
);
1878 EXECUTE_IF_AND_IN_BITMAP (exprset
, &set
->expressions
, 0, i
, bi
)
1879 return expression_for_id (i
);
1884 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1885 BLOCK by seeing if it is not killed in the block. Note that we are
1886 only determining whether there is a store that kills it. Because
1887 of the order in which clean iterates over values, we are guaranteed
1888 that altered operands will have caused us to be eliminated from the
1889 ANTIC_IN set already. */
1892 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1894 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1895 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1897 gimple_stmt_iterator gsi
;
1898 unsigned id
= get_expression_id (expr
);
1905 /* Lookup a previously calculated result. */
1906 if (EXPR_DIES (block
)
1907 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1908 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1910 /* A memory expression {e, VUSE} dies in the block if there is a
1911 statement that may clobber e. If, starting statement walk from the
1912 top of the basic block, a statement uses VUSE there can be no kill
1913 inbetween that use and the original statement that loaded {e, VUSE},
1914 so we can stop walking. */
1915 ref
.base
= NULL_TREE
;
1916 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1918 tree def_vuse
, def_vdef
;
1919 def
= gsi_stmt (gsi
);
1920 def_vuse
= gimple_vuse (def
);
1921 def_vdef
= gimple_vdef (def
);
1923 /* Not a memory statement. */
1927 /* Not a may-def. */
1930 /* A load with the same VUSE, we're done. */
1931 if (def_vuse
== vuse
)
1937 /* Init ref only if we really need it. */
1938 if (ref
.base
== NULL_TREE
1939 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->type
,
1945 /* If the statement may clobber expr, it dies. */
1946 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1953 /* Remember the result. */
1954 if (!EXPR_DIES (block
))
1955 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1956 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1958 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1964 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1965 contains its value-id. */
1968 op_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree op
)
1970 if (op
&& TREE_CODE (op
) == SSA_NAME
)
1972 unsigned int value_id
= VN_INFO (op
)->value_id
;
1973 if (!(bitmap_set_contains_value (set1
, value_id
)
1974 || (set2
&& bitmap_set_contains_value (set2
, value_id
))))
1980 /* Determine if the expression EXPR is valid in SET1 U SET2.
1981 ONLY SET2 CAN BE NULL.
1982 This means that we have a leader for each part of the expression
1983 (if it consists of values), or the expression is an SSA_NAME.
1984 For loads/calls, we also see if the vuse is killed in this block. */
1987 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
,
1993 return bitmap_set_contains_expr (AVAIL_OUT (block
), expr
);
1997 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1998 for (i
= 0; i
< nary
->length
; i
++)
1999 if (!op_valid_in_sets (set1
, set2
, nary
->op
[i
]))
2006 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2007 vn_reference_op_t vro
;
2010 FOR_EACH_VEC_ELT (vn_reference_op_s
, ref
->operands
, i
, vro
)
2012 if (!op_valid_in_sets (set1
, set2
, vro
->op0
)
2013 || !op_valid_in_sets (set1
, set2
, vro
->op1
)
2014 || !op_valid_in_sets (set1
, set2
, vro
->op2
))
2024 /* Clean the set of expressions that are no longer valid in SET1 or
2025 SET2. This means expressions that are made up of values we have no
2026 leaders for in SET1 or SET2. This version is used for partial
2027 anticipation, which means it is not valid in either ANTIC_IN or
2031 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
, basic_block block
)
2033 VEC (pre_expr
, heap
) *exprs
= sorted_array_from_bitmap_set (set1
);
2037 FOR_EACH_VEC_ELT (pre_expr
, exprs
, i
, expr
)
2039 if (!valid_in_sets (set1
, set2
, expr
, block
))
2040 bitmap_remove_from_set (set1
, expr
);
2042 VEC_free (pre_expr
, heap
, exprs
);
2045 /* Clean the set of expressions that are no longer valid in SET. This
2046 means expressions that are made up of values we have no leaders for
2050 clean (bitmap_set_t set
, basic_block block
)
2052 VEC (pre_expr
, heap
) *exprs
= sorted_array_from_bitmap_set (set
);
2056 FOR_EACH_VEC_ELT (pre_expr
, exprs
, i
, expr
)
2058 if (!valid_in_sets (set
, NULL
, expr
, block
))
2059 bitmap_remove_from_set (set
, expr
);
2061 VEC_free (pre_expr
, heap
, exprs
);
2064 /* Clean the set of expressions that are no longer valid in SET because
2065 they are clobbered in BLOCK or because they trap and may not be executed. */
2068 prune_clobbered_mems (bitmap_set_t set
, basic_block block
)
2073 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
2075 pre_expr expr
= expression_for_id (i
);
2076 if (expr
->kind
== REFERENCE
)
2078 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2081 gimple def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
2082 if (!gimple_nop_p (def_stmt
)
2083 && ((gimple_bb (def_stmt
) != block
2084 && !dominated_by_p (CDI_DOMINATORS
,
2085 block
, gimple_bb (def_stmt
)))
2086 || (gimple_bb (def_stmt
) == block
2087 && value_dies_in_block_x (expr
, block
))))
2088 bitmap_remove_from_set (set
, expr
);
2091 else if (expr
->kind
== NARY
)
2093 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2094 /* If the NARY may trap make sure the block does not contain
2095 a possible exit point.
2096 ??? This is overly conservative if we translate AVAIL_OUT
2097 as the available expression might be after the exit point. */
2098 if (BB_MAY_NOTRETURN (block
)
2099 && vn_nary_may_trap (nary
))
2100 bitmap_remove_from_set (set
, expr
);
2105 static sbitmap has_abnormal_preds
;
2107 /* List of blocks that may have changed during ANTIC computation and
2108 thus need to be iterated over. */
2110 static sbitmap changed_blocks
;
2112 /* Decide whether to defer a block for a later iteration, or PHI
2113 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2114 should defer the block, and true if we processed it. */
2117 defer_or_phi_translate_block (bitmap_set_t dest
, bitmap_set_t source
,
2118 basic_block block
, basic_block phiblock
)
2120 if (!BB_VISITED (phiblock
))
2122 bitmap_set_bit (changed_blocks
, block
->index
);
2123 BB_VISITED (block
) = 0;
2124 BB_DEFERRED (block
) = 1;
2128 phi_translate_set (dest
, source
, block
, phiblock
);
2132 /* Compute the ANTIC set for BLOCK.
2134 If succs(BLOCK) > 1 then
2135 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2136 else if succs(BLOCK) == 1 then
2137 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2139 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2143 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2145 bool changed
= false;
2146 bitmap_set_t S
, old
, ANTIC_OUT
;
2152 old
= ANTIC_OUT
= S
= NULL
;
2153 BB_VISITED (block
) = 1;
2155 /* If any edges from predecessors are abnormal, antic_in is empty,
2157 if (block_has_abnormal_pred_edge
)
2158 goto maybe_dump_sets
;
2160 old
= ANTIC_IN (block
);
2161 ANTIC_OUT
= bitmap_set_new ();
2163 /* If the block has no successors, ANTIC_OUT is empty. */
2164 if (EDGE_COUNT (block
->succs
) == 0)
2166 /* If we have one successor, we could have some phi nodes to
2167 translate through. */
2168 else if (single_succ_p (block
))
2170 basic_block succ_bb
= single_succ (block
);
2172 /* We trade iterations of the dataflow equations for having to
2173 phi translate the maximal set, which is incredibly slow
2174 (since the maximal set often has 300+ members, even when you
2175 have a small number of blocks).
2176 Basically, we defer the computation of ANTIC for this block
2177 until we have processed it's successor, which will inevitably
2178 have a *much* smaller set of values to phi translate once
2179 clean has been run on it.
2180 The cost of doing this is that we technically perform more
2181 iterations, however, they are lower cost iterations.
2183 Timings for PRE on tramp3d-v4:
2184 without maximal set fix: 11 seconds
2185 with maximal set fix/without deferring: 26 seconds
2186 with maximal set fix/with deferring: 11 seconds
2189 if (!defer_or_phi_translate_block (ANTIC_OUT
, ANTIC_IN (succ_bb
),
2193 goto maybe_dump_sets
;
2196 /* If we have multiple successors, we take the intersection of all of
2197 them. Note that in the case of loop exit phi nodes, we may have
2198 phis to translate through. */
2201 VEC(basic_block
, heap
) * worklist
;
2203 basic_block bprime
, first
= NULL
;
2205 worklist
= VEC_alloc (basic_block
, heap
, EDGE_COUNT (block
->succs
));
2206 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2209 && BB_VISITED (e
->dest
))
2211 else if (BB_VISITED (e
->dest
))
2212 VEC_quick_push (basic_block
, worklist
, e
->dest
);
2215 /* Of multiple successors we have to have visited one already. */
2218 bitmap_set_bit (changed_blocks
, block
->index
);
2219 BB_VISITED (block
) = 0;
2220 BB_DEFERRED (block
) = 1;
2222 VEC_free (basic_block
, heap
, worklist
);
2223 goto maybe_dump_sets
;
2226 if (!gimple_seq_empty_p (phi_nodes (first
)))
2227 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
), block
, first
);
2229 bitmap_set_copy (ANTIC_OUT
, ANTIC_IN (first
));
2231 FOR_EACH_VEC_ELT (basic_block
, worklist
, i
, bprime
)
2233 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2235 bitmap_set_t tmp
= bitmap_set_new ();
2236 phi_translate_set (tmp
, ANTIC_IN (bprime
), block
, bprime
);
2237 bitmap_set_and (ANTIC_OUT
, tmp
);
2238 bitmap_set_free (tmp
);
2241 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
2243 VEC_free (basic_block
, heap
, worklist
);
2246 /* Prune expressions that are clobbered in block and thus become
2247 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2248 prune_clobbered_mems (ANTIC_OUT
, block
);
2250 /* Generate ANTIC_OUT - TMP_GEN. */
2251 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
2253 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2254 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
2257 /* Then union in the ANTIC_OUT - TMP_GEN values,
2258 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2259 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
2260 bitmap_value_insert_into_set (ANTIC_IN (block
),
2261 expression_for_id (bii
));
2263 clean (ANTIC_IN (block
), block
);
2265 if (!bitmap_set_equal (old
, ANTIC_IN (block
)))
2268 bitmap_set_bit (changed_blocks
, block
->index
);
2269 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2270 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2273 bitmap_clear_bit (changed_blocks
, block
->index
);
2276 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2278 if (!BB_DEFERRED (block
) || BB_VISITED (block
))
2281 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2283 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2287 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2292 "Block %d was deferred for a future iteration.\n",
2297 bitmap_set_free (old
);
2299 bitmap_set_free (S
);
2301 bitmap_set_free (ANTIC_OUT
);
2305 /* Compute PARTIAL_ANTIC for BLOCK.
2307 If succs(BLOCK) > 1 then
2308 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2309 in ANTIC_OUT for all succ(BLOCK)
2310 else if succs(BLOCK) == 1 then
2311 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2313 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2318 compute_partial_antic_aux (basic_block block
,
2319 bool block_has_abnormal_pred_edge
)
2321 bool changed
= false;
2322 bitmap_set_t old_PA_IN
;
2323 bitmap_set_t PA_OUT
;
2326 unsigned long max_pa
= PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH
);
2328 old_PA_IN
= PA_OUT
= NULL
;
2330 /* If any edges from predecessors are abnormal, antic_in is empty,
2332 if (block_has_abnormal_pred_edge
)
2333 goto maybe_dump_sets
;
2335 /* If there are too many partially anticipatable values in the
2336 block, phi_translate_set can take an exponential time: stop
2337 before the translation starts. */
2339 && single_succ_p (block
)
2340 && bitmap_count_bits (&PA_IN (single_succ (block
))->values
) > max_pa
)
2341 goto maybe_dump_sets
;
2343 old_PA_IN
= PA_IN (block
);
2344 PA_OUT
= bitmap_set_new ();
2346 /* If the block has no successors, ANTIC_OUT is empty. */
2347 if (EDGE_COUNT (block
->succs
) == 0)
2349 /* If we have one successor, we could have some phi nodes to
2350 translate through. Note that we can't phi translate across DFS
2351 back edges in partial antic, because it uses a union operation on
2352 the successors. For recurrences like IV's, we will end up
2353 generating a new value in the set on each go around (i + 3 (VH.1)
2354 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2355 else if (single_succ_p (block
))
2357 basic_block succ
= single_succ (block
);
2358 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
2359 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
2361 /* If we have multiple successors, we take the union of all of
2365 VEC(basic_block
, heap
) * worklist
;
2369 worklist
= VEC_alloc (basic_block
, heap
, EDGE_COUNT (block
->succs
));
2370 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2372 if (e
->flags
& EDGE_DFS_BACK
)
2374 VEC_quick_push (basic_block
, worklist
, e
->dest
);
2376 if (VEC_length (basic_block
, worklist
) > 0)
2378 FOR_EACH_VEC_ELT (basic_block
, worklist
, i
, bprime
)
2383 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
2384 bitmap_value_insert_into_set (PA_OUT
,
2385 expression_for_id (i
));
2386 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2388 bitmap_set_t pa_in
= bitmap_set_new ();
2389 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
2390 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2391 bitmap_value_insert_into_set (PA_OUT
,
2392 expression_for_id (i
));
2393 bitmap_set_free (pa_in
);
2396 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
2397 bitmap_value_insert_into_set (PA_OUT
,
2398 expression_for_id (i
));
2401 VEC_free (basic_block
, heap
, worklist
);
2404 /* Prune expressions that are clobbered in block and thus become
2405 invalid if translated from PA_OUT to PA_IN. */
2406 prune_clobbered_mems (PA_OUT
, block
);
2408 /* PA_IN starts with PA_OUT - TMP_GEN.
2409 Then we subtract things from ANTIC_IN. */
2410 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
2412 /* For partial antic, we want to put back in the phi results, since
2413 we will properly avoid making them partially antic over backedges. */
2414 bitmap_ior_into (&PA_IN (block
)->values
, &PHI_GEN (block
)->values
);
2415 bitmap_ior_into (&PA_IN (block
)->expressions
, &PHI_GEN (block
)->expressions
);
2417 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2418 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2420 dependent_clean (PA_IN (block
), ANTIC_IN (block
), block
);
2422 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
2425 bitmap_set_bit (changed_blocks
, block
->index
);
2426 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2427 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2430 bitmap_clear_bit (changed_blocks
, block
->index
);
2433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2436 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2438 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2441 bitmap_set_free (old_PA_IN
);
2443 bitmap_set_free (PA_OUT
);
2447 /* Compute ANTIC and partial ANTIC sets. */
2450 compute_antic (void)
2452 bool changed
= true;
2453 int num_iterations
= 0;
2457 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2458 We pre-build the map of blocks with incoming abnormal edges here. */
2459 has_abnormal_preds
= sbitmap_alloc (last_basic_block
);
2460 bitmap_clear (has_abnormal_preds
);
2467 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2469 e
->flags
&= ~EDGE_DFS_BACK
;
2470 if (e
->flags
& EDGE_ABNORMAL
)
2472 bitmap_set_bit (has_abnormal_preds
, block
->index
);
2477 BB_VISITED (block
) = 0;
2478 BB_DEFERRED (block
) = 0;
2480 /* While we are here, give empty ANTIC_IN sets to each block. */
2481 ANTIC_IN (block
) = bitmap_set_new ();
2482 PA_IN (block
) = bitmap_set_new ();
2485 /* At the exit block we anticipate nothing. */
2486 BB_VISITED (EXIT_BLOCK_PTR
) = 1;
2488 changed_blocks
= sbitmap_alloc (last_basic_block
+ 1);
2489 bitmap_ones (changed_blocks
);
2492 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2493 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2494 /* ??? We need to clear our PHI translation cache here as the
2495 ANTIC sets shrink and we restrict valid translations to
2496 those having operands with leaders in ANTIC. Same below
2497 for PA ANTIC computation. */
2500 for (i
= postorder_num
- 1; i
>= 0; i
--)
2502 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2504 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2505 changed
|= compute_antic_aux (block
,
2506 bitmap_bit_p (has_abnormal_preds
,
2510 /* Theoretically possible, but *highly* unlikely. */
2511 gcc_checking_assert (num_iterations
< 500);
2514 statistics_histogram_event (cfun
, "compute_antic iterations",
2517 if (do_partial_partial
)
2519 bitmap_ones (changed_blocks
);
2520 mark_dfs_back_edges ();
2525 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2526 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2529 for (i
= postorder_num
- 1 ; i
>= 0; i
--)
2531 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2533 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2535 |= compute_partial_antic_aux (block
,
2536 bitmap_bit_p (has_abnormal_preds
,
2540 /* Theoretically possible, but *highly* unlikely. */
2541 gcc_checking_assert (num_iterations
< 500);
2543 statistics_histogram_event (cfun
, "compute_partial_antic iterations",
2546 sbitmap_free (has_abnormal_preds
);
2547 sbitmap_free (changed_blocks
);
2551 /* Inserted expressions are placed onto this worklist, which is used
2552 for performing quick dead code elimination of insertions we made
2553 that didn't turn out to be necessary. */
2554 static bitmap inserted_exprs
;
2556 /* The actual worker for create_component_ref_by_pieces. */
2559 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2560 unsigned int *operand
, gimple_seq
*stmts
)
2562 vn_reference_op_t currop
= &VEC_index (vn_reference_op_s
, ref
->operands
,
2566 switch (currop
->opcode
)
2570 tree folded
, sc
= NULL_TREE
;
2571 unsigned int nargs
= 0;
2573 if (TREE_CODE (currop
->op0
) == FUNCTION_DECL
)
2576 fn
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2578 sc
= find_or_generate_expression (block
, currop
->op1
, stmts
);
2579 args
= XNEWVEC (tree
, VEC_length (vn_reference_op_s
,
2580 ref
->operands
) - 1);
2581 while (*operand
< VEC_length (vn_reference_op_s
, ref
->operands
))
2583 args
[nargs
] = create_component_ref_by_pieces_1 (block
, ref
,
2587 folded
= build_call_array (currop
->type
,
2588 (TREE_CODE (fn
) == FUNCTION_DECL
2589 ? build_fold_addr_expr (fn
) : fn
),
2593 CALL_EXPR_STATIC_CHAIN (folded
) = sc
;
2599 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2601 tree offset
= currop
->op0
;
2602 if (TREE_CODE (baseop
) == ADDR_EXPR
2603 && handled_component_p (TREE_OPERAND (baseop
, 0)))
2607 base
= get_addr_base_and_unit_offset (TREE_OPERAND (baseop
, 0),
2610 offset
= int_const_binop (PLUS_EXPR
, offset
,
2611 build_int_cst (TREE_TYPE (offset
),
2613 baseop
= build_fold_addr_expr (base
);
2615 return fold_build2 (MEM_REF
, currop
->type
, baseop
, offset
);
2618 case TARGET_MEM_REF
:
2620 tree genop0
= NULL_TREE
, genop1
= NULL_TREE
;
2621 vn_reference_op_t nextop
= &VEC_index (vn_reference_op_s
, ref
->operands
,
2623 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2626 genop0
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2628 genop1
= find_or_generate_expression (block
, nextop
->op0
, stmts
);
2629 return build5 (TARGET_MEM_REF
, currop
->type
,
2630 baseop
, currop
->op2
, genop0
, currop
->op1
, genop1
);
2636 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2642 case VIEW_CONVERT_EXPR
:
2644 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
,
2646 return fold_build1 (currop
->opcode
, currop
->type
, genop0
);
2649 case WITH_SIZE_EXPR
:
2651 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2653 tree genop1
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2654 return fold_build2 (currop
->opcode
, currop
->type
, genop0
, genop1
);
2659 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2661 tree op1
= currop
->op0
;
2662 tree op2
= currop
->op1
;
2663 return fold_build3 (BIT_FIELD_REF
, currop
->type
, genop0
, op1
, op2
);
2666 /* For array ref vn_reference_op's, operand 1 of the array ref
2667 is op0 of the reference op and operand 3 of the array ref is
2669 case ARRAY_RANGE_REF
:
2673 tree genop1
= currop
->op0
;
2674 tree genop2
= currop
->op1
;
2675 tree genop3
= currop
->op2
;
2676 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2677 genop1
= find_or_generate_expression (block
, genop1
, stmts
);
2680 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (genop0
));
2681 /* Drop zero minimum index if redundant. */
2682 if (integer_zerop (genop2
)
2684 || integer_zerop (TYPE_MIN_VALUE (domain_type
))))
2687 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2691 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2692 /* We can't always put a size in units of the element alignment
2693 here as the element alignment may be not visible. See
2694 PR43783. Simply drop the element size for constant
2696 if (tree_int_cst_equal (genop3
, TYPE_SIZE_UNIT (elmt_type
)))
2700 genop3
= size_binop (EXACT_DIV_EXPR
, genop3
,
2701 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
2702 genop3
= find_or_generate_expression (block
, genop3
, stmts
);
2705 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2712 tree genop2
= currop
->op1
;
2713 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2714 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2717 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2718 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
, genop2
);
2723 genop
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2744 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2745 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2746 trying to rename aggregates into ssa form directly, which is a no no.
2748 Thus, this routine doesn't create temporaries, it just builds a
2749 single access expression for the array, calling
2750 find_or_generate_expression to build the innermost pieces.
2752 This function is a subroutine of create_expression_by_pieces, and
2753 should not be called on it's own unless you really know what you
2757 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2760 unsigned int op
= 0;
2761 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
);
2764 /* Find a leader for an expression, or generate one using
2765 create_expression_by_pieces if it's ANTIC but
2767 BLOCK is the basic_block we are looking for leaders in.
2768 OP is the tree expression to find a leader for or generate.
2769 STMTS is the statement list to put the inserted expressions on.
2770 Returns the SSA_NAME of the LHS of the generated expression or the
2772 DOMSTMT if non-NULL is a statement that should be dominated by
2773 all uses in the generated expression. If DOMSTMT is non-NULL this
2774 routine can fail and return NULL_TREE. Otherwise it will assert
2778 find_or_generate_expression (basic_block block
, tree op
, gimple_seq
*stmts
)
2780 pre_expr expr
= get_or_alloc_expr_for (op
);
2781 unsigned int lookfor
= get_expr_value_id (expr
);
2782 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
), lookfor
);
2785 if (leader
->kind
== NAME
)
2786 return PRE_EXPR_NAME (leader
);
2787 else if (leader
->kind
== CONSTANT
)
2788 return PRE_EXPR_CONSTANT (leader
);
2791 /* It must be a complex expression, so generate it recursively. */
2792 bitmap exprset
= VEC_index (bitmap
, value_expressions
, lookfor
);
2795 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
2797 pre_expr temp
= expression_for_id (i
);
2798 if (temp
->kind
!= NAME
)
2799 return create_expression_by_pieces (block
, temp
, stmts
,
2800 get_expr_type (expr
));
2806 #define NECESSARY GF_PLF_1
2808 /* Create an expression in pieces, so that we can handle very complex
2809 expressions that may be ANTIC, but not necessary GIMPLE.
2810 BLOCK is the basic block the expression will be inserted into,
2811 EXPR is the expression to insert (in value form)
2812 STMTS is a statement list to append the necessary insertions into.
2814 This function will die if we hit some value that shouldn't be
2815 ANTIC but is (IE there is no leader for it, or its components).
2816 This function may also generate expressions that are themselves
2817 partially or fully redundant. Those that are will be either made
2818 fully redundant during the next iteration of insert (for partially
2819 redundant ones), or eliminated by eliminate (for fully redundant
2822 If DOMSTMT is non-NULL then we make sure that all uses in the
2823 expressions dominate that statement. In this case the function
2824 can return NULL_TREE to signal failure. */
2827 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2828 gimple_seq
*stmts
, tree type
)
2832 gimple_seq forced_stmts
= NULL
;
2833 unsigned int value_id
;
2834 gimple_stmt_iterator gsi
;
2835 tree exprtype
= type
? type
: get_expr_type (expr
);
2841 /* We may hit the NAME/CONSTANT case if we have to convert types
2842 that value numbering saw through. */
2844 folded
= PRE_EXPR_NAME (expr
);
2847 folded
= PRE_EXPR_CONSTANT (expr
);
2851 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2852 folded
= create_component_ref_by_pieces (block
, ref
, stmts
);
2857 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2858 tree
*genop
= XALLOCAVEC (tree
, nary
->length
);
2860 for (i
= 0; i
< nary
->length
; ++i
)
2862 genop
[i
] = find_or_generate_expression (block
, nary
->op
[i
], stmts
);
2863 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2864 may have conversions stripped. */
2865 if (nary
->opcode
== POINTER_PLUS_EXPR
)
2868 genop
[i
] = fold_convert (nary
->type
, genop
[i
]);
2870 genop
[i
] = convert_to_ptrofftype (genop
[i
]);
2873 genop
[i
] = fold_convert (TREE_TYPE (nary
->op
[i
]), genop
[i
]);
2875 if (nary
->opcode
== CONSTRUCTOR
)
2877 VEC(constructor_elt
,gc
) *elts
= NULL
;
2878 for (i
= 0; i
< nary
->length
; ++i
)
2879 CONSTRUCTOR_APPEND_ELT (elts
, NULL_TREE
, genop
[i
]);
2880 folded
= build_constructor (nary
->type
, elts
);
2884 switch (nary
->length
)
2887 folded
= fold_build1 (nary
->opcode
, nary
->type
,
2891 folded
= fold_build2 (nary
->opcode
, nary
->type
,
2892 genop
[0], genop
[1]);
2895 folded
= fold_build3 (nary
->opcode
, nary
->type
,
2896 genop
[0], genop
[1], genop
[3]);
2908 if (!useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
2909 folded
= fold_convert (exprtype
, folded
);
2911 /* Force the generated expression to be a sequence of GIMPLE
2913 We have to call unshare_expr because force_gimple_operand may
2914 modify the tree we pass to it. */
2915 folded
= force_gimple_operand (unshare_expr (folded
), &forced_stmts
,
2918 /* If we have any intermediate expressions to the value sets, add them
2919 to the value sets and chain them in the instruction stream. */
2922 gsi
= gsi_start (forced_stmts
);
2923 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
2925 gimple stmt
= gsi_stmt (gsi
);
2926 tree forcedname
= gimple_get_lhs (stmt
);
2929 if (TREE_CODE (forcedname
) == SSA_NAME
)
2931 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (forcedname
));
2932 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
2933 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
2934 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
2935 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
2936 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2937 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2940 gimple_seq_add_seq (stmts
, forced_stmts
);
2943 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2944 newstmt
= gimple_build_assign (name
, folded
);
2945 gimple_set_plf (newstmt
, NECESSARY
, false);
2947 gimple_seq_add_stmt (stmts
, newstmt
);
2948 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (name
));
2950 /* Fold the last statement. */
2951 gsi
= gsi_last (*stmts
);
2952 if (fold_stmt_inplace (&gsi
))
2953 update_stmt (gsi_stmt (gsi
));
2955 /* Add a value number to the temporary.
2956 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2957 we are creating the expression by pieces, and this particular piece of
2958 the expression may have been represented. There is no harm in replacing
2960 value_id
= get_expr_value_id (expr
);
2961 VN_INFO_GET (name
)->value_id
= value_id
;
2962 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
2963 if (VN_INFO (name
)->valnum
== NULL_TREE
)
2964 VN_INFO (name
)->valnum
= name
;
2965 gcc_assert (VN_INFO (name
)->valnum
!= NULL_TREE
);
2966 nameexpr
= get_or_alloc_expr_for_name (name
);
2967 add_to_value (value_id
, nameexpr
);
2968 if (NEW_SETS (block
))
2969 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2970 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2972 pre_stats
.insertions
++;
2973 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2975 fprintf (dump_file
, "Inserted ");
2976 print_gimple_stmt (dump_file
, newstmt
, 0, 0);
2977 fprintf (dump_file
, " in predecessor %d\n", block
->index
);
2984 /* Returns true if we want to inhibit the insertions of PHI nodes
2985 for the given EXPR for basic block BB (a member of a loop).
2986 We want to do this, when we fear that the induction variable we
2987 create might inhibit vectorization. */
2990 inhibit_phi_insertion (basic_block bb
, pre_expr expr
)
2992 vn_reference_t vr
= PRE_EXPR_REFERENCE (expr
);
2993 VEC (vn_reference_op_s
, heap
) *ops
= vr
->operands
;
2994 vn_reference_op_t op
;
2997 /* If we aren't going to vectorize we don't inhibit anything. */
2998 if (!flag_tree_vectorize
)
3001 /* Otherwise we inhibit the insertion when the address of the
3002 memory reference is a simple induction variable. In other
3003 cases the vectorizer won't do anything anyway (either it's
3004 loop invariant or a complicated expression). */
3005 FOR_EACH_VEC_ELT (vn_reference_op_s
, ops
, i
, op
)
3010 /* Calls are not a problem. */
3014 case ARRAY_RANGE_REF
:
3015 if (TREE_CODE (op
->op0
) != SSA_NAME
)
3020 basic_block defbb
= gimple_bb (SSA_NAME_DEF_STMT (op
->op0
));
3022 /* Default defs are loop invariant. */
3025 /* Defined outside this loop, also loop invariant. */
3026 if (!flow_bb_inside_loop_p (bb
->loop_father
, defbb
))
3028 /* If it's a simple induction variable inhibit insertion,
3029 the vectorizer might be interested in this one. */
3030 if (simple_iv (bb
->loop_father
, bb
->loop_father
,
3031 op
->op0
, &iv
, true))
3033 /* No simple IV, vectorizer can't do anything, hence no
3034 reason to inhibit the transformation for this operand. */
3044 /* Insert the to-be-made-available values of expression EXPRNUM for each
3045 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3046 merge the result with a phi node, given the same value number as
3047 NODE. Return true if we have inserted new stuff. */
3050 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
3051 VEC(pre_expr
, heap
) *avail
)
3053 pre_expr expr
= expression_for_id (exprnum
);
3055 unsigned int val
= get_expr_value_id (expr
);
3057 bool insertions
= false;
3062 tree type
= get_expr_type (expr
);
3066 /* Make sure we aren't creating an induction variable. */
3067 if (bb_loop_depth (block
) > 0 && EDGE_COUNT (block
->preds
) == 2)
3069 bool firstinsideloop
= false;
3070 bool secondinsideloop
= false;
3071 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3072 EDGE_PRED (block
, 0)->src
);
3073 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3074 EDGE_PRED (block
, 1)->src
);
3075 /* Induction variables only have one edge inside the loop. */
3076 if ((firstinsideloop
^ secondinsideloop
)
3077 && (expr
->kind
!= REFERENCE
3078 || inhibit_phi_insertion (block
, expr
)))
3080 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3081 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3086 /* Make the necessary insertions. */
3087 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3089 gimple_seq stmts
= NULL
;
3092 eprime
= VEC_index (pre_expr
, avail
, pred
->dest_idx
);
3094 if (eprime
->kind
!= NAME
&& eprime
->kind
!= CONSTANT
)
3096 builtexpr
= create_expression_by_pieces (bprime
, eprime
,
3098 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3099 gsi_insert_seq_on_edge (pred
, stmts
);
3100 VEC_replace (pre_expr
, avail
, pred
->dest_idx
,
3101 get_or_alloc_expr_for_name (builtexpr
));
3104 else if (eprime
->kind
== CONSTANT
)
3106 /* Constants may not have the right type, fold_convert
3107 should give us back a constant with the right type. */
3108 tree constant
= PRE_EXPR_CONSTANT (eprime
);
3109 if (!useless_type_conversion_p (type
, TREE_TYPE (constant
)))
3111 tree builtexpr
= fold_convert (type
, constant
);
3112 if (!is_gimple_min_invariant (builtexpr
))
3114 tree forcedexpr
= force_gimple_operand (builtexpr
,
3117 if (!is_gimple_min_invariant (forcedexpr
))
3119 if (forcedexpr
!= builtexpr
)
3121 VN_INFO_GET (forcedexpr
)->valnum
= PRE_EXPR_CONSTANT (eprime
);
3122 VN_INFO (forcedexpr
)->value_id
= get_expr_value_id (eprime
);
3126 gimple_stmt_iterator gsi
;
3127 gsi
= gsi_start (stmts
);
3128 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3130 gimple stmt
= gsi_stmt (gsi
);
3131 tree lhs
= gimple_get_lhs (stmt
);
3132 if (TREE_CODE (lhs
) == SSA_NAME
)
3133 bitmap_set_bit (inserted_exprs
,
3134 SSA_NAME_VERSION (lhs
));
3135 gimple_set_plf (stmt
, NECESSARY
, false);
3137 gsi_insert_seq_on_edge (pred
, stmts
);
3139 VEC_replace (pre_expr
, avail
, pred
->dest_idx
,
3140 get_or_alloc_expr_for_name (forcedexpr
));
3144 VEC_replace (pre_expr
, avail
, pred
->dest_idx
,
3145 get_or_alloc_expr_for_constant (builtexpr
));
3148 else if (eprime
->kind
== NAME
)
3150 /* We may have to do a conversion because our value
3151 numbering can look through types in certain cases, but
3152 our IL requires all operands of a phi node have the same
3154 tree name
= PRE_EXPR_NAME (eprime
);
3155 if (!useless_type_conversion_p (type
, TREE_TYPE (name
)))
3159 builtexpr
= fold_convert (type
, name
);
3160 forcedexpr
= force_gimple_operand (builtexpr
,
3164 if (forcedexpr
!= name
)
3166 VN_INFO_GET (forcedexpr
)->valnum
= VN_INFO (name
)->valnum
;
3167 VN_INFO (forcedexpr
)->value_id
= VN_INFO (name
)->value_id
;
3172 gimple_stmt_iterator gsi
;
3173 gsi
= gsi_start (stmts
);
3174 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3176 gimple stmt
= gsi_stmt (gsi
);
3177 tree lhs
= gimple_get_lhs (stmt
);
3178 if (TREE_CODE (lhs
) == SSA_NAME
)
3179 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
3180 gimple_set_plf (stmt
, NECESSARY
, false);
3182 gsi_insert_seq_on_edge (pred
, stmts
);
3184 VEC_replace (pre_expr
, avail
, pred
->dest_idx
,
3185 get_or_alloc_expr_for_name (forcedexpr
));
3189 /* If we didn't want a phi node, and we made insertions, we still have
3190 inserted new stuff, and thus return true. If we didn't want a phi node,
3191 and didn't make insertions, we haven't added anything new, so return
3193 if (nophi
&& insertions
)
3195 else if (nophi
&& !insertions
)
3198 /* Now build a phi for the new variable. */
3199 temp
= make_temp_ssa_name (type
, NULL
, "prephitmp");
3200 phi
= create_phi_node (temp
, block
);
3202 gimple_set_plf (phi
, NECESSARY
, false);
3203 VN_INFO_GET (temp
)->value_id
= val
;
3204 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3205 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3206 VN_INFO (temp
)->valnum
= temp
;
3207 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3208 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3210 pre_expr ae
= VEC_index (pre_expr
, avail
, pred
->dest_idx
);
3211 gcc_assert (get_expr_type (ae
) == type
3212 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3213 if (ae
->kind
== CONSTANT
)
3214 add_phi_arg (phi
, PRE_EXPR_CONSTANT (ae
), pred
, UNKNOWN_LOCATION
);
3216 add_phi_arg (phi
, PRE_EXPR_NAME (ae
), pred
, UNKNOWN_LOCATION
);
3219 newphi
= get_or_alloc_expr_for_name (temp
);
3220 add_to_value (val
, newphi
);
3222 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3223 this insertion, since we test for the existence of this value in PHI_GEN
3224 before proceeding with the partial redundancy checks in insert_aux.
3226 The value may exist in AVAIL_OUT, in particular, it could be represented
3227 by the expression we are trying to eliminate, in which case we want the
3228 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3231 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3232 this block, because if it did, it would have existed in our dominator's
3233 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3236 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3237 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3239 bitmap_insert_into_set (NEW_SETS (block
),
3242 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3244 fprintf (dump_file
, "Created phi ");
3245 print_gimple_stmt (dump_file
, phi
, 0, 0);
3246 fprintf (dump_file
, " in block %d\n", block
->index
);
3254 /* Perform insertion of partially redundant values.
3255 For BLOCK, do the following:
3256 1. Propagate the NEW_SETS of the dominator into the current block.
3257 If the block has multiple predecessors,
3258 2a. Iterate over the ANTIC expressions for the block to see if
3259 any of them are partially redundant.
3260 2b. If so, insert them into the necessary predecessors to make
3261 the expression fully redundant.
3262 2c. Insert a new PHI merging the values of the predecessors.
3263 2d. Insert the new PHI, and the new expressions, into the
3265 3. Recursively call ourselves on the dominator children of BLOCK.
3267 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3268 do_regular_insertion and do_partial_insertion.
3273 do_regular_insertion (basic_block block
, basic_block dom
)
3275 bool new_stuff
= false;
3276 VEC (pre_expr
, heap
) *exprs
;
3278 VEC (pre_expr
, heap
) *avail
= NULL
;
3281 exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3282 VEC_safe_grow (pre_expr
, heap
, avail
, EDGE_COUNT (block
->preds
));
3284 FOR_EACH_VEC_ELT (pre_expr
, exprs
, i
, expr
)
3286 if (expr
->kind
!= NAME
)
3289 bool by_some
= false;
3290 bool cant_insert
= false;
3291 bool all_same
= true;
3292 pre_expr first_s
= NULL
;
3295 pre_expr eprime
= NULL
;
3297 pre_expr edoubleprime
= NULL
;
3298 bool do_insertion
= false;
3300 val
= get_expr_value_id (expr
);
3301 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3303 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3305 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3306 fprintf (dump_file
, "Found fully redundant value\n");
3310 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3312 unsigned int vprime
;
3314 /* We should never run insertion for the exit block
3315 and so not come across fake pred edges. */
3316 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3318 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
3321 /* eprime will generally only be NULL if the
3322 value of the expression, translated
3323 through the PHI for this predecessor, is
3324 undefined. If that is the case, we can't
3325 make the expression fully redundant,
3326 because its value is undefined along a
3327 predecessor path. We can thus break out
3328 early because it doesn't matter what the
3329 rest of the results are. */
3332 VEC_replace (pre_expr
, avail
, pred
->dest_idx
, NULL
);
3337 eprime
= fully_constant_expression (eprime
);
3338 vprime
= get_expr_value_id (eprime
);
3339 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3341 if (edoubleprime
== NULL
)
3343 VEC_replace (pre_expr
, avail
, pred
->dest_idx
, eprime
);
3348 VEC_replace (pre_expr
, avail
, pred
->dest_idx
, edoubleprime
);
3350 /* We want to perform insertions to remove a redundancy on
3351 a path in the CFG we want to optimize for speed. */
3352 if (optimize_edge_for_speed_p (pred
))
3353 do_insertion
= true;
3354 if (first_s
== NULL
)
3355 first_s
= edoubleprime
;
3356 else if (!pre_expr_d::equal (first_s
, edoubleprime
))
3360 /* If we can insert it, it's not the same value
3361 already existing along every predecessor, and
3362 it's defined by some predecessor, it is
3363 partially redundant. */
3364 if (!cant_insert
&& !all_same
&& by_some
)
3368 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3370 fprintf (dump_file
, "Skipping partial redundancy for "
3372 print_pre_expr (dump_file
, expr
);
3373 fprintf (dump_file
, " (%04d), no redundancy on to be "
3374 "optimized for speed edge\n", val
);
3377 else if (dbg_cnt (treepre_insert
))
3379 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3381 fprintf (dump_file
, "Found partial redundancy for "
3383 print_pre_expr (dump_file
, expr
);
3384 fprintf (dump_file
, " (%04d)\n",
3385 get_expr_value_id (expr
));
3387 if (insert_into_preds_of_block (block
,
3388 get_expression_id (expr
),
3393 /* If all edges produce the same value and that value is
3394 an invariant, then the PHI has the same value on all
3395 edges. Note this. */
3396 else if (!cant_insert
&& all_same
&& eprime
3397 && (edoubleprime
->kind
== CONSTANT
3398 || edoubleprime
->kind
== NAME
)
3399 && !value_id_constant_p (val
))
3403 bitmap exprset
= VEC_index (bitmap
, value_expressions
, val
);
3405 unsigned int new_val
= get_expr_value_id (edoubleprime
);
3406 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bi
)
3408 pre_expr expr
= expression_for_id (j
);
3410 if (expr
->kind
== NAME
)
3412 vn_ssa_aux_t info
= VN_INFO (PRE_EXPR_NAME (expr
));
3413 /* Just reset the value id and valnum so it is
3414 the same as the constant we have discovered. */
3415 if (edoubleprime
->kind
== CONSTANT
)
3417 info
->valnum
= PRE_EXPR_CONSTANT (edoubleprime
);
3418 pre_stats
.constified
++;
3421 info
->valnum
= VN_INFO (PRE_EXPR_NAME (edoubleprime
))->valnum
;
3422 info
->value_id
= new_val
;
3429 VEC_free (pre_expr
, heap
, exprs
);
3430 VEC_free (pre_expr
, heap
, avail
);
3435 /* Perform insertion for partially anticipatable expressions. There
3436 is only one case we will perform insertion for these. This case is
3437 if the expression is partially anticipatable, and fully available.
3438 In this case, we know that putting it earlier will enable us to
3439 remove the later computation. */
3443 do_partial_partial_insertion (basic_block block
, basic_block dom
)
3445 bool new_stuff
= false;
3446 VEC (pre_expr
, heap
) *exprs
;
3448 VEC (pre_expr
, heap
) *avail
= NULL
;
3451 exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3452 VEC_safe_grow (pre_expr
, heap
, avail
, EDGE_COUNT (block
->preds
));
3454 FOR_EACH_VEC_ELT (pre_expr
, exprs
, i
, expr
)
3456 if (expr
->kind
!= NAME
)
3460 bool cant_insert
= false;
3463 pre_expr eprime
= NULL
;
3466 val
= get_expr_value_id (expr
);
3467 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3469 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3472 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3474 unsigned int vprime
;
3475 pre_expr edoubleprime
;
3477 /* We should never run insertion for the exit block
3478 and so not come across fake pred edges. */
3479 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3481 eprime
= phi_translate (expr
, ANTIC_IN (block
),
3485 /* eprime will generally only be NULL if the
3486 value of the expression, translated
3487 through the PHI for this predecessor, is
3488 undefined. If that is the case, we can't
3489 make the expression fully redundant,
3490 because its value is undefined along a
3491 predecessor path. We can thus break out
3492 early because it doesn't matter what the
3493 rest of the results are. */
3496 VEC_replace (pre_expr
, avail
, pred
->dest_idx
, NULL
);
3501 eprime
= fully_constant_expression (eprime
);
3502 vprime
= get_expr_value_id (eprime
);
3503 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
), vprime
);
3504 VEC_replace (pre_expr
, avail
, pred
->dest_idx
, edoubleprime
);
3505 if (edoubleprime
== NULL
)
3512 /* If we can insert it, it's not the same value
3513 already existing along every predecessor, and
3514 it's defined by some predecessor, it is
3515 partially redundant. */
3516 if (!cant_insert
&& by_all
)
3519 bool do_insertion
= false;
3521 /* Insert only if we can remove a later expression on a path
3522 that we want to optimize for speed.
3523 The phi node that we will be inserting in BLOCK is not free,
3524 and inserting it for the sake of !optimize_for_speed successor
3525 may cause regressions on the speed path. */
3526 FOR_EACH_EDGE (succ
, ei
, block
->succs
)
3528 if (bitmap_set_contains_value (PA_IN (succ
->dest
), val
))
3530 if (optimize_edge_for_speed_p (succ
))
3531 do_insertion
= true;
3537 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3539 fprintf (dump_file
, "Skipping partial partial redundancy "
3541 print_pre_expr (dump_file
, expr
);
3542 fprintf (dump_file
, " (%04d), not partially anticipated "
3543 "on any to be optimized for speed edges\n", val
);
3546 else if (dbg_cnt (treepre_insert
))
3548 pre_stats
.pa_insert
++;
3549 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3551 fprintf (dump_file
, "Found partial partial redundancy "
3553 print_pre_expr (dump_file
, expr
);
3554 fprintf (dump_file
, " (%04d)\n",
3555 get_expr_value_id (expr
));
3557 if (insert_into_preds_of_block (block
,
3558 get_expression_id (expr
),
3566 VEC_free (pre_expr
, heap
, exprs
);
3567 VEC_free (pre_expr
, heap
, avail
);
3572 insert_aux (basic_block block
)
3575 bool new_stuff
= false;
3580 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3585 bitmap_set_t newset
= NEW_SETS (dom
);
3588 /* Note that we need to value_replace both NEW_SETS, and
3589 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3590 represented by some non-simple expression here that we want
3591 to replace it with. */
3592 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3594 pre_expr expr
= expression_for_id (i
);
3595 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3596 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3599 if (!single_pred_p (block
))
3601 new_stuff
|= do_regular_insertion (block
, dom
);
3602 if (do_partial_partial
)
3603 new_stuff
|= do_partial_partial_insertion (block
, dom
);
3607 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3609 son
= next_dom_son (CDI_DOMINATORS
, son
))
3611 new_stuff
|= insert_aux (son
);
3617 /* Perform insertion of partially redundant values. */
3622 bool new_stuff
= true;
3624 int num_iterations
= 0;
3627 NEW_SETS (bb
) = bitmap_set_new ();
3632 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3633 fprintf (dump_file
, "Starting insert iteration %d\n", num_iterations
);
3634 new_stuff
= insert_aux (ENTRY_BLOCK_PTR
);
3636 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3640 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3643 add_to_exp_gen (basic_block block
, tree op
)
3647 if (TREE_CODE (op
) == SSA_NAME
&& ssa_undefined_value_p (op
))
3650 result
= get_or_alloc_expr_for_name (op
);
3651 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3654 /* Create value ids for PHI in BLOCK. */
3657 make_values_for_phi (gimple phi
, basic_block block
)
3659 tree result
= gimple_phi_result (phi
);
3662 /* We have no need for virtual phis, as they don't represent
3663 actual computations. */
3664 if (virtual_operand_p (result
))
3667 pre_expr e
= get_or_alloc_expr_for_name (result
);
3668 add_to_value (get_expr_value_id (e
), e
);
3669 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3670 bitmap_insert_into_set (PHI_GEN (block
), e
);
3671 for (i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
3673 tree arg
= gimple_phi_arg_def (phi
, i
);
3674 if (TREE_CODE (arg
) == SSA_NAME
)
3676 e
= get_or_alloc_expr_for_name (arg
);
3677 add_to_value (get_expr_value_id (e
), e
);
3682 /* Compute the AVAIL set for all basic blocks.
3684 This function performs value numbering of the statements in each basic
3685 block. The AVAIL sets are built from information we glean while doing
3686 this value numbering, since the AVAIL sets contain only one entry per
3689 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3690 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3693 compute_avail (void)
3696 basic_block block
, son
;
3697 basic_block
*worklist
;
3701 /* We pretend that default definitions are defined in the entry block.
3702 This includes function arguments and the static chain decl. */
3703 for (i
= 1; i
< num_ssa_names
; ++i
)
3705 tree name
= ssa_name (i
);
3708 || !SSA_NAME_IS_DEFAULT_DEF (name
)
3709 || has_zero_uses (name
)
3710 || virtual_operand_p (name
))
3713 e
= get_or_alloc_expr_for_name (name
);
3714 add_to_value (get_expr_value_id (e
), e
);
3715 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR
), e
);
3716 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR
), e
);
3719 /* Allocate the worklist. */
3720 worklist
= XNEWVEC (basic_block
, n_basic_blocks
);
3722 /* Seed the algorithm by putting the dominator children of the entry
3723 block on the worklist. */
3724 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR
);
3726 son
= next_dom_son (CDI_DOMINATORS
, son
))
3727 worklist
[sp
++] = son
;
3729 /* Loop until the worklist is empty. */
3732 gimple_stmt_iterator gsi
;
3736 /* Pick a block from the worklist. */
3737 block
= worklist
[--sp
];
3739 /* Initially, the set of available values in BLOCK is that of
3740 its immediate dominator. */
3741 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3743 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3745 /* Generate values for PHI nodes. */
3746 for (gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3747 make_values_for_phi (gsi_stmt (gsi
), block
);
3749 BB_MAY_NOTRETURN (block
) = 0;
3751 /* Now compute value numbers and populate value sets with all
3752 the expressions computed in BLOCK. */
3753 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3758 stmt
= gsi_stmt (gsi
);
3760 /* Cache whether the basic-block has any non-visible side-effect
3762 If this isn't a call or it is the last stmt in the
3763 basic-block then the CFG represents things correctly. */
3764 if (is_gimple_call (stmt
) && !stmt_ends_bb_p (stmt
))
3766 /* Non-looping const functions always return normally.
3767 Otherwise the call might not return or have side-effects
3768 that forbids hoisting possibly trapping expressions
3770 int flags
= gimple_call_flags (stmt
);
3771 if (!(flags
& ECF_CONST
)
3772 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3773 BB_MAY_NOTRETURN (block
) = 1;
3776 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3778 pre_expr e
= get_or_alloc_expr_for_name (op
);
3780 add_to_value (get_expr_value_id (e
), e
);
3781 bitmap_insert_into_set (TMP_GEN (block
), e
);
3782 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3785 if (gimple_has_side_effects (stmt
)
3786 || stmt_could_throw_p (stmt
)
3787 || is_gimple_debug (stmt
))
3790 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3791 add_to_exp_gen (block
, op
);
3793 switch (gimple_code (stmt
))
3801 pre_expr result
= NULL
;
3802 VEC(vn_reference_op_s
, heap
) *ops
= NULL
;
3804 /* We can value number only calls to real functions. */
3805 if (gimple_call_internal_p (stmt
))
3808 copy_reference_ops_from_call (stmt
, &ops
);
3809 vn_reference_lookup_pieces (gimple_vuse (stmt
), 0,
3810 gimple_expr_type (stmt
),
3811 ops
, &ref
, VN_NOWALK
);
3812 VEC_free (vn_reference_op_s
, heap
, ops
);
3816 /* If the value of the call is not invalidated in
3817 this block until it is computed, add the expression
3819 if (!gimple_vuse (stmt
)
3821 (SSA_NAME_DEF_STMT (gimple_vuse (stmt
))) == GIMPLE_PHI
3822 || gimple_bb (SSA_NAME_DEF_STMT
3823 (gimple_vuse (stmt
))) != block
)
3825 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3826 result
->kind
= REFERENCE
;
3828 PRE_EXPR_REFERENCE (result
) = ref
;
3830 get_or_alloc_expression_id (result
);
3831 add_to_value (get_expr_value_id (result
), result
);
3832 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3839 pre_expr result
= NULL
;
3840 switch (vn_get_stmt_kind (stmt
))
3844 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3847 /* COND_EXPR and VEC_COND_EXPR are awkward in
3848 that they contain an embedded complex expression.
3849 Don't even try to shove those through PRE. */
3850 if (code
== COND_EXPR
3851 || code
== VEC_COND_EXPR
)
3854 vn_nary_op_lookup_stmt (stmt
, &nary
);
3858 /* If the NARY traps and there was a preceding
3859 point in the block that might not return avoid
3860 adding the nary to EXP_GEN. */
3861 if (BB_MAY_NOTRETURN (block
)
3862 && vn_nary_may_trap (nary
))
3865 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3866 result
->kind
= NARY
;
3868 PRE_EXPR_NARY (result
) = nary
;
3875 vn_reference_lookup (gimple_assign_rhs1 (stmt
),
3881 /* If the value of the reference is not invalidated in
3882 this block until it is computed, add the expression
3884 if (gimple_vuse (stmt
))
3888 def_stmt
= SSA_NAME_DEF_STMT (gimple_vuse (stmt
));
3889 while (!gimple_nop_p (def_stmt
)
3890 && gimple_code (def_stmt
) != GIMPLE_PHI
3891 && gimple_bb (def_stmt
) == block
)
3893 if (stmt_may_clobber_ref_p
3894 (def_stmt
, gimple_assign_rhs1 (stmt
)))
3900 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt
));
3906 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3907 result
->kind
= REFERENCE
;
3909 PRE_EXPR_REFERENCE (result
) = ref
;
3917 get_or_alloc_expression_id (result
);
3918 add_to_value (get_expr_value_id (result
), result
);
3919 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3927 /* Put the dominator children of BLOCK on the worklist of blocks
3928 to compute available sets for. */
3929 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3931 son
= next_dom_son (CDI_DOMINATORS
, son
))
3932 worklist
[sp
++] = son
;
3939 /* Local state for the eliminate domwalk. */
3940 static VEC (gimple
, heap
) *el_to_remove
;
3941 static VEC (gimple
, heap
) *el_to_update
;
3942 static unsigned int el_todo
;
3943 static VEC (tree
, heap
) *el_avail
;
3944 static VEC (tree
, heap
) *el_avail_stack
;
3946 /* Return a leader for OP that is available at the current point of the
3947 eliminate domwalk. */
3950 eliminate_avail (tree op
)
3952 tree valnum
= VN_INFO (op
)->valnum
;
3953 if (TREE_CODE (valnum
) == SSA_NAME
)
3955 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
3957 if (VEC_length (tree
, el_avail
) > SSA_NAME_VERSION (valnum
))
3958 return VEC_index (tree
, el_avail
, SSA_NAME_VERSION (valnum
));
3960 else if (is_gimple_min_invariant (valnum
))
3965 /* At the current point of the eliminate domwalk make OP available. */
3968 eliminate_push_avail (tree op
)
3970 tree valnum
= VN_INFO (op
)->valnum
;
3971 if (TREE_CODE (valnum
) == SSA_NAME
)
3973 if (VEC_length (tree
, el_avail
) <= SSA_NAME_VERSION (valnum
))
3974 VEC_safe_grow_cleared (tree
, heap
,
3975 el_avail
, SSA_NAME_VERSION (valnum
) + 1);
3976 VEC_replace (tree
, el_avail
, SSA_NAME_VERSION (valnum
), op
);
3977 VEC_safe_push (tree
, heap
, el_avail_stack
, op
);
3981 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3982 the leader for the expression if insertion was successful. */
3985 eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
)
3987 tree expr
= vn_get_expr_for (val
);
3988 if (!CONVERT_EXPR_P (expr
)
3989 && TREE_CODE (expr
) != VIEW_CONVERT_EXPR
)
3992 tree op
= TREE_OPERAND (expr
, 0);
3993 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (op
) : op
;
3997 tree res
= make_temp_ssa_name (TREE_TYPE (val
), NULL
, "pretmp");
3998 gimple tem
= gimple_build_assign (res
,
3999 fold_build1 (TREE_CODE (expr
),
4000 TREE_TYPE (expr
), leader
));
4001 gsi_insert_before (gsi
, tem
, GSI_SAME_STMT
);
4002 VN_INFO_GET (res
)->valnum
= val
;
4004 if (TREE_CODE (leader
) == SSA_NAME
)
4005 gimple_set_plf (SSA_NAME_DEF_STMT (leader
), NECESSARY
, true);
4007 pre_stats
.insertions
++;
4008 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4010 fprintf (dump_file
, "Inserted ");
4011 print_gimple_stmt (dump_file
, tem
, 0, 0);
4017 /* Perform elimination for the basic-block B during the domwalk. */
4020 eliminate_bb (dom_walk_data
*, basic_block b
)
4022 gimple_stmt_iterator gsi
;
4026 VEC_safe_push (tree
, heap
, el_avail_stack
, NULL_TREE
);
4028 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
4030 gimple stmt
, phi
= gsi_stmt (gsi
);
4031 tree sprime
= NULL_TREE
, res
= PHI_RESULT (phi
);
4032 gimple_stmt_iterator gsi2
;
4034 /* We want to perform redundant PHI elimination. Do so by
4035 replacing the PHI with a single copy if possible.
4036 Do not touch inserted, single-argument or virtual PHIs. */
4037 if (gimple_phi_num_args (phi
) == 1
4038 || virtual_operand_p (res
))
4044 sprime
= eliminate_avail (res
);
4048 eliminate_push_avail (res
);
4052 else if (is_gimple_min_invariant (sprime
))
4054 if (!useless_type_conversion_p (TREE_TYPE (res
),
4055 TREE_TYPE (sprime
)))
4056 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4059 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4061 fprintf (dump_file
, "Replaced redundant PHI node defining ");
4062 print_generic_expr (dump_file
, res
, 0);
4063 fprintf (dump_file
, " with ");
4064 print_generic_expr (dump_file
, sprime
, 0);
4065 fprintf (dump_file
, "\n");
4068 remove_phi_node (&gsi
, false);
4071 && !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
))
4072 && TREE_CODE (sprime
) == SSA_NAME
)
4073 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4075 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
4076 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4077 stmt
= gimple_build_assign (res
, sprime
);
4078 SSA_NAME_DEF_STMT (res
) = stmt
;
4079 gimple_set_plf (stmt
, NECESSARY
, gimple_plf (phi
, NECESSARY
));
4081 gsi2
= gsi_after_labels (b
);
4082 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
4083 /* Queue the copy for eventual removal. */
4084 VEC_safe_push (gimple
, heap
, el_to_remove
, stmt
);
4085 /* If we inserted this PHI node ourself, it's not an elimination. */
4087 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
4090 pre_stats
.eliminations
++;
4093 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4095 tree lhs
= NULL_TREE
;
4096 tree rhs
= NULL_TREE
;
4098 stmt
= gsi_stmt (gsi
);
4100 if (gimple_has_lhs (stmt
))
4101 lhs
= gimple_get_lhs (stmt
);
4103 if (gimple_assign_single_p (stmt
))
4104 rhs
= gimple_assign_rhs1 (stmt
);
4106 /* Lookup the RHS of the expression, see if we have an
4107 available computation for it. If so, replace the RHS with
4108 the available computation.
4111 We don't replace global register variable when it is a the RHS of
4112 a single assign. We do replace local register variable since gcc
4113 does not guarantee local variable will be allocated in register. */
4114 if (gimple_has_lhs (stmt
)
4115 && TREE_CODE (lhs
) == SSA_NAME
4116 && !gimple_assign_ssa_name_copy_p (stmt
)
4117 && (!gimple_assign_single_p (stmt
)
4118 || (!is_gimple_min_invariant (rhs
)
4119 && (gimple_assign_rhs_code (stmt
) != VAR_DECL
4120 || !is_global_var (rhs
)
4121 || !DECL_HARD_REGISTER (rhs
))))
4122 && !gimple_has_volatile_ops (stmt
))
4125 gimple orig_stmt
= stmt
;
4127 sprime
= eliminate_avail (lhs
);
4130 /* If there is no existing usable leader but SCCVN thinks
4131 it has an expression it wants to use as replacement,
4133 tree val
= VN_INFO (lhs
)->valnum
;
4135 && TREE_CODE (val
) == SSA_NAME
4136 && VN_INFO (val
)->needs_insertion
4137 && (sprime
= eliminate_insert (&gsi
, val
)) != NULL_TREE
)
4138 eliminate_push_avail (sprime
);
4140 else if (is_gimple_min_invariant (sprime
))
4142 /* If there is no existing leader but SCCVN knows this
4143 value is constant, use that constant. */
4144 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4145 TREE_TYPE (sprime
)))
4146 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4148 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4150 fprintf (dump_file
, "Replaced ");
4151 print_gimple_expr (dump_file
, stmt
, 0, 0);
4152 fprintf (dump_file
, " with ");
4153 print_generic_expr (dump_file
, sprime
, 0);
4154 fprintf (dump_file
, " in ");
4155 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4157 pre_stats
.eliminations
++;
4158 propagate_tree_value_into_stmt (&gsi
, sprime
);
4159 stmt
= gsi_stmt (gsi
);
4162 /* If we removed EH side-effects from the statement, clean
4163 its EH information. */
4164 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4166 bitmap_set_bit (need_eh_cleanup
,
4167 gimple_bb (stmt
)->index
);
4168 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4169 fprintf (dump_file
, " Removed EH side-effects.\n");
4174 /* If there is no usable leader mark lhs as leader for its value. */
4176 eliminate_push_avail (lhs
);
4180 && (rhs
== NULL_TREE
4181 || TREE_CODE (rhs
) != SSA_NAME
4182 || may_propagate_copy (rhs
, sprime
)))
4184 bool can_make_abnormal_goto
4185 = is_gimple_call (stmt
)
4186 && stmt_can_make_abnormal_goto (stmt
);
4188 gcc_assert (sprime
!= rhs
);
4190 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4192 fprintf (dump_file
, "Replaced ");
4193 print_gimple_expr (dump_file
, stmt
, 0, 0);
4194 fprintf (dump_file
, " with ");
4195 print_generic_expr (dump_file
, sprime
, 0);
4196 fprintf (dump_file
, " in ");
4197 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4200 if (TREE_CODE (sprime
) == SSA_NAME
)
4201 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4203 /* We need to make sure the new and old types actually match,
4204 which may require adding a simple cast, which fold_convert
4206 if ((!rhs
|| TREE_CODE (rhs
) != SSA_NAME
)
4207 && !useless_type_conversion_p (gimple_expr_type (stmt
),
4208 TREE_TYPE (sprime
)))
4209 sprime
= fold_convert (gimple_expr_type (stmt
), sprime
);
4211 pre_stats
.eliminations
++;
4212 propagate_tree_value_into_stmt (&gsi
, sprime
);
4213 stmt
= gsi_stmt (gsi
);
4216 /* If we removed EH side-effects from the statement, clean
4217 its EH information. */
4218 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4220 bitmap_set_bit (need_eh_cleanup
,
4221 gimple_bb (stmt
)->index
);
4222 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4223 fprintf (dump_file
, " Removed EH side-effects.\n");
4226 /* Likewise for AB side-effects. */
4227 if (can_make_abnormal_goto
4228 && !stmt_can_make_abnormal_goto (stmt
))
4230 bitmap_set_bit (need_ab_cleanup
,
4231 gimple_bb (stmt
)->index
);
4232 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4233 fprintf (dump_file
, " Removed AB side-effects.\n");
4237 /* If the statement is a scalar store, see if the expression
4238 has the same value number as its rhs. If so, the store is
4240 else if (gimple_assign_single_p (stmt
)
4241 && !gimple_has_volatile_ops (stmt
)
4242 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4243 && (TREE_CODE (rhs
) == SSA_NAME
4244 || is_gimple_min_invariant (rhs
)))
4247 val
= vn_reference_lookup (gimple_assign_lhs (stmt
),
4248 gimple_vuse (stmt
), VN_WALK
, NULL
);
4249 if (TREE_CODE (rhs
) == SSA_NAME
)
4250 rhs
= VN_INFO (rhs
)->valnum
;
4252 && operand_equal_p (val
, rhs
, 0))
4254 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4256 fprintf (dump_file
, "Deleted redundant store ");
4257 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4260 /* Queue stmt for removal. */
4261 VEC_safe_push (gimple
, heap
, el_to_remove
, stmt
);
4264 /* Visit COND_EXPRs and fold the comparison with the
4265 available value-numbers. */
4266 else if (gimple_code (stmt
) == GIMPLE_COND
)
4268 tree op0
= gimple_cond_lhs (stmt
);
4269 tree op1
= gimple_cond_rhs (stmt
);
4272 if (TREE_CODE (op0
) == SSA_NAME
)
4273 op0
= VN_INFO (op0
)->valnum
;
4274 if (TREE_CODE (op1
) == SSA_NAME
)
4275 op1
= VN_INFO (op1
)->valnum
;
4276 result
= fold_binary (gimple_cond_code (stmt
), boolean_type_node
,
4278 if (result
&& TREE_CODE (result
) == INTEGER_CST
)
4280 if (integer_zerop (result
))
4281 gimple_cond_make_false (stmt
);
4283 gimple_cond_make_true (stmt
);
4285 el_todo
= TODO_cleanup_cfg
;
4288 /* Visit indirect calls and turn them into direct calls if
4290 if (is_gimple_call (stmt
))
4292 tree orig_fn
= gimple_call_fn (stmt
);
4296 if (TREE_CODE (orig_fn
) == SSA_NAME
)
4297 fn
= VN_INFO (orig_fn
)->valnum
;
4298 else if (TREE_CODE (orig_fn
) == OBJ_TYPE_REF
4299 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn
)) == SSA_NAME
)
4300 fn
= VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn
))->valnum
;
4303 if (gimple_call_addr_fndecl (fn
) != NULL_TREE
4304 && useless_type_conversion_p (TREE_TYPE (orig_fn
),
4307 bool can_make_abnormal_goto
4308 = stmt_can_make_abnormal_goto (stmt
);
4309 bool was_noreturn
= gimple_call_noreturn_p (stmt
);
4311 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4313 fprintf (dump_file
, "Replacing call target with ");
4314 print_generic_expr (dump_file
, fn
, 0);
4315 fprintf (dump_file
, " in ");
4316 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4319 gimple_call_set_fn (stmt
, fn
);
4320 VEC_safe_push (gimple
, heap
, el_to_update
, stmt
);
4322 /* When changing a call into a noreturn call, cfg cleanup
4323 is needed to fix up the noreturn call. */
4324 if (!was_noreturn
&& gimple_call_noreturn_p (stmt
))
4325 el_todo
|= TODO_cleanup_cfg
;
4327 /* If we removed EH side-effects from the statement, clean
4328 its EH information. */
4329 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
4331 bitmap_set_bit (need_eh_cleanup
,
4332 gimple_bb (stmt
)->index
);
4333 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4334 fprintf (dump_file
, " Removed EH side-effects.\n");
4337 /* Likewise for AB side-effects. */
4338 if (can_make_abnormal_goto
4339 && !stmt_can_make_abnormal_goto (stmt
))
4341 bitmap_set_bit (need_ab_cleanup
,
4342 gimple_bb (stmt
)->index
);
4343 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4344 fprintf (dump_file
, " Removed AB side-effects.\n");
4347 /* Changing an indirect call to a direct call may
4348 have exposed different semantics. This may
4349 require an SSA update. */
4350 el_todo
|= TODO_update_ssa_only_virtuals
;
4356 /* Make no longer available leaders no longer available. */
4359 eliminate_leave_block (dom_walk_data
*, basic_block
)
4362 while ((entry
= VEC_pop (tree
, el_avail_stack
)) != NULL_TREE
)
4363 VEC_replace (tree
, el_avail
,
4364 SSA_NAME_VERSION (VN_INFO (entry
)->valnum
), NULL_TREE
);
4367 /* Eliminate fully redundant computations. */
4372 struct dom_walk_data walk_data
;
4373 gimple_stmt_iterator gsi
;
4377 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4378 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4380 el_to_remove
= NULL
;
4381 el_to_update
= NULL
;
4384 el_avail_stack
= NULL
;
4386 walk_data
.dom_direction
= CDI_DOMINATORS
;
4387 walk_data
.initialize_block_local_data
= NULL
;
4388 walk_data
.before_dom_children
= eliminate_bb
;
4389 walk_data
.after_dom_children
= eliminate_leave_block
;
4390 walk_data
.global_data
= NULL
;
4391 walk_data
.block_local_data_size
= 0;
4392 init_walk_dominator_tree (&walk_data
);
4393 walk_dominator_tree (&walk_data
, ENTRY_BLOCK_PTR
);
4394 fini_walk_dominator_tree (&walk_data
);
4396 VEC_free (tree
, heap
, el_avail
);
4397 VEC_free (tree
, heap
, el_avail_stack
);
4399 /* We cannot remove stmts during BB walk, especially not release SSA
4400 names there as this confuses the VN machinery. The stmts ending
4401 up in el_to_remove are either stores or simple copies. */
4402 FOR_EACH_VEC_ELT (gimple
, el_to_remove
, i
, stmt
)
4404 tree lhs
= gimple_assign_lhs (stmt
);
4405 tree rhs
= gimple_assign_rhs1 (stmt
);
4406 use_operand_p use_p
;
4409 /* If there is a single use only, propagate the equivalency
4410 instead of keeping the copy. */
4411 if (TREE_CODE (lhs
) == SSA_NAME
4412 && TREE_CODE (rhs
) == SSA_NAME
4413 && single_imm_use (lhs
, &use_p
, &use_stmt
)
4414 && may_propagate_copy (USE_FROM_PTR (use_p
), rhs
))
4416 SET_USE (use_p
, rhs
);
4417 update_stmt (use_stmt
);
4419 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (lhs
))
4420 && TREE_CODE (rhs
) == SSA_NAME
)
4421 gimple_set_plf (SSA_NAME_DEF_STMT (rhs
), NECESSARY
, true);
4424 /* If this is a store or a now unused copy, remove it. */
4425 if (TREE_CODE (lhs
) != SSA_NAME
4426 || has_zero_uses (lhs
))
4428 basic_block bb
= gimple_bb (stmt
);
4429 gsi
= gsi_for_stmt (stmt
);
4430 unlink_stmt_vdef (stmt
);
4431 if (gsi_remove (&gsi
, true))
4432 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
4434 && TREE_CODE (lhs
) == SSA_NAME
)
4435 bitmap_clear_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
4436 release_defs (stmt
);
4439 VEC_free (gimple
, heap
, el_to_remove
);
4441 /* We cannot update call statements with virtual operands during
4442 SSA walk. This might remove them which in turn makes our
4443 VN lattice invalid. */
4444 FOR_EACH_VEC_ELT (gimple
, el_to_update
, i
, stmt
)
4446 VEC_free (gimple
, heap
, el_to_update
);
4451 /* Perform CFG cleanups made necessary by elimination. */
4454 fini_eliminate (void)
4456 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
4457 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
4460 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
4463 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
4465 BITMAP_FREE (need_eh_cleanup
);
4466 BITMAP_FREE (need_ab_cleanup
);
4468 if (do_eh_cleanup
|| do_ab_cleanup
)
4469 return TODO_cleanup_cfg
;
4473 /* Borrow a bit of tree-ssa-dce.c for the moment.
4474 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4475 this may be a bit faster, and we may want critical edges kept split. */
4477 /* If OP's defining statement has not already been determined to be necessary,
4478 mark that statement necessary. Return the stmt, if it is newly
4481 static inline gimple
4482 mark_operand_necessary (tree op
)
4488 if (TREE_CODE (op
) != SSA_NAME
)
4491 stmt
= SSA_NAME_DEF_STMT (op
);
4494 if (gimple_plf (stmt
, NECESSARY
)
4495 || gimple_nop_p (stmt
))
4498 gimple_set_plf (stmt
, NECESSARY
, true);
4502 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4503 to insert PHI nodes sometimes, and because value numbering of casts isn't
4504 perfect, we sometimes end up inserting dead code. This simple DCE-like
4505 pass removes any insertions we made that weren't actually used. */
4508 remove_dead_inserted_code (void)
4515 worklist
= BITMAP_ALLOC (NULL
);
4516 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4518 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4519 if (gimple_plf (t
, NECESSARY
))
4520 bitmap_set_bit (worklist
, i
);
4522 while (!bitmap_empty_p (worklist
))
4524 i
= bitmap_first_set_bit (worklist
);
4525 bitmap_clear_bit (worklist
, i
);
4526 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4528 /* PHI nodes are somewhat special in that each PHI alternative has
4529 data and control dependencies. All the statements feeding the
4530 PHI node's arguments are always necessary. */
4531 if (gimple_code (t
) == GIMPLE_PHI
)
4535 for (k
= 0; k
< gimple_phi_num_args (t
); k
++)
4537 tree arg
= PHI_ARG_DEF (t
, k
);
4538 if (TREE_CODE (arg
) == SSA_NAME
)
4540 gimple n
= mark_operand_necessary (arg
);
4542 bitmap_set_bit (worklist
, SSA_NAME_VERSION (arg
));
4548 /* Propagate through the operands. Examine all the USE, VUSE and
4549 VDEF operands in this statement. Mark all the statements
4550 which feed this statement's uses as necessary. */
4554 /* The operands of VDEF expressions are also needed as they
4555 represent potential definitions that may reach this
4556 statement (VDEF operands allow us to follow def-def
4559 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
4561 gimple n
= mark_operand_necessary (use
);
4563 bitmap_set_bit (worklist
, SSA_NAME_VERSION (use
));
4568 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4570 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4571 if (!gimple_plf (t
, NECESSARY
))
4573 gimple_stmt_iterator gsi
;
4575 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4577 fprintf (dump_file
, "Removing unnecessary insertion:");
4578 print_gimple_stmt (dump_file
, t
, 0, 0);
4581 gsi
= gsi_for_stmt (t
);
4582 if (gimple_code (t
) == GIMPLE_PHI
)
4583 remove_phi_node (&gsi
, true);
4586 gsi_remove (&gsi
, true);
4591 BITMAP_FREE (worklist
);
4595 /* Initialize data structures used by PRE. */
4602 next_expression_id
= 1;
4604 VEC_safe_push (pre_expr
, heap
, expressions
, NULL
);
4605 value_expressions
= VEC_alloc (bitmap
, heap
, get_max_value_id () + 1);
4606 VEC_safe_grow_cleared (bitmap
, heap
, value_expressions
,
4607 get_max_value_id() + 1);
4610 inserted_exprs
= BITMAP_ALLOC (NULL
);
4612 connect_infinite_loops_to_exit ();
4613 memset (&pre_stats
, 0, sizeof (pre_stats
));
4615 postorder
= XNEWVEC (int, n_basic_blocks
);
4616 postorder_num
= inverted_post_order_compute (postorder
);
4618 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets
));
4620 calculate_dominance_info (CDI_POST_DOMINATORS
);
4621 calculate_dominance_info (CDI_DOMINATORS
);
4623 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4624 phi_translate_table
.create (5110);
4625 expression_to_id
.create (num_ssa_names
* 3);
4626 bitmap_set_pool
= create_alloc_pool ("Bitmap sets",
4627 sizeof (struct bitmap_set
), 30);
4628 pre_expr_pool
= create_alloc_pool ("pre_expr nodes",
4629 sizeof (struct pre_expr_d
), 30);
4632 EXP_GEN (bb
) = bitmap_set_new ();
4633 PHI_GEN (bb
) = bitmap_set_new ();
4634 TMP_GEN (bb
) = bitmap_set_new ();
4635 AVAIL_OUT (bb
) = bitmap_set_new ();
4640 /* Deallocate data structures used by PRE. */
4646 VEC_free (bitmap
, heap
, value_expressions
);
4647 BITMAP_FREE (inserted_exprs
);
4648 bitmap_obstack_release (&grand_bitmap_obstack
);
4649 free_alloc_pool (bitmap_set_pool
);
4650 free_alloc_pool (pre_expr_pool
);
4651 phi_translate_table
.dispose ();
4652 expression_to_id
.dispose ();
4653 VEC_free (unsigned, heap
, name_to_id
);
4655 free_aux_for_blocks ();
4657 free_dominance_info (CDI_POST_DOMINATORS
);
4660 /* Gate and execute functions for PRE. */
4665 unsigned int todo
= 0;
4667 do_partial_partial
=
4668 flag_tree_partial_pre
&& optimize_function_for_speed_p (cfun
);
4670 /* This has to happen before SCCVN runs because
4671 loop_optimizer_init may create new phis, etc. */
4672 loop_optimizer_init (LOOPS_NORMAL
);
4674 if (!run_scc_vn (VN_WALK
))
4676 loop_optimizer_finalize ();
4683 /* Collect and value number expressions computed in each basic block. */
4686 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4691 print_bitmap_set (dump_file
, EXP_GEN (bb
),
4692 "exp_gen", bb
->index
);
4693 print_bitmap_set (dump_file
, PHI_GEN (bb
),
4694 "phi_gen", bb
->index
);
4695 print_bitmap_set (dump_file
, TMP_GEN (bb
),
4696 "tmp_gen", bb
->index
);
4697 print_bitmap_set (dump_file
, AVAIL_OUT (bb
),
4698 "avail_out", bb
->index
);
4702 /* Insert can get quite slow on an incredibly large number of basic
4703 blocks due to some quadratic behavior. Until this behavior is
4704 fixed, don't run it when he have an incredibly large number of
4705 bb's. If we aren't going to run insert, there is no point in
4706 computing ANTIC, either, even though it's plenty fast. */
4707 if (n_basic_blocks
< 4000)
4713 /* Make sure to remove fake edges before committing our inserts.
4714 This makes sure we don't end up with extra critical edges that
4715 we would need to split. */
4716 remove_fake_exit_edges ();
4717 gsi_commit_edge_inserts ();
4719 /* Remove all the redundant expressions. */
4720 todo
|= eliminate ();
4722 statistics_counter_event (cfun
, "Insertions", pre_stats
.insertions
);
4723 statistics_counter_event (cfun
, "PA inserted", pre_stats
.pa_insert
);
4724 statistics_counter_event (cfun
, "New PHIs", pre_stats
.phis
);
4725 statistics_counter_event (cfun
, "Eliminated", pre_stats
.eliminations
);
4726 statistics_counter_event (cfun
, "Constified", pre_stats
.constified
);
4728 clear_expression_ids ();
4729 remove_dead_inserted_code ();
4730 todo
|= TODO_verify_flow
;
4734 todo
|= fini_eliminate ();
4735 loop_optimizer_finalize ();
4737 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4738 case we can merge the block with the remaining predecessor of the block.
4740 - call merge_blocks after each tail merge iteration
4741 - call merge_blocks after all tail merge iterations
4742 - mark TODO_cleanup_cfg when necessary
4743 - share the cfg cleanup with fini_pre. */
4744 todo
|= tail_merge_optimize (todo
);
4748 /* Tail merging invalidates the virtual SSA web, together with
4749 cfg-cleanup opportunities exposed by PRE this will wreck the
4750 SSA updating machinery. So make sure to run update-ssa
4751 manually, before eventually scheduling cfg-cleanup as part of
4753 update_ssa (TODO_update_ssa_only_virtuals
);
4761 return flag_tree_pre
!= 0;
4764 struct gimple_opt_pass pass_pre
=
4769 OPTGROUP_NONE
, /* optinfo_flags */
4770 gate_pre
, /* gate */
4771 do_pre
, /* execute */
4774 0, /* static_pass_number */
4775 TV_TREE_PRE
, /* tv_id */
4776 PROP_no_crit_edges
| PROP_cfg
4777 | PROP_ssa
, /* properties_required */
4778 0, /* properties_provided */
4779 0, /* properties_destroyed */
4780 TODO_rebuild_alias
, /* todo_flags_start */
4781 TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */
4786 /* Gate and execute functions for FRE. */
4791 unsigned int todo
= 0;
4793 if (!run_scc_vn (VN_WALKREWRITE
))
4796 memset (&pre_stats
, 0, sizeof (pre_stats
));
4798 /* Remove all the redundant expressions. */
4799 todo
|= eliminate ();
4801 todo
|= fini_eliminate ();
4805 statistics_counter_event (cfun
, "Insertions", pre_stats
.insertions
);
4806 statistics_counter_event (cfun
, "Eliminated", pre_stats
.eliminations
);
4807 statistics_counter_event (cfun
, "Constified", pre_stats
.constified
);
4815 return flag_tree_fre
!= 0;
4818 struct gimple_opt_pass pass_fre
=
4823 OPTGROUP_NONE
, /* optinfo_flags */
4824 gate_fre
, /* gate */
4825 execute_fre
, /* execute */
4828 0, /* static_pass_number */
4829 TV_TREE_FRE
, /* tv_id */
4830 PROP_cfg
| PROP_ssa
, /* properties_required */
4831 0, /* properties_provided */
4832 0, /* properties_destroyed */
4833 0, /* todo_flags_start */
4834 TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */