2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "dominance.h"
38 #include "basic-block.h"
39 #include "gimple-pretty-print.h"
40 #include "tree-inline.h"
42 #include "hash-table.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
47 #include "gimple-expr.h"
51 #include "gimple-iterator.h"
52 #include "gimplify-me.h"
53 #include "gimple-ssa.h"
55 #include "tree-phinodes.h"
56 #include "ssa-iterators.h"
57 #include "stringpool.h"
58 #include "tree-ssanames.h"
59 #include "tree-ssa-loop.h"
60 #include "tree-into-ssa.h"
64 #include "tree-iterator.h"
65 #include "alloc-pool.h"
67 #include "tree-pass.h"
69 #include "langhooks.h"
71 #include "tree-ssa-sccvn.h"
72 #include "tree-scalar-evolution.h"
77 #include "plugin-api.h"
81 #include "tree-ssa-propagate.h"
82 #include "ipa-utils.h"
86 1. Avail sets can be shared by making an avail_find_leader that
87 walks up the dominator tree and looks in those avail sets.
88 This might affect code optimality, it's unclear right now.
89 2. Strength reduction can be performed by anticipating expressions
90 we can repair later on.
91 3. We can do back-substitution or smarter value numbering to catch
92 commutative expressions split up over multiple statements.
95 /* For ease of terminology, "expression node" in the below refers to
96 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
97 represent the actual statement containing the expressions we care about,
98 and we cache the value number by putting it in the expression. */
102 First we walk the statements to generate the AVAIL sets, the
103 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
104 generation of values/expressions by a given block. We use them
105 when computing the ANTIC sets. The AVAIL sets consist of
106 SSA_NAME's that represent values, so we know what values are
107 available in what blocks. AVAIL is a forward dataflow problem. In
108 SSA, values are never killed, so we don't need a kill set, or a
109 fixpoint iteration, in order to calculate the AVAIL sets. In
110 traditional parlance, AVAIL sets tell us the downsafety of the
113 Next, we generate the ANTIC sets. These sets represent the
114 anticipatable expressions. ANTIC is a backwards dataflow
115 problem. An expression is anticipatable in a given block if it could
116 be generated in that block. This means that if we had to perform
117 an insertion in that block, of the value of that expression, we
118 could. Calculating the ANTIC sets requires phi translation of
119 expressions, because the flow goes backwards through phis. We must
120 iterate to a fixpoint of the ANTIC sets, because we have a kill
121 set. Even in SSA form, values are not live over the entire
122 function, only from their definition point onwards. So we have to
123 remove values from the ANTIC set once we go past the definition
124 point of the leaders that make them up.
125 compute_antic/compute_antic_aux performs this computation.
127 Third, we perform insertions to make partially redundant
128 expressions fully redundant.
130 An expression is partially redundant (excluding partial
133 1. It is AVAIL in some, but not all, of the predecessors of a
135 2. It is ANTIC in all the predecessors.
137 In order to make it fully redundant, we insert the expression into
138 the predecessors where it is not available, but is ANTIC.
140 For the partial anticipation case, we only perform insertion if it
141 is partially anticipated in some block, and fully available in all
144 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
145 performs these steps.
147 Fourth, we eliminate fully redundant expressions.
148 This is a simple statement walk that replaces redundant
149 calculations with the now available values. */
151 /* Representations of value numbers:
153 Value numbers are represented by a representative SSA_NAME. We
154 will create fake SSA_NAME's in situations where we need a
155 representative but do not have one (because it is a complex
156 expression). In order to facilitate storing the value numbers in
157 bitmaps, and keep the number of wasted SSA_NAME's down, we also
158 associate a value_id with each value number, and create full blown
159 ssa_name's only where we actually need them (IE in operands of
160 existing expressions).
162 Theoretically you could replace all the value_id's with
163 SSA_NAME_VERSION, but this would allocate a large number of
164 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
165 It would also require an additional indirection at each point we
168 /* Representation of expressions on value numbers:
170 Expressions consisting of value numbers are represented the same
171 way as our VN internally represents them, with an additional
172 "pre_expr" wrapping around them in order to facilitate storing all
173 of the expressions in the same sets. */
175 /* Representation of sets:
177 The dataflow sets do not need to be sorted in any particular order
178 for the majority of their lifetime, are simply represented as two
179 bitmaps, one that keeps track of values present in the set, and one
180 that keeps track of expressions present in the set.
182 When we need them in topological order, we produce it on demand by
183 transforming the bitmap into an array and sorting it into topo
186 /* Type of expression, used to know which member of the PRE_EXPR union
197 typedef union pre_expr_union_d
202 vn_reference_t reference
;
205 typedef struct pre_expr_d
: typed_noop_remove
<pre_expr_d
>
207 enum pre_expr_kind kind
;
211 /* hash_table support. */
212 typedef pre_expr_d value_type
;
213 typedef pre_expr_d compare_type
;
214 static inline hashval_t
hash (const pre_expr_d
*);
215 static inline int equal (const pre_expr_d
*, const pre_expr_d
*);
218 #define PRE_EXPR_NAME(e) (e)->u.name
219 #define PRE_EXPR_NARY(e) (e)->u.nary
220 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
221 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
223 /* Compare E1 and E1 for equality. */
226 pre_expr_d::equal (const value_type
*e1
, const compare_type
*e2
)
228 if (e1
->kind
!= e2
->kind
)
234 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
235 PRE_EXPR_CONSTANT (e2
));
237 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
239 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
241 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
242 PRE_EXPR_REFERENCE (e2
));
251 pre_expr_d::hash (const value_type
*e
)
256 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
258 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
260 return PRE_EXPR_NARY (e
)->hashcode
;
262 return PRE_EXPR_REFERENCE (e
)->hashcode
;
268 /* Next global expression id number. */
269 static unsigned int next_expression_id
;
271 /* Mapping from expression to id number we can use in bitmap sets. */
272 static vec
<pre_expr
> expressions
;
273 static hash_table
<pre_expr_d
> *expression_to_id
;
274 static vec
<unsigned> name_to_id
;
276 /* Allocate an expression id for EXPR. */
278 static inline unsigned int
279 alloc_expression_id (pre_expr expr
)
281 struct pre_expr_d
**slot
;
282 /* Make sure we won't overflow. */
283 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
284 expr
->id
= next_expression_id
++;
285 expressions
.safe_push (expr
);
286 if (expr
->kind
== NAME
)
288 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
289 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
290 re-allocations by using vec::reserve upfront. */
291 unsigned old_len
= name_to_id
.length ();
292 name_to_id
.reserve (num_ssa_names
- old_len
);
293 name_to_id
.quick_grow_cleared (num_ssa_names
);
294 gcc_assert (name_to_id
[version
] == 0);
295 name_to_id
[version
] = expr
->id
;
299 slot
= expression_to_id
->find_slot (expr
, INSERT
);
303 return next_expression_id
- 1;
306 /* Return the expression id for tree EXPR. */
308 static inline unsigned int
309 get_expression_id (const pre_expr expr
)
314 static inline unsigned int
315 lookup_expression_id (const pre_expr expr
)
317 struct pre_expr_d
**slot
;
319 if (expr
->kind
== NAME
)
321 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
322 if (name_to_id
.length () <= version
)
324 return name_to_id
[version
];
328 slot
= expression_to_id
->find_slot (expr
, NO_INSERT
);
331 return ((pre_expr
)*slot
)->id
;
335 /* Return the existing expression id for EXPR, or create one if one
336 does not exist yet. */
338 static inline unsigned int
339 get_or_alloc_expression_id (pre_expr expr
)
341 unsigned int id
= lookup_expression_id (expr
);
343 return alloc_expression_id (expr
);
344 return expr
->id
= id
;
347 /* Return the expression that has expression id ID */
349 static inline pre_expr
350 expression_for_id (unsigned int id
)
352 return expressions
[id
];
355 /* Free the expression id field in all of our expressions,
356 and then destroy the expressions array. */
359 clear_expression_ids (void)
361 expressions
.release ();
364 static alloc_pool pre_expr_pool
;
366 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
369 get_or_alloc_expr_for_name (tree name
)
371 struct pre_expr_d expr
;
373 unsigned int result_id
;
377 PRE_EXPR_NAME (&expr
) = name
;
378 result_id
= lookup_expression_id (&expr
);
380 return expression_for_id (result_id
);
382 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
384 PRE_EXPR_NAME (result
) = name
;
385 alloc_expression_id (result
);
389 /* An unordered bitmap set. One bitmap tracks values, the other,
391 typedef struct bitmap_set
393 bitmap_head expressions
;
397 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
398 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
400 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
401 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
403 /* Mapping from value id to expressions with that value_id. */
404 static vec
<bitmap
> value_expressions
;
406 /* Sets that we need to keep track of. */
407 typedef struct bb_bitmap_sets
409 /* The EXP_GEN set, which represents expressions/values generated in
411 bitmap_set_t exp_gen
;
413 /* The PHI_GEN set, which represents PHI results generated in a
415 bitmap_set_t phi_gen
;
417 /* The TMP_GEN set, which represents results/temporaries generated
418 in a basic block. IE the LHS of an expression. */
419 bitmap_set_t tmp_gen
;
421 /* The AVAIL_OUT set, which represents which values are available in
422 a given basic block. */
423 bitmap_set_t avail_out
;
425 /* The ANTIC_IN set, which represents which values are anticipatable
426 in a given basic block. */
427 bitmap_set_t antic_in
;
429 /* The PA_IN set, which represents which values are
430 partially anticipatable in a given basic block. */
433 /* The NEW_SETS set, which is used during insertion to augment the
434 AVAIL_OUT set of blocks with the new insertions performed during
435 the current iteration. */
436 bitmap_set_t new_sets
;
438 /* A cache for value_dies_in_block_x. */
441 /* The live virtual operand on successor edges. */
444 /* True if we have visited this block during ANTIC calculation. */
445 unsigned int visited
: 1;
447 /* True when the block contains a call that might not return. */
448 unsigned int contains_may_not_return_call
: 1;
451 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
452 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
453 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
454 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
455 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
456 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
457 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
458 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
459 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
460 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
461 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
464 /* Basic block list in postorder. */
465 static int *postorder
;
466 static int postorder_num
;
468 /* This structure is used to keep track of statistics on what
469 optimization PRE was able to perform. */
472 /* The number of RHS computations eliminated by PRE. */
475 /* The number of new expressions/temporaries generated by PRE. */
478 /* The number of inserts found due to partial anticipation */
481 /* The number of new PHI nodes added by PRE. */
485 static bool do_partial_partial
;
486 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int);
487 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
488 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
489 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
490 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
491 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
492 static void bitmap_insert_into_set_1 (bitmap_set_t
, pre_expr
,
494 static bitmap_set_t
bitmap_set_new (void);
495 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
497 static tree
find_or_generate_expression (basic_block
, tree
, gimple_seq
*);
498 static unsigned int get_expr_value_id (pre_expr
);
500 /* We can add and remove elements and entries to and from sets
501 and hash tables, so we use alloc pools for them. */
503 static alloc_pool bitmap_set_pool
;
504 static bitmap_obstack grand_bitmap_obstack
;
506 /* Set of blocks with statements that have had their EH properties changed. */
507 static bitmap need_eh_cleanup
;
509 /* Set of blocks with statements that have had their AB properties changed. */
510 static bitmap need_ab_cleanup
;
512 /* A three tuple {e, pred, v} used to cache phi translations in the
513 phi_translate_table. */
515 typedef struct expr_pred_trans_d
: typed_free_remove
<expr_pred_trans_d
>
517 /* The expression. */
520 /* The predecessor block along which we translated the expression. */
523 /* The value that resulted from the translation. */
526 /* The hashcode for the expression, pred pair. This is cached for
530 /* hash_table support. */
531 typedef expr_pred_trans_d value_type
;
532 typedef expr_pred_trans_d compare_type
;
533 static inline hashval_t
hash (const value_type
*);
534 static inline int equal (const value_type
*, const compare_type
*);
535 } *expr_pred_trans_t
;
536 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
539 expr_pred_trans_d::hash (const expr_pred_trans_d
*e
)
545 expr_pred_trans_d::equal (const value_type
*ve1
,
546 const compare_type
*ve2
)
548 basic_block b1
= ve1
->pred
;
549 basic_block b2
= ve2
->pred
;
551 /* If they are not translations for the same basic block, they can't
555 return pre_expr_d::equal (ve1
->e
, ve2
->e
);
558 /* The phi_translate_table caches phi translations for a given
559 expression and predecessor. */
560 static hash_table
<expr_pred_trans_d
> *phi_translate_table
;
562 /* Add the tuple mapping from {expression E, basic block PRED} to
563 the phi translation table and return whether it pre-existed. */
566 phi_trans_add (expr_pred_trans_t
*entry
, pre_expr e
, basic_block pred
)
568 expr_pred_trans_t
*slot
;
569 expr_pred_trans_d tem
;
570 hashval_t hash
= iterative_hash_hashval_t (pre_expr_d::hash (e
),
575 slot
= phi_translate_table
->find_slot_with_hash (&tem
, hash
, INSERT
);
582 *entry
= *slot
= XNEW (struct expr_pred_trans_d
);
584 (*entry
)->pred
= pred
;
585 (*entry
)->hashcode
= hash
;
590 /* Add expression E to the expression set of value id V. */
593 add_to_value (unsigned int v
, pre_expr e
)
597 gcc_checking_assert (get_expr_value_id (e
) == v
);
599 if (v
>= value_expressions
.length ())
601 value_expressions
.safe_grow_cleared (v
+ 1);
604 set
= value_expressions
[v
];
607 set
= BITMAP_ALLOC (&grand_bitmap_obstack
);
608 value_expressions
[v
] = set
;
611 bitmap_set_bit (set
, get_or_alloc_expression_id (e
));
614 /* Create a new bitmap set and return it. */
617 bitmap_set_new (void)
619 bitmap_set_t ret
= (bitmap_set_t
) pool_alloc (bitmap_set_pool
);
620 bitmap_initialize (&ret
->expressions
, &grand_bitmap_obstack
);
621 bitmap_initialize (&ret
->values
, &grand_bitmap_obstack
);
625 /* Return the value id for a PRE expression EXPR. */
628 get_expr_value_id (pre_expr expr
)
634 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
637 id
= VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
640 id
= PRE_EXPR_NARY (expr
)->value_id
;
643 id
= PRE_EXPR_REFERENCE (expr
)->value_id
;
648 /* ??? We cannot assert that expr has a value-id (it can be 0), because
649 we assign value-ids only to expressions that have a result
650 in set_hashtable_value_ids. */
654 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
657 sccvn_valnum_from_value_id (unsigned int val
)
661 bitmap exprset
= value_expressions
[val
];
662 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
664 pre_expr vexpr
= expression_for_id (i
);
665 if (vexpr
->kind
== NAME
)
666 return VN_INFO (PRE_EXPR_NAME (vexpr
))->valnum
;
667 else if (vexpr
->kind
== CONSTANT
)
668 return PRE_EXPR_CONSTANT (vexpr
);
673 /* Remove an expression EXPR from a bitmapped set. */
676 bitmap_remove_from_set (bitmap_set_t set
, pre_expr expr
)
678 unsigned int val
= get_expr_value_id (expr
);
679 if (!value_id_constant_p (val
))
681 bitmap_clear_bit (&set
->values
, val
);
682 bitmap_clear_bit (&set
->expressions
, get_expression_id (expr
));
687 bitmap_insert_into_set_1 (bitmap_set_t set
, pre_expr expr
,
688 unsigned int val
, bool allow_constants
)
690 if (allow_constants
|| !value_id_constant_p (val
))
692 /* We specifically expect this and only this function to be able to
693 insert constants into a set. */
694 bitmap_set_bit (&set
->values
, val
);
695 bitmap_set_bit (&set
->expressions
, get_or_alloc_expression_id (expr
));
699 /* Insert an expression EXPR into a bitmapped set. */
702 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
704 bitmap_insert_into_set_1 (set
, expr
, get_expr_value_id (expr
), false);
707 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
710 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
712 bitmap_copy (&dest
->expressions
, &orig
->expressions
);
713 bitmap_copy (&dest
->values
, &orig
->values
);
717 /* Free memory used up by SET. */
719 bitmap_set_free (bitmap_set_t set
)
721 bitmap_clear (&set
->expressions
);
722 bitmap_clear (&set
->values
);
726 /* Generate an topological-ordered array of bitmap set SET. */
729 sorted_array_from_bitmap_set (bitmap_set_t set
)
732 bitmap_iterator bi
, bj
;
733 vec
<pre_expr
> result
;
735 /* Pre-allocate enough space for the array. */
736 result
.create (bitmap_count_bits (&set
->expressions
));
738 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
740 /* The number of expressions having a given value is usually
741 relatively small. Thus, rather than making a vector of all
742 the expressions and sorting it by value-id, we walk the values
743 and check in the reverse mapping that tells us what expressions
744 have a given value, to filter those in our set. As a result,
745 the expressions are inserted in value-id order, which means
748 If this is somehow a significant lose for some cases, we can
749 choose which set to walk based on the set size. */
750 bitmap exprset
= value_expressions
[i
];
751 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bj
)
753 if (bitmap_bit_p (&set
->expressions
, j
))
754 result
.quick_push (expression_for_id (j
));
761 /* Perform bitmapped set operation DEST &= ORIG. */
764 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
772 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
774 bitmap_and_into (&dest
->values
, &orig
->values
);
775 bitmap_copy (&temp
, &dest
->expressions
);
776 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
778 pre_expr expr
= expression_for_id (i
);
779 unsigned int value_id
= get_expr_value_id (expr
);
780 if (!bitmap_bit_p (&dest
->values
, value_id
))
781 bitmap_clear_bit (&dest
->expressions
, i
);
783 bitmap_clear (&temp
);
787 /* Subtract all values and expressions contained in ORIG from DEST. */
790 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
792 bitmap_set_t result
= bitmap_set_new ();
796 bitmap_and_compl (&result
->expressions
, &dest
->expressions
,
799 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
801 pre_expr expr
= expression_for_id (i
);
802 unsigned int value_id
= get_expr_value_id (expr
);
803 bitmap_set_bit (&result
->values
, value_id
);
809 /* Subtract all the values in bitmap set B from bitmap set A. */
812 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
818 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
820 bitmap_copy (&temp
, &a
->expressions
);
821 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
823 pre_expr expr
= expression_for_id (i
);
824 if (bitmap_set_contains_value (b
, get_expr_value_id (expr
)))
825 bitmap_remove_from_set (a
, expr
);
827 bitmap_clear (&temp
);
831 /* Return true if bitmapped set SET contains the value VALUE_ID. */
834 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
836 if (value_id_constant_p (value_id
))
839 if (!set
|| bitmap_empty_p (&set
->expressions
))
842 return bitmap_bit_p (&set
->values
, value_id
);
846 bitmap_set_contains_expr (bitmap_set_t set
, const pre_expr expr
)
848 return bitmap_bit_p (&set
->expressions
, get_expression_id (expr
));
851 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
854 bitmap_set_replace_value (bitmap_set_t set
, unsigned int lookfor
,
861 if (value_id_constant_p (lookfor
))
864 if (!bitmap_set_contains_value (set
, lookfor
))
867 /* The number of expressions having a given value is usually
868 significantly less than the total number of expressions in SET.
869 Thus, rather than check, for each expression in SET, whether it
870 has the value LOOKFOR, we walk the reverse mapping that tells us
871 what expressions have a given value, and see if any of those
872 expressions are in our set. For large testcases, this is about
873 5-10x faster than walking the bitmap. If this is somehow a
874 significant lose for some cases, we can choose which set to walk
875 based on the set size. */
876 exprset
= value_expressions
[lookfor
];
877 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
879 if (bitmap_clear_bit (&set
->expressions
, i
))
881 bitmap_set_bit (&set
->expressions
, get_expression_id (expr
));
889 /* Return true if two bitmap sets are equal. */
892 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
894 return bitmap_equal_p (&a
->values
, &b
->values
);
897 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
898 and add it otherwise. */
901 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
903 unsigned int val
= get_expr_value_id (expr
);
905 if (bitmap_set_contains_value (set
, val
))
906 bitmap_set_replace_value (set
, val
, expr
);
908 bitmap_insert_into_set (set
, expr
);
911 /* Insert EXPR into SET if EXPR's value is not already present in
915 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
917 unsigned int val
= get_expr_value_id (expr
);
919 gcc_checking_assert (expr
->id
== get_or_alloc_expression_id (expr
));
921 /* Constant values are always considered to be part of the set. */
922 if (value_id_constant_p (val
))
925 /* If the value membership changed, add the expression. */
926 if (bitmap_set_bit (&set
->values
, val
))
927 bitmap_set_bit (&set
->expressions
, expr
->id
);
930 /* Print out EXPR to outfile. */
933 print_pre_expr (FILE *outfile
, const pre_expr expr
)
938 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
), 0);
941 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
), 0);
946 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
947 fprintf (outfile
, "{%s,", get_tree_code_name (nary
->opcode
));
948 for (i
= 0; i
< nary
->length
; i
++)
950 print_generic_expr (outfile
, nary
->op
[i
], 0);
951 if (i
!= (unsigned) nary
->length
- 1)
952 fprintf (outfile
, ",");
954 fprintf (outfile
, "}");
960 vn_reference_op_t vro
;
962 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
963 fprintf (outfile
, "{");
965 ref
->operands
.iterate (i
, &vro
);
968 bool closebrace
= false;
969 if (vro
->opcode
!= SSA_NAME
970 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
972 fprintf (outfile
, "%s", get_tree_code_name (vro
->opcode
));
975 fprintf (outfile
, "<");
981 print_generic_expr (outfile
, vro
->op0
, 0);
984 fprintf (outfile
, ",");
985 print_generic_expr (outfile
, vro
->op1
, 0);
989 fprintf (outfile
, ",");
990 print_generic_expr (outfile
, vro
->op2
, 0);
994 fprintf (outfile
, ">");
995 if (i
!= ref
->operands
.length () - 1)
996 fprintf (outfile
, ",");
998 fprintf (outfile
, "}");
1001 fprintf (outfile
, "@");
1002 print_generic_expr (outfile
, ref
->vuse
, 0);
1008 void debug_pre_expr (pre_expr
);
1010 /* Like print_pre_expr but always prints to stderr. */
1012 debug_pre_expr (pre_expr e
)
1014 print_pre_expr (stderr
, e
);
1015 fprintf (stderr
, "\n");
1018 /* Print out SET to OUTFILE. */
1021 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
1022 const char *setname
, int blockindex
)
1024 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
1031 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1033 const pre_expr expr
= expression_for_id (i
);
1036 fprintf (outfile
, ", ");
1038 print_pre_expr (outfile
, expr
);
1040 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1043 fprintf (outfile
, " }\n");
1046 void debug_bitmap_set (bitmap_set_t
);
1049 debug_bitmap_set (bitmap_set_t set
)
1051 print_bitmap_set (stderr
, set
, "debug", 0);
1054 void debug_bitmap_sets_for (basic_block
);
1057 debug_bitmap_sets_for (basic_block bb
)
1059 print_bitmap_set (stderr
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
1060 print_bitmap_set (stderr
, EXP_GEN (bb
), "exp_gen", bb
->index
);
1061 print_bitmap_set (stderr
, PHI_GEN (bb
), "phi_gen", bb
->index
);
1062 print_bitmap_set (stderr
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
1063 print_bitmap_set (stderr
, ANTIC_IN (bb
), "antic_in", bb
->index
);
1064 if (do_partial_partial
)
1065 print_bitmap_set (stderr
, PA_IN (bb
), "pa_in", bb
->index
);
1066 print_bitmap_set (stderr
, NEW_SETS (bb
), "new_sets", bb
->index
);
1069 /* Print out the expressions that have VAL to OUTFILE. */
1072 print_value_expressions (FILE *outfile
, unsigned int val
)
1074 bitmap set
= value_expressions
[val
];
1079 sprintf (s
, "%04d", val
);
1080 x
.expressions
= *set
;
1081 print_bitmap_set (outfile
, &x
, s
, 0);
1087 debug_value_expressions (unsigned int val
)
1089 print_value_expressions (stderr
, val
);
1092 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1096 get_or_alloc_expr_for_constant (tree constant
)
1098 unsigned int result_id
;
1099 unsigned int value_id
;
1100 struct pre_expr_d expr
;
1103 expr
.kind
= CONSTANT
;
1104 PRE_EXPR_CONSTANT (&expr
) = constant
;
1105 result_id
= lookup_expression_id (&expr
);
1107 return expression_for_id (result_id
);
1109 newexpr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1110 newexpr
->kind
= CONSTANT
;
1111 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1112 alloc_expression_id (newexpr
);
1113 value_id
= get_or_alloc_constant_value_id (constant
);
1114 add_to_value (value_id
, newexpr
);
1118 /* Given a value id V, find the actual tree representing the constant
1119 value if there is one, and return it. Return NULL if we can't find
1123 get_constant_for_value_id (unsigned int v
)
1125 if (value_id_constant_p (v
))
1129 bitmap exprset
= value_expressions
[v
];
1131 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1133 pre_expr expr
= expression_for_id (i
);
1134 if (expr
->kind
== CONSTANT
)
1135 return PRE_EXPR_CONSTANT (expr
);
1141 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1142 Currently only supports constants and SSA_NAMES. */
1144 get_or_alloc_expr_for (tree t
)
1146 if (TREE_CODE (t
) == SSA_NAME
)
1147 return get_or_alloc_expr_for_name (t
);
1148 else if (is_gimple_min_invariant (t
))
1149 return get_or_alloc_expr_for_constant (t
);
1152 /* More complex expressions can result from SCCVN expression
1153 simplification that inserts values for them. As they all
1154 do not have VOPs the get handled by the nary ops struct. */
1155 vn_nary_op_t result
;
1156 unsigned int result_id
;
1157 vn_nary_op_lookup (t
, &result
);
1160 pre_expr e
= (pre_expr
) pool_alloc (pre_expr_pool
);
1162 PRE_EXPR_NARY (e
) = result
;
1163 result_id
= lookup_expression_id (e
);
1166 pool_free (pre_expr_pool
, e
);
1167 e
= expression_for_id (result_id
);
1170 alloc_expression_id (e
);
1177 /* Return the folded version of T if T, when folded, is a gimple
1178 min_invariant. Otherwise, return T. */
1181 fully_constant_expression (pre_expr e
)
1189 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1190 switch (TREE_CODE_CLASS (nary
->opcode
))
1193 case tcc_comparison
:
1195 /* We have to go from trees to pre exprs to value ids to
1197 tree naryop0
= nary
->op
[0];
1198 tree naryop1
= nary
->op
[1];
1200 if (!is_gimple_min_invariant (naryop0
))
1202 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1203 unsigned int vrep0
= get_expr_value_id (rep0
);
1204 tree const0
= get_constant_for_value_id (vrep0
);
1206 naryop0
= fold_convert (TREE_TYPE (naryop0
), const0
);
1208 if (!is_gimple_min_invariant (naryop1
))
1210 pre_expr rep1
= get_or_alloc_expr_for (naryop1
);
1211 unsigned int vrep1
= get_expr_value_id (rep1
);
1212 tree const1
= get_constant_for_value_id (vrep1
);
1214 naryop1
= fold_convert (TREE_TYPE (naryop1
), const1
);
1216 result
= fold_binary (nary
->opcode
, nary
->type
,
1218 if (result
&& is_gimple_min_invariant (result
))
1219 return get_or_alloc_expr_for_constant (result
);
1220 /* We might have simplified the expression to a
1221 SSA_NAME for example from x_1 * 1. But we cannot
1222 insert a PHI for x_1 unconditionally as x_1 might
1223 not be available readily. */
1227 if (nary
->opcode
!= REALPART_EXPR
1228 && nary
->opcode
!= IMAGPART_EXPR
1229 && nary
->opcode
!= VIEW_CONVERT_EXPR
)
1234 /* We have to go from trees to pre exprs to value ids to
1236 tree naryop0
= nary
->op
[0];
1237 tree const0
, result
;
1238 if (is_gimple_min_invariant (naryop0
))
1242 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1243 unsigned int vrep0
= get_expr_value_id (rep0
);
1244 const0
= get_constant_for_value_id (vrep0
);
1249 tree type1
= TREE_TYPE (nary
->op
[0]);
1250 const0
= fold_convert (type1
, const0
);
1251 result
= fold_unary (nary
->opcode
, nary
->type
, const0
);
1253 if (result
&& is_gimple_min_invariant (result
))
1254 return get_or_alloc_expr_for_constant (result
);
1263 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1265 if ((folded
= fully_constant_vn_reference_p (ref
)))
1266 return get_or_alloc_expr_for_constant (folded
);
1275 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1276 it has the value it would have in BLOCK. Set *SAME_VALID to true
1277 in case the new vuse doesn't change the value id of the OPERANDS. */
1280 translate_vuse_through_block (vec
<vn_reference_op_s
> operands
,
1281 alias_set_type set
, tree type
, tree vuse
,
1282 basic_block phiblock
,
1283 basic_block block
, bool *same_valid
)
1285 gimple phi
= SSA_NAME_DEF_STMT (vuse
);
1292 if (gimple_bb (phi
) != phiblock
)
1295 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, type
, operands
);
1297 /* Use the alias-oracle to find either the PHI node in this block,
1298 the first VUSE used in this block that is equivalent to vuse or
1299 the first VUSE which definition in this block kills the value. */
1300 if (gimple_code (phi
) == GIMPLE_PHI
)
1301 e
= find_edge (block
, phiblock
);
1302 else if (use_oracle
)
1303 while (!stmt_may_clobber_ref_p_1 (phi
, &ref
))
1305 vuse
= gimple_vuse (phi
);
1306 phi
= SSA_NAME_DEF_STMT (vuse
);
1307 if (gimple_bb (phi
) != phiblock
)
1309 if (gimple_code (phi
) == GIMPLE_PHI
)
1311 e
= find_edge (block
, phiblock
);
1322 bitmap visited
= NULL
;
1324 /* Try to find a vuse that dominates this phi node by skipping
1325 non-clobbering statements. */
1326 vuse
= get_continuation_for_phi (phi
, &ref
, &cnt
, &visited
, false,
1329 BITMAP_FREE (visited
);
1335 /* If we didn't find any, the value ID can't stay the same,
1336 but return the translated vuse. */
1337 *same_valid
= false;
1338 vuse
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1340 /* ??? We would like to return vuse here as this is the canonical
1341 upmost vdef that this reference is associated with. But during
1342 insertion of the references into the hash tables we only ever
1343 directly insert with their direct gimple_vuse, hence returning
1344 something else would make us not find the other expression. */
1345 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1351 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1352 SET2. This is used to avoid making a set consisting of the union
1353 of PA_IN and ANTIC_IN during insert. */
1355 static inline pre_expr
1356 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
)
1360 result
= bitmap_find_leader (set1
, val
);
1361 if (!result
&& set2
)
1362 result
= bitmap_find_leader (set2
, val
);
1366 /* Get the tree type for our PRE expression e. */
1369 get_expr_type (const pre_expr e
)
1374 return TREE_TYPE (PRE_EXPR_NAME (e
));
1376 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1378 return PRE_EXPR_REFERENCE (e
)->type
;
1380 return PRE_EXPR_NARY (e
)->type
;
1385 /* Get a representative SSA_NAME for a given expression.
1386 Since all of our sub-expressions are treated as values, we require
1387 them to be SSA_NAME's for simplicity.
1388 Prior versions of GVNPRE used to use "value handles" here, so that
1389 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1390 either case, the operands are really values (IE we do not expect
1391 them to be usable without finding leaders). */
1394 get_representative_for (const pre_expr e
)
1397 unsigned int value_id
= get_expr_value_id (e
);
1402 return PRE_EXPR_NAME (e
);
1404 return PRE_EXPR_CONSTANT (e
);
1408 /* Go through all of the expressions representing this value
1409 and pick out an SSA_NAME. */
1412 bitmap exprs
= value_expressions
[value_id
];
1413 EXECUTE_IF_SET_IN_BITMAP (exprs
, 0, i
, bi
)
1415 pre_expr rep
= expression_for_id (i
);
1416 if (rep
->kind
== NAME
)
1417 return PRE_EXPR_NAME (rep
);
1418 else if (rep
->kind
== CONSTANT
)
1419 return PRE_EXPR_CONSTANT (rep
);
1425 /* If we reached here we couldn't find an SSA_NAME. This can
1426 happen when we've discovered a value that has never appeared in
1427 the program as set to an SSA_NAME, as the result of phi translation.
1429 ??? We should be able to re-use this when we insert the statement
1431 name
= make_temp_ssa_name (get_expr_type (e
), gimple_build_nop (), "pretmp");
1432 VN_INFO_GET (name
)->value_id
= value_id
;
1433 VN_INFO (name
)->valnum
= name
;
1434 /* ??? For now mark this SSA name for release by SCCVN. */
1435 VN_INFO (name
)->needs_insertion
= true;
1436 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1437 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1439 fprintf (dump_file
, "Created SSA_NAME representative ");
1440 print_generic_expr (dump_file
, name
, 0);
1441 fprintf (dump_file
, " for expression:");
1442 print_pre_expr (dump_file
, e
);
1443 fprintf (dump_file
, " (%04d)\n", value_id
);
1452 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1453 basic_block pred
, basic_block phiblock
);
1455 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1456 the phis in PRED. Return NULL if we can't find a leader for each part
1457 of the translated expression. */
1460 phi_translate_1 (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1461 basic_block pred
, basic_block phiblock
)
1468 bool changed
= false;
1469 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1470 vn_nary_op_t newnary
= XALLOCAVAR (struct vn_nary_op_s
,
1471 sizeof_vn_nary_op (nary
->length
));
1472 memcpy (newnary
, nary
, sizeof_vn_nary_op (nary
->length
));
1474 for (i
= 0; i
< newnary
->length
; i
++)
1476 if (TREE_CODE (newnary
->op
[i
]) != SSA_NAME
)
1480 pre_expr leader
, result
;
1481 unsigned int op_val_id
= VN_INFO (newnary
->op
[i
])->value_id
;
1482 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1483 result
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1484 if (result
&& result
!= leader
)
1486 tree name
= get_representative_for (result
);
1489 newnary
->op
[i
] = name
;
1494 changed
|= newnary
->op
[i
] != nary
->op
[i
];
1500 unsigned int new_val_id
;
1502 tree result
= vn_nary_op_lookup_pieces (newnary
->length
,
1507 if (result
&& is_gimple_min_invariant (result
))
1508 return get_or_alloc_expr_for_constant (result
);
1510 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1515 PRE_EXPR_NARY (expr
) = nary
;
1516 constant
= fully_constant_expression (expr
);
1517 if (constant
!= expr
)
1520 new_val_id
= nary
->value_id
;
1521 get_or_alloc_expression_id (expr
);
1525 new_val_id
= get_next_value_id ();
1526 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
1527 nary
= vn_nary_op_insert_pieces (newnary
->length
,
1531 result
, new_val_id
);
1532 PRE_EXPR_NARY (expr
) = nary
;
1533 constant
= fully_constant_expression (expr
);
1534 if (constant
!= expr
)
1536 get_or_alloc_expression_id (expr
);
1538 add_to_value (new_val_id
, expr
);
1546 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1547 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1548 tree vuse
= ref
->vuse
;
1549 tree newvuse
= vuse
;
1550 vec
<vn_reference_op_s
> newoperands
= vNULL
;
1551 bool changed
= false, same_valid
= true;
1553 vn_reference_op_t operand
;
1554 vn_reference_t newref
;
1556 for (i
= 0; operands
.iterate (i
, &operand
); i
++)
1561 tree type
= operand
->type
;
1562 vn_reference_op_s newop
= *operand
;
1563 op
[0] = operand
->op0
;
1564 op
[1] = operand
->op1
;
1565 op
[2] = operand
->op2
;
1566 for (n
= 0; n
< 3; ++n
)
1568 unsigned int op_val_id
;
1571 if (TREE_CODE (op
[n
]) != SSA_NAME
)
1573 /* We can't possibly insert these. */
1575 && !is_gimple_min_invariant (op
[n
]))
1579 op_val_id
= VN_INFO (op
[n
])->value_id
;
1580 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1583 opresult
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1586 if (opresult
!= leader
)
1588 tree name
= get_representative_for (opresult
);
1591 changed
|= name
!= op
[n
];
1597 newoperands
.release ();
1602 if (!newoperands
.exists ())
1603 newoperands
= operands
.copy ();
1604 /* We may have changed from an SSA_NAME to a constant */
1605 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op
[0]) != SSA_NAME
)
1606 newop
.opcode
= TREE_CODE (op
[0]);
1611 newoperands
[i
] = newop
;
1613 gcc_checking_assert (i
== operands
.length ());
1617 newvuse
= translate_vuse_through_block (newoperands
.exists ()
1618 ? newoperands
: operands
,
1619 ref
->set
, ref
->type
,
1620 vuse
, phiblock
, pred
,
1622 if (newvuse
== NULL_TREE
)
1624 newoperands
.release ();
1629 if (changed
|| newvuse
!= vuse
)
1631 unsigned int new_val_id
;
1634 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1636 newoperands
.exists ()
1637 ? newoperands
: operands
,
1640 newoperands
.release ();
1642 /* We can always insert constants, so if we have a partial
1643 redundant constant load of another type try to translate it
1644 to a constant of appropriate type. */
1645 if (result
&& is_gimple_min_invariant (result
))
1648 if (!useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1650 tem
= fold_unary (VIEW_CONVERT_EXPR
, ref
->type
, result
);
1651 if (tem
&& !is_gimple_min_invariant (tem
))
1655 return get_or_alloc_expr_for_constant (tem
);
1658 /* If we'd have to convert things we would need to validate
1659 if we can insert the translated expression. So fail
1660 here for now - we cannot insert an alias with a different
1661 type in the VN tables either, as that would assert. */
1663 && !useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1665 else if (!result
&& newref
1666 && !useless_type_conversion_p (ref
->type
, newref
->type
))
1668 newoperands
.release ();
1672 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1673 expr
->kind
= REFERENCE
;
1678 PRE_EXPR_REFERENCE (expr
) = newref
;
1679 constant
= fully_constant_expression (expr
);
1680 if (constant
!= expr
)
1683 new_val_id
= newref
->value_id
;
1684 get_or_alloc_expression_id (expr
);
1688 if (changed
|| !same_valid
)
1690 new_val_id
= get_next_value_id ();
1691 value_expressions
.safe_grow_cleared
1692 (get_max_value_id () + 1);
1695 new_val_id
= ref
->value_id
;
1696 if (!newoperands
.exists ())
1697 newoperands
= operands
.copy ();
1698 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1701 result
, new_val_id
);
1702 newoperands
= vNULL
;
1703 PRE_EXPR_REFERENCE (expr
) = newref
;
1704 constant
= fully_constant_expression (expr
);
1705 if (constant
!= expr
)
1707 get_or_alloc_expression_id (expr
);
1709 add_to_value (new_val_id
, expr
);
1711 newoperands
.release ();
1718 tree name
= PRE_EXPR_NAME (expr
);
1719 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
1720 /* If the SSA name is defined by a PHI node in this block,
1722 if (gimple_code (def_stmt
) == GIMPLE_PHI
1723 && gimple_bb (def_stmt
) == phiblock
)
1725 edge e
= find_edge (pred
, gimple_bb (def_stmt
));
1726 tree def
= PHI_ARG_DEF (def_stmt
, e
->dest_idx
);
1728 /* Handle constant. */
1729 if (is_gimple_min_invariant (def
))
1730 return get_or_alloc_expr_for_constant (def
);
1732 return get_or_alloc_expr_for_name (def
);
1734 /* Otherwise return it unchanged - it will get removed if its
1735 value is not available in PREDs AVAIL_OUT set of expressions
1736 by the subtraction of TMP_GEN. */
1745 /* Wrapper around phi_translate_1 providing caching functionality. */
1748 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1749 basic_block pred
, basic_block phiblock
)
1751 expr_pred_trans_t slot
= NULL
;
1757 /* Constants contain no values that need translation. */
1758 if (expr
->kind
== CONSTANT
)
1761 if (value_id_constant_p (get_expr_value_id (expr
)))
1764 /* Don't add translations of NAMEs as those are cheap to translate. */
1765 if (expr
->kind
!= NAME
)
1767 if (phi_trans_add (&slot
, expr
, pred
))
1769 /* Store NULL for the value we want to return in the case of
1775 phitrans
= phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
);
1782 /* Remove failed translations again, they cause insert
1783 iteration to not pick up new opportunities reliably. */
1784 phi_translate_table
->remove_elt_with_hash (slot
, slot
->hashcode
);
1791 /* For each expression in SET, translate the values through phi nodes
1792 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1793 expressions in DEST. */
1796 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1797 basic_block phiblock
)
1799 vec
<pre_expr
> exprs
;
1803 if (gimple_seq_empty_p (phi_nodes (phiblock
)))
1805 bitmap_set_copy (dest
, set
);
1809 exprs
= sorted_array_from_bitmap_set (set
);
1810 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
1812 pre_expr translated
;
1813 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1817 /* We might end up with multiple expressions from SET being
1818 translated to the same value. In this case we do not want
1819 to retain the NARY or REFERENCE expression but prefer a NAME
1820 which would be the leader. */
1821 if (translated
->kind
== NAME
)
1822 bitmap_value_replace_in_set (dest
, translated
);
1824 bitmap_value_insert_into_set (dest
, translated
);
1829 /* Find the leader for a value (i.e., the name representing that
1830 value) in a given set, and return it. Return NULL if no leader
1834 bitmap_find_leader (bitmap_set_t set
, unsigned int val
)
1836 if (value_id_constant_p (val
))
1840 bitmap exprset
= value_expressions
[val
];
1842 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1844 pre_expr expr
= expression_for_id (i
);
1845 if (expr
->kind
== CONSTANT
)
1849 if (bitmap_set_contains_value (set
, val
))
1851 /* Rather than walk the entire bitmap of expressions, and see
1852 whether any of them has the value we are looking for, we look
1853 at the reverse mapping, which tells us the set of expressions
1854 that have a given value (IE value->expressions with that
1855 value) and see if any of those expressions are in our set.
1856 The number of expressions per value is usually significantly
1857 less than the number of expressions in the set. In fact, for
1858 large testcases, doing it this way is roughly 5-10x faster
1859 than walking the bitmap.
1860 If this is somehow a significant lose for some cases, we can
1861 choose which set to walk based on which set is smaller. */
1864 bitmap exprset
= value_expressions
[val
];
1866 EXECUTE_IF_AND_IN_BITMAP (exprset
, &set
->expressions
, 0, i
, bi
)
1867 return expression_for_id (i
);
1872 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1873 BLOCK by seeing if it is not killed in the block. Note that we are
1874 only determining whether there is a store that kills it. Because
1875 of the order in which clean iterates over values, we are guaranteed
1876 that altered operands will have caused us to be eliminated from the
1877 ANTIC_IN set already. */
1880 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1882 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1883 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1885 gimple_stmt_iterator gsi
;
1886 unsigned id
= get_expression_id (expr
);
1893 /* Lookup a previously calculated result. */
1894 if (EXPR_DIES (block
)
1895 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1896 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1898 /* A memory expression {e, VUSE} dies in the block if there is a
1899 statement that may clobber e. If, starting statement walk from the
1900 top of the basic block, a statement uses VUSE there can be no kill
1901 inbetween that use and the original statement that loaded {e, VUSE},
1902 so we can stop walking. */
1903 ref
.base
= NULL_TREE
;
1904 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1906 tree def_vuse
, def_vdef
;
1907 def
= gsi_stmt (gsi
);
1908 def_vuse
= gimple_vuse (def
);
1909 def_vdef
= gimple_vdef (def
);
1911 /* Not a memory statement. */
1915 /* Not a may-def. */
1918 /* A load with the same VUSE, we're done. */
1919 if (def_vuse
== vuse
)
1925 /* Init ref only if we really need it. */
1926 if (ref
.base
== NULL_TREE
1927 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->type
,
1933 /* If the statement may clobber expr, it dies. */
1934 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1941 /* Remember the result. */
1942 if (!EXPR_DIES (block
))
1943 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1944 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1946 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1952 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1953 contains its value-id. */
1956 op_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree op
)
1958 if (op
&& TREE_CODE (op
) == SSA_NAME
)
1960 unsigned int value_id
= VN_INFO (op
)->value_id
;
1961 if (!(bitmap_set_contains_value (set1
, value_id
)
1962 || (set2
&& bitmap_set_contains_value (set2
, value_id
))))
1968 /* Determine if the expression EXPR is valid in SET1 U SET2.
1969 ONLY SET2 CAN BE NULL.
1970 This means that we have a leader for each part of the expression
1971 (if it consists of values), or the expression is an SSA_NAME.
1972 For loads/calls, we also see if the vuse is killed in this block. */
1975 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
)
1980 /* By construction all NAMEs are available. Non-available
1981 NAMEs are removed by subtracting TMP_GEN from the sets. */
1986 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1987 for (i
= 0; i
< nary
->length
; i
++)
1988 if (!op_valid_in_sets (set1
, set2
, nary
->op
[i
]))
1995 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1996 vn_reference_op_t vro
;
1999 FOR_EACH_VEC_ELT (ref
->operands
, i
, vro
)
2001 if (!op_valid_in_sets (set1
, set2
, vro
->op0
)
2002 || !op_valid_in_sets (set1
, set2
, vro
->op1
)
2003 || !op_valid_in_sets (set1
, set2
, vro
->op2
))
2013 /* Clean the set of expressions that are no longer valid in SET1 or
2014 SET2. This means expressions that are made up of values we have no
2015 leaders for in SET1 or SET2. This version is used for partial
2016 anticipation, which means it is not valid in either ANTIC_IN or
2020 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
)
2022 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set1
);
2026 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2028 if (!valid_in_sets (set1
, set2
, expr
))
2029 bitmap_remove_from_set (set1
, expr
);
2034 /* Clean the set of expressions that are no longer valid in SET. This
2035 means expressions that are made up of values we have no leaders for
2039 clean (bitmap_set_t set
)
2041 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set
);
2045 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2047 if (!valid_in_sets (set
, NULL
, expr
))
2048 bitmap_remove_from_set (set
, expr
);
2053 /* Clean the set of expressions that are no longer valid in SET because
2054 they are clobbered in BLOCK or because they trap and may not be executed. */
2057 prune_clobbered_mems (bitmap_set_t set
, basic_block block
)
2062 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
2064 pre_expr expr
= expression_for_id (i
);
2065 if (expr
->kind
== REFERENCE
)
2067 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2070 gimple def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
2071 if (!gimple_nop_p (def_stmt
)
2072 && ((gimple_bb (def_stmt
) != block
2073 && !dominated_by_p (CDI_DOMINATORS
,
2074 block
, gimple_bb (def_stmt
)))
2075 || (gimple_bb (def_stmt
) == block
2076 && value_dies_in_block_x (expr
, block
))))
2077 bitmap_remove_from_set (set
, expr
);
2080 else if (expr
->kind
== NARY
)
2082 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2083 /* If the NARY may trap make sure the block does not contain
2084 a possible exit point.
2085 ??? This is overly conservative if we translate AVAIL_OUT
2086 as the available expression might be after the exit point. */
2087 if (BB_MAY_NOTRETURN (block
)
2088 && vn_nary_may_trap (nary
))
2089 bitmap_remove_from_set (set
, expr
);
2094 static sbitmap has_abnormal_preds
;
2096 /* List of blocks that may have changed during ANTIC computation and
2097 thus need to be iterated over. */
2099 static sbitmap changed_blocks
;
2101 /* Compute the ANTIC set for BLOCK.
2103 If succs(BLOCK) > 1 then
2104 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2105 else if succs(BLOCK) == 1 then
2106 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2108 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2112 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2114 bool changed
= false;
2115 bitmap_set_t S
, old
, ANTIC_OUT
;
2121 old
= ANTIC_OUT
= S
= NULL
;
2122 BB_VISITED (block
) = 1;
2124 /* If any edges from predecessors are abnormal, antic_in is empty,
2126 if (block_has_abnormal_pred_edge
)
2127 goto maybe_dump_sets
;
2129 old
= ANTIC_IN (block
);
2130 ANTIC_OUT
= bitmap_set_new ();
2132 /* If the block has no successors, ANTIC_OUT is empty. */
2133 if (EDGE_COUNT (block
->succs
) == 0)
2135 /* If we have one successor, we could have some phi nodes to
2136 translate through. */
2137 else if (single_succ_p (block
))
2139 basic_block succ_bb
= single_succ (block
);
2140 gcc_assert (BB_VISITED (succ_bb
));
2141 phi_translate_set (ANTIC_OUT
, ANTIC_IN (succ_bb
), block
, succ_bb
);
2143 /* If we have multiple successors, we take the intersection of all of
2144 them. Note that in the case of loop exit phi nodes, we may have
2145 phis to translate through. */
2149 basic_block bprime
, first
= NULL
;
2151 auto_vec
<basic_block
> worklist (EDGE_COUNT (block
->succs
));
2152 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2155 && BB_VISITED (e
->dest
))
2157 else if (BB_VISITED (e
->dest
))
2158 worklist
.quick_push (e
->dest
);
2161 /* Of multiple successors we have to have visited one already
2162 which is guaranteed by iteration order. */
2163 gcc_assert (first
!= NULL
);
2165 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
), block
, first
);
2167 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2169 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2171 bitmap_set_t tmp
= bitmap_set_new ();
2172 phi_translate_set (tmp
, ANTIC_IN (bprime
), block
, bprime
);
2173 bitmap_set_and (ANTIC_OUT
, tmp
);
2174 bitmap_set_free (tmp
);
2177 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
2181 /* Prune expressions that are clobbered in block and thus become
2182 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2183 prune_clobbered_mems (ANTIC_OUT
, block
);
2185 /* Generate ANTIC_OUT - TMP_GEN. */
2186 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
2188 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2189 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
2192 /* Then union in the ANTIC_OUT - TMP_GEN values,
2193 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2194 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
2195 bitmap_value_insert_into_set (ANTIC_IN (block
),
2196 expression_for_id (bii
));
2198 clean (ANTIC_IN (block
));
2200 if (!bitmap_set_equal (old
, ANTIC_IN (block
)))
2203 bitmap_set_bit (changed_blocks
, block
->index
);
2204 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2205 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2208 bitmap_clear_bit (changed_blocks
, block
->index
);
2211 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2214 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2216 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2220 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2223 bitmap_set_free (old
);
2225 bitmap_set_free (S
);
2227 bitmap_set_free (ANTIC_OUT
);
2231 /* Compute PARTIAL_ANTIC for BLOCK.
2233 If succs(BLOCK) > 1 then
2234 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2235 in ANTIC_OUT for all succ(BLOCK)
2236 else if succs(BLOCK) == 1 then
2237 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2239 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2244 compute_partial_antic_aux (basic_block block
,
2245 bool block_has_abnormal_pred_edge
)
2247 bool changed
= false;
2248 bitmap_set_t old_PA_IN
;
2249 bitmap_set_t PA_OUT
;
2252 unsigned long max_pa
= PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH
);
2254 old_PA_IN
= PA_OUT
= NULL
;
2256 /* If any edges from predecessors are abnormal, antic_in is empty,
2258 if (block_has_abnormal_pred_edge
)
2259 goto maybe_dump_sets
;
2261 /* If there are too many partially anticipatable values in the
2262 block, phi_translate_set can take an exponential time: stop
2263 before the translation starts. */
2265 && single_succ_p (block
)
2266 && bitmap_count_bits (&PA_IN (single_succ (block
))->values
) > max_pa
)
2267 goto maybe_dump_sets
;
2269 old_PA_IN
= PA_IN (block
);
2270 PA_OUT
= bitmap_set_new ();
2272 /* If the block has no successors, ANTIC_OUT is empty. */
2273 if (EDGE_COUNT (block
->succs
) == 0)
2275 /* If we have one successor, we could have some phi nodes to
2276 translate through. Note that we can't phi translate across DFS
2277 back edges in partial antic, because it uses a union operation on
2278 the successors. For recurrences like IV's, we will end up
2279 generating a new value in the set on each go around (i + 3 (VH.1)
2280 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2281 else if (single_succ_p (block
))
2283 basic_block succ
= single_succ (block
);
2284 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
2285 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
2287 /* If we have multiple successors, we take the union of all of
2294 auto_vec
<basic_block
> worklist (EDGE_COUNT (block
->succs
));
2295 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2297 if (e
->flags
& EDGE_DFS_BACK
)
2299 worklist
.quick_push (e
->dest
);
2301 if (worklist
.length () > 0)
2303 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2308 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
2309 bitmap_value_insert_into_set (PA_OUT
,
2310 expression_for_id (i
));
2311 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2313 bitmap_set_t pa_in
= bitmap_set_new ();
2314 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
2315 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2316 bitmap_value_insert_into_set (PA_OUT
,
2317 expression_for_id (i
));
2318 bitmap_set_free (pa_in
);
2321 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
2322 bitmap_value_insert_into_set (PA_OUT
,
2323 expression_for_id (i
));
2328 /* Prune expressions that are clobbered in block and thus become
2329 invalid if translated from PA_OUT to PA_IN. */
2330 prune_clobbered_mems (PA_OUT
, block
);
2332 /* PA_IN starts with PA_OUT - TMP_GEN.
2333 Then we subtract things from ANTIC_IN. */
2334 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
2336 /* For partial antic, we want to put back in the phi results, since
2337 we will properly avoid making them partially antic over backedges. */
2338 bitmap_ior_into (&PA_IN (block
)->values
, &PHI_GEN (block
)->values
);
2339 bitmap_ior_into (&PA_IN (block
)->expressions
, &PHI_GEN (block
)->expressions
);
2341 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2342 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2344 dependent_clean (PA_IN (block
), ANTIC_IN (block
));
2346 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
2349 bitmap_set_bit (changed_blocks
, block
->index
);
2350 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2351 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2354 bitmap_clear_bit (changed_blocks
, block
->index
);
2357 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2360 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2362 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2365 bitmap_set_free (old_PA_IN
);
2367 bitmap_set_free (PA_OUT
);
2371 /* Compute ANTIC and partial ANTIC sets. */
2374 compute_antic (void)
2376 bool changed
= true;
2377 int num_iterations
= 0;
2381 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2382 We pre-build the map of blocks with incoming abnormal edges here. */
2383 has_abnormal_preds
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
2384 bitmap_clear (has_abnormal_preds
);
2386 FOR_ALL_BB_FN (block
, cfun
)
2391 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2393 e
->flags
&= ~EDGE_DFS_BACK
;
2394 if (e
->flags
& EDGE_ABNORMAL
)
2396 bitmap_set_bit (has_abnormal_preds
, block
->index
);
2401 BB_VISITED (block
) = 0;
2403 /* While we are here, give empty ANTIC_IN sets to each block. */
2404 ANTIC_IN (block
) = bitmap_set_new ();
2405 PA_IN (block
) = bitmap_set_new ();
2408 /* At the exit block we anticipate nothing. */
2409 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun
)) = 1;
2411 changed_blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
) + 1);
2412 bitmap_ones (changed_blocks
);
2415 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2416 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2417 /* ??? We need to clear our PHI translation cache here as the
2418 ANTIC sets shrink and we restrict valid translations to
2419 those having operands with leaders in ANTIC. Same below
2420 for PA ANTIC computation. */
2423 for (i
= postorder_num
- 1; i
>= 0; i
--)
2425 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2427 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2428 changed
|= compute_antic_aux (block
,
2429 bitmap_bit_p (has_abnormal_preds
,
2433 /* Theoretically possible, but *highly* unlikely. */
2434 gcc_checking_assert (num_iterations
< 500);
2437 statistics_histogram_event (cfun
, "compute_antic iterations",
2440 if (do_partial_partial
)
2442 bitmap_ones (changed_blocks
);
2443 mark_dfs_back_edges ();
2448 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2449 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2452 for (i
= postorder_num
- 1 ; i
>= 0; i
--)
2454 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2456 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2458 |= compute_partial_antic_aux (block
,
2459 bitmap_bit_p (has_abnormal_preds
,
2463 /* Theoretically possible, but *highly* unlikely. */
2464 gcc_checking_assert (num_iterations
< 500);
2466 statistics_histogram_event (cfun
, "compute_partial_antic iterations",
2469 sbitmap_free (has_abnormal_preds
);
2470 sbitmap_free (changed_blocks
);
2474 /* Inserted expressions are placed onto this worklist, which is used
2475 for performing quick dead code elimination of insertions we made
2476 that didn't turn out to be necessary. */
2477 static bitmap inserted_exprs
;
2479 /* The actual worker for create_component_ref_by_pieces. */
2482 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2483 unsigned int *operand
, gimple_seq
*stmts
)
2485 vn_reference_op_t currop
= &ref
->operands
[*operand
];
2488 switch (currop
->opcode
)
2492 tree folded
, sc
= NULL_TREE
;
2493 unsigned int nargs
= 0;
2495 if (TREE_CODE (currop
->op0
) == FUNCTION_DECL
)
2498 fn
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2503 sc
= find_or_generate_expression (block
, currop
->op1
, stmts
);
2507 args
= XNEWVEC (tree
, ref
->operands
.length () - 1);
2508 while (*operand
< ref
->operands
.length ())
2510 args
[nargs
] = create_component_ref_by_pieces_1 (block
, ref
,
2516 folded
= build_call_array (currop
->type
,
2517 (TREE_CODE (fn
) == FUNCTION_DECL
2518 ? build_fold_addr_expr (fn
) : fn
),
2520 if (currop
->with_bounds
)
2521 CALL_WITH_BOUNDS_P (folded
) = true;
2524 CALL_EXPR_STATIC_CHAIN (folded
) = sc
;
2530 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2534 tree offset
= currop
->op0
;
2535 if (TREE_CODE (baseop
) == ADDR_EXPR
2536 && handled_component_p (TREE_OPERAND (baseop
, 0)))
2540 base
= get_addr_base_and_unit_offset (TREE_OPERAND (baseop
, 0),
2543 offset
= int_const_binop (PLUS_EXPR
, offset
,
2544 build_int_cst (TREE_TYPE (offset
),
2546 baseop
= build_fold_addr_expr (base
);
2548 return fold_build2 (MEM_REF
, currop
->type
, baseop
, offset
);
2551 case TARGET_MEM_REF
:
2553 tree genop0
= NULL_TREE
, genop1
= NULL_TREE
;
2554 vn_reference_op_t nextop
= &ref
->operands
[++*operand
];
2555 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2561 genop0
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2567 genop1
= find_or_generate_expression (block
, nextop
->op0
, stmts
);
2571 return build5 (TARGET_MEM_REF
, currop
->type
,
2572 baseop
, currop
->op2
, genop0
, currop
->op1
, genop1
);
2578 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2584 case VIEW_CONVERT_EXPR
:
2586 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2590 return fold_build1 (currop
->opcode
, currop
->type
, genop0
);
2593 case WITH_SIZE_EXPR
:
2595 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2599 tree genop1
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2602 return fold_build2 (currop
->opcode
, currop
->type
, genop0
, genop1
);
2607 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2611 tree op1
= currop
->op0
;
2612 tree op2
= currop
->op1
;
2613 return fold_build3 (BIT_FIELD_REF
, currop
->type
, genop0
, op1
, op2
);
2616 /* For array ref vn_reference_op's, operand 1 of the array ref
2617 is op0 of the reference op and operand 3 of the array ref is
2619 case ARRAY_RANGE_REF
:
2623 tree genop1
= currop
->op0
;
2624 tree genop2
= currop
->op1
;
2625 tree genop3
= currop
->op2
;
2626 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2630 genop1
= find_or_generate_expression (block
, genop1
, stmts
);
2635 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (genop0
));
2636 /* Drop zero minimum index if redundant. */
2637 if (integer_zerop (genop2
)
2639 || integer_zerop (TYPE_MIN_VALUE (domain_type
))))
2643 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2650 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2651 /* We can't always put a size in units of the element alignment
2652 here as the element alignment may be not visible. See
2653 PR43783. Simply drop the element size for constant
2655 if (tree_int_cst_equal (genop3
, TYPE_SIZE_UNIT (elmt_type
)))
2659 genop3
= size_binop (EXACT_DIV_EXPR
, genop3
,
2660 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
2661 genop3
= find_or_generate_expression (block
, genop3
, stmts
);
2666 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2673 tree genop2
= currop
->op1
;
2674 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2677 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2681 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2685 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
, genop2
);
2690 genop
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2711 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2712 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2713 trying to rename aggregates into ssa form directly, which is a no no.
2715 Thus, this routine doesn't create temporaries, it just builds a
2716 single access expression for the array, calling
2717 find_or_generate_expression to build the innermost pieces.
2719 This function is a subroutine of create_expression_by_pieces, and
2720 should not be called on it's own unless you really know what you
2724 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2727 unsigned int op
= 0;
2728 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
);
2731 /* Find a simple leader for an expression, or generate one using
2732 create_expression_by_pieces from a NARY expression for the value.
2733 BLOCK is the basic_block we are looking for leaders in.
2734 OP is the tree expression to find a leader for or generate.
2735 Returns the leader or NULL_TREE on failure. */
2738 find_or_generate_expression (basic_block block
, tree op
, gimple_seq
*stmts
)
2740 pre_expr expr
= get_or_alloc_expr_for (op
);
2741 unsigned int lookfor
= get_expr_value_id (expr
);
2742 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
), lookfor
);
2745 if (leader
->kind
== NAME
)
2746 return PRE_EXPR_NAME (leader
);
2747 else if (leader
->kind
== CONSTANT
)
2748 return PRE_EXPR_CONSTANT (leader
);
2754 /* It must be a complex expression, so generate it recursively. Note
2755 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2756 where the insert algorithm fails to insert a required expression. */
2757 bitmap exprset
= value_expressions
[lookfor
];
2760 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
2762 pre_expr temp
= expression_for_id (i
);
2763 /* We cannot insert random REFERENCE expressions at arbitrary
2764 places. We can insert NARYs which eventually re-materializes
2765 its operand values. */
2766 if (temp
->kind
== NARY
)
2767 return create_expression_by_pieces (block
, temp
, stmts
,
2768 get_expr_type (expr
));
2775 #define NECESSARY GF_PLF_1
2777 /* Create an expression in pieces, so that we can handle very complex
2778 expressions that may be ANTIC, but not necessary GIMPLE.
2779 BLOCK is the basic block the expression will be inserted into,
2780 EXPR is the expression to insert (in value form)
2781 STMTS is a statement list to append the necessary insertions into.
2783 This function will die if we hit some value that shouldn't be
2784 ANTIC but is (IE there is no leader for it, or its components).
2785 The function returns NULL_TREE in case a different antic expression
2786 has to be inserted first.
2787 This function may also generate expressions that are themselves
2788 partially or fully redundant. Those that are will be either made
2789 fully redundant during the next iteration of insert (for partially
2790 redundant ones), or eliminated by eliminate (for fully redundant
2794 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2795 gimple_seq
*stmts
, tree type
)
2799 gimple_seq forced_stmts
= NULL
;
2800 unsigned int value_id
;
2801 gimple_stmt_iterator gsi
;
2802 tree exprtype
= type
? type
: get_expr_type (expr
);
2808 /* We may hit the NAME/CONSTANT case if we have to convert types
2809 that value numbering saw through. */
2811 folded
= PRE_EXPR_NAME (expr
);
2814 folded
= PRE_EXPR_CONSTANT (expr
);
2818 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2819 folded
= create_component_ref_by_pieces (block
, ref
, stmts
);
2826 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2827 tree
*genop
= XALLOCAVEC (tree
, nary
->length
);
2829 for (i
= 0; i
< nary
->length
; ++i
)
2831 genop
[i
] = find_or_generate_expression (block
, nary
->op
[i
], stmts
);
2834 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2835 may have conversions stripped. */
2836 if (nary
->opcode
== POINTER_PLUS_EXPR
)
2839 genop
[i
] = gimple_convert (&forced_stmts
,
2840 nary
->type
, genop
[i
]);
2842 genop
[i
] = gimple_convert (&forced_stmts
,
2843 sizetype
, genop
[i
]);
2846 genop
[i
] = gimple_convert (&forced_stmts
,
2847 TREE_TYPE (nary
->op
[i
]), genop
[i
]);
2849 if (nary
->opcode
== CONSTRUCTOR
)
2851 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
2852 for (i
= 0; i
< nary
->length
; ++i
)
2853 CONSTRUCTOR_APPEND_ELT (elts
, NULL_TREE
, genop
[i
]);
2854 folded
= build_constructor (nary
->type
, elts
);
2858 switch (nary
->length
)
2861 folded
= fold_build1 (nary
->opcode
, nary
->type
,
2865 folded
= fold_build2 (nary
->opcode
, nary
->type
,
2866 genop
[0], genop
[1]);
2869 folded
= fold_build3 (nary
->opcode
, nary
->type
,
2870 genop
[0], genop
[1], genop
[2]);
2882 if (!useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
2883 folded
= fold_convert (exprtype
, folded
);
2885 /* Force the generated expression to be a sequence of GIMPLE
2887 We have to call unshare_expr because force_gimple_operand may
2888 modify the tree we pass to it. */
2889 gimple_seq tem
= NULL
;
2890 folded
= force_gimple_operand (unshare_expr (folded
), &tem
,
2892 gimple_seq_add_seq_without_update (&forced_stmts
, tem
);
2894 /* If we have any intermediate expressions to the value sets, add them
2895 to the value sets and chain them in the instruction stream. */
2898 gsi
= gsi_start (forced_stmts
);
2899 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
2901 gimple stmt
= gsi_stmt (gsi
);
2902 tree forcedname
= gimple_get_lhs (stmt
);
2905 if (TREE_CODE (forcedname
) == SSA_NAME
)
2907 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (forcedname
));
2908 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
2909 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
2910 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
2911 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
2912 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2913 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2916 gimple_set_vuse (stmt
, BB_LIVE_VOP_ON_EXIT (block
));
2917 gimple_set_modified (stmt
, true);
2919 gimple_seq_add_seq (stmts
, forced_stmts
);
2922 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2923 newstmt
= gimple_build_assign (name
, folded
);
2924 gimple_set_vuse (newstmt
, BB_LIVE_VOP_ON_EXIT (block
));
2925 gimple_set_modified (newstmt
, true);
2926 gimple_set_plf (newstmt
, NECESSARY
, false);
2928 gimple_seq_add_stmt (stmts
, newstmt
);
2929 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (name
));
2931 /* Fold the last statement. */
2932 gsi
= gsi_last (*stmts
);
2933 if (fold_stmt_inplace (&gsi
))
2934 update_stmt (gsi_stmt (gsi
));
2936 /* Add a value number to the temporary.
2937 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2938 we are creating the expression by pieces, and this particular piece of
2939 the expression may have been represented. There is no harm in replacing
2941 value_id
= get_expr_value_id (expr
);
2942 VN_INFO_GET (name
)->value_id
= value_id
;
2943 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
2944 if (VN_INFO (name
)->valnum
== NULL_TREE
)
2945 VN_INFO (name
)->valnum
= name
;
2946 gcc_assert (VN_INFO (name
)->valnum
!= NULL_TREE
);
2947 nameexpr
= get_or_alloc_expr_for_name (name
);
2948 add_to_value (value_id
, nameexpr
);
2949 if (NEW_SETS (block
))
2950 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2951 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2953 pre_stats
.insertions
++;
2954 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2956 fprintf (dump_file
, "Inserted ");
2957 print_gimple_stmt (dump_file
, newstmt
, 0, 0);
2958 fprintf (dump_file
, " in predecessor %d (%04d)\n",
2959 block
->index
, value_id
);
2966 /* Insert the to-be-made-available values of expression EXPRNUM for each
2967 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2968 merge the result with a phi node, given the same value number as
2969 NODE. Return true if we have inserted new stuff. */
2972 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
2973 vec
<pre_expr
> avail
)
2975 pre_expr expr
= expression_for_id (exprnum
);
2977 unsigned int val
= get_expr_value_id (expr
);
2979 bool insertions
= false;
2984 tree type
= get_expr_type (expr
);
2988 /* Make sure we aren't creating an induction variable. */
2989 if (bb_loop_depth (block
) > 0 && EDGE_COUNT (block
->preds
) == 2)
2991 bool firstinsideloop
= false;
2992 bool secondinsideloop
= false;
2993 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
2994 EDGE_PRED (block
, 0)->src
);
2995 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
2996 EDGE_PRED (block
, 1)->src
);
2997 /* Induction variables only have one edge inside the loop. */
2998 if ((firstinsideloop
^ secondinsideloop
)
2999 && expr
->kind
!= REFERENCE
)
3001 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3002 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3007 /* Make the necessary insertions. */
3008 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3010 gimple_seq stmts
= NULL
;
3013 eprime
= avail
[pred
->dest_idx
];
3015 if (eprime
->kind
!= NAME
&& eprime
->kind
!= CONSTANT
)
3017 builtexpr
= create_expression_by_pieces (bprime
, eprime
,
3019 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3020 gsi_insert_seq_on_edge (pred
, stmts
);
3023 /* We cannot insert a PHI node if we failed to insert
3028 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (builtexpr
);
3031 else if (eprime
->kind
== CONSTANT
)
3033 /* Constants may not have the right type, fold_convert
3034 should give us back a constant with the right type. */
3035 tree constant
= PRE_EXPR_CONSTANT (eprime
);
3036 if (!useless_type_conversion_p (type
, TREE_TYPE (constant
)))
3038 tree builtexpr
= fold_convert (type
, constant
);
3039 if (!is_gimple_min_invariant (builtexpr
))
3041 tree forcedexpr
= force_gimple_operand (builtexpr
,
3044 if (!is_gimple_min_invariant (forcedexpr
))
3046 if (forcedexpr
!= builtexpr
)
3048 VN_INFO_GET (forcedexpr
)->valnum
= PRE_EXPR_CONSTANT (eprime
);
3049 VN_INFO (forcedexpr
)->value_id
= get_expr_value_id (eprime
);
3053 gimple_stmt_iterator gsi
;
3054 gsi
= gsi_start (stmts
);
3055 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3057 gimple stmt
= gsi_stmt (gsi
);
3058 tree lhs
= gimple_get_lhs (stmt
);
3059 if (TREE_CODE (lhs
) == SSA_NAME
)
3060 bitmap_set_bit (inserted_exprs
,
3061 SSA_NAME_VERSION (lhs
));
3062 gimple_set_plf (stmt
, NECESSARY
, false);
3064 gsi_insert_seq_on_edge (pred
, stmts
);
3066 avail
[pred
->dest_idx
]
3067 = get_or_alloc_expr_for_name (forcedexpr
);
3071 avail
[pred
->dest_idx
]
3072 = get_or_alloc_expr_for_constant (builtexpr
);
3075 else if (eprime
->kind
== NAME
)
3077 /* We may have to do a conversion because our value
3078 numbering can look through types in certain cases, but
3079 our IL requires all operands of a phi node have the same
3081 tree name
= PRE_EXPR_NAME (eprime
);
3082 if (!useless_type_conversion_p (type
, TREE_TYPE (name
)))
3086 builtexpr
= fold_convert (type
, name
);
3087 forcedexpr
= force_gimple_operand (builtexpr
,
3091 if (forcedexpr
!= name
)
3093 VN_INFO_GET (forcedexpr
)->valnum
= VN_INFO (name
)->valnum
;
3094 VN_INFO (forcedexpr
)->value_id
= VN_INFO (name
)->value_id
;
3099 gimple_stmt_iterator gsi
;
3100 gsi
= gsi_start (stmts
);
3101 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3103 gimple stmt
= gsi_stmt (gsi
);
3104 tree lhs
= gimple_get_lhs (stmt
);
3105 if (TREE_CODE (lhs
) == SSA_NAME
)
3106 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
3107 gimple_set_plf (stmt
, NECESSARY
, false);
3109 gsi_insert_seq_on_edge (pred
, stmts
);
3111 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (forcedexpr
);
3115 /* If we didn't want a phi node, and we made insertions, we still have
3116 inserted new stuff, and thus return true. If we didn't want a phi node,
3117 and didn't make insertions, we haven't added anything new, so return
3119 if (nophi
&& insertions
)
3121 else if (nophi
&& !insertions
)
3124 /* Now build a phi for the new variable. */
3125 temp
= make_temp_ssa_name (type
, NULL
, "prephitmp");
3126 phi
= create_phi_node (temp
, block
);
3128 gimple_set_plf (phi
, NECESSARY
, false);
3129 VN_INFO_GET (temp
)->value_id
= val
;
3130 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3131 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3132 VN_INFO (temp
)->valnum
= temp
;
3133 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3134 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3136 pre_expr ae
= avail
[pred
->dest_idx
];
3137 gcc_assert (get_expr_type (ae
) == type
3138 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3139 if (ae
->kind
== CONSTANT
)
3140 add_phi_arg (phi
, unshare_expr (PRE_EXPR_CONSTANT (ae
)),
3141 pred
, UNKNOWN_LOCATION
);
3143 add_phi_arg (phi
, PRE_EXPR_NAME (ae
), pred
, UNKNOWN_LOCATION
);
3146 newphi
= get_or_alloc_expr_for_name (temp
);
3147 add_to_value (val
, newphi
);
3149 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3150 this insertion, since we test for the existence of this value in PHI_GEN
3151 before proceeding with the partial redundancy checks in insert_aux.
3153 The value may exist in AVAIL_OUT, in particular, it could be represented
3154 by the expression we are trying to eliminate, in which case we want the
3155 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3158 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3159 this block, because if it did, it would have existed in our dominator's
3160 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3163 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3164 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3166 bitmap_insert_into_set (NEW_SETS (block
),
3169 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3171 fprintf (dump_file
, "Created phi ");
3172 print_gimple_stmt (dump_file
, phi
, 0, 0);
3173 fprintf (dump_file
, " in block %d (%04d)\n", block
->index
, val
);
3181 /* Perform insertion of partially redundant values.
3182 For BLOCK, do the following:
3183 1. Propagate the NEW_SETS of the dominator into the current block.
3184 If the block has multiple predecessors,
3185 2a. Iterate over the ANTIC expressions for the block to see if
3186 any of them are partially redundant.
3187 2b. If so, insert them into the necessary predecessors to make
3188 the expression fully redundant.
3189 2c. Insert a new PHI merging the values of the predecessors.
3190 2d. Insert the new PHI, and the new expressions, into the
3192 3. Recursively call ourselves on the dominator children of BLOCK.
3194 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3195 do_regular_insertion and do_partial_insertion.
3200 do_regular_insertion (basic_block block
, basic_block dom
)
3202 bool new_stuff
= false;
3203 vec
<pre_expr
> exprs
;
3205 vec
<pre_expr
> avail
= vNULL
;
3208 exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3209 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3211 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3213 if (expr
->kind
== NARY
3214 || expr
->kind
== REFERENCE
)
3217 bool by_some
= false;
3218 bool cant_insert
= false;
3219 bool all_same
= true;
3220 pre_expr first_s
= NULL
;
3223 pre_expr eprime
= NULL
;
3225 pre_expr edoubleprime
= NULL
;
3226 bool do_insertion
= false;
3228 val
= get_expr_value_id (expr
);
3229 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3231 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3233 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3235 fprintf (dump_file
, "Found fully redundant value: ");
3236 print_pre_expr (dump_file
, expr
);
3237 fprintf (dump_file
, "\n");
3242 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3244 unsigned int vprime
;
3246 /* We should never run insertion for the exit block
3247 and so not come across fake pred edges. */
3248 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3250 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
3253 /* eprime will generally only be NULL if the
3254 value of the expression, translated
3255 through the PHI for this predecessor, is
3256 undefined. If that is the case, we can't
3257 make the expression fully redundant,
3258 because its value is undefined along a
3259 predecessor path. We can thus break out
3260 early because it doesn't matter what the
3261 rest of the results are. */
3264 avail
[pred
->dest_idx
] = NULL
;
3269 eprime
= fully_constant_expression (eprime
);
3270 vprime
= get_expr_value_id (eprime
);
3271 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3273 if (edoubleprime
== NULL
)
3275 avail
[pred
->dest_idx
] = eprime
;
3280 avail
[pred
->dest_idx
] = edoubleprime
;
3282 /* We want to perform insertions to remove a redundancy on
3283 a path in the CFG we want to optimize for speed. */
3284 if (optimize_edge_for_speed_p (pred
))
3285 do_insertion
= true;
3286 if (first_s
== NULL
)
3287 first_s
= edoubleprime
;
3288 else if (!pre_expr_d::equal (first_s
, edoubleprime
))
3292 /* If we can insert it, it's not the same value
3293 already existing along every predecessor, and
3294 it's defined by some predecessor, it is
3295 partially redundant. */
3296 if (!cant_insert
&& !all_same
&& by_some
)
3300 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3302 fprintf (dump_file
, "Skipping partial redundancy for "
3304 print_pre_expr (dump_file
, expr
);
3305 fprintf (dump_file
, " (%04d), no redundancy on to be "
3306 "optimized for speed edge\n", val
);
3309 else if (dbg_cnt (treepre_insert
))
3311 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3313 fprintf (dump_file
, "Found partial redundancy for "
3315 print_pre_expr (dump_file
, expr
);
3316 fprintf (dump_file
, " (%04d)\n",
3317 get_expr_value_id (expr
));
3319 if (insert_into_preds_of_block (block
,
3320 get_expression_id (expr
),
3325 /* If all edges produce the same value and that value is
3326 an invariant, then the PHI has the same value on all
3327 edges. Note this. */
3328 else if (!cant_insert
&& all_same
)
3330 gcc_assert (edoubleprime
->kind
== CONSTANT
3331 || edoubleprime
->kind
== NAME
);
3333 tree temp
= make_temp_ssa_name (get_expr_type (expr
),
3336 gimple_build_assign (temp
,
3337 edoubleprime
->kind
== CONSTANT
?
3338 PRE_EXPR_CONSTANT (edoubleprime
) :
3339 PRE_EXPR_NAME (edoubleprime
));
3340 gimple_stmt_iterator gsi
= gsi_after_labels (block
);
3341 gsi_insert_before (&gsi
, assign
, GSI_NEW_STMT
);
3343 gimple_set_plf (assign
, NECESSARY
, false);
3344 VN_INFO_GET (temp
)->value_id
= val
;
3345 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3346 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3347 VN_INFO (temp
)->valnum
= temp
;
3348 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3349 pre_expr newe
= get_or_alloc_expr_for_name (temp
);
3350 add_to_value (val
, newe
);
3351 bitmap_value_replace_in_set (AVAIL_OUT (block
), newe
);
3352 bitmap_insert_into_set (NEW_SETS (block
), newe
);
3362 /* Perform insertion for partially anticipatable expressions. There
3363 is only one case we will perform insertion for these. This case is
3364 if the expression is partially anticipatable, and fully available.
3365 In this case, we know that putting it earlier will enable us to
3366 remove the later computation. */
3370 do_partial_partial_insertion (basic_block block
, basic_block dom
)
3372 bool new_stuff
= false;
3373 vec
<pre_expr
> exprs
;
3375 auto_vec
<pre_expr
> avail
;
3378 exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3379 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3381 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3383 if (expr
->kind
== NARY
3384 || expr
->kind
== REFERENCE
)
3388 bool cant_insert
= false;
3391 pre_expr eprime
= NULL
;
3394 val
= get_expr_value_id (expr
);
3395 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3397 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3400 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3402 unsigned int vprime
;
3403 pre_expr edoubleprime
;
3405 /* We should never run insertion for the exit block
3406 and so not come across fake pred edges. */
3407 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3409 eprime
= phi_translate (expr
, ANTIC_IN (block
),
3413 /* eprime will generally only be NULL if the
3414 value of the expression, translated
3415 through the PHI for this predecessor, is
3416 undefined. If that is the case, we can't
3417 make the expression fully redundant,
3418 because its value is undefined along a
3419 predecessor path. We can thus break out
3420 early because it doesn't matter what the
3421 rest of the results are. */
3424 avail
[pred
->dest_idx
] = NULL
;
3429 eprime
= fully_constant_expression (eprime
);
3430 vprime
= get_expr_value_id (eprime
);
3431 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
), vprime
);
3432 avail
[pred
->dest_idx
] = edoubleprime
;
3433 if (edoubleprime
== NULL
)
3440 /* If we can insert it, it's not the same value
3441 already existing along every predecessor, and
3442 it's defined by some predecessor, it is
3443 partially redundant. */
3444 if (!cant_insert
&& by_all
)
3447 bool do_insertion
= false;
3449 /* Insert only if we can remove a later expression on a path
3450 that we want to optimize for speed.
3451 The phi node that we will be inserting in BLOCK is not free,
3452 and inserting it for the sake of !optimize_for_speed successor
3453 may cause regressions on the speed path. */
3454 FOR_EACH_EDGE (succ
, ei
, block
->succs
)
3456 if (bitmap_set_contains_value (PA_IN (succ
->dest
), val
)
3457 || bitmap_set_contains_value (ANTIC_IN (succ
->dest
), val
))
3459 if (optimize_edge_for_speed_p (succ
))
3460 do_insertion
= true;
3466 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3468 fprintf (dump_file
, "Skipping partial partial redundancy "
3470 print_pre_expr (dump_file
, expr
);
3471 fprintf (dump_file
, " (%04d), not (partially) anticipated "
3472 "on any to be optimized for speed edges\n", val
);
3475 else if (dbg_cnt (treepre_insert
))
3477 pre_stats
.pa_insert
++;
3478 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3480 fprintf (dump_file
, "Found partial partial redundancy "
3482 print_pre_expr (dump_file
, expr
);
3483 fprintf (dump_file
, " (%04d)\n",
3484 get_expr_value_id (expr
));
3486 if (insert_into_preds_of_block (block
,
3487 get_expression_id (expr
),
3500 insert_aux (basic_block block
)
3503 bool new_stuff
= false;
3508 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3513 bitmap_set_t newset
= NEW_SETS (dom
);
3516 /* Note that we need to value_replace both NEW_SETS, and
3517 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3518 represented by some non-simple expression here that we want
3519 to replace it with. */
3520 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3522 pre_expr expr
= expression_for_id (i
);
3523 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3524 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3527 if (!single_pred_p (block
))
3529 new_stuff
|= do_regular_insertion (block
, dom
);
3530 if (do_partial_partial
)
3531 new_stuff
|= do_partial_partial_insertion (block
, dom
);
3535 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3537 son
= next_dom_son (CDI_DOMINATORS
, son
))
3539 new_stuff
|= insert_aux (son
);
3545 /* Perform insertion of partially redundant values. */
3550 bool new_stuff
= true;
3552 int num_iterations
= 0;
3554 FOR_ALL_BB_FN (bb
, cfun
)
3555 NEW_SETS (bb
) = bitmap_set_new ();
3560 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3561 fprintf (dump_file
, "Starting insert iteration %d\n", num_iterations
);
3562 new_stuff
= insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3564 /* Clear the NEW sets before the next iteration. We have already
3565 fully propagated its contents. */
3567 FOR_ALL_BB_FN (bb
, cfun
)
3568 bitmap_set_free (NEW_SETS (bb
));
3570 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3574 /* Compute the AVAIL set for all basic blocks.
3576 This function performs value numbering of the statements in each basic
3577 block. The AVAIL sets are built from information we glean while doing
3578 this value numbering, since the AVAIL sets contain only one entry per
3581 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3582 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3585 compute_avail (void)
3588 basic_block block
, son
;
3589 basic_block
*worklist
;
3593 /* We pretend that default definitions are defined in the entry block.
3594 This includes function arguments and the static chain decl. */
3595 for (i
= 1; i
< num_ssa_names
; ++i
)
3597 tree name
= ssa_name (i
);
3600 || !SSA_NAME_IS_DEFAULT_DEF (name
)
3601 || has_zero_uses (name
)
3602 || virtual_operand_p (name
))
3605 e
= get_or_alloc_expr_for_name (name
);
3606 add_to_value (get_expr_value_id (e
), e
);
3607 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), e
);
3608 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3612 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3614 print_bitmap_set (dump_file
, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3615 "tmp_gen", ENTRY_BLOCK
);
3616 print_bitmap_set (dump_file
, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3617 "avail_out", ENTRY_BLOCK
);
3620 /* Allocate the worklist. */
3621 worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
));
3623 /* Seed the algorithm by putting the dominator children of the entry
3624 block on the worklist. */
3625 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3627 son
= next_dom_son (CDI_DOMINATORS
, son
))
3628 worklist
[sp
++] = son
;
3630 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3631 = ssa_default_def (cfun
, gimple_vop (cfun
));
3633 /* Loop until the worklist is empty. */
3639 /* Pick a block from the worklist. */
3640 block
= worklist
[--sp
];
3642 /* Initially, the set of available values in BLOCK is that of
3643 its immediate dominator. */
3644 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3647 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3648 BB_LIVE_VOP_ON_EXIT (block
) = BB_LIVE_VOP_ON_EXIT (dom
);
3651 /* Generate values for PHI nodes. */
3652 for (gphi_iterator gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
);
3655 tree result
= gimple_phi_result (gsi
.phi ());
3657 /* We have no need for virtual phis, as they don't represent
3658 actual computations. */
3659 if (virtual_operand_p (result
))
3661 BB_LIVE_VOP_ON_EXIT (block
) = result
;
3665 pre_expr e
= get_or_alloc_expr_for_name (result
);
3666 add_to_value (get_expr_value_id (e
), e
);
3667 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3668 bitmap_insert_into_set (PHI_GEN (block
), e
);
3671 BB_MAY_NOTRETURN (block
) = 0;
3673 /* Now compute value numbers and populate value sets with all
3674 the expressions computed in BLOCK. */
3675 for (gimple_stmt_iterator gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
);
3681 stmt
= gsi_stmt (gsi
);
3683 /* Cache whether the basic-block has any non-visible side-effect
3685 If this isn't a call or it is the last stmt in the
3686 basic-block then the CFG represents things correctly. */
3687 if (is_gimple_call (stmt
) && !stmt_ends_bb_p (stmt
))
3689 /* Non-looping const functions always return normally.
3690 Otherwise the call might not return or have side-effects
3691 that forbids hoisting possibly trapping expressions
3693 int flags
= gimple_call_flags (stmt
);
3694 if (!(flags
& ECF_CONST
)
3695 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3696 BB_MAY_NOTRETURN (block
) = 1;
3699 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3701 pre_expr e
= get_or_alloc_expr_for_name (op
);
3703 add_to_value (get_expr_value_id (e
), e
);
3704 bitmap_insert_into_set (TMP_GEN (block
), e
);
3705 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3708 if (gimple_vdef (stmt
))
3709 BB_LIVE_VOP_ON_EXIT (block
) = gimple_vdef (stmt
);
3711 if (gimple_has_side_effects (stmt
)
3712 || stmt_could_throw_p (stmt
)
3713 || is_gimple_debug (stmt
))
3716 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3718 if (ssa_undefined_value_p (op
))
3720 pre_expr e
= get_or_alloc_expr_for_name (op
);
3721 bitmap_value_insert_into_set (EXP_GEN (block
), e
);
3724 switch (gimple_code (stmt
))
3732 vn_reference_s ref1
;
3733 pre_expr result
= NULL
;
3735 /* We can value number only calls to real functions. */
3736 if (gimple_call_internal_p (stmt
))
3739 vn_reference_lookup_call (as_a
<gcall
*> (stmt
), &ref
, &ref1
);
3743 /* If the value of the call is not invalidated in
3744 this block until it is computed, add the expression
3746 if (!gimple_vuse (stmt
)
3748 (SSA_NAME_DEF_STMT (gimple_vuse (stmt
))) == GIMPLE_PHI
3749 || gimple_bb (SSA_NAME_DEF_STMT
3750 (gimple_vuse (stmt
))) != block
)
3752 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3753 result
->kind
= REFERENCE
;
3755 PRE_EXPR_REFERENCE (result
) = ref
;
3757 get_or_alloc_expression_id (result
);
3758 add_to_value (get_expr_value_id (result
), result
);
3759 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3766 pre_expr result
= NULL
;
3767 switch (vn_get_stmt_kind (stmt
))
3771 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3774 /* COND_EXPR and VEC_COND_EXPR are awkward in
3775 that they contain an embedded complex expression.
3776 Don't even try to shove those through PRE. */
3777 if (code
== COND_EXPR
3778 || code
== VEC_COND_EXPR
)
3781 vn_nary_op_lookup_stmt (stmt
, &nary
);
3785 /* If the NARY traps and there was a preceding
3786 point in the block that might not return avoid
3787 adding the nary to EXP_GEN. */
3788 if (BB_MAY_NOTRETURN (block
)
3789 && vn_nary_may_trap (nary
))
3792 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3793 result
->kind
= NARY
;
3795 PRE_EXPR_NARY (result
) = nary
;
3802 vn_reference_lookup (gimple_assign_rhs1 (stmt
),
3808 /* If the value of the reference is not invalidated in
3809 this block until it is computed, add the expression
3811 if (gimple_vuse (stmt
))
3815 def_stmt
= SSA_NAME_DEF_STMT (gimple_vuse (stmt
));
3816 while (!gimple_nop_p (def_stmt
)
3817 && gimple_code (def_stmt
) != GIMPLE_PHI
3818 && gimple_bb (def_stmt
) == block
)
3820 if (stmt_may_clobber_ref_p
3821 (def_stmt
, gimple_assign_rhs1 (stmt
)))
3827 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt
));
3833 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3834 result
->kind
= REFERENCE
;
3836 PRE_EXPR_REFERENCE (result
) = ref
;
3844 get_or_alloc_expression_id (result
);
3845 add_to_value (get_expr_value_id (result
), result
);
3846 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3854 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3856 print_bitmap_set (dump_file
, EXP_GEN (block
),
3857 "exp_gen", block
->index
);
3858 print_bitmap_set (dump_file
, PHI_GEN (block
),
3859 "phi_gen", block
->index
);
3860 print_bitmap_set (dump_file
, TMP_GEN (block
),
3861 "tmp_gen", block
->index
);
3862 print_bitmap_set (dump_file
, AVAIL_OUT (block
),
3863 "avail_out", block
->index
);
3866 /* Put the dominator children of BLOCK on the worklist of blocks
3867 to compute available sets for. */
3868 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3870 son
= next_dom_son (CDI_DOMINATORS
, son
))
3871 worklist
[sp
++] = son
;
3878 /* Local state for the eliminate domwalk. */
3879 static vec
<gimple
> el_to_remove
;
3880 static unsigned int el_todo
;
3881 static vec
<tree
> el_avail
;
3882 static vec
<tree
> el_avail_stack
;
3884 /* Return a leader for OP that is available at the current point of the
3885 eliminate domwalk. */
3888 eliminate_avail (tree op
)
3890 tree valnum
= VN_INFO (op
)->valnum
;
3891 if (TREE_CODE (valnum
) == SSA_NAME
)
3893 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
3895 if (el_avail
.length () > SSA_NAME_VERSION (valnum
))
3896 return el_avail
[SSA_NAME_VERSION (valnum
)];
3898 else if (is_gimple_min_invariant (valnum
))
3903 /* At the current point of the eliminate domwalk make OP available. */
3906 eliminate_push_avail (tree op
)
3908 tree valnum
= VN_INFO (op
)->valnum
;
3909 if (TREE_CODE (valnum
) == SSA_NAME
)
3911 if (el_avail
.length () <= SSA_NAME_VERSION (valnum
))
3912 el_avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
3914 if (el_avail
[SSA_NAME_VERSION (valnum
)])
3915 pushop
= el_avail
[SSA_NAME_VERSION (valnum
)];
3916 el_avail_stack
.safe_push (pushop
);
3917 el_avail
[SSA_NAME_VERSION (valnum
)] = op
;
3921 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3922 the leader for the expression if insertion was successful. */
3925 eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
)
3927 tree expr
= vn_get_expr_for (val
);
3928 if (!CONVERT_EXPR_P (expr
)
3929 && TREE_CODE (expr
) != VIEW_CONVERT_EXPR
)
3932 tree op
= TREE_OPERAND (expr
, 0);
3933 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (op
) : op
;
3937 tree res
= make_temp_ssa_name (TREE_TYPE (val
), NULL
, "pretmp");
3939 gimple_build_assign (res
,
3940 fold_build1 (TREE_CODE (expr
),
3941 TREE_TYPE (expr
), leader
));
3942 gsi_insert_before (gsi
, tem
, GSI_SAME_STMT
);
3943 VN_INFO_GET (res
)->valnum
= val
;
3945 if (TREE_CODE (leader
) == SSA_NAME
)
3946 gimple_set_plf (SSA_NAME_DEF_STMT (leader
), NECESSARY
, true);
3948 pre_stats
.insertions
++;
3949 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3951 fprintf (dump_file
, "Inserted ");
3952 print_gimple_stmt (dump_file
, tem
, 0, 0);
3958 class eliminate_dom_walker
: public dom_walker
3961 eliminate_dom_walker (cdi_direction direction
, bool do_pre_
)
3962 : dom_walker (direction
), do_pre (do_pre_
) {}
3964 virtual void before_dom_children (basic_block
);
3965 virtual void after_dom_children (basic_block
);
3970 /* Perform elimination for the basic-block B during the domwalk. */
3973 eliminate_dom_walker::before_dom_children (basic_block b
)
3976 el_avail_stack
.safe_push (NULL_TREE
);
3978 /* ??? If we do nothing for unreachable blocks then this will confuse
3979 tailmerging. Eventually we can reduce its reliance on SCCVN now
3980 that we fully copy/constant-propagate (most) things. */
3982 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
3984 gphi
*phi
= gsi
.phi ();
3985 tree res
= PHI_RESULT (phi
);
3987 if (virtual_operand_p (res
))
3993 tree sprime
= eliminate_avail (res
);
3997 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3999 fprintf (dump_file
, "Replaced redundant PHI node defining ");
4000 print_generic_expr (dump_file
, res
, 0);
4001 fprintf (dump_file
, " with ");
4002 print_generic_expr (dump_file
, sprime
, 0);
4003 fprintf (dump_file
, "\n");
4006 /* If we inserted this PHI node ourself, it's not an elimination. */
4008 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
4011 pre_stats
.eliminations
++;
4013 /* If we will propagate into all uses don't bother to do
4015 if (may_propagate_copy (res
, sprime
))
4017 /* Mark the PHI for removal. */
4018 el_to_remove
.safe_push (phi
);
4023 remove_phi_node (&gsi
, false);
4026 && !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
))
4027 && TREE_CODE (sprime
) == SSA_NAME
)
4028 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4030 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
4031 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4032 gimple stmt
= gimple_build_assign (res
, sprime
);
4033 /* ??? It cannot yet be necessary (DOM walk). */
4034 gimple_set_plf (stmt
, NECESSARY
, gimple_plf (phi
, NECESSARY
));
4036 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
4037 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
4041 eliminate_push_avail (res
);
4045 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
4049 tree sprime
= NULL_TREE
;
4050 gimple stmt
= gsi_stmt (gsi
);
4051 tree lhs
= gimple_get_lhs (stmt
);
4052 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
4053 && !gimple_has_volatile_ops (stmt
)
4054 /* See PR43491. Do not replace a global register variable when
4055 it is a the RHS of an assignment. Do replace local register
4056 variables since gcc does not guarantee a local variable will
4057 be allocated in register.
4058 ??? The fix isn't effective here. This should instead
4059 be ensured by not value-numbering them the same but treating
4060 them like volatiles? */
4061 && !(gimple_assign_single_p (stmt
)
4062 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
4063 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
4064 && is_global_var (gimple_assign_rhs1 (stmt
)))))
4066 sprime
= eliminate_avail (lhs
);
4069 /* If there is no existing usable leader but SCCVN thinks
4070 it has an expression it wants to use as replacement,
4072 tree val
= VN_INFO (lhs
)->valnum
;
4074 && TREE_CODE (val
) == SSA_NAME
4075 && VN_INFO (val
)->needs_insertion
4076 && VN_INFO (val
)->expr
!= NULL_TREE
4077 && (sprime
= eliminate_insert (&gsi
, val
)) != NULL_TREE
)
4078 eliminate_push_avail (sprime
);
4081 /* If this now constitutes a copy duplicate points-to
4082 and range info appropriately. This is especially
4083 important for inserted code. See tree-ssa-copy.c
4084 for similar code. */
4086 && TREE_CODE (sprime
) == SSA_NAME
)
4088 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
4089 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
4090 && SSA_NAME_PTR_INFO (lhs
)
4091 && !SSA_NAME_PTR_INFO (sprime
))
4093 duplicate_ssa_name_ptr_info (sprime
,
4094 SSA_NAME_PTR_INFO (lhs
));
4096 mark_ptr_info_alignment_unknown
4097 (SSA_NAME_PTR_INFO (sprime
));
4099 else if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
4100 && SSA_NAME_RANGE_INFO (lhs
)
4101 && !SSA_NAME_RANGE_INFO (sprime
)
4103 duplicate_ssa_name_range_info (sprime
,
4104 SSA_NAME_RANGE_TYPE (lhs
),
4105 SSA_NAME_RANGE_INFO (lhs
));
4108 /* Inhibit the use of an inserted PHI on a loop header when
4109 the address of the memory reference is a simple induction
4110 variable. In other cases the vectorizer won't do anything
4111 anyway (either it's loop invariant or a complicated
4114 && TREE_CODE (sprime
) == SSA_NAME
4116 && flag_tree_loop_vectorize
4117 && loop_outer (b
->loop_father
)
4118 && has_zero_uses (sprime
)
4119 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
4120 && gimple_assign_load_p (stmt
))
4122 gimple def_stmt
= SSA_NAME_DEF_STMT (sprime
);
4123 basic_block def_bb
= gimple_bb (def_stmt
);
4124 if (gimple_code (def_stmt
) == GIMPLE_PHI
4125 && b
->loop_father
->header
== def_bb
)
4130 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
4133 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
4135 && flow_bb_inside_loop_p (b
->loop_father
, def_bb
)
4136 && simple_iv (b
->loop_father
,
4137 b
->loop_father
, op
, &iv
, true))
4145 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4147 fprintf (dump_file
, "Not replacing ");
4148 print_gimple_expr (dump_file
, stmt
, 0, 0);
4149 fprintf (dump_file
, " with ");
4150 print_generic_expr (dump_file
, sprime
, 0);
4151 fprintf (dump_file
, " which would add a loop"
4152 " carried dependence to loop %d\n",
4153 b
->loop_father
->num
);
4155 /* Don't keep sprime available. */
4163 /* If we can propagate the value computed for LHS into
4164 all uses don't bother doing anything with this stmt. */
4165 if (may_propagate_copy (lhs
, sprime
))
4167 /* Mark it for removal. */
4168 el_to_remove
.safe_push (stmt
);
4170 /* ??? Don't count copy/constant propagations. */
4171 if (gimple_assign_single_p (stmt
)
4172 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4173 || gimple_assign_rhs1 (stmt
) == sprime
))
4176 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4178 fprintf (dump_file
, "Replaced ");
4179 print_gimple_expr (dump_file
, stmt
, 0, 0);
4180 fprintf (dump_file
, " with ");
4181 print_generic_expr (dump_file
, sprime
, 0);
4182 fprintf (dump_file
, " in all uses of ");
4183 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4186 pre_stats
.eliminations
++;
4190 /* If this is an assignment from our leader (which
4191 happens in the case the value-number is a constant)
4192 then there is nothing to do. */
4193 if (gimple_assign_single_p (stmt
)
4194 && sprime
== gimple_assign_rhs1 (stmt
))
4197 /* Else replace its RHS. */
4198 bool can_make_abnormal_goto
4199 = is_gimple_call (stmt
)
4200 && stmt_can_make_abnormal_goto (stmt
);
4202 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4204 fprintf (dump_file
, "Replaced ");
4205 print_gimple_expr (dump_file
, stmt
, 0, 0);
4206 fprintf (dump_file
, " with ");
4207 print_generic_expr (dump_file
, sprime
, 0);
4208 fprintf (dump_file
, " in ");
4209 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4212 if (TREE_CODE (sprime
) == SSA_NAME
)
4213 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4216 pre_stats
.eliminations
++;
4217 gimple orig_stmt
= stmt
;
4218 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4219 TREE_TYPE (sprime
)))
4220 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4221 tree vdef
= gimple_vdef (stmt
);
4222 tree vuse
= gimple_vuse (stmt
);
4223 propagate_tree_value_into_stmt (&gsi
, sprime
);
4224 stmt
= gsi_stmt (gsi
);
4226 if (vdef
!= gimple_vdef (stmt
))
4227 VN_INFO (vdef
)->valnum
= vuse
;
4229 /* If we removed EH side-effects from the statement, clean
4230 its EH information. */
4231 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4233 bitmap_set_bit (need_eh_cleanup
,
4234 gimple_bb (stmt
)->index
);
4235 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4236 fprintf (dump_file
, " Removed EH side-effects.\n");
4239 /* Likewise for AB side-effects. */
4240 if (can_make_abnormal_goto
4241 && !stmt_can_make_abnormal_goto (stmt
))
4243 bitmap_set_bit (need_ab_cleanup
,
4244 gimple_bb (stmt
)->index
);
4245 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4246 fprintf (dump_file
, " Removed AB side-effects.\n");
4253 /* If the statement is a scalar store, see if the expression
4254 has the same value number as its rhs. If so, the store is
4256 if (gimple_assign_single_p (stmt
)
4257 && !gimple_has_volatile_ops (stmt
)
4258 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4259 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4260 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
4263 tree rhs
= gimple_assign_rhs1 (stmt
);
4264 val
= vn_reference_lookup (gimple_assign_lhs (stmt
),
4265 gimple_vuse (stmt
), VN_WALK
, NULL
);
4266 if (TREE_CODE (rhs
) == SSA_NAME
)
4267 rhs
= VN_INFO (rhs
)->valnum
;
4269 && operand_equal_p (val
, rhs
, 0))
4271 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4273 fprintf (dump_file
, "Deleted redundant store ");
4274 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4277 /* Queue stmt for removal. */
4278 el_to_remove
.safe_push (stmt
);
4283 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
4284 bool was_noreturn
= (is_gimple_call (stmt
)
4285 && gimple_call_noreturn_p (stmt
));
4286 tree vdef
= gimple_vdef (stmt
);
4287 tree vuse
= gimple_vuse (stmt
);
4289 /* If we didn't replace the whole stmt (or propagate the result
4290 into all uses), replace all uses on this stmt with their
4292 use_operand_p use_p
;
4294 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4296 tree use
= USE_FROM_PTR (use_p
);
4297 /* ??? The call code above leaves stmt operands un-updated. */
4298 if (TREE_CODE (use
) != SSA_NAME
)
4300 tree sprime
= eliminate_avail (use
);
4301 if (sprime
&& sprime
!= use
4302 && may_propagate_copy (use
, sprime
)
4303 /* We substitute into debug stmts to avoid excessive
4304 debug temporaries created by removed stmts, but we need
4305 to avoid doing so for inserted sprimes as we never want
4306 to create debug temporaries for them. */
4308 || TREE_CODE (sprime
) != SSA_NAME
4309 || !is_gimple_debug (stmt
)
4310 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
4312 propagate_value (use_p
, sprime
);
4313 gimple_set_modified (stmt
, true);
4314 if (TREE_CODE (sprime
) == SSA_NAME
4315 && !is_gimple_debug (stmt
))
4316 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4321 /* Visit indirect calls and turn them into direct calls if
4322 possible using the devirtualization machinery. */
4323 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
4325 tree fn
= gimple_call_fn (call_stmt
);
4327 && flag_devirtualize
4328 && virtual_method_call_p (fn
))
4330 tree otr_type
= obj_type_ref_class (fn
);
4332 ipa_polymorphic_call_context
context (current_function_decl
, fn
, stmt
, &instance
);
4335 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
), otr_type
, stmt
);
4337 vec
<cgraph_node
*>targets
4338 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
4340 (OBJ_TYPE_REF_TOKEN (fn
)),
4343 if (dump_enabled_p ())
4344 dump_possible_polymorphic_call_targets (dump_file
,
4345 obj_type_ref_class (fn
),
4347 (OBJ_TYPE_REF_TOKEN (fn
)),
4349 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
4352 if (targets
.length () == 1)
4353 fn
= targets
[0]->decl
;
4355 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4356 if (dump_enabled_p ())
4358 location_t loc
= gimple_location_safe (stmt
);
4359 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
4360 "converting indirect call to "
4362 cgraph_node::get (fn
)->name ());
4364 gimple_call_set_fndecl (call_stmt
, fn
);
4365 gimple_set_modified (stmt
, true);
4370 if (gimple_modified_p (stmt
))
4372 /* If a formerly non-invariant ADDR_EXPR is turned into an
4373 invariant one it was on a separate stmt. */
4374 if (gimple_assign_single_p (stmt
)
4375 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
4376 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
4377 gimple old_stmt
= stmt
;
4378 if (is_gimple_call (stmt
))
4380 /* ??? Only fold calls inplace for now, this may create new
4381 SSA names which in turn will confuse free_scc_vn SSA name
4383 fold_stmt_inplace (&gsi
);
4384 /* When changing a call into a noreturn call, cfg cleanup
4385 is needed to fix up the noreturn call. */
4386 if (!was_noreturn
&& gimple_call_noreturn_p (stmt
))
4387 el_todo
|= TODO_cleanup_cfg
;
4392 stmt
= gsi_stmt (gsi
);
4393 if ((gimple_code (stmt
) == GIMPLE_COND
4394 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
4395 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
4396 || (gimple_code (stmt
) == GIMPLE_SWITCH
4397 && TREE_CODE (gimple_switch_index (
4398 as_a
<gswitch
*> (stmt
)))
4400 el_todo
|= TODO_cleanup_cfg
;
4402 /* If we removed EH side-effects from the statement, clean
4403 its EH information. */
4404 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
4406 bitmap_set_bit (need_eh_cleanup
,
4407 gimple_bb (stmt
)->index
);
4408 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4409 fprintf (dump_file
, " Removed EH side-effects.\n");
4411 /* Likewise for AB side-effects. */
4412 if (can_make_abnormal_goto
4413 && !stmt_can_make_abnormal_goto (stmt
))
4415 bitmap_set_bit (need_ab_cleanup
,
4416 gimple_bb (stmt
)->index
);
4417 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4418 fprintf (dump_file
, " Removed AB side-effects.\n");
4421 if (vdef
!= gimple_vdef (stmt
))
4422 VN_INFO (vdef
)->valnum
= vuse
;
4425 /* Make new values available - for fully redundant LHS we
4426 continue with the next stmt above and skip this. */
4428 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
4429 eliminate_push_avail (DEF_FROM_PTR (defp
));
4432 /* Replace destination PHI arguments. */
4435 FOR_EACH_EDGE (e
, ei
, b
->succs
)
4437 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
4441 gphi
*phi
= gsi
.phi ();
4442 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
4443 tree arg
= USE_FROM_PTR (use_p
);
4444 if (TREE_CODE (arg
) != SSA_NAME
4445 || virtual_operand_p (arg
))
4447 tree sprime
= eliminate_avail (arg
);
4448 if (sprime
&& may_propagate_copy (arg
, sprime
))
4450 propagate_value (use_p
, sprime
);
4451 if (TREE_CODE (sprime
) == SSA_NAME
)
4452 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4458 /* Make no longer available leaders no longer available. */
4461 eliminate_dom_walker::after_dom_children (basic_block
)
4464 while ((entry
= el_avail_stack
.pop ()) != NULL_TREE
)
4466 tree valnum
= VN_INFO (entry
)->valnum
;
4467 tree old
= el_avail
[SSA_NAME_VERSION (valnum
)];
4469 el_avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
4471 el_avail
[SSA_NAME_VERSION (valnum
)] = entry
;
4475 /* Eliminate fully redundant computations. */
4478 eliminate (bool do_pre
)
4480 gimple_stmt_iterator gsi
;
4483 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4484 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4486 el_to_remove
.create (0);
4488 el_avail
.create (num_ssa_names
);
4489 el_avail_stack
.create (0);
4491 eliminate_dom_walker (CDI_DOMINATORS
,
4492 do_pre
).walk (cfun
->cfg
->x_entry_block_ptr
);
4494 el_avail
.release ();
4495 el_avail_stack
.release ();
4497 /* We cannot remove stmts during BB walk, especially not release SSA
4498 names there as this confuses the VN machinery. The stmts ending
4499 up in el_to_remove are either stores or simple copies.
4500 Remove stmts in reverse order to make debug stmt creation possible. */
4501 while (!el_to_remove
.is_empty ())
4503 stmt
= el_to_remove
.pop ();
4505 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4507 fprintf (dump_file
, "Removing dead stmt ");
4508 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4512 if (gimple_code (stmt
) == GIMPLE_PHI
)
4513 lhs
= gimple_phi_result (stmt
);
4515 lhs
= gimple_get_lhs (stmt
);
4518 && TREE_CODE (lhs
) == SSA_NAME
)
4519 bitmap_clear_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
4521 gsi
= gsi_for_stmt (stmt
);
4522 if (gimple_code (stmt
) == GIMPLE_PHI
)
4523 remove_phi_node (&gsi
, true);
4526 basic_block bb
= gimple_bb (stmt
);
4527 unlink_stmt_vdef (stmt
);
4528 if (gsi_remove (&gsi
, true))
4529 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
4530 release_defs (stmt
);
4533 /* Removing a stmt may expose a forwarder block. */
4534 el_todo
|= TODO_cleanup_cfg
;
4536 el_to_remove
.release ();
4541 /* Perform CFG cleanups made necessary by elimination. */
4544 fini_eliminate (void)
4546 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
4547 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
4550 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
4553 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
4555 BITMAP_FREE (need_eh_cleanup
);
4556 BITMAP_FREE (need_ab_cleanup
);
4558 if (do_eh_cleanup
|| do_ab_cleanup
)
4559 return TODO_cleanup_cfg
;
4563 /* Borrow a bit of tree-ssa-dce.c for the moment.
4564 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4565 this may be a bit faster, and we may want critical edges kept split. */
4567 /* If OP's defining statement has not already been determined to be necessary,
4568 mark that statement necessary. Return the stmt, if it is newly
4571 static inline gimple
4572 mark_operand_necessary (tree op
)
4578 if (TREE_CODE (op
) != SSA_NAME
)
4581 stmt
= SSA_NAME_DEF_STMT (op
);
4584 if (gimple_plf (stmt
, NECESSARY
)
4585 || gimple_nop_p (stmt
))
4588 gimple_set_plf (stmt
, NECESSARY
, true);
4592 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4593 to insert PHI nodes sometimes, and because value numbering of casts isn't
4594 perfect, we sometimes end up inserting dead code. This simple DCE-like
4595 pass removes any insertions we made that weren't actually used. */
4598 remove_dead_inserted_code (void)
4605 worklist
= BITMAP_ALLOC (NULL
);
4606 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4608 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4609 if (gimple_plf (t
, NECESSARY
))
4610 bitmap_set_bit (worklist
, i
);
4612 while (!bitmap_empty_p (worklist
))
4614 i
= bitmap_first_set_bit (worklist
);
4615 bitmap_clear_bit (worklist
, i
);
4616 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4618 /* PHI nodes are somewhat special in that each PHI alternative has
4619 data and control dependencies. All the statements feeding the
4620 PHI node's arguments are always necessary. */
4621 if (gimple_code (t
) == GIMPLE_PHI
)
4625 for (k
= 0; k
< gimple_phi_num_args (t
); k
++)
4627 tree arg
= PHI_ARG_DEF (t
, k
);
4628 if (TREE_CODE (arg
) == SSA_NAME
)
4630 gimple n
= mark_operand_necessary (arg
);
4632 bitmap_set_bit (worklist
, SSA_NAME_VERSION (arg
));
4638 /* Propagate through the operands. Examine all the USE, VUSE and
4639 VDEF operands in this statement. Mark all the statements
4640 which feed this statement's uses as necessary. */
4644 /* The operands of VDEF expressions are also needed as they
4645 represent potential definitions that may reach this
4646 statement (VDEF operands allow us to follow def-def
4649 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
4651 gimple n
= mark_operand_necessary (use
);
4653 bitmap_set_bit (worklist
, SSA_NAME_VERSION (use
));
4658 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4660 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4661 if (!gimple_plf (t
, NECESSARY
))
4663 gimple_stmt_iterator gsi
;
4665 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4667 fprintf (dump_file
, "Removing unnecessary insertion:");
4668 print_gimple_stmt (dump_file
, t
, 0, 0);
4671 gsi
= gsi_for_stmt (t
);
4672 if (gimple_code (t
) == GIMPLE_PHI
)
4673 remove_phi_node (&gsi
, true);
4676 gsi_remove (&gsi
, true);
4681 BITMAP_FREE (worklist
);
4685 /* Initialize data structures used by PRE. */
4692 next_expression_id
= 1;
4693 expressions
.create (0);
4694 expressions
.safe_push (NULL
);
4695 value_expressions
.create (get_max_value_id () + 1);
4696 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
4697 name_to_id
.create (0);
4699 inserted_exprs
= BITMAP_ALLOC (NULL
);
4701 connect_infinite_loops_to_exit ();
4702 memset (&pre_stats
, 0, sizeof (pre_stats
));
4704 postorder
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
4705 postorder_num
= inverted_post_order_compute (postorder
);
4707 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets
));
4709 calculate_dominance_info (CDI_POST_DOMINATORS
);
4710 calculate_dominance_info (CDI_DOMINATORS
);
4712 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4713 phi_translate_table
= new hash_table
<expr_pred_trans_d
> (5110);
4714 expression_to_id
= new hash_table
<pre_expr_d
> (num_ssa_names
* 3);
4715 bitmap_set_pool
= create_alloc_pool ("Bitmap sets",
4716 sizeof (struct bitmap_set
), 30);
4717 pre_expr_pool
= create_alloc_pool ("pre_expr nodes",
4718 sizeof (struct pre_expr_d
), 30);
4719 FOR_ALL_BB_FN (bb
, cfun
)
4721 EXP_GEN (bb
) = bitmap_set_new ();
4722 PHI_GEN (bb
) = bitmap_set_new ();
4723 TMP_GEN (bb
) = bitmap_set_new ();
4724 AVAIL_OUT (bb
) = bitmap_set_new ();
4729 /* Deallocate data structures used by PRE. */
4735 value_expressions
.release ();
4736 BITMAP_FREE (inserted_exprs
);
4737 bitmap_obstack_release (&grand_bitmap_obstack
);
4738 free_alloc_pool (bitmap_set_pool
);
4739 free_alloc_pool (pre_expr_pool
);
4740 delete phi_translate_table
;
4741 phi_translate_table
= NULL
;
4742 delete expression_to_id
;
4743 expression_to_id
= NULL
;
4744 name_to_id
.release ();
4746 free_aux_for_blocks ();
4748 free_dominance_info (CDI_POST_DOMINATORS
);
4753 const pass_data pass_data_pre
=
4755 GIMPLE_PASS
, /* type */
4757 OPTGROUP_NONE
, /* optinfo_flags */
4758 TV_TREE_PRE
, /* tv_id */
4759 /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
4761 ( PROP_no_crit_edges
| PROP_cfg
| PROP_ssa
), /* properties_required */
4762 0, /* properties_provided */
4763 PROP_no_crit_edges
, /* properties_destroyed */
4764 TODO_rebuild_alias
, /* todo_flags_start */
4765 0, /* todo_flags_finish */
4768 class pass_pre
: public gimple_opt_pass
4771 pass_pre (gcc::context
*ctxt
)
4772 : gimple_opt_pass (pass_data_pre
, ctxt
)
4775 /* opt_pass methods: */
4776 virtual bool gate (function
*) { return flag_tree_pre
!= 0; }
4777 virtual unsigned int execute (function
*);
4779 }; // class pass_pre
4782 pass_pre::execute (function
*fun
)
4784 unsigned int todo
= 0;
4786 do_partial_partial
=
4787 flag_tree_partial_pre
&& optimize_function_for_speed_p (fun
);
4789 /* This has to happen before SCCVN runs because
4790 loop_optimizer_init may create new phis, etc. */
4791 loop_optimizer_init (LOOPS_NORMAL
);
4793 if (!run_scc_vn (VN_WALK
))
4795 loop_optimizer_finalize ();
4802 /* Collect and value number expressions computed in each basic block. */
4805 /* Insert can get quite slow on an incredibly large number of basic
4806 blocks due to some quadratic behavior. Until this behavior is
4807 fixed, don't run it when he have an incredibly large number of
4808 bb's. If we aren't going to run insert, there is no point in
4809 computing ANTIC, either, even though it's plenty fast. */
4810 if (n_basic_blocks_for_fn (fun
) < 4000)
4816 /* Make sure to remove fake edges before committing our inserts.
4817 This makes sure we don't end up with extra critical edges that
4818 we would need to split. */
4819 remove_fake_exit_edges ();
4820 gsi_commit_edge_inserts ();
4822 /* Eliminate folds statements which might (should not...) end up
4823 not keeping virtual operands up-to-date. */
4824 gcc_assert (!need_ssa_update_p (fun
));
4826 /* Remove all the redundant expressions. */
4827 todo
|= eliminate (true);
4829 statistics_counter_event (fun
, "Insertions", pre_stats
.insertions
);
4830 statistics_counter_event (fun
, "PA inserted", pre_stats
.pa_insert
);
4831 statistics_counter_event (fun
, "New PHIs", pre_stats
.phis
);
4832 statistics_counter_event (fun
, "Eliminated", pre_stats
.eliminations
);
4834 clear_expression_ids ();
4835 remove_dead_inserted_code ();
4839 todo
|= fini_eliminate ();
4840 loop_optimizer_finalize ();
4842 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4843 case we can merge the block with the remaining predecessor of the block.
4845 - call merge_blocks after each tail merge iteration
4846 - call merge_blocks after all tail merge iterations
4847 - mark TODO_cleanup_cfg when necessary
4848 - share the cfg cleanup with fini_pre. */
4849 todo
|= tail_merge_optimize (todo
);
4853 /* Tail merging invalidates the virtual SSA web, together with
4854 cfg-cleanup opportunities exposed by PRE this will wreck the
4855 SSA updating machinery. So make sure to run update-ssa
4856 manually, before eventually scheduling cfg-cleanup as part of
4858 update_ssa (TODO_update_ssa_only_virtuals
);
4866 make_pass_pre (gcc::context
*ctxt
)
4868 return new pass_pre (ctxt
);
4873 const pass_data pass_data_fre
=
4875 GIMPLE_PASS
, /* type */
4877 OPTGROUP_NONE
, /* optinfo_flags */
4878 TV_TREE_FRE
, /* tv_id */
4879 ( PROP_cfg
| PROP_ssa
), /* properties_required */
4880 0, /* properties_provided */
4881 0, /* properties_destroyed */
4882 0, /* todo_flags_start */
4883 0, /* todo_flags_finish */
4886 class pass_fre
: public gimple_opt_pass
4889 pass_fre (gcc::context
*ctxt
)
4890 : gimple_opt_pass (pass_data_fre
, ctxt
)
4893 /* opt_pass methods: */
4894 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
4895 virtual bool gate (function
*) { return flag_tree_fre
!= 0; }
4896 virtual unsigned int execute (function
*);
4898 }; // class pass_fre
4901 pass_fre::execute (function
*fun
)
4903 unsigned int todo
= 0;
4905 if (!run_scc_vn (VN_WALKREWRITE
))
4908 memset (&pre_stats
, 0, sizeof (pre_stats
));
4910 /* Remove all the redundant expressions. */
4911 todo
|= eliminate (false);
4913 todo
|= fini_eliminate ();
4917 statistics_counter_event (fun
, "Insertions", pre_stats
.insertions
);
4918 statistics_counter_event (fun
, "Eliminated", pre_stats
.eliminations
);
4926 make_pass_fre (gcc::context
*ctxt
)
4928 return new pass_fre (ctxt
);