1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
37 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
50 #include "tree-ssa-propagate.h"
51 #include "tree-ssa-dce.h"
52 #include "tree-cfgcleanup.h"
55 /* Even though this file is called tree-ssa-pre.c, we actually
56 implement a bit more than just PRE here. All of them piggy-back
57 on GVN which is implemented in tree-ssa-sccvn.c.
59 1. Full Redundancy Elimination (FRE)
60 This is the elimination phase of GVN.
62 2. Partial Redundancy Elimination (PRE)
63 This is adds computation of AVAIL_OUT and ANTIC_IN and
64 doing expression insertion to form GVN-PRE.
67 This optimization uses the ANTIC_IN sets computed for PRE
68 to move expressions further up than PRE would do, to make
69 multiple computations of the same value fully redundant.
70 This pass is explained below (after the explanation of the
71 basic algorithm for PRE).
76 1. Avail sets can be shared by making an avail_find_leader that
77 walks up the dominator tree and looks in those avail sets.
78 This might affect code optimality, it's unclear right now.
79 Currently the AVAIL_OUT sets are the remaining quadraticness in
81 2. Strength reduction can be performed by anticipating expressions
82 we can repair later on.
83 3. We can do back-substitution or smarter value numbering to catch
84 commutative expressions split up over multiple statements.
87 /* For ease of terminology, "expression node" in the below refers to
88 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
89 represent the actual statement containing the expressions we care about,
90 and we cache the value number by putting it in the expression. */
92 /* Basic algorithm for Partial Redundancy Elimination:
94 First we walk the statements to generate the AVAIL sets, the
95 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
96 generation of values/expressions by a given block. We use them
97 when computing the ANTIC sets. The AVAIL sets consist of
98 SSA_NAME's that represent values, so we know what values are
99 available in what blocks. AVAIL is a forward dataflow problem. In
100 SSA, values are never killed, so we don't need a kill set, or a
101 fixpoint iteration, in order to calculate the AVAIL sets. In
102 traditional parlance, AVAIL sets tell us the downsafety of the
105 Next, we generate the ANTIC sets. These sets represent the
106 anticipatable expressions. ANTIC is a backwards dataflow
107 problem. An expression is anticipatable in a given block if it could
108 be generated in that block. This means that if we had to perform
109 an insertion in that block, of the value of that expression, we
110 could. Calculating the ANTIC sets requires phi translation of
111 expressions, because the flow goes backwards through phis. We must
112 iterate to a fixpoint of the ANTIC sets, because we have a kill
113 set. Even in SSA form, values are not live over the entire
114 function, only from their definition point onwards. So we have to
115 remove values from the ANTIC set once we go past the definition
116 point of the leaders that make them up.
117 compute_antic/compute_antic_aux performs this computation.
119 Third, we perform insertions to make partially redundant
120 expressions fully redundant.
122 An expression is partially redundant (excluding partial
125 1. It is AVAIL in some, but not all, of the predecessors of a
127 2. It is ANTIC in all the predecessors.
129 In order to make it fully redundant, we insert the expression into
130 the predecessors where it is not available, but is ANTIC.
132 When optimizing for size, we only eliminate the partial redundancy
133 if we need to insert in only one predecessor. This avoids almost
134 completely the code size increase that PRE usually causes.
136 For the partial anticipation case, we only perform insertion if it
137 is partially anticipated in some block, and fully available in all
140 do_pre_regular_insertion/do_pre_partial_partial_insertion
141 performs these steps, driven by insert/insert_aux.
143 Fourth, we eliminate fully redundant expressions.
144 This is a simple statement walk that replaces redundant
145 calculations with the now available values. */
147 /* Basic algorithm for Code Hoisting:
149 Code hoisting is: Moving value computations up in the control flow
150 graph to make multiple copies redundant. Typically this is a size
151 optimization, but there are cases where it also is helpful for speed.
153 A simple code hoisting algorithm is implemented that piggy-backs on
154 the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT
155 which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be
156 computed for PRE, and we can use them to perform a limited version of
159 For the purpose of this implementation, a value is hoistable to a basic
160 block B if the following properties are met:
162 1. The value is in ANTIC_IN(B) -- the value will be computed on all
163 paths from B to function exit and it can be computed in B);
165 2. The value is not in AVAIL_OUT(B) -- there would be no need to
166 compute the value again and make it available twice;
168 3. All successors of B are dominated by B -- makes sure that inserting
169 a computation of the value in B will make the remaining
170 computations fully redundant;
172 4. At least one successor has the value in AVAIL_OUT -- to avoid
173 hoisting values up too far;
175 5. There are at least two successors of B -- hoisting in straight
176 line code is pointless.
178 The third condition is not strictly necessary, but it would complicate
179 the hoisting pass a lot. In fact, I don't know of any code hoisting
180 algorithm that does not have this requirement. Fortunately, experiments
181 have show that most candidate hoistable values are in regions that meet
182 this condition (e.g. diamond-shape regions).
184 The forth condition is necessary to avoid hoisting things up too far
185 away from the uses of the value. Nothing else limits the algorithm
186 from hoisting everything up as far as ANTIC_IN allows. Experiments
187 with SPEC and CSiBE have shown that hoisting up too far results in more
188 spilling, less benefits for code size, and worse benchmark scores.
189 Fortunately, in practice most of the interesting hoisting opportunities
190 are caught despite this limitation.
192 For hoistable values that meet all conditions, expressions are inserted
193 to make the calculation of the hoistable value fully redundant. We
194 perform code hoisting insertions after each round of PRE insertions,
195 because code hoisting never exposes new PRE opportunities, but PRE can
196 create new code hoisting opportunities.
198 The code hoisting algorithm is implemented in do_hoist_insert, driven
199 by insert/insert_aux. */
201 /* Representations of value numbers:
203 Value numbers are represented by a representative SSA_NAME. We
204 will create fake SSA_NAME's in situations where we need a
205 representative but do not have one (because it is a complex
206 expression). In order to facilitate storing the value numbers in
207 bitmaps, and keep the number of wasted SSA_NAME's down, we also
208 associate a value_id with each value number, and create full blown
209 ssa_name's only where we actually need them (IE in operands of
210 existing expressions).
212 Theoretically you could replace all the value_id's with
213 SSA_NAME_VERSION, but this would allocate a large number of
214 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
215 It would also require an additional indirection at each point we
218 /* Representation of expressions on value numbers:
220 Expressions consisting of value numbers are represented the same
221 way as our VN internally represents them, with an additional
222 "pre_expr" wrapping around them in order to facilitate storing all
223 of the expressions in the same sets. */
225 /* Representation of sets:
227 The dataflow sets do not need to be sorted in any particular order
228 for the majority of their lifetime, are simply represented as two
229 bitmaps, one that keeps track of values present in the set, and one
230 that keeps track of expressions present in the set.
232 When we need them in topological order, we produce it on demand by
233 transforming the bitmap into an array and sorting it into topo
236 /* Type of expression, used to know which member of the PRE_EXPR union
252 vn_reference_t reference
;
255 typedef struct pre_expr_d
: nofree_ptr_hash
<pre_expr_d
>
257 enum pre_expr_kind kind
;
262 /* hash_table support. */
263 static inline hashval_t
hash (const pre_expr_d
*);
264 static inline int equal (const pre_expr_d
*, const pre_expr_d
*);
267 #define PRE_EXPR_NAME(e) (e)->u.name
268 #define PRE_EXPR_NARY(e) (e)->u.nary
269 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
270 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
272 /* Compare E1 and E1 for equality. */
275 pre_expr_d::equal (const pre_expr_d
*e1
, const pre_expr_d
*e2
)
277 if (e1
->kind
!= e2
->kind
)
283 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
284 PRE_EXPR_CONSTANT (e2
));
286 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
288 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
290 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
291 PRE_EXPR_REFERENCE (e2
));
300 pre_expr_d::hash (const pre_expr_d
*e
)
305 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
307 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
309 return PRE_EXPR_NARY (e
)->hashcode
;
311 return PRE_EXPR_REFERENCE (e
)->hashcode
;
317 /* Next global expression id number. */
318 static unsigned int next_expression_id
;
320 /* Mapping from expression to id number we can use in bitmap sets. */
321 static vec
<pre_expr
> expressions
;
322 static hash_table
<pre_expr_d
> *expression_to_id
;
323 static vec
<unsigned> name_to_id
;
325 /* Allocate an expression id for EXPR. */
327 static inline unsigned int
328 alloc_expression_id (pre_expr expr
)
330 struct pre_expr_d
**slot
;
331 /* Make sure we won't overflow. */
332 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
333 expr
->id
= next_expression_id
++;
334 expressions
.safe_push (expr
);
335 if (expr
->kind
== NAME
)
337 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
338 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
339 re-allocations by using vec::reserve upfront. */
340 unsigned old_len
= name_to_id
.length ();
341 name_to_id
.reserve (num_ssa_names
- old_len
);
342 name_to_id
.quick_grow_cleared (num_ssa_names
);
343 gcc_assert (name_to_id
[version
] == 0);
344 name_to_id
[version
] = expr
->id
;
348 slot
= expression_to_id
->find_slot (expr
, INSERT
);
352 return next_expression_id
- 1;
355 /* Return the expression id for tree EXPR. */
357 static inline unsigned int
358 get_expression_id (const pre_expr expr
)
363 static inline unsigned int
364 lookup_expression_id (const pre_expr expr
)
366 struct pre_expr_d
**slot
;
368 if (expr
->kind
== NAME
)
370 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
371 if (name_to_id
.length () <= version
)
373 return name_to_id
[version
];
377 slot
= expression_to_id
->find_slot (expr
, NO_INSERT
);
380 return ((pre_expr
)*slot
)->id
;
384 /* Return the existing expression id for EXPR, or create one if one
385 does not exist yet. */
387 static inline unsigned int
388 get_or_alloc_expression_id (pre_expr expr
)
390 unsigned int id
= lookup_expression_id (expr
);
392 return alloc_expression_id (expr
);
393 return expr
->id
= id
;
396 /* Return the expression that has expression id ID */
398 static inline pre_expr
399 expression_for_id (unsigned int id
)
401 return expressions
[id
];
404 static object_allocator
<pre_expr_d
> pre_expr_pool ("pre_expr nodes");
406 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
409 get_or_alloc_expr_for_name (tree name
)
411 struct pre_expr_d expr
;
413 unsigned int result_id
;
417 PRE_EXPR_NAME (&expr
) = name
;
418 result_id
= lookup_expression_id (&expr
);
420 return expression_for_id (result_id
);
422 result
= pre_expr_pool
.allocate ();
424 result
->loc
= UNKNOWN_LOCATION
;
425 PRE_EXPR_NAME (result
) = name
;
426 alloc_expression_id (result
);
430 /* An unordered bitmap set. One bitmap tracks values, the other,
432 typedef class bitmap_set
435 bitmap_head expressions
;
439 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
440 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
442 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
443 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
445 /* Mapping from value id to expressions with that value_id. */
446 static vec
<bitmap
> value_expressions
;
448 /* Sets that we need to keep track of. */
449 typedef struct bb_bitmap_sets
451 /* The EXP_GEN set, which represents expressions/values generated in
453 bitmap_set_t exp_gen
;
455 /* The PHI_GEN set, which represents PHI results generated in a
457 bitmap_set_t phi_gen
;
459 /* The TMP_GEN set, which represents results/temporaries generated
460 in a basic block. IE the LHS of an expression. */
461 bitmap_set_t tmp_gen
;
463 /* The AVAIL_OUT set, which represents which values are available in
464 a given basic block. */
465 bitmap_set_t avail_out
;
467 /* The ANTIC_IN set, which represents which values are anticipatable
468 in a given basic block. */
469 bitmap_set_t antic_in
;
471 /* The PA_IN set, which represents which values are
472 partially anticipatable in a given basic block. */
475 /* The NEW_SETS set, which is used during insertion to augment the
476 AVAIL_OUT set of blocks with the new insertions performed during
477 the current iteration. */
478 bitmap_set_t new_sets
;
480 /* A cache for value_dies_in_block_x. */
483 /* The live virtual operand on successor edges. */
486 /* True if we have visited this block during ANTIC calculation. */
487 unsigned int visited
: 1;
489 /* True when the block contains a call that might not return. */
490 unsigned int contains_may_not_return_call
: 1;
493 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
494 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
495 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
496 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
497 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
498 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
499 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
500 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
501 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
502 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
503 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
506 /* This structure is used to keep track of statistics on what
507 optimization PRE was able to perform. */
510 /* The number of new expressions/temporaries generated by PRE. */
513 /* The number of inserts found due to partial anticipation */
516 /* The number of inserts made for code hoisting. */
519 /* The number of new PHI nodes added by PRE. */
523 static bool do_partial_partial
;
524 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int);
525 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
526 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
527 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
528 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
529 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
530 static bitmap_set_t
bitmap_set_new (void);
531 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
533 static tree
find_or_generate_expression (basic_block
, tree
, gimple_seq
*);
534 static unsigned int get_expr_value_id (pre_expr
);
536 /* We can add and remove elements and entries to and from sets
537 and hash tables, so we use alloc pools for them. */
539 static object_allocator
<bitmap_set
> bitmap_set_pool ("Bitmap sets");
540 static bitmap_obstack grand_bitmap_obstack
;
542 /* A three tuple {e, pred, v} used to cache phi translations in the
543 phi_translate_table. */
545 typedef struct expr_pred_trans_d
: free_ptr_hash
<expr_pred_trans_d
>
547 /* The expression. */
550 /* The predecessor block along which we translated the expression. */
553 /* The value that resulted from the translation. */
556 /* The hashcode for the expression, pred pair. This is cached for
560 /* hash_table support. */
561 static inline hashval_t
hash (const expr_pred_trans_d
*);
562 static inline int equal (const expr_pred_trans_d
*, const expr_pred_trans_d
*);
563 } *expr_pred_trans_t
;
564 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
567 expr_pred_trans_d::hash (const expr_pred_trans_d
*e
)
573 expr_pred_trans_d::equal (const expr_pred_trans_d
*ve1
,
574 const expr_pred_trans_d
*ve2
)
576 basic_block b1
= ve1
->pred
;
577 basic_block b2
= ve2
->pred
;
579 /* If they are not translations for the same basic block, they can't
583 return pre_expr_d::equal (ve1
->e
, ve2
->e
);
586 /* The phi_translate_table caches phi translations for a given
587 expression and predecessor. */
588 static hash_table
<expr_pred_trans_d
> *phi_translate_table
;
590 /* Add the tuple mapping from {expression E, basic block PRED} to
591 the phi translation table and return whether it pre-existed. */
594 phi_trans_add (expr_pred_trans_t
*entry
, pre_expr e
, basic_block pred
)
596 expr_pred_trans_t
*slot
;
597 expr_pred_trans_d tem
;
598 hashval_t hash
= iterative_hash_hashval_t (pre_expr_d::hash (e
),
603 slot
= phi_translate_table
->find_slot_with_hash (&tem
, hash
, INSERT
);
610 *entry
= *slot
= XNEW (struct expr_pred_trans_d
);
612 (*entry
)->pred
= pred
;
613 (*entry
)->hashcode
= hash
;
618 /* Add expression E to the expression set of value id V. */
621 add_to_value (unsigned int v
, pre_expr e
)
625 gcc_checking_assert (get_expr_value_id (e
) == v
);
627 if (v
>= value_expressions
.length ())
629 value_expressions
.safe_grow_cleared (v
+ 1);
632 set
= value_expressions
[v
];
635 set
= BITMAP_ALLOC (&grand_bitmap_obstack
);
636 value_expressions
[v
] = set
;
639 bitmap_set_bit (set
, get_or_alloc_expression_id (e
));
642 /* Create a new bitmap set and return it. */
645 bitmap_set_new (void)
647 bitmap_set_t ret
= bitmap_set_pool
.allocate ();
648 bitmap_initialize (&ret
->expressions
, &grand_bitmap_obstack
);
649 bitmap_initialize (&ret
->values
, &grand_bitmap_obstack
);
653 /* Return the value id for a PRE expression EXPR. */
656 get_expr_value_id (pre_expr expr
)
662 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
665 id
= VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
668 gcc_assert (!PRE_EXPR_NARY (expr
)->predicated_values
);
669 id
= PRE_EXPR_NARY (expr
)->value_id
;
672 id
= PRE_EXPR_REFERENCE (expr
)->value_id
;
677 /* ??? We cannot assert that expr has a value-id (it can be 0), because
678 we assign value-ids only to expressions that have a result
679 in set_hashtable_value_ids. */
683 /* Return a VN valnum (SSA name or constant) for the PRE value-id VAL. */
686 vn_valnum_from_value_id (unsigned int val
)
690 bitmap exprset
= value_expressions
[val
];
691 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
693 pre_expr vexpr
= expression_for_id (i
);
694 if (vexpr
->kind
== NAME
)
695 return VN_INFO (PRE_EXPR_NAME (vexpr
))->valnum
;
696 else if (vexpr
->kind
== CONSTANT
)
697 return PRE_EXPR_CONSTANT (vexpr
);
702 /* Insert an expression EXPR into a bitmapped set. */
705 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
707 unsigned int val
= get_expr_value_id (expr
);
708 if (! value_id_constant_p (val
))
710 /* Note this is the only function causing multiple expressions
711 for the same value to appear in a set. This is needed for
712 TMP_GEN, PHI_GEN and NEW_SETs. */
713 bitmap_set_bit (&set
->values
, val
);
714 bitmap_set_bit (&set
->expressions
, get_or_alloc_expression_id (expr
));
718 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
721 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
723 bitmap_copy (&dest
->expressions
, &orig
->expressions
);
724 bitmap_copy (&dest
->values
, &orig
->values
);
728 /* Free memory used up by SET. */
730 bitmap_set_free (bitmap_set_t set
)
732 bitmap_clear (&set
->expressions
);
733 bitmap_clear (&set
->values
);
737 /* Generate an topological-ordered array of bitmap set SET. */
740 sorted_array_from_bitmap_set (bitmap_set_t set
)
743 bitmap_iterator bi
, bj
;
744 vec
<pre_expr
> result
;
746 /* Pre-allocate enough space for the array. */
747 result
.create (bitmap_count_bits (&set
->expressions
));
749 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
751 /* The number of expressions having a given value is usually
752 relatively small. Thus, rather than making a vector of all
753 the expressions and sorting it by value-id, we walk the values
754 and check in the reverse mapping that tells us what expressions
755 have a given value, to filter those in our set. As a result,
756 the expressions are inserted in value-id order, which means
759 If this is somehow a significant lose for some cases, we can
760 choose which set to walk based on the set size. */
761 bitmap exprset
= value_expressions
[i
];
762 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bj
)
764 if (bitmap_bit_p (&set
->expressions
, j
))
765 result
.quick_push (expression_for_id (j
));
772 /* Subtract all expressions contained in ORIG from DEST. */
775 bitmap_set_subtract_expressions (bitmap_set_t dest
, bitmap_set_t orig
)
777 bitmap_set_t result
= bitmap_set_new ();
781 bitmap_and_compl (&result
->expressions
, &dest
->expressions
,
784 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
786 pre_expr expr
= expression_for_id (i
);
787 unsigned int value_id
= get_expr_value_id (expr
);
788 bitmap_set_bit (&result
->values
, value_id
);
794 /* Subtract all values in bitmap set B from bitmap set A. */
797 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
801 unsigned to_remove
= -1U;
802 bitmap_and_compl_into (&a
->values
, &b
->values
);
803 FOR_EACH_EXPR_ID_IN_SET (a
, i
, bi
)
805 if (to_remove
!= -1U)
807 bitmap_clear_bit (&a
->expressions
, to_remove
);
810 pre_expr expr
= expression_for_id (i
);
811 if (! bitmap_bit_p (&a
->values
, get_expr_value_id (expr
)))
814 if (to_remove
!= -1U)
815 bitmap_clear_bit (&a
->expressions
, to_remove
);
819 /* Return true if bitmapped set SET contains the value VALUE_ID. */
822 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
824 if (value_id_constant_p (value_id
))
827 return bitmap_bit_p (&set
->values
, value_id
);
830 /* Return true if two bitmap sets are equal. */
833 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
835 return bitmap_equal_p (&a
->values
, &b
->values
);
838 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
839 and add it otherwise. */
842 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
844 unsigned int val
= get_expr_value_id (expr
);
845 if (value_id_constant_p (val
))
848 if (bitmap_set_contains_value (set
, val
))
850 /* The number of expressions having a given value is usually
851 significantly less than the total number of expressions in SET.
852 Thus, rather than check, for each expression in SET, whether it
853 has the value LOOKFOR, we walk the reverse mapping that tells us
854 what expressions have a given value, and see if any of those
855 expressions are in our set. For large testcases, this is about
856 5-10x faster than walking the bitmap. If this is somehow a
857 significant lose for some cases, we can choose which set to walk
858 based on the set size. */
861 bitmap exprset
= value_expressions
[val
];
862 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
864 if (bitmap_clear_bit (&set
->expressions
, i
))
866 bitmap_set_bit (&set
->expressions
, get_expression_id (expr
));
873 bitmap_insert_into_set (set
, expr
);
876 /* Insert EXPR into SET if EXPR's value is not already present in
880 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
882 unsigned int val
= get_expr_value_id (expr
);
884 gcc_checking_assert (expr
->id
== get_or_alloc_expression_id (expr
));
886 /* Constant values are always considered to be part of the set. */
887 if (value_id_constant_p (val
))
890 /* If the value membership changed, add the expression. */
891 if (bitmap_set_bit (&set
->values
, val
))
892 bitmap_set_bit (&set
->expressions
, expr
->id
);
895 /* Print out EXPR to outfile. */
898 print_pre_expr (FILE *outfile
, const pre_expr expr
)
902 fprintf (outfile
, "NULL");
908 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
));
911 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
));
916 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
917 fprintf (outfile
, "{%s,", get_tree_code_name (nary
->opcode
));
918 for (i
= 0; i
< nary
->length
; i
++)
920 print_generic_expr (outfile
, nary
->op
[i
]);
921 if (i
!= (unsigned) nary
->length
- 1)
922 fprintf (outfile
, ",");
924 fprintf (outfile
, "}");
930 vn_reference_op_t vro
;
932 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
933 fprintf (outfile
, "{");
935 ref
->operands
.iterate (i
, &vro
);
938 bool closebrace
= false;
939 if (vro
->opcode
!= SSA_NAME
940 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
942 fprintf (outfile
, "%s", get_tree_code_name (vro
->opcode
));
945 fprintf (outfile
, "<");
951 print_generic_expr (outfile
, vro
->op0
);
954 fprintf (outfile
, ",");
955 print_generic_expr (outfile
, vro
->op1
);
959 fprintf (outfile
, ",");
960 print_generic_expr (outfile
, vro
->op2
);
964 fprintf (outfile
, ">");
965 if (i
!= ref
->operands
.length () - 1)
966 fprintf (outfile
, ",");
968 fprintf (outfile
, "}");
971 fprintf (outfile
, "@");
972 print_generic_expr (outfile
, ref
->vuse
);
978 void debug_pre_expr (pre_expr
);
980 /* Like print_pre_expr but always prints to stderr. */
982 debug_pre_expr (pre_expr e
)
984 print_pre_expr (stderr
, e
);
985 fprintf (stderr
, "\n");
988 /* Print out SET to OUTFILE. */
991 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
992 const char *setname
, int blockindex
)
994 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
1001 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1003 const pre_expr expr
= expression_for_id (i
);
1006 fprintf (outfile
, ", ");
1008 print_pre_expr (outfile
, expr
);
1010 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1013 fprintf (outfile
, " }\n");
1016 void debug_bitmap_set (bitmap_set_t
);
1019 debug_bitmap_set (bitmap_set_t set
)
1021 print_bitmap_set (stderr
, set
, "debug", 0);
1024 void debug_bitmap_sets_for (basic_block
);
1027 debug_bitmap_sets_for (basic_block bb
)
1029 print_bitmap_set (stderr
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
1030 print_bitmap_set (stderr
, EXP_GEN (bb
), "exp_gen", bb
->index
);
1031 print_bitmap_set (stderr
, PHI_GEN (bb
), "phi_gen", bb
->index
);
1032 print_bitmap_set (stderr
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
1033 print_bitmap_set (stderr
, ANTIC_IN (bb
), "antic_in", bb
->index
);
1034 if (do_partial_partial
)
1035 print_bitmap_set (stderr
, PA_IN (bb
), "pa_in", bb
->index
);
1036 print_bitmap_set (stderr
, NEW_SETS (bb
), "new_sets", bb
->index
);
1039 /* Print out the expressions that have VAL to OUTFILE. */
1042 print_value_expressions (FILE *outfile
, unsigned int val
)
1044 bitmap set
= value_expressions
[val
];
1049 sprintf (s
, "%04d", val
);
1050 x
.expressions
= *set
;
1051 print_bitmap_set (outfile
, &x
, s
, 0);
1057 debug_value_expressions (unsigned int val
)
1059 print_value_expressions (stderr
, val
);
1062 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1066 get_or_alloc_expr_for_constant (tree constant
)
1068 unsigned int result_id
;
1069 unsigned int value_id
;
1070 struct pre_expr_d expr
;
1073 expr
.kind
= CONSTANT
;
1074 PRE_EXPR_CONSTANT (&expr
) = constant
;
1075 result_id
= lookup_expression_id (&expr
);
1077 return expression_for_id (result_id
);
1079 newexpr
= pre_expr_pool
.allocate ();
1080 newexpr
->kind
= CONSTANT
;
1081 newexpr
->loc
= UNKNOWN_LOCATION
;
1082 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1083 alloc_expression_id (newexpr
);
1084 value_id
= get_or_alloc_constant_value_id (constant
);
1085 add_to_value (value_id
, newexpr
);
1089 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1090 Currently only supports constants and SSA_NAMES. */
1092 get_or_alloc_expr_for (tree t
)
1094 if (TREE_CODE (t
) == SSA_NAME
)
1095 return get_or_alloc_expr_for_name (t
);
1096 else if (is_gimple_min_invariant (t
))
1097 return get_or_alloc_expr_for_constant (t
);
1101 /* Return the folded version of T if T, when folded, is a gimple
1102 min_invariant or an SSA name. Otherwise, return T. */
1105 fully_constant_expression (pre_expr e
)
1113 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1114 tree res
= vn_nary_simplify (nary
);
1117 if (is_gimple_min_invariant (res
))
1118 return get_or_alloc_expr_for_constant (res
);
1119 if (TREE_CODE (res
) == SSA_NAME
)
1120 return get_or_alloc_expr_for_name (res
);
1125 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1127 if ((folded
= fully_constant_vn_reference_p (ref
)))
1128 return get_or_alloc_expr_for_constant (folded
);
1137 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1138 it has the value it would have in BLOCK. Set *SAME_VALID to true
1139 in case the new vuse doesn't change the value id of the OPERANDS. */
1142 translate_vuse_through_block (vec
<vn_reference_op_s
> operands
,
1143 alias_set_type set
, alias_set_type base_set
,
1144 tree type
, tree vuse
,
1145 basic_block phiblock
,
1146 basic_block block
, bool *same_valid
)
1148 gimple
*phi
= SSA_NAME_DEF_STMT (vuse
);
1156 if (gimple_bb (phi
) != phiblock
)
1159 unsigned int cnt
= param_sccvn_max_alias_queries_per_access
;
1160 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, base_set
,
1163 /* Use the alias-oracle to find either the PHI node in this block,
1164 the first VUSE used in this block that is equivalent to vuse or
1165 the first VUSE which definition in this block kills the value. */
1166 if (gimple_code (phi
) == GIMPLE_PHI
)
1167 e
= find_edge (block
, phiblock
);
1168 else if (use_oracle
)
1170 && !stmt_may_clobber_ref_p_1 (phi
, &ref
))
1173 vuse
= gimple_vuse (phi
);
1174 phi
= SSA_NAME_DEF_STMT (vuse
);
1175 if (gimple_bb (phi
) != phiblock
)
1177 if (gimple_code (phi
) == GIMPLE_PHI
)
1179 e
= find_edge (block
, phiblock
);
1188 if (use_oracle
&& same_valid
)
1190 bitmap visited
= NULL
;
1191 /* Try to find a vuse that dominates this phi node by skipping
1192 non-clobbering statements. */
1193 vuse
= get_continuation_for_phi (phi
, &ref
, true,
1194 cnt
, &visited
, false, NULL
, NULL
);
1196 BITMAP_FREE (visited
);
1200 /* If we didn't find any, the value ID can't stay the same. */
1201 if (!vuse
&& same_valid
)
1202 *same_valid
= false;
1203 /* ??? We would like to return vuse here as this is the canonical
1204 upmost vdef that this reference is associated with. But during
1205 insertion of the references into the hash tables we only ever
1206 directly insert with their direct gimple_vuse, hence returning
1207 something else would make us not find the other expression. */
1208 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1214 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1215 SET2 *or* SET3. This is used to avoid making a set consisting of the union
1216 of PA_IN and ANTIC_IN during insert and phi-translation. */
1218 static inline pre_expr
1219 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
,
1220 bitmap_set_t set3
= NULL
)
1222 pre_expr result
= NULL
;
1225 result
= bitmap_find_leader (set1
, val
);
1226 if (!result
&& set2
)
1227 result
= bitmap_find_leader (set2
, val
);
1228 if (!result
&& set3
)
1229 result
= bitmap_find_leader (set3
, val
);
1233 /* Get the tree type for our PRE expression e. */
1236 get_expr_type (const pre_expr e
)
1241 return TREE_TYPE (PRE_EXPR_NAME (e
));
1243 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1245 return PRE_EXPR_REFERENCE (e
)->type
;
1247 return PRE_EXPR_NARY (e
)->type
;
1252 /* Get a representative SSA_NAME for a given expression that is available in B.
1253 Since all of our sub-expressions are treated as values, we require
1254 them to be SSA_NAME's for simplicity.
1255 Prior versions of GVNPRE used to use "value handles" here, so that
1256 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1257 either case, the operands are really values (IE we do not expect
1258 them to be usable without finding leaders). */
1261 get_representative_for (const pre_expr e
, basic_block b
= NULL
)
1263 tree name
, valnum
= NULL_TREE
;
1264 unsigned int value_id
= get_expr_value_id (e
);
1269 return PRE_EXPR_NAME (e
);
1271 return PRE_EXPR_CONSTANT (e
);
1275 /* Go through all of the expressions representing this value
1276 and pick out an SSA_NAME. */
1279 bitmap exprs
= value_expressions
[value_id
];
1280 EXECUTE_IF_SET_IN_BITMAP (exprs
, 0, i
, bi
)
1282 pre_expr rep
= expression_for_id (i
);
1283 if (rep
->kind
== NAME
)
1285 tree name
= PRE_EXPR_NAME (rep
);
1286 valnum
= VN_INFO (name
)->valnum
;
1287 gimple
*def
= SSA_NAME_DEF_STMT (name
);
1288 /* We have to return either a new representative or one
1289 that can be used for expression simplification and thus
1290 is available in B. */
1292 || gimple_nop_p (def
)
1293 || dominated_by_p (CDI_DOMINATORS
, b
, gimple_bb (def
)))
1296 else if (rep
->kind
== CONSTANT
)
1297 return PRE_EXPR_CONSTANT (rep
);
1303 /* If we reached here we couldn't find an SSA_NAME. This can
1304 happen when we've discovered a value that has never appeared in
1305 the program as set to an SSA_NAME, as the result of phi translation.
1307 ??? We should be able to re-use this when we insert the statement
1309 name
= make_temp_ssa_name (get_expr_type (e
), gimple_build_nop (), "pretmp");
1310 VN_INFO (name
)->value_id
= value_id
;
1311 VN_INFO (name
)->valnum
= valnum
? valnum
: name
;
1312 /* ??? For now mark this SSA name for release by VN. */
1313 VN_INFO (name
)->needs_insertion
= true;
1314 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1315 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1317 fprintf (dump_file
, "Created SSA_NAME representative ");
1318 print_generic_expr (dump_file
, name
);
1319 fprintf (dump_file
, " for expression:");
1320 print_pre_expr (dump_file
, e
);
1321 fprintf (dump_file
, " (%04d)\n", value_id
);
1329 phi_translate (bitmap_set_t
, pre_expr
, bitmap_set_t
, bitmap_set_t
, edge
);
1331 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1332 the phis in PRED. Return NULL if we can't find a leader for each part
1333 of the translated expression. */
1336 phi_translate_1 (bitmap_set_t dest
,
1337 pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
, edge e
)
1339 basic_block pred
= e
->src
;
1340 basic_block phiblock
= e
->dest
;
1341 location_t expr_loc
= expr
->loc
;
1347 bool changed
= false;
1348 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1349 vn_nary_op_t newnary
= XALLOCAVAR (struct vn_nary_op_s
,
1350 sizeof_vn_nary_op (nary
->length
));
1351 memcpy (newnary
, nary
, sizeof_vn_nary_op (nary
->length
));
1353 for (i
= 0; i
< newnary
->length
; i
++)
1355 if (TREE_CODE (newnary
->op
[i
]) != SSA_NAME
)
1359 pre_expr leader
, result
;
1360 unsigned int op_val_id
= VN_INFO (newnary
->op
[i
])->value_id
;
1361 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1362 result
= phi_translate (dest
, leader
, set1
, set2
, e
);
1363 if (result
&& result
!= leader
)
1364 /* If op has a leader in the sets we translate make
1365 sure to use the value of the translated expression.
1366 We might need a new representative for that. */
1367 newnary
->op
[i
] = get_representative_for (result
, pred
);
1371 changed
|= newnary
->op
[i
] != nary
->op
[i
];
1377 unsigned int new_val_id
;
1379 PRE_EXPR_NARY (expr
) = newnary
;
1380 constant
= fully_constant_expression (expr
);
1381 PRE_EXPR_NARY (expr
) = nary
;
1382 if (constant
!= expr
)
1384 /* For non-CONSTANTs we have to make sure we can eventually
1385 insert the expression. Which means we need to have a
1387 if (constant
->kind
!= CONSTANT
)
1389 /* Do not allow simplifications to non-constants over
1390 backedges as this will likely result in a loop PHI node
1391 to be inserted and increased register pressure.
1392 See PR77498 - this avoids doing predcoms work in
1393 a less efficient way. */
1394 if (e
->flags
& EDGE_DFS_BACK
)
1398 unsigned value_id
= get_expr_value_id (constant
);
1399 /* We want a leader in ANTIC_OUT or AVAIL_OUT here.
1400 dest has what we computed into ANTIC_OUT sofar
1401 so pick from that - since topological sorting
1402 by sorted_array_from_bitmap_set isn't perfect
1403 we may lose some cases here. */
1404 constant
= find_leader_in_sets (value_id
, dest
,
1408 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1410 fprintf (dump_file
, "simplifying ");
1411 print_pre_expr (dump_file
, expr
);
1412 fprintf (dump_file
, " translated %d -> %d to ",
1413 phiblock
->index
, pred
->index
);
1414 PRE_EXPR_NARY (expr
) = newnary
;
1415 print_pre_expr (dump_file
, expr
);
1416 PRE_EXPR_NARY (expr
) = nary
;
1417 fprintf (dump_file
, " to ");
1418 print_pre_expr (dump_file
, constant
);
1419 fprintf (dump_file
, "\n");
1429 /* vn_nary_* do not valueize operands. */
1430 for (i
= 0; i
< newnary
->length
; ++i
)
1431 if (TREE_CODE (newnary
->op
[i
]) == SSA_NAME
)
1432 newnary
->op
[i
] = VN_INFO (newnary
->op
[i
])->valnum
;
1433 tree result
= vn_nary_op_lookup_pieces (newnary
->length
,
1438 if (result
&& is_gimple_min_invariant (result
))
1439 return get_or_alloc_expr_for_constant (result
);
1441 expr
= pre_expr_pool
.allocate ();
1444 expr
->loc
= expr_loc
;
1445 if (nary
&& !nary
->predicated_values
)
1447 PRE_EXPR_NARY (expr
) = nary
;
1448 new_val_id
= nary
->value_id
;
1449 get_or_alloc_expression_id (expr
);
1453 new_val_id
= get_next_value_id ();
1454 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
1455 nary
= vn_nary_op_insert_pieces (newnary
->length
,
1459 result
, new_val_id
);
1460 PRE_EXPR_NARY (expr
) = nary
;
1461 get_or_alloc_expression_id (expr
);
1463 add_to_value (new_val_id
, expr
);
1471 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1472 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1473 tree vuse
= ref
->vuse
;
1474 tree newvuse
= vuse
;
1475 vec
<vn_reference_op_s
> newoperands
= vNULL
;
1476 bool changed
= false, same_valid
= true;
1478 vn_reference_op_t operand
;
1479 vn_reference_t newref
;
1481 for (i
= 0; operands
.iterate (i
, &operand
); i
++)
1486 tree type
= operand
->type
;
1487 vn_reference_op_s newop
= *operand
;
1488 op
[0] = operand
->op0
;
1489 op
[1] = operand
->op1
;
1490 op
[2] = operand
->op2
;
1491 for (n
= 0; n
< 3; ++n
)
1493 unsigned int op_val_id
;
1496 if (TREE_CODE (op
[n
]) != SSA_NAME
)
1498 /* We can't possibly insert these. */
1500 && !is_gimple_min_invariant (op
[n
]))
1504 op_val_id
= VN_INFO (op
[n
])->value_id
;
1505 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1506 opresult
= phi_translate (dest
, leader
, set1
, set2
, e
);
1507 if (opresult
&& opresult
!= leader
)
1509 tree name
= get_representative_for (opresult
);
1510 changed
|= name
!= op
[n
];
1518 newoperands
.release ();
1523 if (!newoperands
.exists ())
1524 newoperands
= operands
.copy ();
1525 /* We may have changed from an SSA_NAME to a constant */
1526 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op
[0]) != SSA_NAME
)
1527 newop
.opcode
= TREE_CODE (op
[0]);
1532 newoperands
[i
] = newop
;
1534 gcc_checking_assert (i
== operands
.length ());
1538 newvuse
= translate_vuse_through_block (newoperands
.exists ()
1539 ? newoperands
: operands
,
1540 ref
->set
, ref
->base_set
,
1542 vuse
, phiblock
, pred
,
1544 ? NULL
: &same_valid
);
1545 if (newvuse
== NULL_TREE
)
1547 newoperands
.release ();
1552 if (changed
|| newvuse
!= vuse
)
1554 unsigned int new_val_id
;
1556 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1559 newoperands
.exists ()
1560 ? newoperands
: operands
,
1563 newoperands
.release ();
1565 /* We can always insert constants, so if we have a partial
1566 redundant constant load of another type try to translate it
1567 to a constant of appropriate type. */
1568 if (result
&& is_gimple_min_invariant (result
))
1571 if (!useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1573 tem
= fold_unary (VIEW_CONVERT_EXPR
, ref
->type
, result
);
1574 if (tem
&& !is_gimple_min_invariant (tem
))
1578 return get_or_alloc_expr_for_constant (tem
);
1581 /* If we'd have to convert things we would need to validate
1582 if we can insert the translated expression. So fail
1583 here for now - we cannot insert an alias with a different
1584 type in the VN tables either, as that would assert. */
1586 && !useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1588 else if (!result
&& newref
1589 && !useless_type_conversion_p (ref
->type
, newref
->type
))
1591 newoperands
.release ();
1595 expr
= pre_expr_pool
.allocate ();
1596 expr
->kind
= REFERENCE
;
1598 expr
->loc
= expr_loc
;
1601 new_val_id
= newref
->value_id
;
1604 if (changed
|| !same_valid
)
1606 new_val_id
= get_next_value_id ();
1607 value_expressions
.safe_grow_cleared
1608 (get_max_value_id () + 1);
1611 new_val_id
= ref
->value_id
;
1612 if (!newoperands
.exists ())
1613 newoperands
= operands
.copy ();
1614 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1615 ref
->base_set
, ref
->type
,
1617 result
, new_val_id
);
1618 newoperands
= vNULL
;
1620 PRE_EXPR_REFERENCE (expr
) = newref
;
1621 get_or_alloc_expression_id (expr
);
1622 add_to_value (new_val_id
, expr
);
1624 newoperands
.release ();
1631 tree name
= PRE_EXPR_NAME (expr
);
1632 gimple
*def_stmt
= SSA_NAME_DEF_STMT (name
);
1633 /* If the SSA name is defined by a PHI node in this block,
1635 if (gimple_code (def_stmt
) == GIMPLE_PHI
1636 && gimple_bb (def_stmt
) == phiblock
)
1638 tree def
= PHI_ARG_DEF (def_stmt
, e
->dest_idx
);
1640 /* Handle constant. */
1641 if (is_gimple_min_invariant (def
))
1642 return get_or_alloc_expr_for_constant (def
);
1644 return get_or_alloc_expr_for_name (def
);
1646 /* Otherwise return it unchanged - it will get removed if its
1647 value is not available in PREDs AVAIL_OUT set of expressions
1648 by the subtraction of TMP_GEN. */
1657 /* Wrapper around phi_translate_1 providing caching functionality. */
1660 phi_translate (bitmap_set_t dest
, pre_expr expr
,
1661 bitmap_set_t set1
, bitmap_set_t set2
, edge e
)
1663 expr_pred_trans_t slot
= NULL
;
1669 /* Constants contain no values that need translation. */
1670 if (expr
->kind
== CONSTANT
)
1673 if (value_id_constant_p (get_expr_value_id (expr
)))
1676 /* Don't add translations of NAMEs as those are cheap to translate. */
1677 if (expr
->kind
!= NAME
)
1679 if (phi_trans_add (&slot
, expr
, e
->src
))
1681 /* Store NULL for the value we want to return in the case of
1687 basic_block saved_valueize_bb
= vn_context_bb
;
1688 vn_context_bb
= e
->src
;
1689 phitrans
= phi_translate_1 (dest
, expr
, set1
, set2
, e
);
1690 vn_context_bb
= saved_valueize_bb
;
1697 /* Remove failed translations again, they cause insert
1698 iteration to not pick up new opportunities reliably. */
1699 phi_translate_table
->remove_elt_with_hash (slot
, slot
->hashcode
);
1706 /* For each expression in SET, translate the values through phi nodes
1707 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1708 expressions in DEST. */
1711 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, edge e
)
1713 vec
<pre_expr
> exprs
;
1717 if (gimple_seq_empty_p (phi_nodes (e
->dest
)))
1719 bitmap_set_copy (dest
, set
);
1723 exprs
= sorted_array_from_bitmap_set (set
);
1724 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
1726 pre_expr translated
;
1727 translated
= phi_translate (dest
, expr
, set
, NULL
, e
);
1731 bitmap_insert_into_set (dest
, translated
);
1736 /* Find the leader for a value (i.e., the name representing that
1737 value) in a given set, and return it. Return NULL if no leader
1741 bitmap_find_leader (bitmap_set_t set
, unsigned int val
)
1743 if (value_id_constant_p (val
))
1747 bitmap exprset
= value_expressions
[val
];
1749 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1751 pre_expr expr
= expression_for_id (i
);
1752 if (expr
->kind
== CONSTANT
)
1756 if (bitmap_set_contains_value (set
, val
))
1758 /* Rather than walk the entire bitmap of expressions, and see
1759 whether any of them has the value we are looking for, we look
1760 at the reverse mapping, which tells us the set of expressions
1761 that have a given value (IE value->expressions with that
1762 value) and see if any of those expressions are in our set.
1763 The number of expressions per value is usually significantly
1764 less than the number of expressions in the set. In fact, for
1765 large testcases, doing it this way is roughly 5-10x faster
1766 than walking the bitmap.
1767 If this is somehow a significant lose for some cases, we can
1768 choose which set to walk based on which set is smaller. */
1771 bitmap exprset
= value_expressions
[val
];
1773 EXECUTE_IF_AND_IN_BITMAP (exprset
, &set
->expressions
, 0, i
, bi
)
1774 return expression_for_id (i
);
1779 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1780 BLOCK by seeing if it is not killed in the block. Note that we are
1781 only determining whether there is a store that kills it. Because
1782 of the order in which clean iterates over values, we are guaranteed
1783 that altered operands will have caused us to be eliminated from the
1784 ANTIC_IN set already. */
1787 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1789 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1790 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1792 gimple_stmt_iterator gsi
;
1793 unsigned id
= get_expression_id (expr
);
1800 /* Lookup a previously calculated result. */
1801 if (EXPR_DIES (block
)
1802 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1803 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1805 /* A memory expression {e, VUSE} dies in the block if there is a
1806 statement that may clobber e. If, starting statement walk from the
1807 top of the basic block, a statement uses VUSE there can be no kill
1808 inbetween that use and the original statement that loaded {e, VUSE},
1809 so we can stop walking. */
1810 ref
.base
= NULL_TREE
;
1811 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1813 tree def_vuse
, def_vdef
;
1814 def
= gsi_stmt (gsi
);
1815 def_vuse
= gimple_vuse (def
);
1816 def_vdef
= gimple_vdef (def
);
1818 /* Not a memory statement. */
1822 /* Not a may-def. */
1825 /* A load with the same VUSE, we're done. */
1826 if (def_vuse
== vuse
)
1832 /* Init ref only if we really need it. */
1833 if (ref
.base
== NULL_TREE
1834 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->base_set
,
1835 refx
->type
, refx
->operands
))
1840 /* If the statement may clobber expr, it dies. */
1841 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1848 /* Remember the result. */
1849 if (!EXPR_DIES (block
))
1850 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1851 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1853 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1859 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1860 contains its value-id. */
1863 op_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree op
)
1865 if (op
&& TREE_CODE (op
) == SSA_NAME
)
1867 unsigned int value_id
= VN_INFO (op
)->value_id
;
1868 if (!(bitmap_set_contains_value (set1
, value_id
)
1869 || (set2
&& bitmap_set_contains_value (set2
, value_id
))))
1875 /* Determine if the expression EXPR is valid in SET1 U SET2.
1876 ONLY SET2 CAN BE NULL.
1877 This means that we have a leader for each part of the expression
1878 (if it consists of values), or the expression is an SSA_NAME.
1879 For loads/calls, we also see if the vuse is killed in this block. */
1882 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
)
1887 /* By construction all NAMEs are available. Non-available
1888 NAMEs are removed by subtracting TMP_GEN from the sets. */
1893 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1894 for (i
= 0; i
< nary
->length
; i
++)
1895 if (!op_valid_in_sets (set1
, set2
, nary
->op
[i
]))
1902 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1903 vn_reference_op_t vro
;
1906 FOR_EACH_VEC_ELT (ref
->operands
, i
, vro
)
1908 if (!op_valid_in_sets (set1
, set2
, vro
->op0
)
1909 || !op_valid_in_sets (set1
, set2
, vro
->op1
)
1910 || !op_valid_in_sets (set1
, set2
, vro
->op2
))
1920 /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2.
1921 This means expressions that are made up of values we have no leaders for
1925 clean (bitmap_set_t set1
, bitmap_set_t set2
= NULL
)
1927 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set1
);
1931 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
1933 if (!valid_in_sets (set1
, set2
, expr
))
1935 unsigned int val
= get_expr_value_id (expr
);
1936 bitmap_clear_bit (&set1
->expressions
, get_expression_id (expr
));
1937 /* We are entered with possibly multiple expressions for a value
1938 so before removing a value from the set see if there's an
1939 expression for it left. */
1940 if (! bitmap_find_leader (set1
, val
))
1941 bitmap_clear_bit (&set1
->values
, val
);
1947 /* Clean the set of expressions that are no longer valid in SET because
1948 they are clobbered in BLOCK or because they trap and may not be executed. */
1951 prune_clobbered_mems (bitmap_set_t set
, basic_block block
)
1955 unsigned to_remove
= -1U;
1956 bool any_removed
= false;
1958 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1960 /* Remove queued expr. */
1961 if (to_remove
!= -1U)
1963 bitmap_clear_bit (&set
->expressions
, to_remove
);
1968 pre_expr expr
= expression_for_id (i
);
1969 if (expr
->kind
== REFERENCE
)
1971 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1974 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
1975 if (!gimple_nop_p (def_stmt
)
1976 && ((gimple_bb (def_stmt
) != block
1977 && !dominated_by_p (CDI_DOMINATORS
,
1978 block
, gimple_bb (def_stmt
)))
1979 || (gimple_bb (def_stmt
) == block
1980 && value_dies_in_block_x (expr
, block
))))
1984 else if (expr
->kind
== NARY
)
1986 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1987 /* If the NARY may trap make sure the block does not contain
1988 a possible exit point.
1989 ??? This is overly conservative if we translate AVAIL_OUT
1990 as the available expression might be after the exit point. */
1991 if (BB_MAY_NOTRETURN (block
)
1992 && vn_nary_may_trap (nary
))
1997 /* Remove queued expr. */
1998 if (to_remove
!= -1U)
2000 bitmap_clear_bit (&set
->expressions
, to_remove
);
2004 /* Above we only removed expressions, now clean the set of values
2005 which no longer have any corresponding expression. We cannot
2006 clear the value at the time we remove an expression since there
2007 may be multiple expressions per value.
2008 If we'd queue possibly to be removed values we could use
2009 the bitmap_find_leader way to see if there's still an expression
2010 for it. For some ratio of to be removed values and number of
2011 values/expressions in the set this might be faster than rebuilding
2015 bitmap_clear (&set
->values
);
2016 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
2018 pre_expr expr
= expression_for_id (i
);
2019 unsigned int value_id
= get_expr_value_id (expr
);
2020 bitmap_set_bit (&set
->values
, value_id
);
2025 /* Compute the ANTIC set for BLOCK.
2027 If succs(BLOCK) > 1 then
2028 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2029 else if succs(BLOCK) == 1 then
2030 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2032 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2034 Note that clean() is deferred until after the iteration. */
2037 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2039 bitmap_set_t S
, old
, ANTIC_OUT
;
2043 bool was_visited
= BB_VISITED (block
);
2044 bool changed
= ! BB_VISITED (block
);
2045 BB_VISITED (block
) = 1;
2046 old
= ANTIC_OUT
= S
= NULL
;
2048 /* If any edges from predecessors are abnormal, antic_in is empty,
2050 if (block_has_abnormal_pred_edge
)
2051 goto maybe_dump_sets
;
2053 old
= ANTIC_IN (block
);
2054 ANTIC_OUT
= bitmap_set_new ();
2056 /* If the block has no successors, ANTIC_OUT is empty. */
2057 if (EDGE_COUNT (block
->succs
) == 0)
2059 /* If we have one successor, we could have some phi nodes to
2060 translate through. */
2061 else if (single_succ_p (block
))
2063 e
= single_succ_edge (block
);
2064 gcc_assert (BB_VISITED (e
->dest
));
2065 phi_translate_set (ANTIC_OUT
, ANTIC_IN (e
->dest
), e
);
2067 /* If we have multiple successors, we take the intersection of all of
2068 them. Note that in the case of loop exit phi nodes, we may have
2069 phis to translate through. */
2075 auto_vec
<edge
> worklist (EDGE_COUNT (block
->succs
));
2076 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2079 && BB_VISITED (e
->dest
))
2081 else if (BB_VISITED (e
->dest
))
2082 worklist
.quick_push (e
);
2085 /* Unvisited successors get their ANTIC_IN replaced by the
2086 maximal set to arrive at a maximum ANTIC_IN solution.
2087 We can ignore them in the intersection operation and thus
2088 need not explicitely represent that maximum solution. */
2089 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2090 fprintf (dump_file
, "ANTIC_IN is MAX on %d->%d\n",
2091 e
->src
->index
, e
->dest
->index
);
2095 /* Of multiple successors we have to have visited one already
2096 which is guaranteed by iteration order. */
2097 gcc_assert (first
!= NULL
);
2099 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
->dest
), first
);
2101 /* If we have multiple successors we need to intersect the ANTIC_OUT
2102 sets. For values that's a simple intersection but for
2103 expressions it is a union. Given we want to have a single
2104 expression per value in our sets we have to canonicalize.
2105 Avoid randomness and running into cycles like for PR82129 and
2106 canonicalize the expression we choose to the one with the
2107 lowest id. This requires we actually compute the union first. */
2108 FOR_EACH_VEC_ELT (worklist
, i
, e
)
2110 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
2112 bitmap_set_t tmp
= bitmap_set_new ();
2113 phi_translate_set (tmp
, ANTIC_IN (e
->dest
), e
);
2114 bitmap_and_into (&ANTIC_OUT
->values
, &tmp
->values
);
2115 bitmap_ior_into (&ANTIC_OUT
->expressions
, &tmp
->expressions
);
2116 bitmap_set_free (tmp
);
2120 bitmap_and_into (&ANTIC_OUT
->values
, &ANTIC_IN (e
->dest
)->values
);
2121 bitmap_ior_into (&ANTIC_OUT
->expressions
,
2122 &ANTIC_IN (e
->dest
)->expressions
);
2125 if (! worklist
.is_empty ())
2127 /* Prune expressions not in the value set. */
2130 unsigned int to_clear
= -1U;
2131 FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT
, i
, bi
)
2133 if (to_clear
!= -1U)
2135 bitmap_clear_bit (&ANTIC_OUT
->expressions
, to_clear
);
2138 pre_expr expr
= expression_for_id (i
);
2139 unsigned int value_id
= get_expr_value_id (expr
);
2140 if (!bitmap_bit_p (&ANTIC_OUT
->values
, value_id
))
2143 if (to_clear
!= -1U)
2144 bitmap_clear_bit (&ANTIC_OUT
->expressions
, to_clear
);
2148 /* Prune expressions that are clobbered in block and thus become
2149 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2150 prune_clobbered_mems (ANTIC_OUT
, block
);
2152 /* Generate ANTIC_OUT - TMP_GEN. */
2153 S
= bitmap_set_subtract_expressions (ANTIC_OUT
, TMP_GEN (block
));
2155 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2156 ANTIC_IN (block
) = bitmap_set_subtract_expressions (EXP_GEN (block
),
2159 /* Then union in the ANTIC_OUT - TMP_GEN values,
2160 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2161 bitmap_ior_into (&ANTIC_IN (block
)->values
, &S
->values
);
2162 bitmap_ior_into (&ANTIC_IN (block
)->expressions
, &S
->expressions
);
2164 /* clean (ANTIC_IN (block)) is defered to after the iteration converged
2165 because it can cause non-convergence, see for example PR81181. */
2167 /* Intersect ANTIC_IN with the old ANTIC_IN. This is required until
2168 we properly represent the maximum expression set, thus not prune
2169 values without expressions during the iteration. */
2171 && bitmap_and_into (&ANTIC_IN (block
)->values
, &old
->values
))
2173 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2174 fprintf (dump_file
, "warning: intersecting with old ANTIC_IN "
2175 "shrinks the set\n");
2176 /* Prune expressions not in the value set. */
2179 unsigned int to_clear
= -1U;
2180 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (block
), i
, bi
)
2182 if (to_clear
!= -1U)
2184 bitmap_clear_bit (&ANTIC_IN (block
)->expressions
, to_clear
);
2187 pre_expr expr
= expression_for_id (i
);
2188 unsigned int value_id
= get_expr_value_id (expr
);
2189 if (!bitmap_bit_p (&ANTIC_IN (block
)->values
, value_id
))
2192 if (to_clear
!= -1U)
2193 bitmap_clear_bit (&ANTIC_IN (block
)->expressions
, to_clear
);
2196 if (!bitmap_set_equal (old
, ANTIC_IN (block
)))
2200 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2203 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2206 fprintf (dump_file
, "[changed] ");
2207 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2211 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2214 bitmap_set_free (old
);
2216 bitmap_set_free (S
);
2218 bitmap_set_free (ANTIC_OUT
);
2222 /* Compute PARTIAL_ANTIC for BLOCK.
2224 If succs(BLOCK) > 1 then
2225 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2226 in ANTIC_OUT for all succ(BLOCK)
2227 else if succs(BLOCK) == 1 then
2228 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2230 PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK])
2234 compute_partial_antic_aux (basic_block block
,
2235 bool block_has_abnormal_pred_edge
)
2237 bitmap_set_t old_PA_IN
;
2238 bitmap_set_t PA_OUT
;
2241 unsigned long max_pa
= param_max_partial_antic_length
;
2243 old_PA_IN
= PA_OUT
= NULL
;
2245 /* If any edges from predecessors are abnormal, antic_in is empty,
2247 if (block_has_abnormal_pred_edge
)
2248 goto maybe_dump_sets
;
2250 /* If there are too many partially anticipatable values in the
2251 block, phi_translate_set can take an exponential time: stop
2252 before the translation starts. */
2254 && single_succ_p (block
)
2255 && bitmap_count_bits (&PA_IN (single_succ (block
))->values
) > max_pa
)
2256 goto maybe_dump_sets
;
2258 old_PA_IN
= PA_IN (block
);
2259 PA_OUT
= bitmap_set_new ();
2261 /* If the block has no successors, ANTIC_OUT is empty. */
2262 if (EDGE_COUNT (block
->succs
) == 0)
2264 /* If we have one successor, we could have some phi nodes to
2265 translate through. Note that we can't phi translate across DFS
2266 back edges in partial antic, because it uses a union operation on
2267 the successors. For recurrences like IV's, we will end up
2268 generating a new value in the set on each go around (i + 3 (VH.1)
2269 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2270 else if (single_succ_p (block
))
2272 e
= single_succ_edge (block
);
2273 if (!(e
->flags
& EDGE_DFS_BACK
))
2274 phi_translate_set (PA_OUT
, PA_IN (e
->dest
), e
);
2276 /* If we have multiple successors, we take the union of all of
2282 auto_vec
<edge
> worklist (EDGE_COUNT (block
->succs
));
2283 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2285 if (e
->flags
& EDGE_DFS_BACK
)
2287 worklist
.quick_push (e
);
2289 if (worklist
.length () > 0)
2291 FOR_EACH_VEC_ELT (worklist
, i
, e
)
2296 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (e
->dest
), i
, bi
)
2297 bitmap_value_insert_into_set (PA_OUT
,
2298 expression_for_id (i
));
2299 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
2301 bitmap_set_t pa_in
= bitmap_set_new ();
2302 phi_translate_set (pa_in
, PA_IN (e
->dest
), e
);
2303 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2304 bitmap_value_insert_into_set (PA_OUT
,
2305 expression_for_id (i
));
2306 bitmap_set_free (pa_in
);
2309 FOR_EACH_EXPR_ID_IN_SET (PA_IN (e
->dest
), i
, bi
)
2310 bitmap_value_insert_into_set (PA_OUT
,
2311 expression_for_id (i
));
2316 /* Prune expressions that are clobbered in block and thus become
2317 invalid if translated from PA_OUT to PA_IN. */
2318 prune_clobbered_mems (PA_OUT
, block
);
2320 /* PA_IN starts with PA_OUT - TMP_GEN.
2321 Then we subtract things from ANTIC_IN. */
2322 PA_IN (block
) = bitmap_set_subtract_expressions (PA_OUT
, TMP_GEN (block
));
2324 /* For partial antic, we want to put back in the phi results, since
2325 we will properly avoid making them partially antic over backedges. */
2326 bitmap_ior_into (&PA_IN (block
)->values
, &PHI_GEN (block
)->values
);
2327 bitmap_ior_into (&PA_IN (block
)->expressions
, &PHI_GEN (block
)->expressions
);
2329 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2330 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2332 clean (PA_IN (block
), ANTIC_IN (block
));
2335 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2338 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2340 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2343 bitmap_set_free (old_PA_IN
);
2345 bitmap_set_free (PA_OUT
);
2348 /* Compute ANTIC and partial ANTIC sets. */
2351 compute_antic (void)
2353 bool changed
= true;
2354 int num_iterations
= 0;
2360 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2361 We pre-build the map of blocks with incoming abnormal edges here. */
2362 auto_sbitmap
has_abnormal_preds (last_basic_block_for_fn (cfun
));
2363 bitmap_clear (has_abnormal_preds
);
2365 FOR_ALL_BB_FN (block
, cfun
)
2367 BB_VISITED (block
) = 0;
2369 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2370 if (e
->flags
& EDGE_ABNORMAL
)
2372 bitmap_set_bit (has_abnormal_preds
, block
->index
);
2376 /* While we are here, give empty ANTIC_IN sets to each block. */
2377 ANTIC_IN (block
) = bitmap_set_new ();
2378 if (do_partial_partial
)
2379 PA_IN (block
) = bitmap_set_new ();
2382 /* At the exit block we anticipate nothing. */
2383 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun
)) = 1;
2385 /* For ANTIC computation we need a postorder that also guarantees that
2386 a block with a single successor is visited after its successor.
2387 RPO on the inverted CFG has this property. */
2388 auto_vec
<int, 20> postorder
;
2389 inverted_post_order_compute (&postorder
);
2391 auto_sbitmap
worklist (last_basic_block_for_fn (cfun
) + 1);
2392 bitmap_clear (worklist
);
2393 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
2394 bitmap_set_bit (worklist
, e
->src
->index
);
2397 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2398 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2399 /* ??? We need to clear our PHI translation cache here as the
2400 ANTIC sets shrink and we restrict valid translations to
2401 those having operands with leaders in ANTIC. Same below
2402 for PA ANTIC computation. */
2405 for (i
= postorder
.length () - 1; i
>= 0; i
--)
2407 if (bitmap_bit_p (worklist
, postorder
[i
]))
2409 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2410 bitmap_clear_bit (worklist
, block
->index
);
2411 if (compute_antic_aux (block
,
2412 bitmap_bit_p (has_abnormal_preds
,
2415 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2416 bitmap_set_bit (worklist
, e
->src
->index
);
2421 /* Theoretically possible, but *highly* unlikely. */
2422 gcc_checking_assert (num_iterations
< 500);
2425 /* We have to clean after the dataflow problem converged as cleaning
2426 can cause non-convergence because it is based on expressions
2427 rather than values. */
2428 FOR_EACH_BB_FN (block
, cfun
)
2429 clean (ANTIC_IN (block
));
2431 statistics_histogram_event (cfun
, "compute_antic iterations",
2434 if (do_partial_partial
)
2436 /* For partial antic we ignore backedges and thus we do not need
2437 to perform any iteration when we process blocks in postorder. */
2438 for (i
= postorder
.length () - 1; i
>= 0; i
--)
2440 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2441 compute_partial_antic_aux (block
,
2442 bitmap_bit_p (has_abnormal_preds
,
2449 /* Inserted expressions are placed onto this worklist, which is used
2450 for performing quick dead code elimination of insertions we made
2451 that didn't turn out to be necessary. */
2452 static bitmap inserted_exprs
;
2454 /* The actual worker for create_component_ref_by_pieces. */
2457 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2458 unsigned int *operand
, gimple_seq
*stmts
)
2460 vn_reference_op_t currop
= &ref
->operands
[*operand
];
2463 switch (currop
->opcode
)
2470 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2474 tree offset
= currop
->op0
;
2475 if (TREE_CODE (baseop
) == ADDR_EXPR
2476 && handled_component_p (TREE_OPERAND (baseop
, 0)))
2480 base
= get_addr_base_and_unit_offset (TREE_OPERAND (baseop
, 0),
2483 offset
= int_const_binop (PLUS_EXPR
, offset
,
2484 build_int_cst (TREE_TYPE (offset
),
2486 baseop
= build_fold_addr_expr (base
);
2488 genop
= build2 (MEM_REF
, currop
->type
, baseop
, offset
);
2489 MR_DEPENDENCE_CLIQUE (genop
) = currop
->clique
;
2490 MR_DEPENDENCE_BASE (genop
) = currop
->base
;
2491 REF_REVERSE_STORAGE_ORDER (genop
) = currop
->reverse
;
2495 case TARGET_MEM_REF
:
2497 tree genop0
= NULL_TREE
, genop1
= NULL_TREE
;
2498 vn_reference_op_t nextop
= &ref
->operands
[(*operand
)++];
2499 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2505 genop0
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2511 genop1
= find_or_generate_expression (block
, nextop
->op0
, stmts
);
2515 genop
= build5 (TARGET_MEM_REF
, currop
->type
,
2516 baseop
, currop
->op2
, genop0
, currop
->op1
, genop1
);
2518 MR_DEPENDENCE_CLIQUE (genop
) = currop
->clique
;
2519 MR_DEPENDENCE_BASE (genop
) = currop
->base
;
2526 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2532 case VIEW_CONVERT_EXPR
:
2534 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2538 return fold_build1 (currop
->opcode
, currop
->type
, genop0
);
2541 case WITH_SIZE_EXPR
:
2543 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2547 tree genop1
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2550 return fold_build2 (currop
->opcode
, currop
->type
, genop0
, genop1
);
2555 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2559 tree op1
= currop
->op0
;
2560 tree op2
= currop
->op1
;
2561 tree t
= build3 (BIT_FIELD_REF
, currop
->type
, genop0
, op1
, op2
);
2562 REF_REVERSE_STORAGE_ORDER (t
) = currop
->reverse
;
2566 /* For array ref vn_reference_op's, operand 1 of the array ref
2567 is op0 of the reference op and operand 3 of the array ref is
2569 case ARRAY_RANGE_REF
:
2573 tree genop1
= currop
->op0
;
2574 tree genop2
= currop
->op1
;
2575 tree genop3
= currop
->op2
;
2576 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2580 genop1
= find_or_generate_expression (block
, genop1
, stmts
);
2585 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (genop0
));
2586 /* Drop zero minimum index if redundant. */
2587 if (integer_zerop (genop2
)
2589 || integer_zerop (TYPE_MIN_VALUE (domain_type
))))
2593 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2600 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2601 /* We can't always put a size in units of the element alignment
2602 here as the element alignment may be not visible. See
2603 PR43783. Simply drop the element size for constant
2605 if (TREE_CODE (genop3
) == INTEGER_CST
2606 && TREE_CODE (TYPE_SIZE_UNIT (elmt_type
)) == INTEGER_CST
2607 && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type
)),
2608 (wi::to_offset (genop3
)
2609 * vn_ref_op_align_unit (currop
))))
2613 genop3
= find_or_generate_expression (block
, genop3
, stmts
);
2618 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2625 tree genop2
= currop
->op1
;
2626 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2629 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2633 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2637 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
, genop2
);
2642 genop
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2663 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2664 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2665 trying to rename aggregates into ssa form directly, which is a no no.
2667 Thus, this routine doesn't create temporaries, it just builds a
2668 single access expression for the array, calling
2669 find_or_generate_expression to build the innermost pieces.
2671 This function is a subroutine of create_expression_by_pieces, and
2672 should not be called on it's own unless you really know what you
2676 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2679 unsigned int op
= 0;
2680 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
);
2683 /* Find a simple leader for an expression, or generate one using
2684 create_expression_by_pieces from a NARY expression for the value.
2685 BLOCK is the basic_block we are looking for leaders in.
2686 OP is the tree expression to find a leader for or generate.
2687 Returns the leader or NULL_TREE on failure. */
2690 find_or_generate_expression (basic_block block
, tree op
, gimple_seq
*stmts
)
2692 pre_expr expr
= get_or_alloc_expr_for (op
);
2693 unsigned int lookfor
= get_expr_value_id (expr
);
2694 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
), lookfor
);
2697 if (leader
->kind
== NAME
)
2698 return PRE_EXPR_NAME (leader
);
2699 else if (leader
->kind
== CONSTANT
)
2700 return PRE_EXPR_CONSTANT (leader
);
2706 /* It must be a complex expression, so generate it recursively. Note
2707 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2708 where the insert algorithm fails to insert a required expression. */
2709 bitmap exprset
= value_expressions
[lookfor
];
2712 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
2714 pre_expr temp
= expression_for_id (i
);
2715 /* We cannot insert random REFERENCE expressions at arbitrary
2716 places. We can insert NARYs which eventually re-materializes
2717 its operand values. */
2718 if (temp
->kind
== NARY
)
2719 return create_expression_by_pieces (block
, temp
, stmts
,
2720 get_expr_type (expr
));
2727 /* Create an expression in pieces, so that we can handle very complex
2728 expressions that may be ANTIC, but not necessary GIMPLE.
2729 BLOCK is the basic block the expression will be inserted into,
2730 EXPR is the expression to insert (in value form)
2731 STMTS is a statement list to append the necessary insertions into.
2733 This function will die if we hit some value that shouldn't be
2734 ANTIC but is (IE there is no leader for it, or its components).
2735 The function returns NULL_TREE in case a different antic expression
2736 has to be inserted first.
2737 This function may also generate expressions that are themselves
2738 partially or fully redundant. Those that are will be either made
2739 fully redundant during the next iteration of insert (for partially
2740 redundant ones), or eliminated by eliminate (for fully redundant
2744 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2745 gimple_seq
*stmts
, tree type
)
2749 gimple_seq forced_stmts
= NULL
;
2750 unsigned int value_id
;
2751 gimple_stmt_iterator gsi
;
2752 tree exprtype
= type
? type
: get_expr_type (expr
);
2758 /* We may hit the NAME/CONSTANT case if we have to convert types
2759 that value numbering saw through. */
2761 folded
= PRE_EXPR_NAME (expr
);
2762 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (folded
))
2764 if (useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
2769 folded
= PRE_EXPR_CONSTANT (expr
);
2770 tree tem
= fold_convert (exprtype
, folded
);
2771 if (is_gimple_min_invariant (tem
))
2776 if (PRE_EXPR_REFERENCE (expr
)->operands
[0].opcode
== CALL_EXPR
)
2778 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2779 unsigned int operand
= 1;
2780 vn_reference_op_t currop
= &ref
->operands
[0];
2781 tree sc
= NULL_TREE
;
2782 tree fn
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2787 sc
= find_or_generate_expression (block
, currop
->op1
, stmts
);
2791 auto_vec
<tree
> args (ref
->operands
.length () - 1);
2792 while (operand
< ref
->operands
.length ())
2794 tree arg
= create_component_ref_by_pieces_1 (block
, ref
,
2798 args
.quick_push (arg
);
2800 gcall
*call
= gimple_build_call_vec (fn
, args
);
2801 gimple_set_location (call
, expr
->loc
);
2802 gimple_call_set_fntype (call
, currop
->type
);
2804 gimple_call_set_chain (call
, sc
);
2805 tree forcedname
= make_ssa_name (TREE_TYPE (currop
->type
));
2806 gimple_call_set_lhs (call
, forcedname
);
2807 /* There's no CCP pass after PRE which would re-compute alignment
2808 information so make sure we re-materialize this here. */
2809 if (gimple_call_builtin_p (call
, BUILT_IN_ASSUME_ALIGNED
)
2810 && args
.length () - 2 <= 1
2811 && tree_fits_uhwi_p (args
[1])
2812 && (args
.length () != 3 || tree_fits_uhwi_p (args
[2])))
2814 unsigned HOST_WIDE_INT halign
= tree_to_uhwi (args
[1]);
2815 unsigned HOST_WIDE_INT hmisalign
2816 = args
.length () == 3 ? tree_to_uhwi (args
[2]) : 0;
2817 if ((halign
& (halign
- 1)) == 0
2818 && (hmisalign
& ~(halign
- 1)) == 0
2819 && (unsigned int)halign
!= 0)
2820 set_ptr_info_alignment (get_ptr_info (forcedname
),
2823 gimple_set_vuse (call
, BB_LIVE_VOP_ON_EXIT (block
));
2824 gimple_seq_add_stmt_without_update (&forced_stmts
, call
);
2825 folded
= forcedname
;
2829 folded
= create_component_ref_by_pieces (block
,
2830 PRE_EXPR_REFERENCE (expr
),
2834 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2835 newstmt
= gimple_build_assign (name
, folded
);
2836 gimple_set_location (newstmt
, expr
->loc
);
2837 gimple_seq_add_stmt_without_update (&forced_stmts
, newstmt
);
2838 gimple_set_vuse (newstmt
, BB_LIVE_VOP_ON_EXIT (block
));
2844 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2845 tree
*genop
= XALLOCAVEC (tree
, nary
->length
);
2847 for (i
= 0; i
< nary
->length
; ++i
)
2849 genop
[i
] = find_or_generate_expression (block
, nary
->op
[i
], stmts
);
2852 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2853 may have conversions stripped. */
2854 if (nary
->opcode
== POINTER_PLUS_EXPR
)
2857 genop
[i
] = gimple_convert (&forced_stmts
,
2858 nary
->type
, genop
[i
]);
2860 genop
[i
] = gimple_convert (&forced_stmts
,
2861 sizetype
, genop
[i
]);
2864 genop
[i
] = gimple_convert (&forced_stmts
,
2865 TREE_TYPE (nary
->op
[i
]), genop
[i
]);
2867 if (nary
->opcode
== CONSTRUCTOR
)
2869 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
2870 for (i
= 0; i
< nary
->length
; ++i
)
2871 CONSTRUCTOR_APPEND_ELT (elts
, NULL_TREE
, genop
[i
]);
2872 folded
= build_constructor (nary
->type
, elts
);
2873 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2874 newstmt
= gimple_build_assign (name
, folded
);
2875 gimple_set_location (newstmt
, expr
->loc
);
2876 gimple_seq_add_stmt_without_update (&forced_stmts
, newstmt
);
2881 switch (nary
->length
)
2884 folded
= gimple_build (&forced_stmts
, expr
->loc
,
2885 nary
->opcode
, nary
->type
, genop
[0]);
2888 folded
= gimple_build (&forced_stmts
, expr
->loc
, nary
->opcode
,
2889 nary
->type
, genop
[0], genop
[1]);
2892 folded
= gimple_build (&forced_stmts
, expr
->loc
, nary
->opcode
,
2893 nary
->type
, genop
[0], genop
[1],
2906 folded
= gimple_convert (&forced_stmts
, exprtype
, folded
);
2908 /* If there is nothing to insert, return the simplified result. */
2909 if (gimple_seq_empty_p (forced_stmts
))
2911 /* If we simplified to a constant return it and discard eventually
2913 if (is_gimple_min_invariant (folded
))
2915 gimple_seq_discard (forced_stmts
);
2918 /* Likewise if we simplified to sth not queued for insertion. */
2920 gsi
= gsi_last (forced_stmts
);
2921 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
2923 gimple
*stmt
= gsi_stmt (gsi
);
2924 tree forcedname
= gimple_get_lhs (stmt
);
2925 if (forcedname
== folded
)
2933 gimple_seq_discard (forced_stmts
);
2936 gcc_assert (TREE_CODE (folded
) == SSA_NAME
);
2938 /* If we have any intermediate expressions to the value sets, add them
2939 to the value sets and chain them in the instruction stream. */
2942 gsi
= gsi_start (forced_stmts
);
2943 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
2945 gimple
*stmt
= gsi_stmt (gsi
);
2946 tree forcedname
= gimple_get_lhs (stmt
);
2949 if (forcedname
!= folded
)
2951 VN_INFO (forcedname
)->valnum
= forcedname
;
2952 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
2953 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
2954 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
2955 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2956 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2959 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (forcedname
));
2961 gimple_seq_add_seq (stmts
, forced_stmts
);
2966 /* Fold the last statement. */
2967 gsi
= gsi_last (*stmts
);
2968 if (fold_stmt_inplace (&gsi
))
2969 update_stmt (gsi_stmt (gsi
));
2971 /* Add a value number to the temporary.
2972 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2973 we are creating the expression by pieces, and this particular piece of
2974 the expression may have been represented. There is no harm in replacing
2976 value_id
= get_expr_value_id (expr
);
2977 VN_INFO (name
)->value_id
= value_id
;
2978 VN_INFO (name
)->valnum
= vn_valnum_from_value_id (value_id
);
2979 if (VN_INFO (name
)->valnum
== NULL_TREE
)
2980 VN_INFO (name
)->valnum
= name
;
2981 gcc_assert (VN_INFO (name
)->valnum
!= NULL_TREE
);
2982 nameexpr
= get_or_alloc_expr_for_name (name
);
2983 add_to_value (value_id
, nameexpr
);
2984 if (NEW_SETS (block
))
2985 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2986 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2988 pre_stats
.insertions
++;
2989 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2991 fprintf (dump_file
, "Inserted ");
2992 print_gimple_stmt (dump_file
, gsi_stmt (gsi_last (*stmts
)), 0);
2993 fprintf (dump_file
, " in predecessor %d (%04d)\n",
2994 block
->index
, value_id
);
3001 /* Insert the to-be-made-available values of expression EXPRNUM for each
3002 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3003 merge the result with a phi node, given the same value number as
3004 NODE. Return true if we have inserted new stuff. */
3007 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
3008 vec
<pre_expr
> avail
)
3010 pre_expr expr
= expression_for_id (exprnum
);
3012 unsigned int val
= get_expr_value_id (expr
);
3014 bool insertions
= false;
3019 tree type
= get_expr_type (expr
);
3023 /* Make sure we aren't creating an induction variable. */
3024 if (bb_loop_depth (block
) > 0 && EDGE_COUNT (block
->preds
) == 2)
3026 bool firstinsideloop
= false;
3027 bool secondinsideloop
= false;
3028 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3029 EDGE_PRED (block
, 0)->src
);
3030 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3031 EDGE_PRED (block
, 1)->src
);
3032 /* Induction variables only have one edge inside the loop. */
3033 if ((firstinsideloop
^ secondinsideloop
)
3034 && expr
->kind
!= REFERENCE
)
3036 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3037 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3042 /* Make the necessary insertions. */
3043 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3045 gimple_seq stmts
= NULL
;
3048 eprime
= avail
[pred
->dest_idx
];
3049 builtexpr
= create_expression_by_pieces (bprime
, eprime
,
3051 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3052 if (!gimple_seq_empty_p (stmts
))
3054 basic_block new_bb
= gsi_insert_seq_on_edge_immediate (pred
, stmts
);
3055 gcc_assert (! new_bb
);
3060 /* We cannot insert a PHI node if we failed to insert
3065 if (is_gimple_min_invariant (builtexpr
))
3066 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_constant (builtexpr
);
3068 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (builtexpr
);
3070 /* If we didn't want a phi node, and we made insertions, we still have
3071 inserted new stuff, and thus return true. If we didn't want a phi node,
3072 and didn't make insertions, we haven't added anything new, so return
3074 if (nophi
&& insertions
)
3076 else if (nophi
&& !insertions
)
3079 /* Now build a phi for the new variable. */
3080 temp
= make_temp_ssa_name (type
, NULL
, "prephitmp");
3081 phi
= create_phi_node (temp
, block
);
3083 VN_INFO (temp
)->value_id
= val
;
3084 VN_INFO (temp
)->valnum
= vn_valnum_from_value_id (val
);
3085 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3086 VN_INFO (temp
)->valnum
= temp
;
3087 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3088 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3090 pre_expr ae
= avail
[pred
->dest_idx
];
3091 gcc_assert (get_expr_type (ae
) == type
3092 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3093 if (ae
->kind
== CONSTANT
)
3094 add_phi_arg (phi
, unshare_expr (PRE_EXPR_CONSTANT (ae
)),
3095 pred
, UNKNOWN_LOCATION
);
3097 add_phi_arg (phi
, PRE_EXPR_NAME (ae
), pred
, UNKNOWN_LOCATION
);
3100 newphi
= get_or_alloc_expr_for_name (temp
);
3101 add_to_value (val
, newphi
);
3103 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3104 this insertion, since we test for the existence of this value in PHI_GEN
3105 before proceeding with the partial redundancy checks in insert_aux.
3107 The value may exist in AVAIL_OUT, in particular, it could be represented
3108 by the expression we are trying to eliminate, in which case we want the
3109 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3112 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3113 this block, because if it did, it would have existed in our dominator's
3114 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3117 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3118 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3120 bitmap_insert_into_set (NEW_SETS (block
),
3123 /* If we insert a PHI node for a conversion of another PHI node
3124 in the same basic-block try to preserve range information.
3125 This is important so that followup loop passes receive optimal
3126 number of iteration analysis results. See PR61743. */
3127 if (expr
->kind
== NARY
3128 && CONVERT_EXPR_CODE_P (expr
->u
.nary
->opcode
)
3129 && TREE_CODE (expr
->u
.nary
->op
[0]) == SSA_NAME
3130 && gimple_bb (SSA_NAME_DEF_STMT (expr
->u
.nary
->op
[0])) == block
3131 && INTEGRAL_TYPE_P (type
)
3132 && INTEGRAL_TYPE_P (TREE_TYPE (expr
->u
.nary
->op
[0]))
3133 && (TYPE_PRECISION (type
)
3134 >= TYPE_PRECISION (TREE_TYPE (expr
->u
.nary
->op
[0])))
3135 && SSA_NAME_RANGE_INFO (expr
->u
.nary
->op
[0]))
3138 if (get_range_info (expr
->u
.nary
->op
[0], &min
, &max
) == VR_RANGE
3139 && !wi::neg_p (min
, SIGNED
)
3140 && !wi::neg_p (max
, SIGNED
))
3141 /* Just handle extension and sign-changes of all-positive ranges. */
3142 set_range_info (temp
,
3143 SSA_NAME_RANGE_TYPE (expr
->u
.nary
->op
[0]),
3144 wide_int_storage::from (min
, TYPE_PRECISION (type
),
3146 wide_int_storage::from (max
, TYPE_PRECISION (type
),
3150 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3152 fprintf (dump_file
, "Created phi ");
3153 print_gimple_stmt (dump_file
, phi
, 0);
3154 fprintf (dump_file
, " in block %d (%04d)\n", block
->index
, val
);
3162 /* Perform insertion of partially redundant or hoistable values.
3163 For BLOCK, do the following:
3164 1. Propagate the NEW_SETS of the dominator into the current block.
3165 If the block has multiple predecessors,
3166 2a. Iterate over the ANTIC expressions for the block to see if
3167 any of them are partially redundant.
3168 2b. If so, insert them into the necessary predecessors to make
3169 the expression fully redundant.
3170 2c. Insert a new PHI merging the values of the predecessors.
3171 2d. Insert the new PHI, and the new expressions, into the
3173 If the block has multiple successors,
3174 3a. Iterate over the ANTIC values for the block to see if
3175 any of them are good candidates for hoisting.
3176 3b. If so, insert expressions computing the values in BLOCK,
3177 and add the new expressions into the NEW_SETS set.
3178 4. Recursively call ourselves on the dominator children of BLOCK.
3180 Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3181 do_pre_regular_insertion and do_partial_insertion. 3a and 3b are
3182 done in do_hoist_insertion.
3186 do_pre_regular_insertion (basic_block block
, basic_block dom
)
3188 bool new_stuff
= false;
3189 vec
<pre_expr
> exprs
;
3191 auto_vec
<pre_expr
> avail
;
3194 exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3195 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3197 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3199 if (expr
->kind
== NARY
3200 || expr
->kind
== REFERENCE
)
3203 bool by_some
= false;
3204 bool cant_insert
= false;
3205 bool all_same
= true;
3206 pre_expr first_s
= NULL
;
3209 pre_expr eprime
= NULL
;
3211 pre_expr edoubleprime
= NULL
;
3212 bool do_insertion
= false;
3214 val
= get_expr_value_id (expr
);
3215 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3217 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3219 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3221 fprintf (dump_file
, "Found fully redundant value: ");
3222 print_pre_expr (dump_file
, expr
);
3223 fprintf (dump_file
, "\n");
3228 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3230 unsigned int vprime
;
3232 /* We should never run insertion for the exit block
3233 and so not come across fake pred edges. */
3234 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3236 /* We are looking at ANTIC_OUT of bprime. */
3237 eprime
= phi_translate (NULL
, expr
, ANTIC_IN (block
), NULL
, pred
);
3239 /* eprime will generally only be NULL if the
3240 value of the expression, translated
3241 through the PHI for this predecessor, is
3242 undefined. If that is the case, we can't
3243 make the expression fully redundant,
3244 because its value is undefined along a
3245 predecessor path. We can thus break out
3246 early because it doesn't matter what the
3247 rest of the results are. */
3250 avail
[pred
->dest_idx
] = NULL
;
3255 vprime
= get_expr_value_id (eprime
);
3256 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3258 if (edoubleprime
== NULL
)
3260 avail
[pred
->dest_idx
] = eprime
;
3265 avail
[pred
->dest_idx
] = edoubleprime
;
3267 /* We want to perform insertions to remove a redundancy on
3268 a path in the CFG we want to optimize for speed. */
3269 if (optimize_edge_for_speed_p (pred
))
3270 do_insertion
= true;
3271 if (first_s
== NULL
)
3272 first_s
= edoubleprime
;
3273 else if (!pre_expr_d::equal (first_s
, edoubleprime
))
3277 /* If we can insert it, it's not the same value
3278 already existing along every predecessor, and
3279 it's defined by some predecessor, it is
3280 partially redundant. */
3281 if (!cant_insert
&& !all_same
&& by_some
)
3285 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3287 fprintf (dump_file
, "Skipping partial redundancy for "
3289 print_pre_expr (dump_file
, expr
);
3290 fprintf (dump_file
, " (%04d), no redundancy on to be "
3291 "optimized for speed edge\n", val
);
3294 else if (dbg_cnt (treepre_insert
))
3296 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3298 fprintf (dump_file
, "Found partial redundancy for "
3300 print_pre_expr (dump_file
, expr
);
3301 fprintf (dump_file
, " (%04d)\n",
3302 get_expr_value_id (expr
));
3304 if (insert_into_preds_of_block (block
,
3305 get_expression_id (expr
),
3310 /* If all edges produce the same value and that value is
3311 an invariant, then the PHI has the same value on all
3312 edges. Note this. */
3313 else if (!cant_insert
&& all_same
)
3315 gcc_assert (edoubleprime
->kind
== CONSTANT
3316 || edoubleprime
->kind
== NAME
);
3318 tree temp
= make_temp_ssa_name (get_expr_type (expr
),
3321 = gimple_build_assign (temp
,
3322 edoubleprime
->kind
== CONSTANT
?
3323 PRE_EXPR_CONSTANT (edoubleprime
) :
3324 PRE_EXPR_NAME (edoubleprime
));
3325 gimple_stmt_iterator gsi
= gsi_after_labels (block
);
3326 gsi_insert_before (&gsi
, assign
, GSI_NEW_STMT
);
3328 VN_INFO (temp
)->value_id
= val
;
3329 VN_INFO (temp
)->valnum
= vn_valnum_from_value_id (val
);
3330 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3331 VN_INFO (temp
)->valnum
= temp
;
3332 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3333 pre_expr newe
= get_or_alloc_expr_for_name (temp
);
3334 add_to_value (val
, newe
);
3335 bitmap_value_replace_in_set (AVAIL_OUT (block
), newe
);
3336 bitmap_insert_into_set (NEW_SETS (block
), newe
);
3346 /* Perform insertion for partially anticipatable expressions. There
3347 is only one case we will perform insertion for these. This case is
3348 if the expression is partially anticipatable, and fully available.
3349 In this case, we know that putting it earlier will enable us to
3350 remove the later computation. */
3353 do_pre_partial_partial_insertion (basic_block block
, basic_block dom
)
3355 bool new_stuff
= false;
3356 vec
<pre_expr
> exprs
;
3358 auto_vec
<pre_expr
> avail
;
3361 exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3362 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3364 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3366 if (expr
->kind
== NARY
3367 || expr
->kind
== REFERENCE
)
3371 bool cant_insert
= false;
3374 pre_expr eprime
= NULL
;
3377 val
= get_expr_value_id (expr
);
3378 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3380 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3383 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3385 unsigned int vprime
;
3386 pre_expr edoubleprime
;
3388 /* We should never run insertion for the exit block
3389 and so not come across fake pred edges. */
3390 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3392 eprime
= phi_translate (NULL
, expr
, ANTIC_IN (block
),
3393 PA_IN (block
), pred
);
3395 /* eprime will generally only be NULL if the
3396 value of the expression, translated
3397 through the PHI for this predecessor, is
3398 undefined. If that is the case, we can't
3399 make the expression fully redundant,
3400 because its value is undefined along a
3401 predecessor path. We can thus break out
3402 early because it doesn't matter what the
3403 rest of the results are. */
3406 avail
[pred
->dest_idx
] = NULL
;
3411 vprime
= get_expr_value_id (eprime
);
3412 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
), vprime
);
3413 avail
[pred
->dest_idx
] = edoubleprime
;
3414 if (edoubleprime
== NULL
)
3421 /* If we can insert it, it's not the same value
3422 already existing along every predecessor, and
3423 it's defined by some predecessor, it is
3424 partially redundant. */
3425 if (!cant_insert
&& by_all
)
3428 bool do_insertion
= false;
3430 /* Insert only if we can remove a later expression on a path
3431 that we want to optimize for speed.
3432 The phi node that we will be inserting in BLOCK is not free,
3433 and inserting it for the sake of !optimize_for_speed successor
3434 may cause regressions on the speed path. */
3435 FOR_EACH_EDGE (succ
, ei
, block
->succs
)
3437 if (bitmap_set_contains_value (PA_IN (succ
->dest
), val
)
3438 || bitmap_set_contains_value (ANTIC_IN (succ
->dest
), val
))
3440 if (optimize_edge_for_speed_p (succ
))
3441 do_insertion
= true;
3447 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3449 fprintf (dump_file
, "Skipping partial partial redundancy "
3451 print_pre_expr (dump_file
, expr
);
3452 fprintf (dump_file
, " (%04d), not (partially) anticipated "
3453 "on any to be optimized for speed edges\n", val
);
3456 else if (dbg_cnt (treepre_insert
))
3458 pre_stats
.pa_insert
++;
3459 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3461 fprintf (dump_file
, "Found partial partial redundancy "
3463 print_pre_expr (dump_file
, expr
);
3464 fprintf (dump_file
, " (%04d)\n",
3465 get_expr_value_id (expr
));
3467 if (insert_into_preds_of_block (block
,
3468 get_expression_id (expr
),
3480 /* Insert expressions in BLOCK to compute hoistable values up.
3481 Return TRUE if something was inserted, otherwise return FALSE.
3482 The caller has to make sure that BLOCK has at least two successors. */
3485 do_hoist_insertion (basic_block block
)
3489 bool new_stuff
= false;
3491 gimple_stmt_iterator last
;
3493 /* At least two successors, or else... */
3494 gcc_assert (EDGE_COUNT (block
->succs
) >= 2);
3496 /* Check that all successors of BLOCK are dominated by block.
3497 We could use dominated_by_p() for this, but actually there is a much
3498 quicker check: any successor that is dominated by BLOCK can't have
3499 more than one predecessor edge. */
3500 FOR_EACH_EDGE (e
, ei
, block
->succs
)
3501 if (! single_pred_p (e
->dest
))
3504 /* Determine the insertion point. If we cannot safely insert before
3505 the last stmt if we'd have to, bail out. */
3506 last
= gsi_last_bb (block
);
3507 if (!gsi_end_p (last
)
3508 && !is_ctrl_stmt (gsi_stmt (last
))
3509 && stmt_ends_bb_p (gsi_stmt (last
)))
3512 /* Compute the set of hoistable expressions from ANTIC_IN. First compute
3513 hoistable values. */
3514 bitmap_set hoistable_set
;
3516 /* A hoistable value must be in ANTIC_IN(block)
3517 but not in AVAIL_OUT(BLOCK). */
3518 bitmap_initialize (&hoistable_set
.values
, &grand_bitmap_obstack
);
3519 bitmap_and_compl (&hoistable_set
.values
,
3520 &ANTIC_IN (block
)->values
, &AVAIL_OUT (block
)->values
);
3522 /* Short-cut for a common case: hoistable_set is empty. */
3523 if (bitmap_empty_p (&hoistable_set
.values
))
3526 /* Compute which of the hoistable values is in AVAIL_OUT of
3527 at least one of the successors of BLOCK. */
3528 bitmap_head availout_in_some
;
3529 bitmap_initialize (&availout_in_some
, &grand_bitmap_obstack
);
3530 FOR_EACH_EDGE (e
, ei
, block
->succs
)
3531 /* Do not consider expressions solely because their availability
3532 on loop exits. They'd be ANTIC-IN throughout the whole loop
3533 and thus effectively hoisted across loops by combination of
3534 PRE and hoisting. */
3535 if (! loop_exit_edge_p (block
->loop_father
, e
))
3536 bitmap_ior_and_into (&availout_in_some
, &hoistable_set
.values
,
3537 &AVAIL_OUT (e
->dest
)->values
);
3538 bitmap_clear (&hoistable_set
.values
);
3540 /* Short-cut for a common case: availout_in_some is empty. */
3541 if (bitmap_empty_p (&availout_in_some
))
3544 /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */
3545 bitmap_move (&hoistable_set
.values
, &availout_in_some
);
3546 hoistable_set
.expressions
= ANTIC_IN (block
)->expressions
;
3548 /* Now finally construct the topological-ordered expression set. */
3549 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (&hoistable_set
);
3551 bitmap_clear (&hoistable_set
.values
);
3553 /* If there are candidate values for hoisting, insert expressions
3554 strategically to make the hoistable expressions fully redundant. */
3556 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3558 /* While we try to sort expressions topologically above the
3559 sorting doesn't work out perfectly. Catch expressions we
3560 already inserted. */
3561 unsigned int value_id
= get_expr_value_id (expr
);
3562 if (bitmap_set_contains_value (AVAIL_OUT (block
), value_id
))
3564 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3567 "Already inserted expression for ");
3568 print_pre_expr (dump_file
, expr
);
3569 fprintf (dump_file
, " (%04d)\n", value_id
);
3574 /* OK, we should hoist this value. Perform the transformation. */
3575 pre_stats
.hoist_insert
++;
3576 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3579 "Inserting expression in block %d for code hoisting: ",
3581 print_pre_expr (dump_file
, expr
);
3582 fprintf (dump_file
, " (%04d)\n", value_id
);
3585 gimple_seq stmts
= NULL
;
3586 tree res
= create_expression_by_pieces (block
, expr
, &stmts
,
3587 get_expr_type (expr
));
3589 /* Do not return true if expression creation ultimately
3590 did not insert any statements. */
3591 if (gimple_seq_empty_p (stmts
))
3595 if (gsi_end_p (last
) || is_ctrl_stmt (gsi_stmt (last
)))
3596 gsi_insert_seq_before (&last
, stmts
, GSI_SAME_STMT
);
3598 gsi_insert_seq_after (&last
, stmts
, GSI_NEW_STMT
);
3601 /* Make sure to not return true if expression creation ultimately
3602 failed but also make sure to insert any stmts produced as they
3603 are tracked in inserted_exprs. */
3615 /* Perform insertion of partially redundant and hoistable values. */
3622 FOR_ALL_BB_FN (bb
, cfun
)
3623 NEW_SETS (bb
) = bitmap_set_new ();
3625 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
3626 int rpo_num
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
3628 int num_iterations
= 0;
3633 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3634 fprintf (dump_file
, "Starting insert iteration %d\n", num_iterations
);
3637 for (int idx
= 0; idx
< rpo_num
; ++idx
)
3639 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[idx
]);
3640 basic_block dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3645 bitmap_set_t newset
;
3647 /* First, update the AVAIL_OUT set with anything we may have
3648 inserted higher up in the dominator tree. */
3649 newset
= NEW_SETS (dom
);
3652 /* Note that we need to value_replace both NEW_SETS, and
3653 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3654 represented by some non-simple expression here that we want
3655 to replace it with. */
3656 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3658 pre_expr expr
= expression_for_id (i
);
3659 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3660 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3664 /* Insert expressions for partial redundancies. */
3665 if (flag_tree_pre
&& !single_pred_p (block
))
3667 changed
|= do_pre_regular_insertion (block
, dom
);
3668 if (do_partial_partial
)
3669 changed
|= do_pre_partial_partial_insertion (block
, dom
);
3672 /* Insert expressions for hoisting. */
3673 if (flag_code_hoisting
&& EDGE_COUNT (block
->succs
) >= 2)
3674 changed
|= do_hoist_insertion (block
);
3678 /* Clear the NEW sets before the next iteration. We have already
3679 fully propagated its contents. */
3681 FOR_ALL_BB_FN (bb
, cfun
)
3682 bitmap_set_free (NEW_SETS (bb
));
3686 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3692 /* Compute the AVAIL set for all basic blocks.
3694 This function performs value numbering of the statements in each basic
3695 block. The AVAIL sets are built from information we glean while doing
3696 this value numbering, since the AVAIL sets contain only one entry per
3699 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3700 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3703 compute_avail (void)
3706 basic_block block
, son
;
3707 basic_block
*worklist
;
3712 /* We pretend that default definitions are defined in the entry block.
3713 This includes function arguments and the static chain decl. */
3714 FOR_EACH_SSA_NAME (i
, name
, cfun
)
3717 if (!SSA_NAME_IS_DEFAULT_DEF (name
)
3718 || has_zero_uses (name
)
3719 || virtual_operand_p (name
))
3722 e
= get_or_alloc_expr_for_name (name
);
3723 add_to_value (get_expr_value_id (e
), e
);
3724 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), e
);
3725 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3729 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3731 print_bitmap_set (dump_file
, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3732 "tmp_gen", ENTRY_BLOCK
);
3733 print_bitmap_set (dump_file
, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3734 "avail_out", ENTRY_BLOCK
);
3737 /* Allocate the worklist. */
3738 worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
));
3740 /* Seed the algorithm by putting the dominator children of the entry
3741 block on the worklist. */
3742 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3744 son
= next_dom_son (CDI_DOMINATORS
, son
))
3745 worklist
[sp
++] = son
;
3747 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3748 = ssa_default_def (cfun
, gimple_vop (cfun
));
3750 /* Loop until the worklist is empty. */
3756 /* Pick a block from the worklist. */
3757 block
= worklist
[--sp
];
3758 vn_context_bb
= block
;
3760 /* Initially, the set of available values in BLOCK is that of
3761 its immediate dominator. */
3762 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3765 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3766 BB_LIVE_VOP_ON_EXIT (block
) = BB_LIVE_VOP_ON_EXIT (dom
);
3769 /* Generate values for PHI nodes. */
3770 for (gphi_iterator gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
);
3773 tree result
= gimple_phi_result (gsi
.phi ());
3775 /* We have no need for virtual phis, as they don't represent
3776 actual computations. */
3777 if (virtual_operand_p (result
))
3779 BB_LIVE_VOP_ON_EXIT (block
) = result
;
3783 pre_expr e
= get_or_alloc_expr_for_name (result
);
3784 add_to_value (get_expr_value_id (e
), e
);
3785 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3786 bitmap_insert_into_set (PHI_GEN (block
), e
);
3789 BB_MAY_NOTRETURN (block
) = 0;
3791 /* Now compute value numbers and populate value sets with all
3792 the expressions computed in BLOCK. */
3793 for (gimple_stmt_iterator gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
);
3799 stmt
= gsi_stmt (gsi
);
3801 /* Cache whether the basic-block has any non-visible side-effect
3803 If this isn't a call or it is the last stmt in the
3804 basic-block then the CFG represents things correctly. */
3805 if (is_gimple_call (stmt
) && !stmt_ends_bb_p (stmt
))
3807 /* Non-looping const functions always return normally.
3808 Otherwise the call might not return or have side-effects
3809 that forbids hoisting possibly trapping expressions
3811 int flags
= gimple_call_flags (stmt
);
3812 if (!(flags
& ECF_CONST
)
3813 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3814 BB_MAY_NOTRETURN (block
) = 1;
3817 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3819 pre_expr e
= get_or_alloc_expr_for_name (op
);
3821 add_to_value (get_expr_value_id (e
), e
);
3822 bitmap_insert_into_set (TMP_GEN (block
), e
);
3823 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3826 if (gimple_vdef (stmt
))
3827 BB_LIVE_VOP_ON_EXIT (block
) = gimple_vdef (stmt
);
3829 if (gimple_has_side_effects (stmt
)
3830 || stmt_could_throw_p (cfun
, stmt
)
3831 || is_gimple_debug (stmt
))
3834 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3836 if (ssa_undefined_value_p (op
))
3838 pre_expr e
= get_or_alloc_expr_for_name (op
);
3839 bitmap_value_insert_into_set (EXP_GEN (block
), e
);
3842 switch (gimple_code (stmt
))
3850 vn_reference_s ref1
;
3851 pre_expr result
= NULL
;
3853 /* We can value number only calls to real functions. */
3854 if (gimple_call_internal_p (stmt
))
3857 vn_reference_lookup_call (as_a
<gcall
*> (stmt
), &ref
, &ref1
);
3861 /* If the value of the call is not invalidated in
3862 this block until it is computed, add the expression
3864 if (!gimple_vuse (stmt
)
3866 (SSA_NAME_DEF_STMT (gimple_vuse (stmt
))) == GIMPLE_PHI
3867 || gimple_bb (SSA_NAME_DEF_STMT
3868 (gimple_vuse (stmt
))) != block
)
3870 result
= pre_expr_pool
.allocate ();
3871 result
->kind
= REFERENCE
;
3873 result
->loc
= gimple_location (stmt
);
3874 PRE_EXPR_REFERENCE (result
) = ref
;
3876 get_or_alloc_expression_id (result
);
3877 add_to_value (get_expr_value_id (result
), result
);
3878 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3885 pre_expr result
= NULL
;
3886 switch (vn_get_stmt_kind (stmt
))
3890 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3893 /* COND_EXPR and VEC_COND_EXPR are awkward in
3894 that they contain an embedded complex expression.
3895 Don't even try to shove those through PRE. */
3896 if (code
== COND_EXPR
3897 || code
== VEC_COND_EXPR
)
3900 vn_nary_op_lookup_stmt (stmt
, &nary
);
3901 if (!nary
|| nary
->predicated_values
)
3904 /* If the NARY traps and there was a preceding
3905 point in the block that might not return avoid
3906 adding the nary to EXP_GEN. */
3907 if (BB_MAY_NOTRETURN (block
)
3908 && vn_nary_may_trap (nary
))
3911 result
= pre_expr_pool
.allocate ();
3912 result
->kind
= NARY
;
3914 result
->loc
= gimple_location (stmt
);
3915 PRE_EXPR_NARY (result
) = nary
;
3921 tree rhs1
= gimple_assign_rhs1 (stmt
);
3923 ao_ref_init (&rhs1_ref
, rhs1
);
3924 alias_set_type set
= ao_ref_alias_set (&rhs1_ref
);
3925 alias_set_type base_set
3926 = ao_ref_base_alias_set (&rhs1_ref
);
3927 vec
<vn_reference_op_s
> operands
3928 = vn_reference_operands_for_lookup (rhs1
);
3930 vn_reference_lookup_pieces (gimple_vuse (stmt
), set
,
3931 base_set
, TREE_TYPE (rhs1
),
3932 operands
, &ref
, VN_WALK
);
3935 operands
.release ();
3939 /* If the REFERENCE traps and there was a preceding
3940 point in the block that might not return avoid
3941 adding the reference to EXP_GEN. */
3942 if (BB_MAY_NOTRETURN (block
)
3943 && vn_reference_may_trap (ref
))
3946 /* If the value of the reference is not invalidated in
3947 this block until it is computed, add the expression
3949 if (gimple_vuse (stmt
))
3953 def_stmt
= SSA_NAME_DEF_STMT (gimple_vuse (stmt
));
3954 while (!gimple_nop_p (def_stmt
)
3955 && gimple_code (def_stmt
) != GIMPLE_PHI
3956 && gimple_bb (def_stmt
) == block
)
3958 if (stmt_may_clobber_ref_p
3959 (def_stmt
, gimple_assign_rhs1 (stmt
)))
3965 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt
));
3969 operands
.release ();
3974 /* If the load was value-numbered to another
3975 load make sure we do not use its expression
3976 for insertion if it wouldn't be a valid
3978 /* At the momemt we have a testcase
3979 for hoist insertion of aligned vs. misaligned
3980 variants in gcc.dg/torture/pr65270-1.c thus
3981 with just alignment to be considered we can
3982 simply replace the expression in the hashtable
3983 with the most conservative one. */
3984 vn_reference_op_t ref1
= &ref
->operands
.last ();
3985 while (ref1
->opcode
!= TARGET_MEM_REF
3986 && ref1
->opcode
!= MEM_REF
3987 && ref1
!= &ref
->operands
[0])
3989 vn_reference_op_t ref2
= &operands
.last ();
3990 while (ref2
->opcode
!= TARGET_MEM_REF
3991 && ref2
->opcode
!= MEM_REF
3992 && ref2
!= &operands
[0])
3994 if ((ref1
->opcode
== TARGET_MEM_REF
3995 || ref1
->opcode
== MEM_REF
)
3996 && (TYPE_ALIGN (ref1
->type
)
3997 > TYPE_ALIGN (ref2
->type
)))
3999 = build_aligned_type (ref1
->type
,
4000 TYPE_ALIGN (ref2
->type
));
4001 /* TBAA behavior is an obvious part so make sure
4002 that the hashtable one covers this as well
4003 by adjusting the ref alias set and its base. */
4005 || alias_set_subset_of (set
, ref
->set
))
4007 else if (alias_set_subset_of (ref
->set
, set
))
4010 if (ref1
->opcode
== MEM_REF
)
4012 = wide_int_to_tree (TREE_TYPE (ref2
->op0
),
4013 wi::to_wide (ref1
->op0
));
4016 = wide_int_to_tree (TREE_TYPE (ref2
->op2
),
4017 wi::to_wide (ref1
->op2
));
4022 if (ref1
->opcode
== MEM_REF
)
4024 = wide_int_to_tree (ptr_type_node
,
4025 wi::to_wide (ref1
->op0
));
4028 = wide_int_to_tree (ptr_type_node
,
4029 wi::to_wide (ref1
->op2
));
4031 operands
.release ();
4033 result
= pre_expr_pool
.allocate ();
4034 result
->kind
= REFERENCE
;
4036 result
->loc
= gimple_location (stmt
);
4037 PRE_EXPR_REFERENCE (result
) = ref
;
4045 get_or_alloc_expression_id (result
);
4046 add_to_value (get_expr_value_id (result
), result
);
4047 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
4055 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4057 print_bitmap_set (dump_file
, EXP_GEN (block
),
4058 "exp_gen", block
->index
);
4059 print_bitmap_set (dump_file
, PHI_GEN (block
),
4060 "phi_gen", block
->index
);
4061 print_bitmap_set (dump_file
, TMP_GEN (block
),
4062 "tmp_gen", block
->index
);
4063 print_bitmap_set (dump_file
, AVAIL_OUT (block
),
4064 "avail_out", block
->index
);
4067 /* Put the dominator children of BLOCK on the worklist of blocks
4068 to compute available sets for. */
4069 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
4071 son
= next_dom_son (CDI_DOMINATORS
, son
))
4072 worklist
[sp
++] = son
;
4074 vn_context_bb
= NULL
;
4080 /* Initialize data structures used by PRE. */
4087 next_expression_id
= 1;
4088 expressions
.create (0);
4089 expressions
.safe_push (NULL
);
4090 value_expressions
.create (get_max_value_id () + 1);
4091 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
4092 name_to_id
.create (0);
4094 inserted_exprs
= BITMAP_ALLOC (NULL
);
4096 connect_infinite_loops_to_exit ();
4097 memset (&pre_stats
, 0, sizeof (pre_stats
));
4099 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets
));
4101 calculate_dominance_info (CDI_DOMINATORS
);
4103 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4104 phi_translate_table
= new hash_table
<expr_pred_trans_d
> (5110);
4105 expression_to_id
= new hash_table
<pre_expr_d
> (num_ssa_names
* 3);
4106 FOR_ALL_BB_FN (bb
, cfun
)
4108 EXP_GEN (bb
) = bitmap_set_new ();
4109 PHI_GEN (bb
) = bitmap_set_new ();
4110 TMP_GEN (bb
) = bitmap_set_new ();
4111 AVAIL_OUT (bb
) = bitmap_set_new ();
4116 /* Deallocate data structures used by PRE. */
4121 value_expressions
.release ();
4122 expressions
.release ();
4123 BITMAP_FREE (inserted_exprs
);
4124 bitmap_obstack_release (&grand_bitmap_obstack
);
4125 bitmap_set_pool
.release ();
4126 pre_expr_pool
.release ();
4127 delete phi_translate_table
;
4128 phi_translate_table
= NULL
;
4129 delete expression_to_id
;
4130 expression_to_id
= NULL
;
4131 name_to_id
.release ();
4133 free_aux_for_blocks ();
4138 const pass_data pass_data_pre
=
4140 GIMPLE_PASS
, /* type */
4142 OPTGROUP_NONE
, /* optinfo_flags */
4143 TV_TREE_PRE
, /* tv_id */
4144 ( PROP_cfg
| PROP_ssa
), /* properties_required */
4145 0, /* properties_provided */
4146 0, /* properties_destroyed */
4147 TODO_rebuild_alias
, /* todo_flags_start */
4148 0, /* todo_flags_finish */
4151 class pass_pre
: public gimple_opt_pass
4154 pass_pre (gcc::context
*ctxt
)
4155 : gimple_opt_pass (pass_data_pre
, ctxt
)
4158 /* opt_pass methods: */
4159 virtual bool gate (function
*)
4160 { return flag_tree_pre
!= 0 || flag_code_hoisting
!= 0; }
4161 virtual unsigned int execute (function
*);
4163 }; // class pass_pre
4165 /* Valueization hook for RPO VN when we are calling back to it
4166 at ANTIC compute time. */
4169 pre_valueize (tree name
)
4171 if (TREE_CODE (name
) == SSA_NAME
)
4173 tree tem
= VN_INFO (name
)->valnum
;
4174 if (tem
!= VN_TOP
&& tem
!= name
)
4176 if (TREE_CODE (tem
) != SSA_NAME
4177 || SSA_NAME_IS_DEFAULT_DEF (tem
))
4179 /* We create temporary SSA names for representatives that
4180 do not have a definition (yet) but are not default defs either
4181 assume they are fine to use. */
4182 basic_block def_bb
= gimple_bb (SSA_NAME_DEF_STMT (tem
));
4184 || dominated_by_p (CDI_DOMINATORS
, vn_context_bb
, def_bb
))
4186 /* ??? Now we could look for a leader. Ideally we'd somehow
4187 expose RPO VN leaders and get rid of AVAIL_OUT as well... */
4194 pass_pre::execute (function
*fun
)
4196 unsigned int todo
= 0;
4198 do_partial_partial
=
4199 flag_tree_partial_pre
&& optimize_function_for_speed_p (fun
);
4201 /* This has to happen before VN runs because
4202 loop_optimizer_init may create new phis, etc. */
4203 loop_optimizer_init (LOOPS_NORMAL
);
4204 split_edges_for_insertion ();
4206 calculate_dominance_info (CDI_DOMINATORS
);
4208 run_rpo_vn (VN_WALK
);
4212 vn_valueize
= pre_valueize
;
4214 /* Insert can get quite slow on an incredibly large number of basic
4215 blocks due to some quadratic behavior. Until this behavior is
4216 fixed, don't run it when he have an incredibly large number of
4217 bb's. If we aren't going to run insert, there is no point in
4218 computing ANTIC, either, even though it's plenty fast nor do
4219 we require AVAIL. */
4220 if (n_basic_blocks_for_fn (fun
) < 4000)
4227 /* Make sure to remove fake edges before committing our inserts.
4228 This makes sure we don't end up with extra critical edges that
4229 we would need to split. */
4230 remove_fake_exit_edges ();
4231 gsi_commit_edge_inserts ();
4233 /* Eliminate folds statements which might (should not...) end up
4234 not keeping virtual operands up-to-date. */
4235 gcc_assert (!need_ssa_update_p (fun
));
4237 statistics_counter_event (fun
, "Insertions", pre_stats
.insertions
);
4238 statistics_counter_event (fun
, "PA inserted", pre_stats
.pa_insert
);
4239 statistics_counter_event (fun
, "HOIST inserted", pre_stats
.hoist_insert
);
4240 statistics_counter_event (fun
, "New PHIs", pre_stats
.phis
);
4242 todo
|= eliminate_with_rpo_vn (inserted_exprs
);
4246 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4247 to insert PHI nodes sometimes, and because value numbering of casts isn't
4248 perfect, we sometimes end up inserting dead code. This simple DCE-like
4249 pass removes any insertions we made that weren't actually used. */
4250 simple_dce_from_worklist (inserted_exprs
);
4255 loop_optimizer_finalize ();
4257 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4258 case we can merge the block with the remaining predecessor of the block.
4260 - call merge_blocks after each tail merge iteration
4261 - call merge_blocks after all tail merge iterations
4262 - mark TODO_cleanup_cfg when necessary
4263 - share the cfg cleanup with fini_pre. */
4264 todo
|= tail_merge_optimize (todo
);
4268 /* Tail merging invalidates the virtual SSA web, together with
4269 cfg-cleanup opportunities exposed by PRE this will wreck the
4270 SSA updating machinery. So make sure to run update-ssa
4271 manually, before eventually scheduling cfg-cleanup as part of
4273 update_ssa (TODO_update_ssa_only_virtuals
);
4281 make_pass_pre (gcc::context
*ctxt
)
4283 return new pass_pre (ctxt
);