re PR fortran/87318 (gfortran.dg/dtio_1.f90 is invalid)
[official-gcc.git] / gcc / tree-ssa-pre.c
blob267086cdf485786e15bc1bf18df4f4fb922e162f
1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-ssa.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
48 #include "params.h"
49 #include "dbgcnt.h"
50 #include "domwalk.h"
51 #include "tree-ssa-propagate.h"
52 #include "tree-ssa-dce.h"
53 #include "tree-cfgcleanup.h"
54 #include "alias.h"
56 /* Even though this file is called tree-ssa-pre.c, we actually
57 implement a bit more than just PRE here. All of them piggy-back
58 on GVN which is implemented in tree-ssa-sccvn.c.
60 1. Full Redundancy Elimination (FRE)
61 This is the elimination phase of GVN.
63 2. Partial Redundancy Elimination (PRE)
64 This is adds computation of AVAIL_OUT and ANTIC_IN and
65 doing expression insertion to form GVN-PRE.
67 3. Code hoisting
68 This optimization uses the ANTIC_IN sets computed for PRE
69 to move expressions further up than PRE would do, to make
70 multiple computations of the same value fully redundant.
71 This pass is explained below (after the explanation of the
72 basic algorithm for PRE).
75 /* TODO:
77 1. Avail sets can be shared by making an avail_find_leader that
78 walks up the dominator tree and looks in those avail sets.
79 This might affect code optimality, it's unclear right now.
80 Currently the AVAIL_OUT sets are the remaining quadraticness in
81 memory of GVN-PRE.
82 2. Strength reduction can be performed by anticipating expressions
83 we can repair later on.
84 3. We can do back-substitution or smarter value numbering to catch
85 commutative expressions split up over multiple statements.
88 /* For ease of terminology, "expression node" in the below refers to
89 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
90 represent the actual statement containing the expressions we care about,
91 and we cache the value number by putting it in the expression. */
93 /* Basic algorithm for Partial Redundancy Elimination:
95 First we walk the statements to generate the AVAIL sets, the
96 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
97 generation of values/expressions by a given block. We use them
98 when computing the ANTIC sets. The AVAIL sets consist of
99 SSA_NAME's that represent values, so we know what values are
100 available in what blocks. AVAIL is a forward dataflow problem. In
101 SSA, values are never killed, so we don't need a kill set, or a
102 fixpoint iteration, in order to calculate the AVAIL sets. In
103 traditional parlance, AVAIL sets tell us the downsafety of the
104 expressions/values.
106 Next, we generate the ANTIC sets. These sets represent the
107 anticipatable expressions. ANTIC is a backwards dataflow
108 problem. An expression is anticipatable in a given block if it could
109 be generated in that block. This means that if we had to perform
110 an insertion in that block, of the value of that expression, we
111 could. Calculating the ANTIC sets requires phi translation of
112 expressions, because the flow goes backwards through phis. We must
113 iterate to a fixpoint of the ANTIC sets, because we have a kill
114 set. Even in SSA form, values are not live over the entire
115 function, only from their definition point onwards. So we have to
116 remove values from the ANTIC set once we go past the definition
117 point of the leaders that make them up.
118 compute_antic/compute_antic_aux performs this computation.
120 Third, we perform insertions to make partially redundant
121 expressions fully redundant.
123 An expression is partially redundant (excluding partial
124 anticipation) if:
126 1. It is AVAIL in some, but not all, of the predecessors of a
127 given block.
128 2. It is ANTIC in all the predecessors.
130 In order to make it fully redundant, we insert the expression into
131 the predecessors where it is not available, but is ANTIC.
133 When optimizing for size, we only eliminate the partial redundancy
134 if we need to insert in only one predecessor. This avoids almost
135 completely the code size increase that PRE usually causes.
137 For the partial anticipation case, we only perform insertion if it
138 is partially anticipated in some block, and fully available in all
139 of the predecessors.
141 do_pre_regular_insertion/do_pre_partial_partial_insertion
142 performs these steps, driven by insert/insert_aux.
144 Fourth, we eliminate fully redundant expressions.
145 This is a simple statement walk that replaces redundant
146 calculations with the now available values. */
148 /* Basic algorithm for Code Hoisting:
150 Code hoisting is: Moving value computations up in the control flow
151 graph to make multiple copies redundant. Typically this is a size
152 optimization, but there are cases where it also is helpful for speed.
154 A simple code hoisting algorithm is implemented that piggy-backs on
155 the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT
156 which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be
157 computed for PRE, and we can use them to perform a limited version of
158 code hoisting, too.
160 For the purpose of this implementation, a value is hoistable to a basic
161 block B if the following properties are met:
163 1. The value is in ANTIC_IN(B) -- the value will be computed on all
164 paths from B to function exit and it can be computed in B);
166 2. The value is not in AVAIL_OUT(B) -- there would be no need to
167 compute the value again and make it available twice;
169 3. All successors of B are dominated by B -- makes sure that inserting
170 a computation of the value in B will make the remaining
171 computations fully redundant;
173 4. At least one successor has the value in AVAIL_OUT -- to avoid
174 hoisting values up too far;
176 5. There are at least two successors of B -- hoisting in straight
177 line code is pointless.
179 The third condition is not strictly necessary, but it would complicate
180 the hoisting pass a lot. In fact, I don't know of any code hoisting
181 algorithm that does not have this requirement. Fortunately, experiments
182 have show that most candidate hoistable values are in regions that meet
183 this condition (e.g. diamond-shape regions).
185 The forth condition is necessary to avoid hoisting things up too far
186 away from the uses of the value. Nothing else limits the algorithm
187 from hoisting everything up as far as ANTIC_IN allows. Experiments
188 with SPEC and CSiBE have shown that hoisting up too far results in more
189 spilling, less benefits for code size, and worse benchmark scores.
190 Fortunately, in practice most of the interesting hoisting opportunities
191 are caught despite this limitation.
193 For hoistable values that meet all conditions, expressions are inserted
194 to make the calculation of the hoistable value fully redundant. We
195 perform code hoisting insertions after each round of PRE insertions,
196 because code hoisting never exposes new PRE opportunities, but PRE can
197 create new code hoisting opportunities.
199 The code hoisting algorithm is implemented in do_hoist_insert, driven
200 by insert/insert_aux. */
202 /* Representations of value numbers:
204 Value numbers are represented by a representative SSA_NAME. We
205 will create fake SSA_NAME's in situations where we need a
206 representative but do not have one (because it is a complex
207 expression). In order to facilitate storing the value numbers in
208 bitmaps, and keep the number of wasted SSA_NAME's down, we also
209 associate a value_id with each value number, and create full blown
210 ssa_name's only where we actually need them (IE in operands of
211 existing expressions).
213 Theoretically you could replace all the value_id's with
214 SSA_NAME_VERSION, but this would allocate a large number of
215 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
216 It would also require an additional indirection at each point we
217 use the value id. */
219 /* Representation of expressions on value numbers:
221 Expressions consisting of value numbers are represented the same
222 way as our VN internally represents them, with an additional
223 "pre_expr" wrapping around them in order to facilitate storing all
224 of the expressions in the same sets. */
226 /* Representation of sets:
228 The dataflow sets do not need to be sorted in any particular order
229 for the majority of their lifetime, are simply represented as two
230 bitmaps, one that keeps track of values present in the set, and one
231 that keeps track of expressions present in the set.
233 When we need them in topological order, we produce it on demand by
234 transforming the bitmap into an array and sorting it into topo
235 order. */
237 /* Type of expression, used to know which member of the PRE_EXPR union
238 is valid. */
240 enum pre_expr_kind
242 NAME,
243 NARY,
244 REFERENCE,
245 CONSTANT
248 union pre_expr_union
250 tree name;
251 tree constant;
252 vn_nary_op_t nary;
253 vn_reference_t reference;
256 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
258 enum pre_expr_kind kind;
259 unsigned int id;
260 pre_expr_union u;
262 /* hash_table support. */
263 static inline hashval_t hash (const pre_expr_d *);
264 static inline int equal (const pre_expr_d *, const pre_expr_d *);
265 } *pre_expr;
267 #define PRE_EXPR_NAME(e) (e)->u.name
268 #define PRE_EXPR_NARY(e) (e)->u.nary
269 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
270 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
272 /* Compare E1 and E1 for equality. */
274 inline int
275 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
277 if (e1->kind != e2->kind)
278 return false;
280 switch (e1->kind)
282 case CONSTANT:
283 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
284 PRE_EXPR_CONSTANT (e2));
285 case NAME:
286 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
287 case NARY:
288 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
289 case REFERENCE:
290 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
291 PRE_EXPR_REFERENCE (e2));
292 default:
293 gcc_unreachable ();
297 /* Hash E. */
299 inline hashval_t
300 pre_expr_d::hash (const pre_expr_d *e)
302 switch (e->kind)
304 case CONSTANT:
305 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
306 case NAME:
307 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
308 case NARY:
309 return PRE_EXPR_NARY (e)->hashcode;
310 case REFERENCE:
311 return PRE_EXPR_REFERENCE (e)->hashcode;
312 default:
313 gcc_unreachable ();
317 /* Next global expression id number. */
318 static unsigned int next_expression_id;
320 /* Mapping from expression to id number we can use in bitmap sets. */
321 static vec<pre_expr> expressions;
322 static hash_table<pre_expr_d> *expression_to_id;
323 static vec<unsigned> name_to_id;
325 /* Allocate an expression id for EXPR. */
327 static inline unsigned int
328 alloc_expression_id (pre_expr expr)
330 struct pre_expr_d **slot;
331 /* Make sure we won't overflow. */
332 gcc_assert (next_expression_id + 1 > next_expression_id);
333 expr->id = next_expression_id++;
334 expressions.safe_push (expr);
335 if (expr->kind == NAME)
337 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
338 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
339 re-allocations by using vec::reserve upfront. */
340 unsigned old_len = name_to_id.length ();
341 name_to_id.reserve (num_ssa_names - old_len);
342 name_to_id.quick_grow_cleared (num_ssa_names);
343 gcc_assert (name_to_id[version] == 0);
344 name_to_id[version] = expr->id;
346 else
348 slot = expression_to_id->find_slot (expr, INSERT);
349 gcc_assert (!*slot);
350 *slot = expr;
352 return next_expression_id - 1;
355 /* Return the expression id for tree EXPR. */
357 static inline unsigned int
358 get_expression_id (const pre_expr expr)
360 return expr->id;
363 static inline unsigned int
364 lookup_expression_id (const pre_expr expr)
366 struct pre_expr_d **slot;
368 if (expr->kind == NAME)
370 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
371 if (name_to_id.length () <= version)
372 return 0;
373 return name_to_id[version];
375 else
377 slot = expression_to_id->find_slot (expr, NO_INSERT);
378 if (!slot)
379 return 0;
380 return ((pre_expr)*slot)->id;
384 /* Return the existing expression id for EXPR, or create one if one
385 does not exist yet. */
387 static inline unsigned int
388 get_or_alloc_expression_id (pre_expr expr)
390 unsigned int id = lookup_expression_id (expr);
391 if (id == 0)
392 return alloc_expression_id (expr);
393 return expr->id = id;
396 /* Return the expression that has expression id ID */
398 static inline pre_expr
399 expression_for_id (unsigned int id)
401 return expressions[id];
404 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
406 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
408 static pre_expr
409 get_or_alloc_expr_for_name (tree name)
411 struct pre_expr_d expr;
412 pre_expr result;
413 unsigned int result_id;
415 expr.kind = NAME;
416 expr.id = 0;
417 PRE_EXPR_NAME (&expr) = name;
418 result_id = lookup_expression_id (&expr);
419 if (result_id != 0)
420 return expression_for_id (result_id);
422 result = pre_expr_pool.allocate ();
423 result->kind = NAME;
424 PRE_EXPR_NAME (result) = name;
425 alloc_expression_id (result);
426 return result;
429 /* An unordered bitmap set. One bitmap tracks values, the other,
430 expressions. */
431 typedef struct bitmap_set
433 bitmap_head expressions;
434 bitmap_head values;
435 } *bitmap_set_t;
437 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
438 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
440 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
441 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
443 /* Mapping from value id to expressions with that value_id. */
444 static vec<bitmap> value_expressions;
446 /* Sets that we need to keep track of. */
447 typedef struct bb_bitmap_sets
449 /* The EXP_GEN set, which represents expressions/values generated in
450 a basic block. */
451 bitmap_set_t exp_gen;
453 /* The PHI_GEN set, which represents PHI results generated in a
454 basic block. */
455 bitmap_set_t phi_gen;
457 /* The TMP_GEN set, which represents results/temporaries generated
458 in a basic block. IE the LHS of an expression. */
459 bitmap_set_t tmp_gen;
461 /* The AVAIL_OUT set, which represents which values are available in
462 a given basic block. */
463 bitmap_set_t avail_out;
465 /* The ANTIC_IN set, which represents which values are anticipatable
466 in a given basic block. */
467 bitmap_set_t antic_in;
469 /* The PA_IN set, which represents which values are
470 partially anticipatable in a given basic block. */
471 bitmap_set_t pa_in;
473 /* The NEW_SETS set, which is used during insertion to augment the
474 AVAIL_OUT set of blocks with the new insertions performed during
475 the current iteration. */
476 bitmap_set_t new_sets;
478 /* A cache for value_dies_in_block_x. */
479 bitmap expr_dies;
481 /* The live virtual operand on successor edges. */
482 tree vop_on_exit;
484 /* True if we have visited this block during ANTIC calculation. */
485 unsigned int visited : 1;
487 /* True when the block contains a call that might not return. */
488 unsigned int contains_may_not_return_call : 1;
489 } *bb_value_sets_t;
491 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
492 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
493 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
494 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
495 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
496 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
497 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
498 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
499 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
500 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
501 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
504 /* This structure is used to keep track of statistics on what
505 optimization PRE was able to perform. */
506 static struct
508 /* The number of new expressions/temporaries generated by PRE. */
509 int insertions;
511 /* The number of inserts found due to partial anticipation */
512 int pa_insert;
514 /* The number of inserts made for code hoisting. */
515 int hoist_insert;
517 /* The number of new PHI nodes added by PRE. */
518 int phis;
519 } pre_stats;
521 static bool do_partial_partial;
522 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
523 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
524 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
525 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
526 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
527 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
528 static bitmap_set_t bitmap_set_new (void);
529 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
530 tree);
531 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
532 static unsigned int get_expr_value_id (pre_expr);
534 /* We can add and remove elements and entries to and from sets
535 and hash tables, so we use alloc pools for them. */
537 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
538 static bitmap_obstack grand_bitmap_obstack;
540 /* A three tuple {e, pred, v} used to cache phi translations in the
541 phi_translate_table. */
543 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
545 /* The expression. */
546 pre_expr e;
548 /* The predecessor block along which we translated the expression. */
549 basic_block pred;
551 /* The value that resulted from the translation. */
552 pre_expr v;
554 /* The hashcode for the expression, pred pair. This is cached for
555 speed reasons. */
556 hashval_t hashcode;
558 /* hash_table support. */
559 static inline hashval_t hash (const expr_pred_trans_d *);
560 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
561 } *expr_pred_trans_t;
562 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
564 inline hashval_t
565 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
567 return e->hashcode;
570 inline int
571 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
572 const expr_pred_trans_d *ve2)
574 basic_block b1 = ve1->pred;
575 basic_block b2 = ve2->pred;
577 /* If they are not translations for the same basic block, they can't
578 be equal. */
579 if (b1 != b2)
580 return false;
581 return pre_expr_d::equal (ve1->e, ve2->e);
584 /* The phi_translate_table caches phi translations for a given
585 expression and predecessor. */
586 static hash_table<expr_pred_trans_d> *phi_translate_table;
588 /* Add the tuple mapping from {expression E, basic block PRED} to
589 the phi translation table and return whether it pre-existed. */
591 static inline bool
592 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
594 expr_pred_trans_t *slot;
595 expr_pred_trans_d tem;
596 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
597 pred->index);
598 tem.e = e;
599 tem.pred = pred;
600 tem.hashcode = hash;
601 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
602 if (*slot)
604 *entry = *slot;
605 return true;
608 *entry = *slot = XNEW (struct expr_pred_trans_d);
609 (*entry)->e = e;
610 (*entry)->pred = pred;
611 (*entry)->hashcode = hash;
612 return false;
616 /* Add expression E to the expression set of value id V. */
618 static void
619 add_to_value (unsigned int v, pre_expr e)
621 bitmap set;
623 gcc_checking_assert (get_expr_value_id (e) == v);
625 if (v >= value_expressions.length ())
627 value_expressions.safe_grow_cleared (v + 1);
630 set = value_expressions[v];
631 if (!set)
633 set = BITMAP_ALLOC (&grand_bitmap_obstack);
634 value_expressions[v] = set;
637 bitmap_set_bit (set, get_or_alloc_expression_id (e));
640 /* Create a new bitmap set and return it. */
642 static bitmap_set_t
643 bitmap_set_new (void)
645 bitmap_set_t ret = bitmap_set_pool.allocate ();
646 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
647 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
648 return ret;
651 /* Return the value id for a PRE expression EXPR. */
653 static unsigned int
654 get_expr_value_id (pre_expr expr)
656 unsigned int id;
657 switch (expr->kind)
659 case CONSTANT:
660 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
661 break;
662 case NAME:
663 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
664 break;
665 case NARY:
666 gcc_assert (!PRE_EXPR_NARY (expr)->predicated_values);
667 id = PRE_EXPR_NARY (expr)->value_id;
668 break;
669 case REFERENCE:
670 id = PRE_EXPR_REFERENCE (expr)->value_id;
671 break;
672 default:
673 gcc_unreachable ();
675 /* ??? We cannot assert that expr has a value-id (it can be 0), because
676 we assign value-ids only to expressions that have a result
677 in set_hashtable_value_ids. */
678 return id;
681 /* Return a VN valnum (SSA name or constant) for the PRE value-id VAL. */
683 static tree
684 vn_valnum_from_value_id (unsigned int val)
686 bitmap_iterator bi;
687 unsigned int i;
688 bitmap exprset = value_expressions[val];
689 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
691 pre_expr vexpr = expression_for_id (i);
692 if (vexpr->kind == NAME)
693 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
694 else if (vexpr->kind == CONSTANT)
695 return PRE_EXPR_CONSTANT (vexpr);
697 return NULL_TREE;
700 /* Insert an expression EXPR into a bitmapped set. */
702 static void
703 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
705 unsigned int val = get_expr_value_id (expr);
706 if (! value_id_constant_p (val))
708 /* Note this is the only function causing multiple expressions
709 for the same value to appear in a set. This is needed for
710 TMP_GEN, PHI_GEN and NEW_SETs. */
711 bitmap_set_bit (&set->values, val);
712 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
716 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
718 static void
719 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
721 bitmap_copy (&dest->expressions, &orig->expressions);
722 bitmap_copy (&dest->values, &orig->values);
726 /* Free memory used up by SET. */
727 static void
728 bitmap_set_free (bitmap_set_t set)
730 bitmap_clear (&set->expressions);
731 bitmap_clear (&set->values);
735 /* Generate an topological-ordered array of bitmap set SET. */
737 static vec<pre_expr>
738 sorted_array_from_bitmap_set (bitmap_set_t set)
740 unsigned int i, j;
741 bitmap_iterator bi, bj;
742 vec<pre_expr> result;
744 /* Pre-allocate enough space for the array. */
745 result.create (bitmap_count_bits (&set->expressions));
747 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
749 /* The number of expressions having a given value is usually
750 relatively small. Thus, rather than making a vector of all
751 the expressions and sorting it by value-id, we walk the values
752 and check in the reverse mapping that tells us what expressions
753 have a given value, to filter those in our set. As a result,
754 the expressions are inserted in value-id order, which means
755 topological order.
757 If this is somehow a significant lose for some cases, we can
758 choose which set to walk based on the set size. */
759 bitmap exprset = value_expressions[i];
760 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
762 if (bitmap_bit_p (&set->expressions, j))
763 result.quick_push (expression_for_id (j));
767 return result;
770 /* Subtract all expressions contained in ORIG from DEST. */
772 static bitmap_set_t
773 bitmap_set_subtract_expressions (bitmap_set_t dest, bitmap_set_t orig)
775 bitmap_set_t result = bitmap_set_new ();
776 bitmap_iterator bi;
777 unsigned int i;
779 bitmap_and_compl (&result->expressions, &dest->expressions,
780 &orig->expressions);
782 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
784 pre_expr expr = expression_for_id (i);
785 unsigned int value_id = get_expr_value_id (expr);
786 bitmap_set_bit (&result->values, value_id);
789 return result;
792 /* Subtract all values in bitmap set B from bitmap set A. */
794 static void
795 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
797 unsigned int i;
798 bitmap_iterator bi;
799 unsigned to_remove = -1U;
800 bitmap_and_compl_into (&a->values, &b->values);
801 FOR_EACH_EXPR_ID_IN_SET (a, i, bi)
803 if (to_remove != -1U)
805 bitmap_clear_bit (&a->expressions, to_remove);
806 to_remove = -1U;
808 pre_expr expr = expression_for_id (i);
809 if (! bitmap_bit_p (&a->values, get_expr_value_id (expr)))
810 to_remove = i;
812 if (to_remove != -1U)
813 bitmap_clear_bit (&a->expressions, to_remove);
817 /* Return true if bitmapped set SET contains the value VALUE_ID. */
819 static bool
820 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
822 if (value_id_constant_p (value_id))
823 return true;
825 return bitmap_bit_p (&set->values, value_id);
828 static inline bool
829 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
831 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
834 /* Return true if two bitmap sets are equal. */
836 static bool
837 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
839 return bitmap_equal_p (&a->values, &b->values);
842 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
843 and add it otherwise. */
845 static void
846 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
848 unsigned int val = get_expr_value_id (expr);
849 if (value_id_constant_p (val))
850 return;
852 if (bitmap_set_contains_value (set, val))
854 /* The number of expressions having a given value is usually
855 significantly less than the total number of expressions in SET.
856 Thus, rather than check, for each expression in SET, whether it
857 has the value LOOKFOR, we walk the reverse mapping that tells us
858 what expressions have a given value, and see if any of those
859 expressions are in our set. For large testcases, this is about
860 5-10x faster than walking the bitmap. If this is somehow a
861 significant lose for some cases, we can choose which set to walk
862 based on the set size. */
863 unsigned int i;
864 bitmap_iterator bi;
865 bitmap exprset = value_expressions[val];
866 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
868 if (bitmap_clear_bit (&set->expressions, i))
870 bitmap_set_bit (&set->expressions, get_expression_id (expr));
871 return;
874 gcc_unreachable ();
876 else
877 bitmap_insert_into_set (set, expr);
880 /* Insert EXPR into SET if EXPR's value is not already present in
881 SET. */
883 static void
884 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
886 unsigned int val = get_expr_value_id (expr);
888 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
890 /* Constant values are always considered to be part of the set. */
891 if (value_id_constant_p (val))
892 return;
894 /* If the value membership changed, add the expression. */
895 if (bitmap_set_bit (&set->values, val))
896 bitmap_set_bit (&set->expressions, expr->id);
899 /* Print out EXPR to outfile. */
901 static void
902 print_pre_expr (FILE *outfile, const pre_expr expr)
904 if (! expr)
906 fprintf (outfile, "NULL");
907 return;
909 switch (expr->kind)
911 case CONSTANT:
912 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr));
913 break;
914 case NAME:
915 print_generic_expr (outfile, PRE_EXPR_NAME (expr));
916 break;
917 case NARY:
919 unsigned int i;
920 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
921 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
922 for (i = 0; i < nary->length; i++)
924 print_generic_expr (outfile, nary->op[i]);
925 if (i != (unsigned) nary->length - 1)
926 fprintf (outfile, ",");
928 fprintf (outfile, "}");
930 break;
932 case REFERENCE:
934 vn_reference_op_t vro;
935 unsigned int i;
936 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
937 fprintf (outfile, "{");
938 for (i = 0;
939 ref->operands.iterate (i, &vro);
940 i++)
942 bool closebrace = false;
943 if (vro->opcode != SSA_NAME
944 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
946 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
947 if (vro->op0)
949 fprintf (outfile, "<");
950 closebrace = true;
953 if (vro->op0)
955 print_generic_expr (outfile, vro->op0);
956 if (vro->op1)
958 fprintf (outfile, ",");
959 print_generic_expr (outfile, vro->op1);
961 if (vro->op2)
963 fprintf (outfile, ",");
964 print_generic_expr (outfile, vro->op2);
967 if (closebrace)
968 fprintf (outfile, ">");
969 if (i != ref->operands.length () - 1)
970 fprintf (outfile, ",");
972 fprintf (outfile, "}");
973 if (ref->vuse)
975 fprintf (outfile, "@");
976 print_generic_expr (outfile, ref->vuse);
979 break;
982 void debug_pre_expr (pre_expr);
984 /* Like print_pre_expr but always prints to stderr. */
985 DEBUG_FUNCTION void
986 debug_pre_expr (pre_expr e)
988 print_pre_expr (stderr, e);
989 fprintf (stderr, "\n");
992 /* Print out SET to OUTFILE. */
994 static void
995 print_bitmap_set (FILE *outfile, bitmap_set_t set,
996 const char *setname, int blockindex)
998 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
999 if (set)
1001 bool first = true;
1002 unsigned i;
1003 bitmap_iterator bi;
1005 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1007 const pre_expr expr = expression_for_id (i);
1009 if (!first)
1010 fprintf (outfile, ", ");
1011 first = false;
1012 print_pre_expr (outfile, expr);
1014 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1017 fprintf (outfile, " }\n");
1020 void debug_bitmap_set (bitmap_set_t);
1022 DEBUG_FUNCTION void
1023 debug_bitmap_set (bitmap_set_t set)
1025 print_bitmap_set (stderr, set, "debug", 0);
1028 void debug_bitmap_sets_for (basic_block);
1030 DEBUG_FUNCTION void
1031 debug_bitmap_sets_for (basic_block bb)
1033 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1034 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1035 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1036 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1037 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1038 if (do_partial_partial)
1039 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1040 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1043 /* Print out the expressions that have VAL to OUTFILE. */
1045 static void
1046 print_value_expressions (FILE *outfile, unsigned int val)
1048 bitmap set = value_expressions[val];
1049 if (set)
1051 bitmap_set x;
1052 char s[10];
1053 sprintf (s, "%04d", val);
1054 x.expressions = *set;
1055 print_bitmap_set (outfile, &x, s, 0);
1060 DEBUG_FUNCTION void
1061 debug_value_expressions (unsigned int val)
1063 print_value_expressions (stderr, val);
1066 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1067 represent it. */
1069 static pre_expr
1070 get_or_alloc_expr_for_constant (tree constant)
1072 unsigned int result_id;
1073 unsigned int value_id;
1074 struct pre_expr_d expr;
1075 pre_expr newexpr;
1077 expr.kind = CONSTANT;
1078 PRE_EXPR_CONSTANT (&expr) = constant;
1079 result_id = lookup_expression_id (&expr);
1080 if (result_id != 0)
1081 return expression_for_id (result_id);
1083 newexpr = pre_expr_pool.allocate ();
1084 newexpr->kind = CONSTANT;
1085 PRE_EXPR_CONSTANT (newexpr) = constant;
1086 alloc_expression_id (newexpr);
1087 value_id = get_or_alloc_constant_value_id (constant);
1088 add_to_value (value_id, newexpr);
1089 return newexpr;
1092 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1093 Currently only supports constants and SSA_NAMES. */
1094 static pre_expr
1095 get_or_alloc_expr_for (tree t)
1097 if (TREE_CODE (t) == SSA_NAME)
1098 return get_or_alloc_expr_for_name (t);
1099 else if (is_gimple_min_invariant (t))
1100 return get_or_alloc_expr_for_constant (t);
1101 gcc_unreachable ();
1104 /* Return the folded version of T if T, when folded, is a gimple
1105 min_invariant or an SSA name. Otherwise, return T. */
1107 static pre_expr
1108 fully_constant_expression (pre_expr e)
1110 switch (e->kind)
1112 case CONSTANT:
1113 return e;
1114 case NARY:
1116 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1117 tree res = vn_nary_simplify (nary);
1118 if (!res)
1119 return e;
1120 if (is_gimple_min_invariant (res))
1121 return get_or_alloc_expr_for_constant (res);
1122 if (TREE_CODE (res) == SSA_NAME)
1123 return get_or_alloc_expr_for_name (res);
1124 return e;
1126 case REFERENCE:
1128 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1129 tree folded;
1130 if ((folded = fully_constant_vn_reference_p (ref)))
1131 return get_or_alloc_expr_for_constant (folded);
1132 return e;
1134 default:
1135 return e;
1137 return e;
1140 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1141 it has the value it would have in BLOCK. Set *SAME_VALID to true
1142 in case the new vuse doesn't change the value id of the OPERANDS. */
1144 static tree
1145 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1146 alias_set_type set, tree type, tree vuse,
1147 basic_block phiblock,
1148 basic_block block, bool *same_valid)
1150 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1151 ao_ref ref;
1152 edge e = NULL;
1153 bool use_oracle;
1155 *same_valid = true;
1157 if (gimple_bb (phi) != phiblock)
1158 return vuse;
1160 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1162 /* Use the alias-oracle to find either the PHI node in this block,
1163 the first VUSE used in this block that is equivalent to vuse or
1164 the first VUSE which definition in this block kills the value. */
1165 if (gimple_code (phi) == GIMPLE_PHI)
1166 e = find_edge (block, phiblock);
1167 else if (use_oracle)
1168 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1170 vuse = gimple_vuse (phi);
1171 phi = SSA_NAME_DEF_STMT (vuse);
1172 if (gimple_bb (phi) != phiblock)
1173 return vuse;
1174 if (gimple_code (phi) == GIMPLE_PHI)
1176 e = find_edge (block, phiblock);
1177 break;
1180 else
1181 return NULL_TREE;
1183 if (e)
1185 if (use_oracle)
1187 bitmap visited = NULL;
1188 unsigned int cnt;
1189 /* Try to find a vuse that dominates this phi node by skipping
1190 non-clobbering statements. */
1191 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1192 NULL, NULL);
1193 if (visited)
1194 BITMAP_FREE (visited);
1196 else
1197 vuse = NULL_TREE;
1198 if (!vuse)
1200 /* If we didn't find any, the value ID can't stay the same,
1201 but return the translated vuse. */
1202 *same_valid = false;
1203 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1205 /* ??? We would like to return vuse here as this is the canonical
1206 upmost vdef that this reference is associated with. But during
1207 insertion of the references into the hash tables we only ever
1208 directly insert with their direct gimple_vuse, hence returning
1209 something else would make us not find the other expression. */
1210 return PHI_ARG_DEF (phi, e->dest_idx);
1213 return NULL_TREE;
1216 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1217 SET2 *or* SET3. This is used to avoid making a set consisting of the union
1218 of PA_IN and ANTIC_IN during insert and phi-translation. */
1220 static inline pre_expr
1221 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2,
1222 bitmap_set_t set3 = NULL)
1224 pre_expr result = NULL;
1226 if (set1)
1227 result = bitmap_find_leader (set1, val);
1228 if (!result && set2)
1229 result = bitmap_find_leader (set2, val);
1230 if (!result && set3)
1231 result = bitmap_find_leader (set3, val);
1232 return result;
1235 /* Get the tree type for our PRE expression e. */
1237 static tree
1238 get_expr_type (const pre_expr e)
1240 switch (e->kind)
1242 case NAME:
1243 return TREE_TYPE (PRE_EXPR_NAME (e));
1244 case CONSTANT:
1245 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1246 case REFERENCE:
1247 return PRE_EXPR_REFERENCE (e)->type;
1248 case NARY:
1249 return PRE_EXPR_NARY (e)->type;
1251 gcc_unreachable ();
1254 /* Get a representative SSA_NAME for a given expression that is available in B.
1255 Since all of our sub-expressions are treated as values, we require
1256 them to be SSA_NAME's for simplicity.
1257 Prior versions of GVNPRE used to use "value handles" here, so that
1258 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1259 either case, the operands are really values (IE we do not expect
1260 them to be usable without finding leaders). */
1262 static tree
1263 get_representative_for (const pre_expr e, basic_block b = NULL)
1265 tree name, valnum = NULL_TREE;
1266 unsigned int value_id = get_expr_value_id (e);
1268 switch (e->kind)
1270 case NAME:
1271 return VN_INFO (PRE_EXPR_NAME (e))->valnum;
1272 case CONSTANT:
1273 return PRE_EXPR_CONSTANT (e);
1274 case NARY:
1275 case REFERENCE:
1277 /* Go through all of the expressions representing this value
1278 and pick out an SSA_NAME. */
1279 unsigned int i;
1280 bitmap_iterator bi;
1281 bitmap exprs = value_expressions[value_id];
1282 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1284 pre_expr rep = expression_for_id (i);
1285 if (rep->kind == NAME)
1287 tree name = PRE_EXPR_NAME (rep);
1288 valnum = VN_INFO (name)->valnum;
1289 gimple *def = SSA_NAME_DEF_STMT (name);
1290 /* We have to return either a new representative or one
1291 that can be used for expression simplification and thus
1292 is available in B. */
1293 if (! b
1294 || gimple_nop_p (def)
1295 || dominated_by_p (CDI_DOMINATORS, b, gimple_bb (def)))
1296 return name;
1298 else if (rep->kind == CONSTANT)
1299 return PRE_EXPR_CONSTANT (rep);
1302 break;
1305 /* If we reached here we couldn't find an SSA_NAME. This can
1306 happen when we've discovered a value that has never appeared in
1307 the program as set to an SSA_NAME, as the result of phi translation.
1308 Create one here.
1309 ??? We should be able to re-use this when we insert the statement
1310 to compute it. */
1311 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1312 VN_INFO (name)->value_id = value_id;
1313 VN_INFO (name)->valnum = valnum ? valnum : name;
1314 /* ??? For now mark this SSA name for release by VN. */
1315 VN_INFO (name)->needs_insertion = true;
1316 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1317 if (dump_file && (dump_flags & TDF_DETAILS))
1319 fprintf (dump_file, "Created SSA_NAME representative ");
1320 print_generic_expr (dump_file, name);
1321 fprintf (dump_file, " for expression:");
1322 print_pre_expr (dump_file, e);
1323 fprintf (dump_file, " (%04d)\n", value_id);
1326 return name;
1330 static pre_expr
1331 phi_translate (bitmap_set_t, pre_expr, bitmap_set_t, bitmap_set_t, edge);
1333 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1334 the phis in PRED. Return NULL if we can't find a leader for each part
1335 of the translated expression. */
1337 static pre_expr
1338 phi_translate_1 (bitmap_set_t dest,
1339 pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, edge e)
1341 basic_block pred = e->src;
1342 basic_block phiblock = e->dest;
1343 switch (expr->kind)
1345 case NARY:
1347 unsigned int i;
1348 bool changed = false;
1349 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1350 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1351 sizeof_vn_nary_op (nary->length));
1352 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1354 for (i = 0; i < newnary->length; i++)
1356 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1357 continue;
1358 else
1360 pre_expr leader, result;
1361 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1362 leader = find_leader_in_sets (op_val_id, set1, set2);
1363 result = phi_translate (dest, leader, set1, set2, e);
1364 if (result && result != leader)
1365 /* If op has a leader in the sets we translate make
1366 sure to use the value of the translated expression.
1367 We might need a new representative for that. */
1368 newnary->op[i] = get_representative_for (result, pred);
1369 else if (!result)
1370 return NULL;
1372 changed |= newnary->op[i] != nary->op[i];
1375 if (changed)
1377 pre_expr constant;
1378 unsigned int new_val_id;
1380 PRE_EXPR_NARY (expr) = newnary;
1381 constant = fully_constant_expression (expr);
1382 PRE_EXPR_NARY (expr) = nary;
1383 if (constant != expr)
1385 /* For non-CONSTANTs we have to make sure we can eventually
1386 insert the expression. Which means we need to have a
1387 leader for it. */
1388 if (constant->kind != CONSTANT)
1390 /* Do not allow simplifications to non-constants over
1391 backedges as this will likely result in a loop PHI node
1392 to be inserted and increased register pressure.
1393 See PR77498 - this avoids doing predcoms work in
1394 a less efficient way. */
1395 if (e->flags & EDGE_DFS_BACK)
1397 else
1399 unsigned value_id = get_expr_value_id (constant);
1400 /* We want a leader in ANTIC_OUT or AVAIL_OUT here.
1401 dest has what we computed into ANTIC_OUT sofar
1402 so pick from that - since topological sorting
1403 by sorted_array_from_bitmap_set isn't perfect
1404 we may lose some cases here. */
1405 constant = find_leader_in_sets (value_id, dest,
1406 AVAIL_OUT (pred));
1407 if (constant)
1409 if (dump_file && (dump_flags & TDF_DETAILS))
1411 fprintf (dump_file, "simplifying ");
1412 print_pre_expr (dump_file, expr);
1413 fprintf (dump_file, " translated %d -> %d to ",
1414 phiblock->index, pred->index);
1415 PRE_EXPR_NARY (expr) = newnary;
1416 print_pre_expr (dump_file, expr);
1417 PRE_EXPR_NARY (expr) = nary;
1418 fprintf (dump_file, " to ");
1419 print_pre_expr (dump_file, constant);
1420 fprintf (dump_file, "\n");
1422 return constant;
1426 else
1427 return constant;
1430 /* vn_nary_* do not valueize operands. */
1431 for (i = 0; i < newnary->length; ++i)
1432 if (TREE_CODE (newnary->op[i]) == SSA_NAME)
1433 newnary->op[i] = VN_INFO (newnary->op[i])->valnum;
1434 tree result = vn_nary_op_lookup_pieces (newnary->length,
1435 newnary->opcode,
1436 newnary->type,
1437 &newnary->op[0],
1438 &nary);
1439 if (result && is_gimple_min_invariant (result))
1440 return get_or_alloc_expr_for_constant (result);
1442 expr = pre_expr_pool.allocate ();
1443 expr->kind = NARY;
1444 expr->id = 0;
1445 if (nary && !nary->predicated_values)
1447 PRE_EXPR_NARY (expr) = nary;
1448 new_val_id = nary->value_id;
1449 get_or_alloc_expression_id (expr);
1451 else
1453 new_val_id = get_next_value_id ();
1454 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1455 nary = vn_nary_op_insert_pieces (newnary->length,
1456 newnary->opcode,
1457 newnary->type,
1458 &newnary->op[0],
1459 result, new_val_id);
1460 PRE_EXPR_NARY (expr) = nary;
1461 get_or_alloc_expression_id (expr);
1463 add_to_value (new_val_id, expr);
1465 return expr;
1467 break;
1469 case REFERENCE:
1471 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1472 vec<vn_reference_op_s> operands = ref->operands;
1473 tree vuse = ref->vuse;
1474 tree newvuse = vuse;
1475 vec<vn_reference_op_s> newoperands = vNULL;
1476 bool changed = false, same_valid = true;
1477 unsigned int i, n;
1478 vn_reference_op_t operand;
1479 vn_reference_t newref;
1481 for (i = 0; operands.iterate (i, &operand); i++)
1483 pre_expr opresult;
1484 pre_expr leader;
1485 tree op[3];
1486 tree type = operand->type;
1487 vn_reference_op_s newop = *operand;
1488 op[0] = operand->op0;
1489 op[1] = operand->op1;
1490 op[2] = operand->op2;
1491 for (n = 0; n < 3; ++n)
1493 unsigned int op_val_id;
1494 if (!op[n])
1495 continue;
1496 if (TREE_CODE (op[n]) != SSA_NAME)
1498 /* We can't possibly insert these. */
1499 if (n != 0
1500 && !is_gimple_min_invariant (op[n]))
1501 break;
1502 continue;
1504 op_val_id = VN_INFO (op[n])->value_id;
1505 leader = find_leader_in_sets (op_val_id, set1, set2);
1506 opresult = phi_translate (dest, leader, set1, set2, e);
1507 if (opresult && opresult != leader)
1509 tree name = get_representative_for (opresult);
1510 changed |= name != op[n];
1511 op[n] = name;
1513 else if (!opresult)
1514 break;
1516 if (n != 3)
1518 newoperands.release ();
1519 return NULL;
1521 if (!changed)
1522 continue;
1523 if (!newoperands.exists ())
1524 newoperands = operands.copy ();
1525 /* We may have changed from an SSA_NAME to a constant */
1526 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1527 newop.opcode = TREE_CODE (op[0]);
1528 newop.type = type;
1529 newop.op0 = op[0];
1530 newop.op1 = op[1];
1531 newop.op2 = op[2];
1532 newoperands[i] = newop;
1534 gcc_checking_assert (i == operands.length ());
1536 if (vuse)
1538 newvuse = translate_vuse_through_block (newoperands.exists ()
1539 ? newoperands : operands,
1540 ref->set, ref->type,
1541 vuse, phiblock, pred,
1542 &same_valid);
1543 if (newvuse == NULL_TREE)
1545 newoperands.release ();
1546 return NULL;
1550 if (changed || newvuse != vuse)
1552 unsigned int new_val_id;
1554 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1555 ref->type,
1556 newoperands.exists ()
1557 ? newoperands : operands,
1558 &newref, VN_WALK);
1559 if (result)
1560 newoperands.release ();
1562 /* We can always insert constants, so if we have a partial
1563 redundant constant load of another type try to translate it
1564 to a constant of appropriate type. */
1565 if (result && is_gimple_min_invariant (result))
1567 tree tem = result;
1568 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1570 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1571 if (tem && !is_gimple_min_invariant (tem))
1572 tem = NULL_TREE;
1574 if (tem)
1575 return get_or_alloc_expr_for_constant (tem);
1578 /* If we'd have to convert things we would need to validate
1579 if we can insert the translated expression. So fail
1580 here for now - we cannot insert an alias with a different
1581 type in the VN tables either, as that would assert. */
1582 if (result
1583 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1584 return NULL;
1585 else if (!result && newref
1586 && !useless_type_conversion_p (ref->type, newref->type))
1588 newoperands.release ();
1589 return NULL;
1592 expr = pre_expr_pool.allocate ();
1593 expr->kind = REFERENCE;
1594 expr->id = 0;
1596 if (newref)
1597 new_val_id = newref->value_id;
1598 else
1600 if (changed || !same_valid)
1602 new_val_id = get_next_value_id ();
1603 value_expressions.safe_grow_cleared
1604 (get_max_value_id () + 1);
1606 else
1607 new_val_id = ref->value_id;
1608 if (!newoperands.exists ())
1609 newoperands = operands.copy ();
1610 newref = vn_reference_insert_pieces (newvuse, ref->set,
1611 ref->type,
1612 newoperands,
1613 result, new_val_id);
1614 newoperands = vNULL;
1616 PRE_EXPR_REFERENCE (expr) = newref;
1617 get_or_alloc_expression_id (expr);
1618 add_to_value (new_val_id, expr);
1620 newoperands.release ();
1621 return expr;
1623 break;
1625 case NAME:
1627 tree name = PRE_EXPR_NAME (expr);
1628 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1629 /* If the SSA name is defined by a PHI node in this block,
1630 translate it. */
1631 if (gimple_code (def_stmt) == GIMPLE_PHI
1632 && gimple_bb (def_stmt) == phiblock)
1634 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1636 /* Handle constant. */
1637 if (is_gimple_min_invariant (def))
1638 return get_or_alloc_expr_for_constant (def);
1640 return get_or_alloc_expr_for_name (def);
1642 /* Otherwise return it unchanged - it will get removed if its
1643 value is not available in PREDs AVAIL_OUT set of expressions
1644 by the subtraction of TMP_GEN. */
1645 return expr;
1648 default:
1649 gcc_unreachable ();
1653 /* Wrapper around phi_translate_1 providing caching functionality. */
1655 static pre_expr
1656 phi_translate (bitmap_set_t dest, pre_expr expr,
1657 bitmap_set_t set1, bitmap_set_t set2, edge e)
1659 expr_pred_trans_t slot = NULL;
1660 pre_expr phitrans;
1662 if (!expr)
1663 return NULL;
1665 /* Constants contain no values that need translation. */
1666 if (expr->kind == CONSTANT)
1667 return expr;
1669 if (value_id_constant_p (get_expr_value_id (expr)))
1670 return expr;
1672 /* Don't add translations of NAMEs as those are cheap to translate. */
1673 if (expr->kind != NAME)
1675 if (phi_trans_add (&slot, expr, e->src))
1676 return slot->v;
1677 /* Store NULL for the value we want to return in the case of
1678 recursing. */
1679 slot->v = NULL;
1682 /* Translate. */
1683 basic_block saved_valueize_bb = vn_context_bb;
1684 vn_context_bb = e->src;
1685 phitrans = phi_translate_1 (dest, expr, set1, set2, e);
1686 vn_context_bb = saved_valueize_bb;
1688 if (slot)
1690 if (phitrans)
1691 slot->v = phitrans;
1692 else
1693 /* Remove failed translations again, they cause insert
1694 iteration to not pick up new opportunities reliably. */
1695 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1698 return phitrans;
1702 /* For each expression in SET, translate the values through phi nodes
1703 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1704 expressions in DEST. */
1706 static void
1707 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, edge e)
1709 vec<pre_expr> exprs;
1710 pre_expr expr;
1711 int i;
1713 if (gimple_seq_empty_p (phi_nodes (e->dest)))
1715 bitmap_set_copy (dest, set);
1716 return;
1719 exprs = sorted_array_from_bitmap_set (set);
1720 FOR_EACH_VEC_ELT (exprs, i, expr)
1722 pre_expr translated;
1723 translated = phi_translate (dest, expr, set, NULL, e);
1724 if (!translated)
1725 continue;
1727 bitmap_insert_into_set (dest, translated);
1729 exprs.release ();
1732 /* Find the leader for a value (i.e., the name representing that
1733 value) in a given set, and return it. Return NULL if no leader
1734 is found. */
1736 static pre_expr
1737 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1739 if (value_id_constant_p (val))
1741 unsigned int i;
1742 bitmap_iterator bi;
1743 bitmap exprset = value_expressions[val];
1745 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1747 pre_expr expr = expression_for_id (i);
1748 if (expr->kind == CONSTANT)
1749 return expr;
1752 if (bitmap_set_contains_value (set, val))
1754 /* Rather than walk the entire bitmap of expressions, and see
1755 whether any of them has the value we are looking for, we look
1756 at the reverse mapping, which tells us the set of expressions
1757 that have a given value (IE value->expressions with that
1758 value) and see if any of those expressions are in our set.
1759 The number of expressions per value is usually significantly
1760 less than the number of expressions in the set. In fact, for
1761 large testcases, doing it this way is roughly 5-10x faster
1762 than walking the bitmap.
1763 If this is somehow a significant lose for some cases, we can
1764 choose which set to walk based on which set is smaller. */
1765 unsigned int i;
1766 bitmap_iterator bi;
1767 bitmap exprset = value_expressions[val];
1769 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1770 return expression_for_id (i);
1772 return NULL;
1775 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1776 BLOCK by seeing if it is not killed in the block. Note that we are
1777 only determining whether there is a store that kills it. Because
1778 of the order in which clean iterates over values, we are guaranteed
1779 that altered operands will have caused us to be eliminated from the
1780 ANTIC_IN set already. */
1782 static bool
1783 value_dies_in_block_x (pre_expr expr, basic_block block)
1785 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1786 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1787 gimple *def;
1788 gimple_stmt_iterator gsi;
1789 unsigned id = get_expression_id (expr);
1790 bool res = false;
1791 ao_ref ref;
1793 if (!vuse)
1794 return false;
1796 /* Lookup a previously calculated result. */
1797 if (EXPR_DIES (block)
1798 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1799 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1801 /* A memory expression {e, VUSE} dies in the block if there is a
1802 statement that may clobber e. If, starting statement walk from the
1803 top of the basic block, a statement uses VUSE there can be no kill
1804 inbetween that use and the original statement that loaded {e, VUSE},
1805 so we can stop walking. */
1806 ref.base = NULL_TREE;
1807 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1809 tree def_vuse, def_vdef;
1810 def = gsi_stmt (gsi);
1811 def_vuse = gimple_vuse (def);
1812 def_vdef = gimple_vdef (def);
1814 /* Not a memory statement. */
1815 if (!def_vuse)
1816 continue;
1818 /* Not a may-def. */
1819 if (!def_vdef)
1821 /* A load with the same VUSE, we're done. */
1822 if (def_vuse == vuse)
1823 break;
1825 continue;
1828 /* Init ref only if we really need it. */
1829 if (ref.base == NULL_TREE
1830 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1831 refx->operands))
1833 res = true;
1834 break;
1836 /* If the statement may clobber expr, it dies. */
1837 if (stmt_may_clobber_ref_p_1 (def, &ref))
1839 res = true;
1840 break;
1844 /* Remember the result. */
1845 if (!EXPR_DIES (block))
1846 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1847 bitmap_set_bit (EXPR_DIES (block), id * 2);
1848 if (res)
1849 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1851 return res;
1855 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1856 contains its value-id. */
1858 static bool
1859 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1861 if (op && TREE_CODE (op) == SSA_NAME)
1863 unsigned int value_id = VN_INFO (op)->value_id;
1864 if (!(bitmap_set_contains_value (set1, value_id)
1865 || (set2 && bitmap_set_contains_value (set2, value_id))))
1866 return false;
1868 return true;
1871 /* Determine if the expression EXPR is valid in SET1 U SET2.
1872 ONLY SET2 CAN BE NULL.
1873 This means that we have a leader for each part of the expression
1874 (if it consists of values), or the expression is an SSA_NAME.
1875 For loads/calls, we also see if the vuse is killed in this block. */
1877 static bool
1878 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1880 switch (expr->kind)
1882 case NAME:
1883 /* By construction all NAMEs are available. Non-available
1884 NAMEs are removed by subtracting TMP_GEN from the sets. */
1885 return true;
1886 case NARY:
1888 unsigned int i;
1889 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1890 for (i = 0; i < nary->length; i++)
1891 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1892 return false;
1893 return true;
1895 break;
1896 case REFERENCE:
1898 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1899 vn_reference_op_t vro;
1900 unsigned int i;
1902 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1904 if (!op_valid_in_sets (set1, set2, vro->op0)
1905 || !op_valid_in_sets (set1, set2, vro->op1)
1906 || !op_valid_in_sets (set1, set2, vro->op2))
1907 return false;
1909 return true;
1911 default:
1912 gcc_unreachable ();
1916 /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2.
1917 This means expressions that are made up of values we have no leaders for
1918 in SET1 or SET2. */
1920 static void
1921 clean (bitmap_set_t set1, bitmap_set_t set2 = NULL)
1923 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
1924 pre_expr expr;
1925 int i;
1927 FOR_EACH_VEC_ELT (exprs, i, expr)
1929 if (!valid_in_sets (set1, set2, expr))
1931 unsigned int val = get_expr_value_id (expr);
1932 bitmap_clear_bit (&set1->expressions, get_expression_id (expr));
1933 /* We are entered with possibly multiple expressions for a value
1934 so before removing a value from the set see if there's an
1935 expression for it left. */
1936 if (! bitmap_find_leader (set1, val))
1937 bitmap_clear_bit (&set1->values, val);
1940 exprs.release ();
1943 /* Clean the set of expressions that are no longer valid in SET because
1944 they are clobbered in BLOCK or because they trap and may not be executed. */
1946 static void
1947 prune_clobbered_mems (bitmap_set_t set, basic_block block)
1949 bitmap_iterator bi;
1950 unsigned i;
1951 unsigned to_remove = -1U;
1952 bool any_removed = false;
1954 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1956 /* Remove queued expr. */
1957 if (to_remove != -1U)
1959 bitmap_clear_bit (&set->expressions, to_remove);
1960 any_removed = true;
1961 to_remove = -1U;
1964 pre_expr expr = expression_for_id (i);
1965 if (expr->kind == REFERENCE)
1967 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1968 if (ref->vuse)
1970 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
1971 if (!gimple_nop_p (def_stmt)
1972 && ((gimple_bb (def_stmt) != block
1973 && !dominated_by_p (CDI_DOMINATORS,
1974 block, gimple_bb (def_stmt)))
1975 || (gimple_bb (def_stmt) == block
1976 && value_dies_in_block_x (expr, block))))
1977 to_remove = i;
1980 else if (expr->kind == NARY)
1982 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1983 /* If the NARY may trap make sure the block does not contain
1984 a possible exit point.
1985 ??? This is overly conservative if we translate AVAIL_OUT
1986 as the available expression might be after the exit point. */
1987 if (BB_MAY_NOTRETURN (block)
1988 && vn_nary_may_trap (nary))
1989 to_remove = i;
1993 /* Remove queued expr. */
1994 if (to_remove != -1U)
1996 bitmap_clear_bit (&set->expressions, to_remove);
1997 any_removed = true;
2000 /* Above we only removed expressions, now clean the set of values
2001 which no longer have any corresponding expression. We cannot
2002 clear the value at the time we remove an expression since there
2003 may be multiple expressions per value.
2004 If we'd queue possibly to be removed values we could use
2005 the bitmap_find_leader way to see if there's still an expression
2006 for it. For some ratio of to be removed values and number of
2007 values/expressions in the set this might be faster than rebuilding
2008 the value-set. */
2009 if (any_removed)
2011 bitmap_clear (&set->values);
2012 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2014 pre_expr expr = expression_for_id (i);
2015 unsigned int value_id = get_expr_value_id (expr);
2016 bitmap_set_bit (&set->values, value_id);
2021 static sbitmap has_abnormal_preds;
2023 /* Compute the ANTIC set for BLOCK.
2025 If succs(BLOCK) > 1 then
2026 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2027 else if succs(BLOCK) == 1 then
2028 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2030 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2032 Note that clean() is deferred until after the iteration. */
2034 static bool
2035 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2037 bitmap_set_t S, old, ANTIC_OUT;
2038 edge e;
2039 edge_iterator ei;
2041 bool was_visited = BB_VISITED (block);
2042 bool changed = ! BB_VISITED (block);
2043 BB_VISITED (block) = 1;
2044 old = ANTIC_OUT = S = NULL;
2046 /* If any edges from predecessors are abnormal, antic_in is empty,
2047 so do nothing. */
2048 if (block_has_abnormal_pred_edge)
2049 goto maybe_dump_sets;
2051 old = ANTIC_IN (block);
2052 ANTIC_OUT = bitmap_set_new ();
2054 /* If the block has no successors, ANTIC_OUT is empty. */
2055 if (EDGE_COUNT (block->succs) == 0)
2057 /* If we have one successor, we could have some phi nodes to
2058 translate through. */
2059 else if (single_succ_p (block))
2061 e = single_succ_edge (block);
2062 gcc_assert (BB_VISITED (e->dest));
2063 phi_translate_set (ANTIC_OUT, ANTIC_IN (e->dest), e);
2065 /* If we have multiple successors, we take the intersection of all of
2066 them. Note that in the case of loop exit phi nodes, we may have
2067 phis to translate through. */
2068 else
2070 size_t i;
2071 edge first = NULL;
2073 auto_vec<edge> worklist (EDGE_COUNT (block->succs));
2074 FOR_EACH_EDGE (e, ei, block->succs)
2076 if (!first
2077 && BB_VISITED (e->dest))
2078 first = e;
2079 else if (BB_VISITED (e->dest))
2080 worklist.quick_push (e);
2081 else
2083 /* Unvisited successors get their ANTIC_IN replaced by the
2084 maximal set to arrive at a maximum ANTIC_IN solution.
2085 We can ignore them in the intersection operation and thus
2086 need not explicitely represent that maximum solution. */
2087 if (dump_file && (dump_flags & TDF_DETAILS))
2088 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2089 e->src->index, e->dest->index);
2093 /* Of multiple successors we have to have visited one already
2094 which is guaranteed by iteration order. */
2095 gcc_assert (first != NULL);
2097 phi_translate_set (ANTIC_OUT, ANTIC_IN (first->dest), first);
2099 /* If we have multiple successors we need to intersect the ANTIC_OUT
2100 sets. For values that's a simple intersection but for
2101 expressions it is a union. Given we want to have a single
2102 expression per value in our sets we have to canonicalize.
2103 Avoid randomness and running into cycles like for PR82129 and
2104 canonicalize the expression we choose to the one with the
2105 lowest id. This requires we actually compute the union first. */
2106 FOR_EACH_VEC_ELT (worklist, i, e)
2108 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
2110 bitmap_set_t tmp = bitmap_set_new ();
2111 phi_translate_set (tmp, ANTIC_IN (e->dest), e);
2112 bitmap_and_into (&ANTIC_OUT->values, &tmp->values);
2113 bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions);
2114 bitmap_set_free (tmp);
2116 else
2118 bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (e->dest)->values);
2119 bitmap_ior_into (&ANTIC_OUT->expressions,
2120 &ANTIC_IN (e->dest)->expressions);
2123 if (! worklist.is_empty ())
2125 /* Prune expressions not in the value set. */
2126 bitmap_iterator bi;
2127 unsigned int i;
2128 unsigned int to_clear = -1U;
2129 FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT, i, bi)
2131 if (to_clear != -1U)
2133 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2134 to_clear = -1U;
2136 pre_expr expr = expression_for_id (i);
2137 unsigned int value_id = get_expr_value_id (expr);
2138 if (!bitmap_bit_p (&ANTIC_OUT->values, value_id))
2139 to_clear = i;
2141 if (to_clear != -1U)
2142 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2146 /* Prune expressions that are clobbered in block and thus become
2147 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2148 prune_clobbered_mems (ANTIC_OUT, block);
2150 /* Generate ANTIC_OUT - TMP_GEN. */
2151 S = bitmap_set_subtract_expressions (ANTIC_OUT, TMP_GEN (block));
2153 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2154 ANTIC_IN (block) = bitmap_set_subtract_expressions (EXP_GEN (block),
2155 TMP_GEN (block));
2157 /* Then union in the ANTIC_OUT - TMP_GEN values,
2158 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2159 bitmap_ior_into (&ANTIC_IN (block)->values, &S->values);
2160 bitmap_ior_into (&ANTIC_IN (block)->expressions, &S->expressions);
2162 /* clean (ANTIC_IN (block)) is defered to after the iteration converged
2163 because it can cause non-convergence, see for example PR81181. */
2165 /* Intersect ANTIC_IN with the old ANTIC_IN. This is required until
2166 we properly represent the maximum expression set, thus not prune
2167 values without expressions during the iteration. */
2168 if (was_visited
2169 && bitmap_and_into (&ANTIC_IN (block)->values, &old->values))
2171 if (dump_file && (dump_flags & TDF_DETAILS))
2172 fprintf (dump_file, "warning: intersecting with old ANTIC_IN "
2173 "shrinks the set\n");
2174 /* Prune expressions not in the value set. */
2175 bitmap_iterator bi;
2176 unsigned int i;
2177 unsigned int to_clear = -1U;
2178 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (block), i, bi)
2180 if (to_clear != -1U)
2182 bitmap_clear_bit (&ANTIC_IN (block)->expressions, to_clear);
2183 to_clear = -1U;
2185 pre_expr expr = expression_for_id (i);
2186 unsigned int value_id = get_expr_value_id (expr);
2187 if (!bitmap_bit_p (&ANTIC_IN (block)->values, value_id))
2188 to_clear = i;
2190 if (to_clear != -1U)
2191 bitmap_clear_bit (&ANTIC_IN (block)->expressions, to_clear);
2194 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2195 changed = true;
2197 maybe_dump_sets:
2198 if (dump_file && (dump_flags & TDF_DETAILS))
2200 if (ANTIC_OUT)
2201 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2203 if (changed)
2204 fprintf (dump_file, "[changed] ");
2205 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2206 block->index);
2208 if (S)
2209 print_bitmap_set (dump_file, S, "S", block->index);
2211 if (old)
2212 bitmap_set_free (old);
2213 if (S)
2214 bitmap_set_free (S);
2215 if (ANTIC_OUT)
2216 bitmap_set_free (ANTIC_OUT);
2217 return changed;
2220 /* Compute PARTIAL_ANTIC for BLOCK.
2222 If succs(BLOCK) > 1 then
2223 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2224 in ANTIC_OUT for all succ(BLOCK)
2225 else if succs(BLOCK) == 1 then
2226 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2228 PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK])
2231 static void
2232 compute_partial_antic_aux (basic_block block,
2233 bool block_has_abnormal_pred_edge)
2235 bitmap_set_t old_PA_IN;
2236 bitmap_set_t PA_OUT;
2237 edge e;
2238 edge_iterator ei;
2239 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2241 old_PA_IN = PA_OUT = NULL;
2243 /* If any edges from predecessors are abnormal, antic_in is empty,
2244 so do nothing. */
2245 if (block_has_abnormal_pred_edge)
2246 goto maybe_dump_sets;
2248 /* If there are too many partially anticipatable values in the
2249 block, phi_translate_set can take an exponential time: stop
2250 before the translation starts. */
2251 if (max_pa
2252 && single_succ_p (block)
2253 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2254 goto maybe_dump_sets;
2256 old_PA_IN = PA_IN (block);
2257 PA_OUT = bitmap_set_new ();
2259 /* If the block has no successors, ANTIC_OUT is empty. */
2260 if (EDGE_COUNT (block->succs) == 0)
2262 /* If we have one successor, we could have some phi nodes to
2263 translate through. Note that we can't phi translate across DFS
2264 back edges in partial antic, because it uses a union operation on
2265 the successors. For recurrences like IV's, we will end up
2266 generating a new value in the set on each go around (i + 3 (VH.1)
2267 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2268 else if (single_succ_p (block))
2270 e = single_succ_edge (block);
2271 if (!(e->flags & EDGE_DFS_BACK))
2272 phi_translate_set (PA_OUT, PA_IN (e->dest), e);
2274 /* If we have multiple successors, we take the union of all of
2275 them. */
2276 else
2278 size_t i;
2280 auto_vec<edge> worklist (EDGE_COUNT (block->succs));
2281 FOR_EACH_EDGE (e, ei, block->succs)
2283 if (e->flags & EDGE_DFS_BACK)
2284 continue;
2285 worklist.quick_push (e);
2287 if (worklist.length () > 0)
2289 FOR_EACH_VEC_ELT (worklist, i, e)
2291 unsigned int i;
2292 bitmap_iterator bi;
2294 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (e->dest), i, bi)
2295 bitmap_value_insert_into_set (PA_OUT,
2296 expression_for_id (i));
2297 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
2299 bitmap_set_t pa_in = bitmap_set_new ();
2300 phi_translate_set (pa_in, PA_IN (e->dest), e);
2301 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2302 bitmap_value_insert_into_set (PA_OUT,
2303 expression_for_id (i));
2304 bitmap_set_free (pa_in);
2306 else
2307 FOR_EACH_EXPR_ID_IN_SET (PA_IN (e->dest), i, bi)
2308 bitmap_value_insert_into_set (PA_OUT,
2309 expression_for_id (i));
2314 /* Prune expressions that are clobbered in block and thus become
2315 invalid if translated from PA_OUT to PA_IN. */
2316 prune_clobbered_mems (PA_OUT, block);
2318 /* PA_IN starts with PA_OUT - TMP_GEN.
2319 Then we subtract things from ANTIC_IN. */
2320 PA_IN (block) = bitmap_set_subtract_expressions (PA_OUT, TMP_GEN (block));
2322 /* For partial antic, we want to put back in the phi results, since
2323 we will properly avoid making them partially antic over backedges. */
2324 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2325 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2327 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2328 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2330 clean (PA_IN (block), ANTIC_IN (block));
2332 maybe_dump_sets:
2333 if (dump_file && (dump_flags & TDF_DETAILS))
2335 if (PA_OUT)
2336 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2338 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2340 if (old_PA_IN)
2341 bitmap_set_free (old_PA_IN);
2342 if (PA_OUT)
2343 bitmap_set_free (PA_OUT);
2346 /* Compute ANTIC and partial ANTIC sets. */
2348 static void
2349 compute_antic (void)
2351 bool changed = true;
2352 int num_iterations = 0;
2353 basic_block block;
2354 int i;
2355 edge_iterator ei;
2356 edge e;
2358 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2359 We pre-build the map of blocks with incoming abnormal edges here. */
2360 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2361 bitmap_clear (has_abnormal_preds);
2363 FOR_ALL_BB_FN (block, cfun)
2365 BB_VISITED (block) = 0;
2367 FOR_EACH_EDGE (e, ei, block->preds)
2368 if (e->flags & EDGE_ABNORMAL)
2370 bitmap_set_bit (has_abnormal_preds, block->index);
2371 break;
2374 /* While we are here, give empty ANTIC_IN sets to each block. */
2375 ANTIC_IN (block) = bitmap_set_new ();
2376 if (do_partial_partial)
2377 PA_IN (block) = bitmap_set_new ();
2380 /* At the exit block we anticipate nothing. */
2381 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2383 /* For ANTIC computation we need a postorder that also guarantees that
2384 a block with a single successor is visited after its successor.
2385 RPO on the inverted CFG has this property. */
2386 auto_vec<int, 20> postorder;
2387 inverted_post_order_compute (&postorder);
2389 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
2390 bitmap_clear (worklist);
2391 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2392 bitmap_set_bit (worklist, e->src->index);
2393 while (changed)
2395 if (dump_file && (dump_flags & TDF_DETAILS))
2396 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2397 /* ??? We need to clear our PHI translation cache here as the
2398 ANTIC sets shrink and we restrict valid translations to
2399 those having operands with leaders in ANTIC. Same below
2400 for PA ANTIC computation. */
2401 num_iterations++;
2402 changed = false;
2403 for (i = postorder.length () - 1; i >= 0; i--)
2405 if (bitmap_bit_p (worklist, postorder[i]))
2407 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2408 bitmap_clear_bit (worklist, block->index);
2409 if (compute_antic_aux (block,
2410 bitmap_bit_p (has_abnormal_preds,
2411 block->index)))
2413 FOR_EACH_EDGE (e, ei, block->preds)
2414 bitmap_set_bit (worklist, e->src->index);
2415 changed = true;
2419 /* Theoretically possible, but *highly* unlikely. */
2420 gcc_checking_assert (num_iterations < 500);
2423 /* We have to clean after the dataflow problem converged as cleaning
2424 can cause non-convergence because it is based on expressions
2425 rather than values. */
2426 FOR_EACH_BB_FN (block, cfun)
2427 clean (ANTIC_IN (block));
2429 statistics_histogram_event (cfun, "compute_antic iterations",
2430 num_iterations);
2432 if (do_partial_partial)
2434 /* For partial antic we ignore backedges and thus we do not need
2435 to perform any iteration when we process blocks in postorder. */
2436 for (i = postorder.length () - 1; i >= 0; i--)
2438 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2439 compute_partial_antic_aux (block,
2440 bitmap_bit_p (has_abnormal_preds,
2441 block->index));
2445 sbitmap_free (has_abnormal_preds);
2449 /* Inserted expressions are placed onto this worklist, which is used
2450 for performing quick dead code elimination of insertions we made
2451 that didn't turn out to be necessary. */
2452 static bitmap inserted_exprs;
2454 /* The actual worker for create_component_ref_by_pieces. */
2456 static tree
2457 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2458 unsigned int *operand, gimple_seq *stmts)
2460 vn_reference_op_t currop = &ref->operands[*operand];
2461 tree genop;
2462 ++*operand;
2463 switch (currop->opcode)
2465 case CALL_EXPR:
2466 gcc_unreachable ();
2468 case MEM_REF:
2470 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2471 stmts);
2472 if (!baseop)
2473 return NULL_TREE;
2474 tree offset = currop->op0;
2475 if (TREE_CODE (baseop) == ADDR_EXPR
2476 && handled_component_p (TREE_OPERAND (baseop, 0)))
2478 poly_int64 off;
2479 tree base;
2480 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2481 &off);
2482 gcc_assert (base);
2483 offset = int_const_binop (PLUS_EXPR, offset,
2484 build_int_cst (TREE_TYPE (offset),
2485 off));
2486 baseop = build_fold_addr_expr (base);
2488 genop = build2 (MEM_REF, currop->type, baseop, offset);
2489 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2490 MR_DEPENDENCE_BASE (genop) = currop->base;
2491 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2492 return genop;
2495 case TARGET_MEM_REF:
2497 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2498 vn_reference_op_t nextop = &ref->operands[++*operand];
2499 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2500 stmts);
2501 if (!baseop)
2502 return NULL_TREE;
2503 if (currop->op0)
2505 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2506 if (!genop0)
2507 return NULL_TREE;
2509 if (nextop->op0)
2511 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2512 if (!genop1)
2513 return NULL_TREE;
2515 genop = build5 (TARGET_MEM_REF, currop->type,
2516 baseop, currop->op2, genop0, currop->op1, genop1);
2518 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2519 MR_DEPENDENCE_BASE (genop) = currop->base;
2520 return genop;
2523 case ADDR_EXPR:
2524 if (currop->op0)
2526 gcc_assert (is_gimple_min_invariant (currop->op0));
2527 return currop->op0;
2529 /* Fallthrough. */
2530 case REALPART_EXPR:
2531 case IMAGPART_EXPR:
2532 case VIEW_CONVERT_EXPR:
2534 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2535 stmts);
2536 if (!genop0)
2537 return NULL_TREE;
2538 return fold_build1 (currop->opcode, currop->type, genop0);
2541 case WITH_SIZE_EXPR:
2543 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2544 stmts);
2545 if (!genop0)
2546 return NULL_TREE;
2547 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2548 if (!genop1)
2549 return NULL_TREE;
2550 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2553 case BIT_FIELD_REF:
2555 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2556 stmts);
2557 if (!genop0)
2558 return NULL_TREE;
2559 tree op1 = currop->op0;
2560 tree op2 = currop->op1;
2561 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2562 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2563 return fold (t);
2566 /* For array ref vn_reference_op's, operand 1 of the array ref
2567 is op0 of the reference op and operand 3 of the array ref is
2568 op1. */
2569 case ARRAY_RANGE_REF:
2570 case ARRAY_REF:
2572 tree genop0;
2573 tree genop1 = currop->op0;
2574 tree genop2 = currop->op1;
2575 tree genop3 = currop->op2;
2576 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2577 stmts);
2578 if (!genop0)
2579 return NULL_TREE;
2580 genop1 = find_or_generate_expression (block, genop1, stmts);
2581 if (!genop1)
2582 return NULL_TREE;
2583 if (genop2)
2585 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2586 /* Drop zero minimum index if redundant. */
2587 if (integer_zerop (genop2)
2588 && (!domain_type
2589 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2590 genop2 = NULL_TREE;
2591 else
2593 genop2 = find_or_generate_expression (block, genop2, stmts);
2594 if (!genop2)
2595 return NULL_TREE;
2598 if (genop3)
2600 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2601 /* We can't always put a size in units of the element alignment
2602 here as the element alignment may be not visible. See
2603 PR43783. Simply drop the element size for constant
2604 sizes. */
2605 if (TREE_CODE (genop3) == INTEGER_CST
2606 && TREE_CODE (TYPE_SIZE_UNIT (elmt_type)) == INTEGER_CST
2607 && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)),
2608 (wi::to_offset (genop3)
2609 * vn_ref_op_align_unit (currop))))
2610 genop3 = NULL_TREE;
2611 else
2613 genop3 = find_or_generate_expression (block, genop3, stmts);
2614 if (!genop3)
2615 return NULL_TREE;
2618 return build4 (currop->opcode, currop->type, genop0, genop1,
2619 genop2, genop3);
2621 case COMPONENT_REF:
2623 tree op0;
2624 tree op1;
2625 tree genop2 = currop->op1;
2626 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2627 if (!op0)
2628 return NULL_TREE;
2629 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2630 op1 = currop->op0;
2631 if (genop2)
2633 genop2 = find_or_generate_expression (block, genop2, stmts);
2634 if (!genop2)
2635 return NULL_TREE;
2637 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2640 case SSA_NAME:
2642 genop = find_or_generate_expression (block, currop->op0, stmts);
2643 return genop;
2645 case STRING_CST:
2646 case INTEGER_CST:
2647 case COMPLEX_CST:
2648 case VECTOR_CST:
2649 case REAL_CST:
2650 case CONSTRUCTOR:
2651 case VAR_DECL:
2652 case PARM_DECL:
2653 case CONST_DECL:
2654 case RESULT_DECL:
2655 case FUNCTION_DECL:
2656 return currop->op0;
2658 default:
2659 gcc_unreachable ();
2663 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2664 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2665 trying to rename aggregates into ssa form directly, which is a no no.
2667 Thus, this routine doesn't create temporaries, it just builds a
2668 single access expression for the array, calling
2669 find_or_generate_expression to build the innermost pieces.
2671 This function is a subroutine of create_expression_by_pieces, and
2672 should not be called on it's own unless you really know what you
2673 are doing. */
2675 static tree
2676 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2677 gimple_seq *stmts)
2679 unsigned int op = 0;
2680 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2683 /* Find a simple leader for an expression, or generate one using
2684 create_expression_by_pieces from a NARY expression for the value.
2685 BLOCK is the basic_block we are looking for leaders in.
2686 OP is the tree expression to find a leader for or generate.
2687 Returns the leader or NULL_TREE on failure. */
2689 static tree
2690 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2692 pre_expr expr = get_or_alloc_expr_for (op);
2693 unsigned int lookfor = get_expr_value_id (expr);
2694 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2695 if (leader)
2697 if (leader->kind == NAME)
2698 return PRE_EXPR_NAME (leader);
2699 else if (leader->kind == CONSTANT)
2700 return PRE_EXPR_CONSTANT (leader);
2702 /* Defer. */
2703 return NULL_TREE;
2706 /* It must be a complex expression, so generate it recursively. Note
2707 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2708 where the insert algorithm fails to insert a required expression. */
2709 bitmap exprset = value_expressions[lookfor];
2710 bitmap_iterator bi;
2711 unsigned int i;
2712 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2714 pre_expr temp = expression_for_id (i);
2715 /* We cannot insert random REFERENCE expressions at arbitrary
2716 places. We can insert NARYs which eventually re-materializes
2717 its operand values. */
2718 if (temp->kind == NARY)
2719 return create_expression_by_pieces (block, temp, stmts,
2720 get_expr_type (expr));
2723 /* Defer. */
2724 return NULL_TREE;
2727 /* Create an expression in pieces, so that we can handle very complex
2728 expressions that may be ANTIC, but not necessary GIMPLE.
2729 BLOCK is the basic block the expression will be inserted into,
2730 EXPR is the expression to insert (in value form)
2731 STMTS is a statement list to append the necessary insertions into.
2733 This function will die if we hit some value that shouldn't be
2734 ANTIC but is (IE there is no leader for it, or its components).
2735 The function returns NULL_TREE in case a different antic expression
2736 has to be inserted first.
2737 This function may also generate expressions that are themselves
2738 partially or fully redundant. Those that are will be either made
2739 fully redundant during the next iteration of insert (for partially
2740 redundant ones), or eliminated by eliminate (for fully redundant
2741 ones). */
2743 static tree
2744 create_expression_by_pieces (basic_block block, pre_expr expr,
2745 gimple_seq *stmts, tree type)
2747 tree name;
2748 tree folded;
2749 gimple_seq forced_stmts = NULL;
2750 unsigned int value_id;
2751 gimple_stmt_iterator gsi;
2752 tree exprtype = type ? type : get_expr_type (expr);
2753 pre_expr nameexpr;
2754 gassign *newstmt;
2756 switch (expr->kind)
2758 /* We may hit the NAME/CONSTANT case if we have to convert types
2759 that value numbering saw through. */
2760 case NAME:
2761 folded = PRE_EXPR_NAME (expr);
2762 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (folded))
2763 return NULL_TREE;
2764 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2765 return folded;
2766 break;
2767 case CONSTANT:
2769 folded = PRE_EXPR_CONSTANT (expr);
2770 tree tem = fold_convert (exprtype, folded);
2771 if (is_gimple_min_invariant (tem))
2772 return tem;
2773 break;
2775 case REFERENCE:
2776 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2778 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2779 unsigned int operand = 1;
2780 vn_reference_op_t currop = &ref->operands[0];
2781 tree sc = NULL_TREE;
2782 tree fn = find_or_generate_expression (block, currop->op0, stmts);
2783 if (!fn)
2784 return NULL_TREE;
2785 if (currop->op1)
2787 sc = find_or_generate_expression (block, currop->op1, stmts);
2788 if (!sc)
2789 return NULL_TREE;
2791 auto_vec<tree> args (ref->operands.length () - 1);
2792 while (operand < ref->operands.length ())
2794 tree arg = create_component_ref_by_pieces_1 (block, ref,
2795 &operand, stmts);
2796 if (!arg)
2797 return NULL_TREE;
2798 args.quick_push (arg);
2800 gcall *call = gimple_build_call_vec (fn, args);
2801 if (sc)
2802 gimple_call_set_chain (call, sc);
2803 tree forcedname = make_ssa_name (currop->type);
2804 gimple_call_set_lhs (call, forcedname);
2805 /* There's no CCP pass after PRE which would re-compute alignment
2806 information so make sure we re-materialize this here. */
2807 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED)
2808 && args.length () - 2 <= 1
2809 && tree_fits_uhwi_p (args[1])
2810 && (args.length () != 3 || tree_fits_uhwi_p (args[2])))
2812 unsigned HOST_WIDE_INT halign = tree_to_uhwi (args[1]);
2813 unsigned HOST_WIDE_INT hmisalign
2814 = args.length () == 3 ? tree_to_uhwi (args[2]) : 0;
2815 if ((halign & (halign - 1)) == 0
2816 && (hmisalign & ~(halign - 1)) == 0)
2817 set_ptr_info_alignment (get_ptr_info (forcedname),
2818 halign, hmisalign);
2820 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2821 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2822 folded = forcedname;
2824 else
2826 folded = create_component_ref_by_pieces (block,
2827 PRE_EXPR_REFERENCE (expr),
2828 stmts);
2829 if (!folded)
2830 return NULL_TREE;
2831 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2832 newstmt = gimple_build_assign (name, folded);
2833 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2834 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2835 folded = name;
2837 break;
2838 case NARY:
2840 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2841 tree *genop = XALLOCAVEC (tree, nary->length);
2842 unsigned i;
2843 for (i = 0; i < nary->length; ++i)
2845 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2846 if (!genop[i])
2847 return NULL_TREE;
2848 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2849 may have conversions stripped. */
2850 if (nary->opcode == POINTER_PLUS_EXPR)
2852 if (i == 0)
2853 genop[i] = gimple_convert (&forced_stmts,
2854 nary->type, genop[i]);
2855 else if (i == 1)
2856 genop[i] = gimple_convert (&forced_stmts,
2857 sizetype, genop[i]);
2859 else
2860 genop[i] = gimple_convert (&forced_stmts,
2861 TREE_TYPE (nary->op[i]), genop[i]);
2863 if (nary->opcode == CONSTRUCTOR)
2865 vec<constructor_elt, va_gc> *elts = NULL;
2866 for (i = 0; i < nary->length; ++i)
2867 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2868 folded = build_constructor (nary->type, elts);
2869 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2870 newstmt = gimple_build_assign (name, folded);
2871 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2872 folded = name;
2874 else
2876 switch (nary->length)
2878 case 1:
2879 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2880 genop[0]);
2881 break;
2882 case 2:
2883 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2884 genop[0], genop[1]);
2885 break;
2886 case 3:
2887 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2888 genop[0], genop[1], genop[2]);
2889 break;
2890 default:
2891 gcc_unreachable ();
2895 break;
2896 default:
2897 gcc_unreachable ();
2900 folded = gimple_convert (&forced_stmts, exprtype, folded);
2902 /* If there is nothing to insert, return the simplified result. */
2903 if (gimple_seq_empty_p (forced_stmts))
2904 return folded;
2905 /* If we simplified to a constant return it and discard eventually
2906 built stmts. */
2907 if (is_gimple_min_invariant (folded))
2909 gimple_seq_discard (forced_stmts);
2910 return folded;
2912 /* Likewise if we simplified to sth not queued for insertion. */
2913 bool found = false;
2914 gsi = gsi_last (forced_stmts);
2915 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2917 gimple *stmt = gsi_stmt (gsi);
2918 tree forcedname = gimple_get_lhs (stmt);
2919 if (forcedname == folded)
2921 found = true;
2922 break;
2925 if (! found)
2927 gimple_seq_discard (forced_stmts);
2928 return folded;
2930 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2932 /* If we have any intermediate expressions to the value sets, add them
2933 to the value sets and chain them in the instruction stream. */
2934 if (forced_stmts)
2936 gsi = gsi_start (forced_stmts);
2937 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2939 gimple *stmt = gsi_stmt (gsi);
2940 tree forcedname = gimple_get_lhs (stmt);
2941 pre_expr nameexpr;
2943 if (forcedname != folded)
2945 VN_INFO (forcedname)->valnum = forcedname;
2946 VN_INFO (forcedname)->value_id = get_next_value_id ();
2947 nameexpr = get_or_alloc_expr_for_name (forcedname);
2948 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2949 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2950 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2953 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2955 gimple_seq_add_seq (stmts, forced_stmts);
2958 name = folded;
2960 /* Fold the last statement. */
2961 gsi = gsi_last (*stmts);
2962 if (fold_stmt_inplace (&gsi))
2963 update_stmt (gsi_stmt (gsi));
2965 /* Add a value number to the temporary.
2966 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2967 we are creating the expression by pieces, and this particular piece of
2968 the expression may have been represented. There is no harm in replacing
2969 here. */
2970 value_id = get_expr_value_id (expr);
2971 VN_INFO (name)->value_id = value_id;
2972 VN_INFO (name)->valnum = vn_valnum_from_value_id (value_id);
2973 if (VN_INFO (name)->valnum == NULL_TREE)
2974 VN_INFO (name)->valnum = name;
2975 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2976 nameexpr = get_or_alloc_expr_for_name (name);
2977 add_to_value (value_id, nameexpr);
2978 if (NEW_SETS (block))
2979 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2980 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2982 pre_stats.insertions++;
2983 if (dump_file && (dump_flags & TDF_DETAILS))
2985 fprintf (dump_file, "Inserted ");
2986 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0);
2987 fprintf (dump_file, " in predecessor %d (%04d)\n",
2988 block->index, value_id);
2991 return name;
2995 /* Insert the to-be-made-available values of expression EXPRNUM for each
2996 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2997 merge the result with a phi node, given the same value number as
2998 NODE. Return true if we have inserted new stuff. */
3000 static bool
3001 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3002 vec<pre_expr> avail)
3004 pre_expr expr = expression_for_id (exprnum);
3005 pre_expr newphi;
3006 unsigned int val = get_expr_value_id (expr);
3007 edge pred;
3008 bool insertions = false;
3009 bool nophi = false;
3010 basic_block bprime;
3011 pre_expr eprime;
3012 edge_iterator ei;
3013 tree type = get_expr_type (expr);
3014 tree temp;
3015 gphi *phi;
3017 /* Make sure we aren't creating an induction variable. */
3018 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
3020 bool firstinsideloop = false;
3021 bool secondinsideloop = false;
3022 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3023 EDGE_PRED (block, 0)->src);
3024 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3025 EDGE_PRED (block, 1)->src);
3026 /* Induction variables only have one edge inside the loop. */
3027 if ((firstinsideloop ^ secondinsideloop)
3028 && expr->kind != REFERENCE)
3030 if (dump_file && (dump_flags & TDF_DETAILS))
3031 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3032 nophi = true;
3036 /* Make the necessary insertions. */
3037 FOR_EACH_EDGE (pred, ei, block->preds)
3039 gimple_seq stmts = NULL;
3040 tree builtexpr;
3041 bprime = pred->src;
3042 eprime = avail[pred->dest_idx];
3043 builtexpr = create_expression_by_pieces (bprime, eprime,
3044 &stmts, type);
3045 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3046 if (!gimple_seq_empty_p (stmts))
3048 basic_block new_bb = gsi_insert_seq_on_edge_immediate (pred, stmts);
3049 gcc_assert (! new_bb);
3050 insertions = true;
3052 if (!builtexpr)
3054 /* We cannot insert a PHI node if we failed to insert
3055 on one edge. */
3056 nophi = true;
3057 continue;
3059 if (is_gimple_min_invariant (builtexpr))
3060 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3061 else
3062 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3064 /* If we didn't want a phi node, and we made insertions, we still have
3065 inserted new stuff, and thus return true. If we didn't want a phi node,
3066 and didn't make insertions, we haven't added anything new, so return
3067 false. */
3068 if (nophi && insertions)
3069 return true;
3070 else if (nophi && !insertions)
3071 return false;
3073 /* Now build a phi for the new variable. */
3074 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3075 phi = create_phi_node (temp, block);
3077 VN_INFO (temp)->value_id = val;
3078 VN_INFO (temp)->valnum = vn_valnum_from_value_id (val);
3079 if (VN_INFO (temp)->valnum == NULL_TREE)
3080 VN_INFO (temp)->valnum = temp;
3081 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3082 FOR_EACH_EDGE (pred, ei, block->preds)
3084 pre_expr ae = avail[pred->dest_idx];
3085 gcc_assert (get_expr_type (ae) == type
3086 || useless_type_conversion_p (type, get_expr_type (ae)));
3087 if (ae->kind == CONSTANT)
3088 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3089 pred, UNKNOWN_LOCATION);
3090 else
3091 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3094 newphi = get_or_alloc_expr_for_name (temp);
3095 add_to_value (val, newphi);
3097 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3098 this insertion, since we test for the existence of this value in PHI_GEN
3099 before proceeding with the partial redundancy checks in insert_aux.
3101 The value may exist in AVAIL_OUT, in particular, it could be represented
3102 by the expression we are trying to eliminate, in which case we want the
3103 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3104 inserted there.
3106 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3107 this block, because if it did, it would have existed in our dominator's
3108 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3111 bitmap_insert_into_set (PHI_GEN (block), newphi);
3112 bitmap_value_replace_in_set (AVAIL_OUT (block),
3113 newphi);
3114 bitmap_insert_into_set (NEW_SETS (block),
3115 newphi);
3117 /* If we insert a PHI node for a conversion of another PHI node
3118 in the same basic-block try to preserve range information.
3119 This is important so that followup loop passes receive optimal
3120 number of iteration analysis results. See PR61743. */
3121 if (expr->kind == NARY
3122 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3123 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3124 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3125 && INTEGRAL_TYPE_P (type)
3126 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3127 && (TYPE_PRECISION (type)
3128 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3129 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3131 wide_int min, max;
3132 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3133 && !wi::neg_p (min, SIGNED)
3134 && !wi::neg_p (max, SIGNED))
3135 /* Just handle extension and sign-changes of all-positive ranges. */
3136 set_range_info (temp,
3137 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3138 wide_int_storage::from (min, TYPE_PRECISION (type),
3139 TYPE_SIGN (type)),
3140 wide_int_storage::from (max, TYPE_PRECISION (type),
3141 TYPE_SIGN (type)));
3144 if (dump_file && (dump_flags & TDF_DETAILS))
3146 fprintf (dump_file, "Created phi ");
3147 print_gimple_stmt (dump_file, phi, 0);
3148 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3150 pre_stats.phis++;
3151 return true;
3156 /* Perform insertion of partially redundant or hoistable values.
3157 For BLOCK, do the following:
3158 1. Propagate the NEW_SETS of the dominator into the current block.
3159 If the block has multiple predecessors,
3160 2a. Iterate over the ANTIC expressions for the block to see if
3161 any of them are partially redundant.
3162 2b. If so, insert them into the necessary predecessors to make
3163 the expression fully redundant.
3164 2c. Insert a new PHI merging the values of the predecessors.
3165 2d. Insert the new PHI, and the new expressions, into the
3166 NEW_SETS set.
3167 If the block has multiple successors,
3168 3a. Iterate over the ANTIC values for the block to see if
3169 any of them are good candidates for hoisting.
3170 3b. If so, insert expressions computing the values in BLOCK,
3171 and add the new expressions into the NEW_SETS set.
3172 4. Recursively call ourselves on the dominator children of BLOCK.
3174 Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3175 do_pre_regular_insertion and do_partial_insertion. 3a and 3b are
3176 done in do_hoist_insertion.
3179 static bool
3180 do_pre_regular_insertion (basic_block block, basic_block dom)
3182 bool new_stuff = false;
3183 vec<pre_expr> exprs;
3184 pre_expr expr;
3185 auto_vec<pre_expr> avail;
3186 int i;
3188 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3189 avail.safe_grow (EDGE_COUNT (block->preds));
3191 FOR_EACH_VEC_ELT (exprs, i, expr)
3193 if (expr->kind == NARY
3194 || expr->kind == REFERENCE)
3196 unsigned int val;
3197 bool by_some = false;
3198 bool cant_insert = false;
3199 bool all_same = true;
3200 pre_expr first_s = NULL;
3201 edge pred;
3202 basic_block bprime;
3203 pre_expr eprime = NULL;
3204 edge_iterator ei;
3205 pre_expr edoubleprime = NULL;
3206 bool do_insertion = false;
3208 val = get_expr_value_id (expr);
3209 if (bitmap_set_contains_value (PHI_GEN (block), val))
3210 continue;
3211 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3213 if (dump_file && (dump_flags & TDF_DETAILS))
3215 fprintf (dump_file, "Found fully redundant value: ");
3216 print_pre_expr (dump_file, expr);
3217 fprintf (dump_file, "\n");
3219 continue;
3222 FOR_EACH_EDGE (pred, ei, block->preds)
3224 unsigned int vprime;
3226 /* We should never run insertion for the exit block
3227 and so not come across fake pred edges. */
3228 gcc_assert (!(pred->flags & EDGE_FAKE));
3229 bprime = pred->src;
3230 /* We are looking at ANTIC_OUT of bprime. */
3231 eprime = phi_translate (NULL, expr, ANTIC_IN (block), NULL, pred);
3233 /* eprime will generally only be NULL if the
3234 value of the expression, translated
3235 through the PHI for this predecessor, is
3236 undefined. If that is the case, we can't
3237 make the expression fully redundant,
3238 because its value is undefined along a
3239 predecessor path. We can thus break out
3240 early because it doesn't matter what the
3241 rest of the results are. */
3242 if (eprime == NULL)
3244 avail[pred->dest_idx] = NULL;
3245 cant_insert = true;
3246 break;
3249 vprime = get_expr_value_id (eprime);
3250 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3251 vprime);
3252 if (edoubleprime == NULL)
3254 avail[pred->dest_idx] = eprime;
3255 all_same = false;
3257 else
3259 avail[pred->dest_idx] = edoubleprime;
3260 by_some = true;
3261 /* We want to perform insertions to remove a redundancy on
3262 a path in the CFG we want to optimize for speed. */
3263 if (optimize_edge_for_speed_p (pred))
3264 do_insertion = true;
3265 if (first_s == NULL)
3266 first_s = edoubleprime;
3267 else if (!pre_expr_d::equal (first_s, edoubleprime))
3268 all_same = false;
3271 /* If we can insert it, it's not the same value
3272 already existing along every predecessor, and
3273 it's defined by some predecessor, it is
3274 partially redundant. */
3275 if (!cant_insert && !all_same && by_some)
3277 if (!do_insertion)
3279 if (dump_file && (dump_flags & TDF_DETAILS))
3281 fprintf (dump_file, "Skipping partial redundancy for "
3282 "expression ");
3283 print_pre_expr (dump_file, expr);
3284 fprintf (dump_file, " (%04d), no redundancy on to be "
3285 "optimized for speed edge\n", val);
3288 else if (dbg_cnt (treepre_insert))
3290 if (dump_file && (dump_flags & TDF_DETAILS))
3292 fprintf (dump_file, "Found partial redundancy for "
3293 "expression ");
3294 print_pre_expr (dump_file, expr);
3295 fprintf (dump_file, " (%04d)\n",
3296 get_expr_value_id (expr));
3298 if (insert_into_preds_of_block (block,
3299 get_expression_id (expr),
3300 avail))
3301 new_stuff = true;
3304 /* If all edges produce the same value and that value is
3305 an invariant, then the PHI has the same value on all
3306 edges. Note this. */
3307 else if (!cant_insert && all_same)
3309 gcc_assert (edoubleprime->kind == CONSTANT
3310 || edoubleprime->kind == NAME);
3312 tree temp = make_temp_ssa_name (get_expr_type (expr),
3313 NULL, "pretmp");
3314 gassign *assign
3315 = gimple_build_assign (temp,
3316 edoubleprime->kind == CONSTANT ?
3317 PRE_EXPR_CONSTANT (edoubleprime) :
3318 PRE_EXPR_NAME (edoubleprime));
3319 gimple_stmt_iterator gsi = gsi_after_labels (block);
3320 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3322 VN_INFO (temp)->value_id = val;
3323 VN_INFO (temp)->valnum = vn_valnum_from_value_id (val);
3324 if (VN_INFO (temp)->valnum == NULL_TREE)
3325 VN_INFO (temp)->valnum = temp;
3326 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3327 pre_expr newe = get_or_alloc_expr_for_name (temp);
3328 add_to_value (val, newe);
3329 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3330 bitmap_insert_into_set (NEW_SETS (block), newe);
3335 exprs.release ();
3336 return new_stuff;
3340 /* Perform insertion for partially anticipatable expressions. There
3341 is only one case we will perform insertion for these. This case is
3342 if the expression is partially anticipatable, and fully available.
3343 In this case, we know that putting it earlier will enable us to
3344 remove the later computation. */
3346 static bool
3347 do_pre_partial_partial_insertion (basic_block block, basic_block dom)
3349 bool new_stuff = false;
3350 vec<pre_expr> exprs;
3351 pre_expr expr;
3352 auto_vec<pre_expr> avail;
3353 int i;
3355 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3356 avail.safe_grow (EDGE_COUNT (block->preds));
3358 FOR_EACH_VEC_ELT (exprs, i, expr)
3360 if (expr->kind == NARY
3361 || expr->kind == REFERENCE)
3363 unsigned int val;
3364 bool by_all = true;
3365 bool cant_insert = false;
3366 edge pred;
3367 basic_block bprime;
3368 pre_expr eprime = NULL;
3369 edge_iterator ei;
3371 val = get_expr_value_id (expr);
3372 if (bitmap_set_contains_value (PHI_GEN (block), val))
3373 continue;
3374 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3375 continue;
3377 FOR_EACH_EDGE (pred, ei, block->preds)
3379 unsigned int vprime;
3380 pre_expr edoubleprime;
3382 /* We should never run insertion for the exit block
3383 and so not come across fake pred edges. */
3384 gcc_assert (!(pred->flags & EDGE_FAKE));
3385 bprime = pred->src;
3386 eprime = phi_translate (NULL, expr, ANTIC_IN (block),
3387 PA_IN (block), pred);
3389 /* eprime will generally only be NULL if the
3390 value of the expression, translated
3391 through the PHI for this predecessor, is
3392 undefined. If that is the case, we can't
3393 make the expression fully redundant,
3394 because its value is undefined along a
3395 predecessor path. We can thus break out
3396 early because it doesn't matter what the
3397 rest of the results are. */
3398 if (eprime == NULL)
3400 avail[pred->dest_idx] = NULL;
3401 cant_insert = true;
3402 break;
3405 vprime = get_expr_value_id (eprime);
3406 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3407 avail[pred->dest_idx] = edoubleprime;
3408 if (edoubleprime == NULL)
3410 by_all = false;
3411 break;
3415 /* If we can insert it, it's not the same value
3416 already existing along every predecessor, and
3417 it's defined by some predecessor, it is
3418 partially redundant. */
3419 if (!cant_insert && by_all)
3421 edge succ;
3422 bool do_insertion = false;
3424 /* Insert only if we can remove a later expression on a path
3425 that we want to optimize for speed.
3426 The phi node that we will be inserting in BLOCK is not free,
3427 and inserting it for the sake of !optimize_for_speed successor
3428 may cause regressions on the speed path. */
3429 FOR_EACH_EDGE (succ, ei, block->succs)
3431 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3432 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3434 if (optimize_edge_for_speed_p (succ))
3435 do_insertion = true;
3439 if (!do_insertion)
3441 if (dump_file && (dump_flags & TDF_DETAILS))
3443 fprintf (dump_file, "Skipping partial partial redundancy "
3444 "for expression ");
3445 print_pre_expr (dump_file, expr);
3446 fprintf (dump_file, " (%04d), not (partially) anticipated "
3447 "on any to be optimized for speed edges\n", val);
3450 else if (dbg_cnt (treepre_insert))
3452 pre_stats.pa_insert++;
3453 if (dump_file && (dump_flags & TDF_DETAILS))
3455 fprintf (dump_file, "Found partial partial redundancy "
3456 "for expression ");
3457 print_pre_expr (dump_file, expr);
3458 fprintf (dump_file, " (%04d)\n",
3459 get_expr_value_id (expr));
3461 if (insert_into_preds_of_block (block,
3462 get_expression_id (expr),
3463 avail))
3464 new_stuff = true;
3470 exprs.release ();
3471 return new_stuff;
3474 /* Insert expressions in BLOCK to compute hoistable values up.
3475 Return TRUE if something was inserted, otherwise return FALSE.
3476 The caller has to make sure that BLOCK has at least two successors. */
3478 static bool
3479 do_hoist_insertion (basic_block block)
3481 edge e;
3482 edge_iterator ei;
3483 bool new_stuff = false;
3484 unsigned i;
3485 gimple_stmt_iterator last;
3487 /* At least two successors, or else... */
3488 gcc_assert (EDGE_COUNT (block->succs) >= 2);
3490 /* Check that all successors of BLOCK are dominated by block.
3491 We could use dominated_by_p() for this, but actually there is a much
3492 quicker check: any successor that is dominated by BLOCK can't have
3493 more than one predecessor edge. */
3494 FOR_EACH_EDGE (e, ei, block->succs)
3495 if (! single_pred_p (e->dest))
3496 return false;
3498 /* Determine the insertion point. If we cannot safely insert before
3499 the last stmt if we'd have to, bail out. */
3500 last = gsi_last_bb (block);
3501 if (!gsi_end_p (last)
3502 && !is_ctrl_stmt (gsi_stmt (last))
3503 && stmt_ends_bb_p (gsi_stmt (last)))
3504 return false;
3506 /* Compute the set of hoistable expressions from ANTIC_IN. First compute
3507 hoistable values. */
3508 bitmap_set hoistable_set;
3510 /* A hoistable value must be in ANTIC_IN(block)
3511 but not in AVAIL_OUT(BLOCK). */
3512 bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack);
3513 bitmap_and_compl (&hoistable_set.values,
3514 &ANTIC_IN (block)->values, &AVAIL_OUT (block)->values);
3516 /* Short-cut for a common case: hoistable_set is empty. */
3517 if (bitmap_empty_p (&hoistable_set.values))
3518 return false;
3520 /* Compute which of the hoistable values is in AVAIL_OUT of
3521 at least one of the successors of BLOCK. */
3522 bitmap_head availout_in_some;
3523 bitmap_initialize (&availout_in_some, &grand_bitmap_obstack);
3524 FOR_EACH_EDGE (e, ei, block->succs)
3525 /* Do not consider expressions solely because their availability
3526 on loop exits. They'd be ANTIC-IN throughout the whole loop
3527 and thus effectively hoisted across loops by combination of
3528 PRE and hoisting. */
3529 if (! loop_exit_edge_p (block->loop_father, e))
3530 bitmap_ior_and_into (&availout_in_some, &hoistable_set.values,
3531 &AVAIL_OUT (e->dest)->values);
3532 bitmap_clear (&hoistable_set.values);
3534 /* Short-cut for a common case: availout_in_some is empty. */
3535 if (bitmap_empty_p (&availout_in_some))
3536 return false;
3538 /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */
3539 hoistable_set.values = availout_in_some;
3540 hoistable_set.expressions = ANTIC_IN (block)->expressions;
3542 /* Now finally construct the topological-ordered expression set. */
3543 vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set);
3545 bitmap_clear (&hoistable_set.values);
3547 /* If there are candidate values for hoisting, insert expressions
3548 strategically to make the hoistable expressions fully redundant. */
3549 pre_expr expr;
3550 FOR_EACH_VEC_ELT (exprs, i, expr)
3552 /* While we try to sort expressions topologically above the
3553 sorting doesn't work out perfectly. Catch expressions we
3554 already inserted. */
3555 unsigned int value_id = get_expr_value_id (expr);
3556 if (bitmap_set_contains_value (AVAIL_OUT (block), value_id))
3558 if (dump_file && (dump_flags & TDF_DETAILS))
3560 fprintf (dump_file,
3561 "Already inserted expression for ");
3562 print_pre_expr (dump_file, expr);
3563 fprintf (dump_file, " (%04d)\n", value_id);
3565 continue;
3568 /* OK, we should hoist this value. Perform the transformation. */
3569 pre_stats.hoist_insert++;
3570 if (dump_file && (dump_flags & TDF_DETAILS))
3572 fprintf (dump_file,
3573 "Inserting expression in block %d for code hoisting: ",
3574 block->index);
3575 print_pre_expr (dump_file, expr);
3576 fprintf (dump_file, " (%04d)\n", value_id);
3579 gimple_seq stmts = NULL;
3580 tree res = create_expression_by_pieces (block, expr, &stmts,
3581 get_expr_type (expr));
3583 /* Do not return true if expression creation ultimately
3584 did not insert any statements. */
3585 if (gimple_seq_empty_p (stmts))
3586 res = NULL_TREE;
3587 else
3589 if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last)))
3590 gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT);
3591 else
3592 gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT);
3595 /* Make sure to not return true if expression creation ultimately
3596 failed but also make sure to insert any stmts produced as they
3597 are tracked in inserted_exprs. */
3598 if (! res)
3599 continue;
3601 new_stuff = true;
3604 exprs.release ();
3606 return new_stuff;
3609 /* Do a dominator walk on the control flow graph, and insert computations
3610 of values as necessary for PRE and hoisting. */
3612 static bool
3613 insert_aux (basic_block block, bool do_pre, bool do_hoist)
3615 basic_block son;
3616 bool new_stuff = false;
3618 if (block)
3620 basic_block dom;
3621 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3622 if (dom)
3624 unsigned i;
3625 bitmap_iterator bi;
3626 bitmap_set_t newset;
3628 /* First, update the AVAIL_OUT set with anything we may have
3629 inserted higher up in the dominator tree. */
3630 newset = NEW_SETS (dom);
3631 if (newset)
3633 /* Note that we need to value_replace both NEW_SETS, and
3634 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3635 represented by some non-simple expression here that we want
3636 to replace it with. */
3637 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3639 pre_expr expr = expression_for_id (i);
3640 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3641 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3645 /* Insert expressions for partial redundancies. */
3646 if (do_pre && !single_pred_p (block))
3648 new_stuff |= do_pre_regular_insertion (block, dom);
3649 if (do_partial_partial)
3650 new_stuff |= do_pre_partial_partial_insertion (block, dom);
3653 /* Insert expressions for hoisting. */
3654 if (do_hoist && EDGE_COUNT (block->succs) >= 2)
3655 new_stuff |= do_hoist_insertion (block);
3658 for (son = first_dom_son (CDI_DOMINATORS, block);
3659 son;
3660 son = next_dom_son (CDI_DOMINATORS, son))
3662 new_stuff |= insert_aux (son, do_pre, do_hoist);
3665 return new_stuff;
3668 /* Perform insertion of partially redundant and hoistable values. */
3670 static void
3671 insert (void)
3673 bool new_stuff = true;
3674 basic_block bb;
3675 int num_iterations = 0;
3677 FOR_ALL_BB_FN (bb, cfun)
3678 NEW_SETS (bb) = bitmap_set_new ();
3680 while (new_stuff)
3682 num_iterations++;
3683 if (dump_file && dump_flags & TDF_DETAILS)
3684 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3685 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun), flag_tree_pre,
3686 flag_code_hoisting);
3688 /* Clear the NEW sets before the next iteration. We have already
3689 fully propagated its contents. */
3690 if (new_stuff)
3691 FOR_ALL_BB_FN (bb, cfun)
3692 bitmap_set_free (NEW_SETS (bb));
3694 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3698 /* Compute the AVAIL set for all basic blocks.
3700 This function performs value numbering of the statements in each basic
3701 block. The AVAIL sets are built from information we glean while doing
3702 this value numbering, since the AVAIL sets contain only one entry per
3703 value.
3705 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3706 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3708 static void
3709 compute_avail (void)
3712 basic_block block, son;
3713 basic_block *worklist;
3714 size_t sp = 0;
3715 unsigned i;
3716 tree name;
3718 /* We pretend that default definitions are defined in the entry block.
3719 This includes function arguments and the static chain decl. */
3720 FOR_EACH_SSA_NAME (i, name, cfun)
3722 pre_expr e;
3723 if (!SSA_NAME_IS_DEFAULT_DEF (name)
3724 || has_zero_uses (name)
3725 || virtual_operand_p (name))
3726 continue;
3728 e = get_or_alloc_expr_for_name (name);
3729 add_to_value (get_expr_value_id (e), e);
3730 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3731 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3735 if (dump_file && (dump_flags & TDF_DETAILS))
3737 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3738 "tmp_gen", ENTRY_BLOCK);
3739 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3740 "avail_out", ENTRY_BLOCK);
3743 /* Allocate the worklist. */
3744 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3746 /* Seed the algorithm by putting the dominator children of the entry
3747 block on the worklist. */
3748 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3749 son;
3750 son = next_dom_son (CDI_DOMINATORS, son))
3751 worklist[sp++] = son;
3753 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3754 = ssa_default_def (cfun, gimple_vop (cfun));
3756 /* Loop until the worklist is empty. */
3757 while (sp)
3759 gimple *stmt;
3760 basic_block dom;
3762 /* Pick a block from the worklist. */
3763 block = worklist[--sp];
3764 vn_context_bb = block;
3766 /* Initially, the set of available values in BLOCK is that of
3767 its immediate dominator. */
3768 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3769 if (dom)
3771 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3772 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3775 /* Generate values for PHI nodes. */
3776 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3777 gsi_next (&gsi))
3779 tree result = gimple_phi_result (gsi.phi ());
3781 /* We have no need for virtual phis, as they don't represent
3782 actual computations. */
3783 if (virtual_operand_p (result))
3785 BB_LIVE_VOP_ON_EXIT (block) = result;
3786 continue;
3789 pre_expr e = get_or_alloc_expr_for_name (result);
3790 add_to_value (get_expr_value_id (e), e);
3791 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3792 bitmap_insert_into_set (PHI_GEN (block), e);
3795 BB_MAY_NOTRETURN (block) = 0;
3797 /* Now compute value numbers and populate value sets with all
3798 the expressions computed in BLOCK. */
3799 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3800 gsi_next (&gsi))
3802 ssa_op_iter iter;
3803 tree op;
3805 stmt = gsi_stmt (gsi);
3807 /* Cache whether the basic-block has any non-visible side-effect
3808 or control flow.
3809 If this isn't a call or it is the last stmt in the
3810 basic-block then the CFG represents things correctly. */
3811 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3813 /* Non-looping const functions always return normally.
3814 Otherwise the call might not return or have side-effects
3815 that forbids hoisting possibly trapping expressions
3816 before it. */
3817 int flags = gimple_call_flags (stmt);
3818 if (!(flags & ECF_CONST)
3819 || (flags & ECF_LOOPING_CONST_OR_PURE))
3820 BB_MAY_NOTRETURN (block) = 1;
3823 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3825 pre_expr e = get_or_alloc_expr_for_name (op);
3827 add_to_value (get_expr_value_id (e), e);
3828 bitmap_insert_into_set (TMP_GEN (block), e);
3829 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3832 if (gimple_vdef (stmt))
3833 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3835 if (gimple_has_side_effects (stmt)
3836 || stmt_could_throw_p (stmt)
3837 || is_gimple_debug (stmt))
3838 continue;
3840 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3842 if (ssa_undefined_value_p (op))
3843 continue;
3844 pre_expr e = get_or_alloc_expr_for_name (op);
3845 bitmap_value_insert_into_set (EXP_GEN (block), e);
3848 switch (gimple_code (stmt))
3850 case GIMPLE_RETURN:
3851 continue;
3853 case GIMPLE_CALL:
3855 vn_reference_t ref;
3856 vn_reference_s ref1;
3857 pre_expr result = NULL;
3859 /* We can value number only calls to real functions. */
3860 if (gimple_call_internal_p (stmt))
3861 continue;
3863 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3864 if (!ref)
3865 continue;
3867 /* If the value of the call is not invalidated in
3868 this block until it is computed, add the expression
3869 to EXP_GEN. */
3870 if (!gimple_vuse (stmt)
3871 || gimple_code
3872 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3873 || gimple_bb (SSA_NAME_DEF_STMT
3874 (gimple_vuse (stmt))) != block)
3876 result = pre_expr_pool.allocate ();
3877 result->kind = REFERENCE;
3878 result->id = 0;
3879 PRE_EXPR_REFERENCE (result) = ref;
3881 get_or_alloc_expression_id (result);
3882 add_to_value (get_expr_value_id (result), result);
3883 bitmap_value_insert_into_set (EXP_GEN (block), result);
3885 continue;
3888 case GIMPLE_ASSIGN:
3890 pre_expr result = NULL;
3891 switch (vn_get_stmt_kind (stmt))
3893 case VN_NARY:
3895 enum tree_code code = gimple_assign_rhs_code (stmt);
3896 vn_nary_op_t nary;
3898 /* COND_EXPR and VEC_COND_EXPR are awkward in
3899 that they contain an embedded complex expression.
3900 Don't even try to shove those through PRE. */
3901 if (code == COND_EXPR
3902 || code == VEC_COND_EXPR)
3903 continue;
3905 vn_nary_op_lookup_stmt (stmt, &nary);
3906 if (!nary || nary->predicated_values)
3907 continue;
3909 /* If the NARY traps and there was a preceding
3910 point in the block that might not return avoid
3911 adding the nary to EXP_GEN. */
3912 if (BB_MAY_NOTRETURN (block)
3913 && vn_nary_may_trap (nary))
3914 continue;
3916 result = pre_expr_pool.allocate ();
3917 result->kind = NARY;
3918 result->id = 0;
3919 PRE_EXPR_NARY (result) = nary;
3920 break;
3923 case VN_REFERENCE:
3925 tree rhs1 = gimple_assign_rhs1 (stmt);
3926 alias_set_type set = get_alias_set (rhs1);
3927 vec<vn_reference_op_s> operands
3928 = vn_reference_operands_for_lookup (rhs1);
3929 vn_reference_t ref;
3930 vn_reference_lookup_pieces (gimple_vuse (stmt), set,
3931 TREE_TYPE (rhs1),
3932 operands, &ref, VN_WALK);
3933 if (!ref)
3935 operands.release ();
3936 continue;
3939 /* If the value of the reference is not invalidated in
3940 this block until it is computed, add the expression
3941 to EXP_GEN. */
3942 if (gimple_vuse (stmt))
3944 gimple *def_stmt;
3945 bool ok = true;
3946 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3947 while (!gimple_nop_p (def_stmt)
3948 && gimple_code (def_stmt) != GIMPLE_PHI
3949 && gimple_bb (def_stmt) == block)
3951 if (stmt_may_clobber_ref_p
3952 (def_stmt, gimple_assign_rhs1 (stmt)))
3954 ok = false;
3955 break;
3957 def_stmt
3958 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3960 if (!ok)
3962 operands.release ();
3963 continue;
3967 /* If the load was value-numbered to another
3968 load make sure we do not use its expression
3969 for insertion if it wouldn't be a valid
3970 replacement. */
3971 /* At the momemt we have a testcase
3972 for hoist insertion of aligned vs. misaligned
3973 variants in gcc.dg/torture/pr65270-1.c thus
3974 with just alignment to be considered we can
3975 simply replace the expression in the hashtable
3976 with the most conservative one. */
3977 vn_reference_op_t ref1 = &ref->operands.last ();
3978 while (ref1->opcode != TARGET_MEM_REF
3979 && ref1->opcode != MEM_REF
3980 && ref1 != &ref->operands[0])
3981 --ref1;
3982 vn_reference_op_t ref2 = &operands.last ();
3983 while (ref2->opcode != TARGET_MEM_REF
3984 && ref2->opcode != MEM_REF
3985 && ref2 != &operands[0])
3986 --ref2;
3987 if ((ref1->opcode == TARGET_MEM_REF
3988 || ref1->opcode == MEM_REF)
3989 && (TYPE_ALIGN (ref1->type)
3990 > TYPE_ALIGN (ref2->type)))
3991 ref1->type
3992 = build_aligned_type (ref1->type,
3993 TYPE_ALIGN (ref2->type));
3994 /* TBAA behavior is an obvious part so make sure
3995 that the hashtable one covers this as well
3996 by adjusting the ref alias set and its base. */
3997 if (ref->set == set
3998 || alias_set_subset_of (set, ref->set))
4000 else if (alias_set_subset_of (ref->set, set))
4002 ref->set = set;
4003 if (ref1->opcode == MEM_REF)
4004 ref1->op0
4005 = wide_int_to_tree (TREE_TYPE (ref2->op0),
4006 wi::to_wide (ref1->op0));
4007 else
4008 ref1->op2
4009 = wide_int_to_tree (TREE_TYPE (ref2->op2),
4010 wi::to_wide (ref1->op2));
4012 else
4014 ref->set = 0;
4015 if (ref1->opcode == MEM_REF)
4016 ref1->op0
4017 = wide_int_to_tree (ptr_type_node,
4018 wi::to_wide (ref1->op0));
4019 else
4020 ref1->op2
4021 = wide_int_to_tree (ptr_type_node,
4022 wi::to_wide (ref1->op2));
4024 operands.release ();
4026 result = pre_expr_pool.allocate ();
4027 result->kind = REFERENCE;
4028 result->id = 0;
4029 PRE_EXPR_REFERENCE (result) = ref;
4030 break;
4033 default:
4034 continue;
4037 get_or_alloc_expression_id (result);
4038 add_to_value (get_expr_value_id (result), result);
4039 bitmap_value_insert_into_set (EXP_GEN (block), result);
4040 continue;
4042 default:
4043 break;
4047 if (dump_file && (dump_flags & TDF_DETAILS))
4049 print_bitmap_set (dump_file, EXP_GEN (block),
4050 "exp_gen", block->index);
4051 print_bitmap_set (dump_file, PHI_GEN (block),
4052 "phi_gen", block->index);
4053 print_bitmap_set (dump_file, TMP_GEN (block),
4054 "tmp_gen", block->index);
4055 print_bitmap_set (dump_file, AVAIL_OUT (block),
4056 "avail_out", block->index);
4059 /* Put the dominator children of BLOCK on the worklist of blocks
4060 to compute available sets for. */
4061 for (son = first_dom_son (CDI_DOMINATORS, block);
4062 son;
4063 son = next_dom_son (CDI_DOMINATORS, son))
4064 worklist[sp++] = son;
4066 vn_context_bb = NULL;
4068 free (worklist);
4072 /* Initialize data structures used by PRE. */
4074 static void
4075 init_pre (void)
4077 basic_block bb;
4079 next_expression_id = 1;
4080 expressions.create (0);
4081 expressions.safe_push (NULL);
4082 value_expressions.create (get_max_value_id () + 1);
4083 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4084 name_to_id.create (0);
4086 inserted_exprs = BITMAP_ALLOC (NULL);
4088 connect_infinite_loops_to_exit ();
4089 memset (&pre_stats, 0, sizeof (pre_stats));
4091 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4093 calculate_dominance_info (CDI_DOMINATORS);
4095 bitmap_obstack_initialize (&grand_bitmap_obstack);
4096 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4097 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4098 FOR_ALL_BB_FN (bb, cfun)
4100 EXP_GEN (bb) = bitmap_set_new ();
4101 PHI_GEN (bb) = bitmap_set_new ();
4102 TMP_GEN (bb) = bitmap_set_new ();
4103 AVAIL_OUT (bb) = bitmap_set_new ();
4108 /* Deallocate data structures used by PRE. */
4110 static void
4111 fini_pre ()
4113 value_expressions.release ();
4114 expressions.release ();
4115 BITMAP_FREE (inserted_exprs);
4116 bitmap_obstack_release (&grand_bitmap_obstack);
4117 bitmap_set_pool.release ();
4118 pre_expr_pool.release ();
4119 delete phi_translate_table;
4120 phi_translate_table = NULL;
4121 delete expression_to_id;
4122 expression_to_id = NULL;
4123 name_to_id.release ();
4125 free_aux_for_blocks ();
4128 namespace {
4130 const pass_data pass_data_pre =
4132 GIMPLE_PASS, /* type */
4133 "pre", /* name */
4134 OPTGROUP_NONE, /* optinfo_flags */
4135 TV_TREE_PRE, /* tv_id */
4136 ( PROP_cfg | PROP_ssa ), /* properties_required */
4137 0, /* properties_provided */
4138 0, /* properties_destroyed */
4139 TODO_rebuild_alias, /* todo_flags_start */
4140 0, /* todo_flags_finish */
4143 class pass_pre : public gimple_opt_pass
4145 public:
4146 pass_pre (gcc::context *ctxt)
4147 : gimple_opt_pass (pass_data_pre, ctxt)
4150 /* opt_pass methods: */
4151 virtual bool gate (function *)
4152 { return flag_tree_pre != 0 || flag_code_hoisting != 0; }
4153 virtual unsigned int execute (function *);
4155 }; // class pass_pre
4157 /* Valueization hook for RPO VN when we are calling back to it
4158 at ANTIC compute time. */
4160 static tree
4161 pre_valueize (tree name)
4163 if (TREE_CODE (name) == SSA_NAME)
4165 tree tem = VN_INFO (name)->valnum;
4166 if (tem != VN_TOP && tem != name)
4168 if (TREE_CODE (tem) != SSA_NAME
4169 || SSA_NAME_IS_DEFAULT_DEF (tem))
4170 return tem;
4171 /* We create temporary SSA names for representatives that
4172 do not have a definition (yet) but are not default defs either
4173 assume they are fine to use. */
4174 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (tem));
4175 if (! def_bb
4176 || dominated_by_p (CDI_DOMINATORS, vn_context_bb, def_bb))
4177 return tem;
4178 /* ??? Now we could look for a leader. Ideally we'd somehow
4179 expose RPO VN leaders and get rid of AVAIL_OUT as well... */
4182 return name;
4185 unsigned int
4186 pass_pre::execute (function *fun)
4188 unsigned int todo = 0;
4190 do_partial_partial =
4191 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4193 /* This has to happen before VN runs because
4194 loop_optimizer_init may create new phis, etc. */
4195 loop_optimizer_init (LOOPS_NORMAL);
4196 split_critical_edges ();
4197 scev_initialize ();
4199 run_rpo_vn (VN_WALK);
4201 init_pre ();
4203 vn_valueize = pre_valueize;
4205 /* Insert can get quite slow on an incredibly large number of basic
4206 blocks due to some quadratic behavior. Until this behavior is
4207 fixed, don't run it when he have an incredibly large number of
4208 bb's. If we aren't going to run insert, there is no point in
4209 computing ANTIC, either, even though it's plenty fast nor do
4210 we require AVAIL. */
4211 if (n_basic_blocks_for_fn (fun) < 4000)
4213 compute_avail ();
4214 compute_antic ();
4215 insert ();
4218 /* Make sure to remove fake edges before committing our inserts.
4219 This makes sure we don't end up with extra critical edges that
4220 we would need to split. */
4221 remove_fake_exit_edges ();
4222 gsi_commit_edge_inserts ();
4224 /* Eliminate folds statements which might (should not...) end up
4225 not keeping virtual operands up-to-date. */
4226 gcc_assert (!need_ssa_update_p (fun));
4228 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4229 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4230 statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
4231 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4233 todo |= eliminate_with_rpo_vn (inserted_exprs);
4235 vn_valueize = NULL;
4237 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4238 to insert PHI nodes sometimes, and because value numbering of casts isn't
4239 perfect, we sometimes end up inserting dead code. This simple DCE-like
4240 pass removes any insertions we made that weren't actually used. */
4241 simple_dce_from_worklist (inserted_exprs);
4243 fini_pre ();
4245 scev_finalize ();
4246 loop_optimizer_finalize ();
4248 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4249 case we can merge the block with the remaining predecessor of the block.
4250 It should either:
4251 - call merge_blocks after each tail merge iteration
4252 - call merge_blocks after all tail merge iterations
4253 - mark TODO_cleanup_cfg when necessary
4254 - share the cfg cleanup with fini_pre. */
4255 todo |= tail_merge_optimize (todo);
4257 free_rpo_vn ();
4259 /* Tail merging invalidates the virtual SSA web, together with
4260 cfg-cleanup opportunities exposed by PRE this will wreck the
4261 SSA updating machinery. So make sure to run update-ssa
4262 manually, before eventually scheduling cfg-cleanup as part of
4263 the todo. */
4264 update_ssa (TODO_update_ssa_only_virtuals);
4266 return todo;
4269 } // anon namespace
4271 gimple_opt_pass *
4272 make_pass_pre (gcc::context *ctxt)
4274 return new pass_pre (ctxt);