PR rtl-optimization/82913
[official-gcc.git] / gcc / tree-ssa-pre.c
blob281f100ada2ec0bcecfca12e14acdb73e2014e2f
1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-ssa.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
48 #include "params.h"
49 #include "dbgcnt.h"
50 #include "domwalk.h"
51 #include "tree-ssa-propagate.h"
52 #include "tree-cfgcleanup.h"
53 #include "alias.h"
55 /* Even though this file is called tree-ssa-pre.c, we actually
56 implement a bit more than just PRE here. All of them piggy-back
57 on GVN which is implemented in tree-ssa-sccvn.c.
59 1. Full Redundancy Elimination (FRE)
60 This is the elimination phase of GVN.
62 2. Partial Redundancy Elimination (PRE)
63 This is adds computation of AVAIL_OUT and ANTIC_IN and
64 doing expression insertion to form GVN-PRE.
66 3. Code hoisting
67 This optimization uses the ANTIC_IN sets computed for PRE
68 to move expressions further up than PRE would do, to make
69 multiple computations of the same value fully redundant.
70 This pass is explained below (after the explanation of the
71 basic algorithm for PRE).
74 /* TODO:
76 1. Avail sets can be shared by making an avail_find_leader that
77 walks up the dominator tree and looks in those avail sets.
78 This might affect code optimality, it's unclear right now.
79 Currently the AVAIL_OUT sets are the remaining quadraticness in
80 memory of GVN-PRE.
81 2. Strength reduction can be performed by anticipating expressions
82 we can repair later on.
83 3. We can do back-substitution or smarter value numbering to catch
84 commutative expressions split up over multiple statements.
87 /* For ease of terminology, "expression node" in the below refers to
88 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
89 represent the actual statement containing the expressions we care about,
90 and we cache the value number by putting it in the expression. */
92 /* Basic algorithm for Partial Redundancy Elimination:
94 First we walk the statements to generate the AVAIL sets, the
95 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
96 generation of values/expressions by a given block. We use them
97 when computing the ANTIC sets. The AVAIL sets consist of
98 SSA_NAME's that represent values, so we know what values are
99 available in what blocks. AVAIL is a forward dataflow problem. In
100 SSA, values are never killed, so we don't need a kill set, or a
101 fixpoint iteration, in order to calculate the AVAIL sets. In
102 traditional parlance, AVAIL sets tell us the downsafety of the
103 expressions/values.
105 Next, we generate the ANTIC sets. These sets represent the
106 anticipatable expressions. ANTIC is a backwards dataflow
107 problem. An expression is anticipatable in a given block if it could
108 be generated in that block. This means that if we had to perform
109 an insertion in that block, of the value of that expression, we
110 could. Calculating the ANTIC sets requires phi translation of
111 expressions, because the flow goes backwards through phis. We must
112 iterate to a fixpoint of the ANTIC sets, because we have a kill
113 set. Even in SSA form, values are not live over the entire
114 function, only from their definition point onwards. So we have to
115 remove values from the ANTIC set once we go past the definition
116 point of the leaders that make them up.
117 compute_antic/compute_antic_aux performs this computation.
119 Third, we perform insertions to make partially redundant
120 expressions fully redundant.
122 An expression is partially redundant (excluding partial
123 anticipation) if:
125 1. It is AVAIL in some, but not all, of the predecessors of a
126 given block.
127 2. It is ANTIC in all the predecessors.
129 In order to make it fully redundant, we insert the expression into
130 the predecessors where it is not available, but is ANTIC.
132 When optimizing for size, we only eliminate the partial redundancy
133 if we need to insert in only one predecessor. This avoids almost
134 completely the code size increase that PRE usually causes.
136 For the partial anticipation case, we only perform insertion if it
137 is partially anticipated in some block, and fully available in all
138 of the predecessors.
140 do_pre_regular_insertion/do_pre_partial_partial_insertion
141 performs these steps, driven by insert/insert_aux.
143 Fourth, we eliminate fully redundant expressions.
144 This is a simple statement walk that replaces redundant
145 calculations with the now available values. */
147 /* Basic algorithm for Code Hoisting:
149 Code hoisting is: Moving value computations up in the control flow
150 graph to make multiple copies redundant. Typically this is a size
151 optimization, but there are cases where it also is helpful for speed.
153 A simple code hoisting algorithm is implemented that piggy-backs on
154 the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT
155 which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be
156 computed for PRE, and we can use them to perform a limited version of
157 code hoisting, too.
159 For the purpose of this implementation, a value is hoistable to a basic
160 block B if the following properties are met:
162 1. The value is in ANTIC_IN(B) -- the value will be computed on all
163 paths from B to function exit and it can be computed in B);
165 2. The value is not in AVAIL_OUT(B) -- there would be no need to
166 compute the value again and make it available twice;
168 3. All successors of B are dominated by B -- makes sure that inserting
169 a computation of the value in B will make the remaining
170 computations fully redundant;
172 4. At least one successor has the value in AVAIL_OUT -- to avoid
173 hoisting values up too far;
175 5. There are at least two successors of B -- hoisting in straight
176 line code is pointless.
178 The third condition is not strictly necessary, but it would complicate
179 the hoisting pass a lot. In fact, I don't know of any code hoisting
180 algorithm that does not have this requirement. Fortunately, experiments
181 have show that most candidate hoistable values are in regions that meet
182 this condition (e.g. diamond-shape regions).
184 The forth condition is necessary to avoid hoisting things up too far
185 away from the uses of the value. Nothing else limits the algorithm
186 from hoisting everything up as far as ANTIC_IN allows. Experiments
187 with SPEC and CSiBE have shown that hoisting up too far results in more
188 spilling, less benefits for code size, and worse benchmark scores.
189 Fortunately, in practice most of the interesting hoisting opportunities
190 are caught despite this limitation.
192 For hoistable values that meet all conditions, expressions are inserted
193 to make the calculation of the hoistable value fully redundant. We
194 perform code hoisting insertions after each round of PRE insertions,
195 because code hoisting never exposes new PRE opportunities, but PRE can
196 create new code hoisting opportunities.
198 The code hoisting algorithm is implemented in do_hoist_insert, driven
199 by insert/insert_aux. */
201 /* Representations of value numbers:
203 Value numbers are represented by a representative SSA_NAME. We
204 will create fake SSA_NAME's in situations where we need a
205 representative but do not have one (because it is a complex
206 expression). In order to facilitate storing the value numbers in
207 bitmaps, and keep the number of wasted SSA_NAME's down, we also
208 associate a value_id with each value number, and create full blown
209 ssa_name's only where we actually need them (IE in operands of
210 existing expressions).
212 Theoretically you could replace all the value_id's with
213 SSA_NAME_VERSION, but this would allocate a large number of
214 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
215 It would also require an additional indirection at each point we
216 use the value id. */
218 /* Representation of expressions on value numbers:
220 Expressions consisting of value numbers are represented the same
221 way as our VN internally represents them, with an additional
222 "pre_expr" wrapping around them in order to facilitate storing all
223 of the expressions in the same sets. */
225 /* Representation of sets:
227 The dataflow sets do not need to be sorted in any particular order
228 for the majority of their lifetime, are simply represented as two
229 bitmaps, one that keeps track of values present in the set, and one
230 that keeps track of expressions present in the set.
232 When we need them in topological order, we produce it on demand by
233 transforming the bitmap into an array and sorting it into topo
234 order. */
236 /* Type of expression, used to know which member of the PRE_EXPR union
237 is valid. */
239 enum pre_expr_kind
241 NAME,
242 NARY,
243 REFERENCE,
244 CONSTANT
247 union pre_expr_union
249 tree name;
250 tree constant;
251 vn_nary_op_t nary;
252 vn_reference_t reference;
255 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
257 enum pre_expr_kind kind;
258 unsigned int id;
259 pre_expr_union u;
261 /* hash_table support. */
262 static inline hashval_t hash (const pre_expr_d *);
263 static inline int equal (const pre_expr_d *, const pre_expr_d *);
264 } *pre_expr;
266 #define PRE_EXPR_NAME(e) (e)->u.name
267 #define PRE_EXPR_NARY(e) (e)->u.nary
268 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
269 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
271 /* Compare E1 and E1 for equality. */
273 inline int
274 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
276 if (e1->kind != e2->kind)
277 return false;
279 switch (e1->kind)
281 case CONSTANT:
282 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
283 PRE_EXPR_CONSTANT (e2));
284 case NAME:
285 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
286 case NARY:
287 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
288 case REFERENCE:
289 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
290 PRE_EXPR_REFERENCE (e2));
291 default:
292 gcc_unreachable ();
296 /* Hash E. */
298 inline hashval_t
299 pre_expr_d::hash (const pre_expr_d *e)
301 switch (e->kind)
303 case CONSTANT:
304 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
305 case NAME:
306 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
307 case NARY:
308 return PRE_EXPR_NARY (e)->hashcode;
309 case REFERENCE:
310 return PRE_EXPR_REFERENCE (e)->hashcode;
311 default:
312 gcc_unreachable ();
316 /* Next global expression id number. */
317 static unsigned int next_expression_id;
319 /* Mapping from expression to id number we can use in bitmap sets. */
320 static vec<pre_expr> expressions;
321 static hash_table<pre_expr_d> *expression_to_id;
322 static vec<unsigned> name_to_id;
324 /* Allocate an expression id for EXPR. */
326 static inline unsigned int
327 alloc_expression_id (pre_expr expr)
329 struct pre_expr_d **slot;
330 /* Make sure we won't overflow. */
331 gcc_assert (next_expression_id + 1 > next_expression_id);
332 expr->id = next_expression_id++;
333 expressions.safe_push (expr);
334 if (expr->kind == NAME)
336 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
337 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
338 re-allocations by using vec::reserve upfront. */
339 unsigned old_len = name_to_id.length ();
340 name_to_id.reserve (num_ssa_names - old_len);
341 name_to_id.quick_grow_cleared (num_ssa_names);
342 gcc_assert (name_to_id[version] == 0);
343 name_to_id[version] = expr->id;
345 else
347 slot = expression_to_id->find_slot (expr, INSERT);
348 gcc_assert (!*slot);
349 *slot = expr;
351 return next_expression_id - 1;
354 /* Return the expression id for tree EXPR. */
356 static inline unsigned int
357 get_expression_id (const pre_expr expr)
359 return expr->id;
362 static inline unsigned int
363 lookup_expression_id (const pre_expr expr)
365 struct pre_expr_d **slot;
367 if (expr->kind == NAME)
369 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
370 if (name_to_id.length () <= version)
371 return 0;
372 return name_to_id[version];
374 else
376 slot = expression_to_id->find_slot (expr, NO_INSERT);
377 if (!slot)
378 return 0;
379 return ((pre_expr)*slot)->id;
383 /* Return the existing expression id for EXPR, or create one if one
384 does not exist yet. */
386 static inline unsigned int
387 get_or_alloc_expression_id (pre_expr expr)
389 unsigned int id = lookup_expression_id (expr);
390 if (id == 0)
391 return alloc_expression_id (expr);
392 return expr->id = id;
395 /* Return the expression that has expression id ID */
397 static inline pre_expr
398 expression_for_id (unsigned int id)
400 return expressions[id];
403 /* Free the expression id field in all of our expressions,
404 and then destroy the expressions array. */
406 static void
407 clear_expression_ids (void)
409 expressions.release ();
412 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
414 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
416 static pre_expr
417 get_or_alloc_expr_for_name (tree name)
419 struct pre_expr_d expr;
420 pre_expr result;
421 unsigned int result_id;
423 expr.kind = NAME;
424 expr.id = 0;
425 PRE_EXPR_NAME (&expr) = name;
426 result_id = lookup_expression_id (&expr);
427 if (result_id != 0)
428 return expression_for_id (result_id);
430 result = pre_expr_pool.allocate ();
431 result->kind = NAME;
432 PRE_EXPR_NAME (result) = name;
433 alloc_expression_id (result);
434 return result;
437 /* An unordered bitmap set. One bitmap tracks values, the other,
438 expressions. */
439 typedef struct bitmap_set
441 bitmap_head expressions;
442 bitmap_head values;
443 } *bitmap_set_t;
445 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
446 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
448 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
449 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
451 /* Mapping from value id to expressions with that value_id. */
452 static vec<bitmap> value_expressions;
454 /* Sets that we need to keep track of. */
455 typedef struct bb_bitmap_sets
457 /* The EXP_GEN set, which represents expressions/values generated in
458 a basic block. */
459 bitmap_set_t exp_gen;
461 /* The PHI_GEN set, which represents PHI results generated in a
462 basic block. */
463 bitmap_set_t phi_gen;
465 /* The TMP_GEN set, which represents results/temporaries generated
466 in a basic block. IE the LHS of an expression. */
467 bitmap_set_t tmp_gen;
469 /* The AVAIL_OUT set, which represents which values are available in
470 a given basic block. */
471 bitmap_set_t avail_out;
473 /* The ANTIC_IN set, which represents which values are anticipatable
474 in a given basic block. */
475 bitmap_set_t antic_in;
477 /* The PA_IN set, which represents which values are
478 partially anticipatable in a given basic block. */
479 bitmap_set_t pa_in;
481 /* The NEW_SETS set, which is used during insertion to augment the
482 AVAIL_OUT set of blocks with the new insertions performed during
483 the current iteration. */
484 bitmap_set_t new_sets;
486 /* A cache for value_dies_in_block_x. */
487 bitmap expr_dies;
489 /* The live virtual operand on successor edges. */
490 tree vop_on_exit;
492 /* True if we have visited this block during ANTIC calculation. */
493 unsigned int visited : 1;
495 /* True when the block contains a call that might not return. */
496 unsigned int contains_may_not_return_call : 1;
497 } *bb_value_sets_t;
499 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
500 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
501 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
502 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
503 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
504 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
505 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
506 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
507 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
508 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
509 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
512 /* This structure is used to keep track of statistics on what
513 optimization PRE was able to perform. */
514 static struct
516 /* The number of new expressions/temporaries generated by PRE. */
517 int insertions;
519 /* The number of inserts found due to partial anticipation */
520 int pa_insert;
522 /* The number of inserts made for code hoisting. */
523 int hoist_insert;
525 /* The number of new PHI nodes added by PRE. */
526 int phis;
527 } pre_stats;
529 static bool do_partial_partial;
530 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
531 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
532 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
533 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
534 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
535 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
536 static bitmap_set_t bitmap_set_new (void);
537 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
538 tree);
539 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
540 static unsigned int get_expr_value_id (pre_expr);
542 /* We can add and remove elements and entries to and from sets
543 and hash tables, so we use alloc pools for them. */
545 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
546 static bitmap_obstack grand_bitmap_obstack;
548 /* A three tuple {e, pred, v} used to cache phi translations in the
549 phi_translate_table. */
551 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
553 /* The expression. */
554 pre_expr e;
556 /* The predecessor block along which we translated the expression. */
557 basic_block pred;
559 /* The value that resulted from the translation. */
560 pre_expr v;
562 /* The hashcode for the expression, pred pair. This is cached for
563 speed reasons. */
564 hashval_t hashcode;
566 /* hash_table support. */
567 static inline hashval_t hash (const expr_pred_trans_d *);
568 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
569 } *expr_pred_trans_t;
570 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
572 inline hashval_t
573 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
575 return e->hashcode;
578 inline int
579 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
580 const expr_pred_trans_d *ve2)
582 basic_block b1 = ve1->pred;
583 basic_block b2 = ve2->pred;
585 /* If they are not translations for the same basic block, they can't
586 be equal. */
587 if (b1 != b2)
588 return false;
589 return pre_expr_d::equal (ve1->e, ve2->e);
592 /* The phi_translate_table caches phi translations for a given
593 expression and predecessor. */
594 static hash_table<expr_pred_trans_d> *phi_translate_table;
596 /* Add the tuple mapping from {expression E, basic block PRED} to
597 the phi translation table and return whether it pre-existed. */
599 static inline bool
600 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
602 expr_pred_trans_t *slot;
603 expr_pred_trans_d tem;
604 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
605 pred->index);
606 tem.e = e;
607 tem.pred = pred;
608 tem.hashcode = hash;
609 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
610 if (*slot)
612 *entry = *slot;
613 return true;
616 *entry = *slot = XNEW (struct expr_pred_trans_d);
617 (*entry)->e = e;
618 (*entry)->pred = pred;
619 (*entry)->hashcode = hash;
620 return false;
624 /* Add expression E to the expression set of value id V. */
626 static void
627 add_to_value (unsigned int v, pre_expr e)
629 bitmap set;
631 gcc_checking_assert (get_expr_value_id (e) == v);
633 if (v >= value_expressions.length ())
635 value_expressions.safe_grow_cleared (v + 1);
638 set = value_expressions[v];
639 if (!set)
641 set = BITMAP_ALLOC (&grand_bitmap_obstack);
642 value_expressions[v] = set;
645 bitmap_set_bit (set, get_or_alloc_expression_id (e));
648 /* Create a new bitmap set and return it. */
650 static bitmap_set_t
651 bitmap_set_new (void)
653 bitmap_set_t ret = bitmap_set_pool.allocate ();
654 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
655 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
656 return ret;
659 /* Return the value id for a PRE expression EXPR. */
661 static unsigned int
662 get_expr_value_id (pre_expr expr)
664 unsigned int id;
665 switch (expr->kind)
667 case CONSTANT:
668 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
669 break;
670 case NAME:
671 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
672 break;
673 case NARY:
674 id = PRE_EXPR_NARY (expr)->value_id;
675 break;
676 case REFERENCE:
677 id = PRE_EXPR_REFERENCE (expr)->value_id;
678 break;
679 default:
680 gcc_unreachable ();
682 /* ??? We cannot assert that expr has a value-id (it can be 0), because
683 we assign value-ids only to expressions that have a result
684 in set_hashtable_value_ids. */
685 return id;
688 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
690 static tree
691 sccvn_valnum_from_value_id (unsigned int val)
693 bitmap_iterator bi;
694 unsigned int i;
695 bitmap exprset = value_expressions[val];
696 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
698 pre_expr vexpr = expression_for_id (i);
699 if (vexpr->kind == NAME)
700 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
701 else if (vexpr->kind == CONSTANT)
702 return PRE_EXPR_CONSTANT (vexpr);
704 return NULL_TREE;
707 /* Remove an expression EXPR from a bitmapped set. */
709 static void
710 bitmap_remove_expr_from_set (bitmap_set_t set, pre_expr expr)
712 unsigned int val = get_expr_value_id (expr);
713 bitmap_clear_bit (&set->values, val);
714 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
717 /* Insert an expression EXPR into a bitmapped set. */
719 static void
720 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
722 unsigned int val = get_expr_value_id (expr);
723 if (! value_id_constant_p (val))
725 /* Note this is the only function causing multiple expressions
726 for the same value to appear in a set. This is needed for
727 TMP_GEN, PHI_GEN and NEW_SETs. */
728 bitmap_set_bit (&set->values, val);
729 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
733 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
735 static void
736 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
738 bitmap_copy (&dest->expressions, &orig->expressions);
739 bitmap_copy (&dest->values, &orig->values);
743 /* Free memory used up by SET. */
744 static void
745 bitmap_set_free (bitmap_set_t set)
747 bitmap_clear (&set->expressions);
748 bitmap_clear (&set->values);
752 /* Generate an topological-ordered array of bitmap set SET. */
754 static vec<pre_expr>
755 sorted_array_from_bitmap_set (bitmap_set_t set)
757 unsigned int i, j;
758 bitmap_iterator bi, bj;
759 vec<pre_expr> result;
761 /* Pre-allocate enough space for the array. */
762 result.create (bitmap_count_bits (&set->expressions));
764 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
766 /* The number of expressions having a given value is usually
767 relatively small. Thus, rather than making a vector of all
768 the expressions and sorting it by value-id, we walk the values
769 and check in the reverse mapping that tells us what expressions
770 have a given value, to filter those in our set. As a result,
771 the expressions are inserted in value-id order, which means
772 topological order.
774 If this is somehow a significant lose for some cases, we can
775 choose which set to walk based on the set size. */
776 bitmap exprset = value_expressions[i];
777 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
779 if (bitmap_bit_p (&set->expressions, j))
780 result.quick_push (expression_for_id (j));
784 return result;
787 /* Subtract all expressions contained in ORIG from DEST. */
789 static bitmap_set_t
790 bitmap_set_subtract_expressions (bitmap_set_t dest, bitmap_set_t orig)
792 bitmap_set_t result = bitmap_set_new ();
793 bitmap_iterator bi;
794 unsigned int i;
796 bitmap_and_compl (&result->expressions, &dest->expressions,
797 &orig->expressions);
799 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
801 pre_expr expr = expression_for_id (i);
802 unsigned int value_id = get_expr_value_id (expr);
803 bitmap_set_bit (&result->values, value_id);
806 return result;
809 /* Subtract all values in bitmap set B from bitmap set A. */
811 static void
812 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
814 unsigned int i;
815 bitmap_iterator bi;
816 pre_expr to_remove = NULL;
817 FOR_EACH_EXPR_ID_IN_SET (a, i, bi)
819 if (to_remove)
821 bitmap_remove_expr_from_set (a, to_remove);
822 to_remove = NULL;
824 pre_expr expr = expression_for_id (i);
825 if (bitmap_bit_p (&b->values, get_expr_value_id (expr)))
826 to_remove = expr;
828 if (to_remove)
829 bitmap_remove_expr_from_set (a, to_remove);
833 /* Return true if bitmapped set SET contains the value VALUE_ID. */
835 static bool
836 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
838 if (value_id_constant_p (value_id))
839 return true;
841 return bitmap_bit_p (&set->values, value_id);
844 static inline bool
845 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
847 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
850 /* Return true if two bitmap sets are equal. */
852 static bool
853 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
855 return bitmap_equal_p (&a->values, &b->values);
858 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
859 and add it otherwise. */
861 static void
862 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
864 unsigned int val = get_expr_value_id (expr);
865 if (value_id_constant_p (val))
866 return;
868 if (bitmap_set_contains_value (set, val))
870 /* The number of expressions having a given value is usually
871 significantly less than the total number of expressions in SET.
872 Thus, rather than check, for each expression in SET, whether it
873 has the value LOOKFOR, we walk the reverse mapping that tells us
874 what expressions have a given value, and see if any of those
875 expressions are in our set. For large testcases, this is about
876 5-10x faster than walking the bitmap. If this is somehow a
877 significant lose for some cases, we can choose which set to walk
878 based on the set size. */
879 unsigned int i;
880 bitmap_iterator bi;
881 bitmap exprset = value_expressions[val];
882 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
884 if (bitmap_clear_bit (&set->expressions, i))
886 bitmap_set_bit (&set->expressions, get_expression_id (expr));
887 return;
890 gcc_unreachable ();
892 else
893 bitmap_insert_into_set (set, expr);
896 /* Insert EXPR into SET if EXPR's value is not already present in
897 SET. */
899 static void
900 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
902 unsigned int val = get_expr_value_id (expr);
904 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
906 /* Constant values are always considered to be part of the set. */
907 if (value_id_constant_p (val))
908 return;
910 /* If the value membership changed, add the expression. */
911 if (bitmap_set_bit (&set->values, val))
912 bitmap_set_bit (&set->expressions, expr->id);
915 /* Print out EXPR to outfile. */
917 static void
918 print_pre_expr (FILE *outfile, const pre_expr expr)
920 if (! expr)
922 fprintf (outfile, "NULL");
923 return;
925 switch (expr->kind)
927 case CONSTANT:
928 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr));
929 break;
930 case NAME:
931 print_generic_expr (outfile, PRE_EXPR_NAME (expr));
932 break;
933 case NARY:
935 unsigned int i;
936 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
937 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
938 for (i = 0; i < nary->length; i++)
940 print_generic_expr (outfile, nary->op[i]);
941 if (i != (unsigned) nary->length - 1)
942 fprintf (outfile, ",");
944 fprintf (outfile, "}");
946 break;
948 case REFERENCE:
950 vn_reference_op_t vro;
951 unsigned int i;
952 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
953 fprintf (outfile, "{");
954 for (i = 0;
955 ref->operands.iterate (i, &vro);
956 i++)
958 bool closebrace = false;
959 if (vro->opcode != SSA_NAME
960 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
962 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
963 if (vro->op0)
965 fprintf (outfile, "<");
966 closebrace = true;
969 if (vro->op0)
971 print_generic_expr (outfile, vro->op0);
972 if (vro->op1)
974 fprintf (outfile, ",");
975 print_generic_expr (outfile, vro->op1);
977 if (vro->op2)
979 fprintf (outfile, ",");
980 print_generic_expr (outfile, vro->op2);
983 if (closebrace)
984 fprintf (outfile, ">");
985 if (i != ref->operands.length () - 1)
986 fprintf (outfile, ",");
988 fprintf (outfile, "}");
989 if (ref->vuse)
991 fprintf (outfile, "@");
992 print_generic_expr (outfile, ref->vuse);
995 break;
998 void debug_pre_expr (pre_expr);
1000 /* Like print_pre_expr but always prints to stderr. */
1001 DEBUG_FUNCTION void
1002 debug_pre_expr (pre_expr e)
1004 print_pre_expr (stderr, e);
1005 fprintf (stderr, "\n");
1008 /* Print out SET to OUTFILE. */
1010 static void
1011 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1012 const char *setname, int blockindex)
1014 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1015 if (set)
1017 bool first = true;
1018 unsigned i;
1019 bitmap_iterator bi;
1021 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1023 const pre_expr expr = expression_for_id (i);
1025 if (!first)
1026 fprintf (outfile, ", ");
1027 first = false;
1028 print_pre_expr (outfile, expr);
1030 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1033 fprintf (outfile, " }\n");
1036 void debug_bitmap_set (bitmap_set_t);
1038 DEBUG_FUNCTION void
1039 debug_bitmap_set (bitmap_set_t set)
1041 print_bitmap_set (stderr, set, "debug", 0);
1044 void debug_bitmap_sets_for (basic_block);
1046 DEBUG_FUNCTION void
1047 debug_bitmap_sets_for (basic_block bb)
1049 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1050 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1051 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1052 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1053 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1054 if (do_partial_partial)
1055 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1056 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1059 /* Print out the expressions that have VAL to OUTFILE. */
1061 static void
1062 print_value_expressions (FILE *outfile, unsigned int val)
1064 bitmap set = value_expressions[val];
1065 if (set)
1067 bitmap_set x;
1068 char s[10];
1069 sprintf (s, "%04d", val);
1070 x.expressions = *set;
1071 print_bitmap_set (outfile, &x, s, 0);
1076 DEBUG_FUNCTION void
1077 debug_value_expressions (unsigned int val)
1079 print_value_expressions (stderr, val);
1082 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1083 represent it. */
1085 static pre_expr
1086 get_or_alloc_expr_for_constant (tree constant)
1088 unsigned int result_id;
1089 unsigned int value_id;
1090 struct pre_expr_d expr;
1091 pre_expr newexpr;
1093 expr.kind = CONSTANT;
1094 PRE_EXPR_CONSTANT (&expr) = constant;
1095 result_id = lookup_expression_id (&expr);
1096 if (result_id != 0)
1097 return expression_for_id (result_id);
1099 newexpr = pre_expr_pool.allocate ();
1100 newexpr->kind = CONSTANT;
1101 PRE_EXPR_CONSTANT (newexpr) = constant;
1102 alloc_expression_id (newexpr);
1103 value_id = get_or_alloc_constant_value_id (constant);
1104 add_to_value (value_id, newexpr);
1105 return newexpr;
1108 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1109 Currently only supports constants and SSA_NAMES. */
1110 static pre_expr
1111 get_or_alloc_expr_for (tree t)
1113 if (TREE_CODE (t) == SSA_NAME)
1114 return get_or_alloc_expr_for_name (t);
1115 else if (is_gimple_min_invariant (t))
1116 return get_or_alloc_expr_for_constant (t);
1117 gcc_unreachable ();
1120 /* Return the folded version of T if T, when folded, is a gimple
1121 min_invariant or an SSA name. Otherwise, return T. */
1123 static pre_expr
1124 fully_constant_expression (pre_expr e)
1126 switch (e->kind)
1128 case CONSTANT:
1129 return e;
1130 case NARY:
1132 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1133 tree res = vn_nary_simplify (nary);
1134 if (!res)
1135 return e;
1136 if (is_gimple_min_invariant (res))
1137 return get_or_alloc_expr_for_constant (res);
1138 if (TREE_CODE (res) == SSA_NAME)
1139 return get_or_alloc_expr_for_name (res);
1140 return e;
1142 case REFERENCE:
1144 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1145 tree folded;
1146 if ((folded = fully_constant_vn_reference_p (ref)))
1147 return get_or_alloc_expr_for_constant (folded);
1148 return e;
1150 default:
1151 return e;
1153 return e;
1156 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1157 it has the value it would have in BLOCK. Set *SAME_VALID to true
1158 in case the new vuse doesn't change the value id of the OPERANDS. */
1160 static tree
1161 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1162 alias_set_type set, tree type, tree vuse,
1163 basic_block phiblock,
1164 basic_block block, bool *same_valid)
1166 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1167 ao_ref ref;
1168 edge e = NULL;
1169 bool use_oracle;
1171 *same_valid = true;
1173 if (gimple_bb (phi) != phiblock)
1174 return vuse;
1176 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1178 /* Use the alias-oracle to find either the PHI node in this block,
1179 the first VUSE used in this block that is equivalent to vuse or
1180 the first VUSE which definition in this block kills the value. */
1181 if (gimple_code (phi) == GIMPLE_PHI)
1182 e = find_edge (block, phiblock);
1183 else if (use_oracle)
1184 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1186 vuse = gimple_vuse (phi);
1187 phi = SSA_NAME_DEF_STMT (vuse);
1188 if (gimple_bb (phi) != phiblock)
1189 return vuse;
1190 if (gimple_code (phi) == GIMPLE_PHI)
1192 e = find_edge (block, phiblock);
1193 break;
1196 else
1197 return NULL_TREE;
1199 if (e)
1201 if (use_oracle)
1203 bitmap visited = NULL;
1204 unsigned int cnt;
1205 /* Try to find a vuse that dominates this phi node by skipping
1206 non-clobbering statements. */
1207 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1208 NULL, NULL);
1209 if (visited)
1210 BITMAP_FREE (visited);
1212 else
1213 vuse = NULL_TREE;
1214 if (!vuse)
1216 /* If we didn't find any, the value ID can't stay the same,
1217 but return the translated vuse. */
1218 *same_valid = false;
1219 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1221 /* ??? We would like to return vuse here as this is the canonical
1222 upmost vdef that this reference is associated with. But during
1223 insertion of the references into the hash tables we only ever
1224 directly insert with their direct gimple_vuse, hence returning
1225 something else would make us not find the other expression. */
1226 return PHI_ARG_DEF (phi, e->dest_idx);
1229 return NULL_TREE;
1232 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1233 SET2 *or* SET3. This is used to avoid making a set consisting of the union
1234 of PA_IN and ANTIC_IN during insert and phi-translation. */
1236 static inline pre_expr
1237 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2,
1238 bitmap_set_t set3 = NULL)
1240 pre_expr result;
1242 result = bitmap_find_leader (set1, val);
1243 if (!result && set2)
1244 result = bitmap_find_leader (set2, val);
1245 if (!result && set3)
1246 result = bitmap_find_leader (set3, val);
1247 return result;
1250 /* Get the tree type for our PRE expression e. */
1252 static tree
1253 get_expr_type (const pre_expr e)
1255 switch (e->kind)
1257 case NAME:
1258 return TREE_TYPE (PRE_EXPR_NAME (e));
1259 case CONSTANT:
1260 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1261 case REFERENCE:
1262 return PRE_EXPR_REFERENCE (e)->type;
1263 case NARY:
1264 return PRE_EXPR_NARY (e)->type;
1266 gcc_unreachable ();
1269 /* Get a representative SSA_NAME for a given expression.
1270 Since all of our sub-expressions are treated as values, we require
1271 them to be SSA_NAME's for simplicity.
1272 Prior versions of GVNPRE used to use "value handles" here, so that
1273 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1274 either case, the operands are really values (IE we do not expect
1275 them to be usable without finding leaders). */
1277 static tree
1278 get_representative_for (const pre_expr e)
1280 tree name;
1281 unsigned int value_id = get_expr_value_id (e);
1283 switch (e->kind)
1285 case NAME:
1286 return VN_INFO (PRE_EXPR_NAME (e))->valnum;
1287 case CONSTANT:
1288 return PRE_EXPR_CONSTANT (e);
1289 case NARY:
1290 case REFERENCE:
1292 /* Go through all of the expressions representing this value
1293 and pick out an SSA_NAME. */
1294 unsigned int i;
1295 bitmap_iterator bi;
1296 bitmap exprs = value_expressions[value_id];
1297 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1299 pre_expr rep = expression_for_id (i);
1300 if (rep->kind == NAME)
1301 return VN_INFO (PRE_EXPR_NAME (rep))->valnum;
1302 else if (rep->kind == CONSTANT)
1303 return PRE_EXPR_CONSTANT (rep);
1306 break;
1309 /* If we reached here we couldn't find an SSA_NAME. This can
1310 happen when we've discovered a value that has never appeared in
1311 the program as set to an SSA_NAME, as the result of phi translation.
1312 Create one here.
1313 ??? We should be able to re-use this when we insert the statement
1314 to compute it. */
1315 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1316 VN_INFO_GET (name)->value_id = value_id;
1317 VN_INFO (name)->valnum = name;
1318 /* ??? For now mark this SSA name for release by SCCVN. */
1319 VN_INFO (name)->needs_insertion = true;
1320 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1321 if (dump_file && (dump_flags & TDF_DETAILS))
1323 fprintf (dump_file, "Created SSA_NAME representative ");
1324 print_generic_expr (dump_file, name);
1325 fprintf (dump_file, " for expression:");
1326 print_pre_expr (dump_file, e);
1327 fprintf (dump_file, " (%04d)\n", value_id);
1330 return name;
1335 static pre_expr
1336 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1337 basic_block pred, basic_block phiblock);
1339 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1340 the phis in PRED. Return NULL if we can't find a leader for each part
1341 of the translated expression. */
1343 static pre_expr
1344 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1345 basic_block pred, basic_block phiblock)
1347 switch (expr->kind)
1349 case NARY:
1351 unsigned int i;
1352 bool changed = false;
1353 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1354 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1355 sizeof_vn_nary_op (nary->length));
1356 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1358 for (i = 0; i < newnary->length; i++)
1360 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1361 continue;
1362 else
1364 pre_expr leader, result;
1365 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1366 leader = find_leader_in_sets (op_val_id, set1, set2);
1367 result = phi_translate (leader, set1, set2, pred, phiblock);
1368 if (result && result != leader)
1369 newnary->op[i] = get_representative_for (result);
1370 else if (!result)
1371 return NULL;
1373 changed |= newnary->op[i] != nary->op[i];
1376 if (changed)
1378 pre_expr constant;
1379 unsigned int new_val_id;
1381 PRE_EXPR_NARY (expr) = newnary;
1382 constant = fully_constant_expression (expr);
1383 PRE_EXPR_NARY (expr) = nary;
1384 if (constant != expr)
1386 /* For non-CONSTANTs we have to make sure we can eventually
1387 insert the expression. Which means we need to have a
1388 leader for it. */
1389 if (constant->kind != CONSTANT)
1391 /* Do not allow simplifications to non-constants over
1392 backedges as this will likely result in a loop PHI node
1393 to be inserted and increased register pressure.
1394 See PR77498 - this avoids doing predcoms work in
1395 a less efficient way. */
1396 if (find_edge (pred, phiblock)->flags & EDGE_DFS_BACK)
1398 else
1400 unsigned value_id = get_expr_value_id (constant);
1401 constant = find_leader_in_sets (value_id, set1, set2,
1402 AVAIL_OUT (pred));
1403 if (constant)
1404 return constant;
1407 else
1408 return constant;
1411 tree result = vn_nary_op_lookup_pieces (newnary->length,
1412 newnary->opcode,
1413 newnary->type,
1414 &newnary->op[0],
1415 &nary);
1416 if (result && is_gimple_min_invariant (result))
1417 return get_or_alloc_expr_for_constant (result);
1419 expr = pre_expr_pool.allocate ();
1420 expr->kind = NARY;
1421 expr->id = 0;
1422 if (nary)
1424 PRE_EXPR_NARY (expr) = nary;
1425 new_val_id = nary->value_id;
1426 get_or_alloc_expression_id (expr);
1427 /* When we end up re-using a value number make sure that
1428 doesn't have unrelated (which we can't check here)
1429 range or points-to info on it. */
1430 if (result
1431 && INTEGRAL_TYPE_P (TREE_TYPE (result))
1432 && SSA_NAME_RANGE_INFO (result)
1433 && ! SSA_NAME_IS_DEFAULT_DEF (result))
1435 if (! VN_INFO (result)->info.range_info)
1437 VN_INFO (result)->info.range_info
1438 = SSA_NAME_RANGE_INFO (result);
1439 VN_INFO (result)->range_info_anti_range_p
1440 = SSA_NAME_ANTI_RANGE_P (result);
1442 if (dump_file && (dump_flags & TDF_DETAILS))
1444 fprintf (dump_file, "clearing range info of ");
1445 print_generic_expr (dump_file, result);
1446 fprintf (dump_file, "\n");
1448 SSA_NAME_RANGE_INFO (result) = NULL;
1450 else if (result
1451 && POINTER_TYPE_P (TREE_TYPE (result))
1452 && SSA_NAME_PTR_INFO (result)
1453 && ! SSA_NAME_IS_DEFAULT_DEF (result))
1455 if (! VN_INFO (result)->info.ptr_info)
1456 VN_INFO (result)->info.ptr_info
1457 = SSA_NAME_PTR_INFO (result);
1458 if (dump_file && (dump_flags & TDF_DETAILS))
1460 fprintf (dump_file, "clearing points-to info of ");
1461 print_generic_expr (dump_file, result);
1462 fprintf (dump_file, "\n");
1464 SSA_NAME_PTR_INFO (result) = NULL;
1467 else
1469 new_val_id = get_next_value_id ();
1470 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1471 nary = vn_nary_op_insert_pieces (newnary->length,
1472 newnary->opcode,
1473 newnary->type,
1474 &newnary->op[0],
1475 result, new_val_id);
1476 PRE_EXPR_NARY (expr) = nary;
1477 get_or_alloc_expression_id (expr);
1479 add_to_value (new_val_id, expr);
1481 return expr;
1483 break;
1485 case REFERENCE:
1487 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1488 vec<vn_reference_op_s> operands = ref->operands;
1489 tree vuse = ref->vuse;
1490 tree newvuse = vuse;
1491 vec<vn_reference_op_s> newoperands = vNULL;
1492 bool changed = false, same_valid = true;
1493 unsigned int i, n;
1494 vn_reference_op_t operand;
1495 vn_reference_t newref;
1497 for (i = 0; operands.iterate (i, &operand); i++)
1499 pre_expr opresult;
1500 pre_expr leader;
1501 tree op[3];
1502 tree type = operand->type;
1503 vn_reference_op_s newop = *operand;
1504 op[0] = operand->op0;
1505 op[1] = operand->op1;
1506 op[2] = operand->op2;
1507 for (n = 0; n < 3; ++n)
1509 unsigned int op_val_id;
1510 if (!op[n])
1511 continue;
1512 if (TREE_CODE (op[n]) != SSA_NAME)
1514 /* We can't possibly insert these. */
1515 if (n != 0
1516 && !is_gimple_min_invariant (op[n]))
1517 break;
1518 continue;
1520 op_val_id = VN_INFO (op[n])->value_id;
1521 leader = find_leader_in_sets (op_val_id, set1, set2);
1522 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1523 if (opresult && opresult != leader)
1525 tree name = get_representative_for (opresult);
1526 changed |= name != op[n];
1527 op[n] = name;
1529 else if (!opresult)
1530 break;
1532 if (n != 3)
1534 newoperands.release ();
1535 return NULL;
1537 if (!changed)
1538 continue;
1539 if (!newoperands.exists ())
1540 newoperands = operands.copy ();
1541 /* We may have changed from an SSA_NAME to a constant */
1542 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1543 newop.opcode = TREE_CODE (op[0]);
1544 newop.type = type;
1545 newop.op0 = op[0];
1546 newop.op1 = op[1];
1547 newop.op2 = op[2];
1548 newoperands[i] = newop;
1550 gcc_checking_assert (i == operands.length ());
1552 if (vuse)
1554 newvuse = translate_vuse_through_block (newoperands.exists ()
1555 ? newoperands : operands,
1556 ref->set, ref->type,
1557 vuse, phiblock, pred,
1558 &same_valid);
1559 if (newvuse == NULL_TREE)
1561 newoperands.release ();
1562 return NULL;
1566 if (changed || newvuse != vuse)
1568 unsigned int new_val_id;
1569 pre_expr constant;
1571 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1572 ref->type,
1573 newoperands.exists ()
1574 ? newoperands : operands,
1575 &newref, VN_WALK);
1576 if (result)
1577 newoperands.release ();
1579 /* We can always insert constants, so if we have a partial
1580 redundant constant load of another type try to translate it
1581 to a constant of appropriate type. */
1582 if (result && is_gimple_min_invariant (result))
1584 tree tem = result;
1585 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1587 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1588 if (tem && !is_gimple_min_invariant (tem))
1589 tem = NULL_TREE;
1591 if (tem)
1592 return get_or_alloc_expr_for_constant (tem);
1595 /* If we'd have to convert things we would need to validate
1596 if we can insert the translated expression. So fail
1597 here for now - we cannot insert an alias with a different
1598 type in the VN tables either, as that would assert. */
1599 if (result
1600 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1601 return NULL;
1602 else if (!result && newref
1603 && !useless_type_conversion_p (ref->type, newref->type))
1605 newoperands.release ();
1606 return NULL;
1609 expr = pre_expr_pool.allocate ();
1610 expr->kind = REFERENCE;
1611 expr->id = 0;
1613 if (newref)
1615 PRE_EXPR_REFERENCE (expr) = newref;
1616 constant = fully_constant_expression (expr);
1617 if (constant != expr)
1618 return constant;
1620 new_val_id = newref->value_id;
1621 get_or_alloc_expression_id (expr);
1623 else
1625 if (changed || !same_valid)
1627 new_val_id = get_next_value_id ();
1628 value_expressions.safe_grow_cleared
1629 (get_max_value_id () + 1);
1631 else
1632 new_val_id = ref->value_id;
1633 if (!newoperands.exists ())
1634 newoperands = operands.copy ();
1635 newref = vn_reference_insert_pieces (newvuse, ref->set,
1636 ref->type,
1637 newoperands,
1638 result, new_val_id);
1639 newoperands = vNULL;
1640 PRE_EXPR_REFERENCE (expr) = newref;
1641 constant = fully_constant_expression (expr);
1642 if (constant != expr)
1643 return constant;
1644 get_or_alloc_expression_id (expr);
1646 add_to_value (new_val_id, expr);
1648 newoperands.release ();
1649 return expr;
1651 break;
1653 case NAME:
1655 tree name = PRE_EXPR_NAME (expr);
1656 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1657 /* If the SSA name is defined by a PHI node in this block,
1658 translate it. */
1659 if (gimple_code (def_stmt) == GIMPLE_PHI
1660 && gimple_bb (def_stmt) == phiblock)
1662 edge e = find_edge (pred, gimple_bb (def_stmt));
1663 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1665 /* Handle constant. */
1666 if (is_gimple_min_invariant (def))
1667 return get_or_alloc_expr_for_constant (def);
1669 return get_or_alloc_expr_for_name (def);
1671 /* Otherwise return it unchanged - it will get removed if its
1672 value is not available in PREDs AVAIL_OUT set of expressions
1673 by the subtraction of TMP_GEN. */
1674 return expr;
1677 default:
1678 gcc_unreachable ();
1682 /* Wrapper around phi_translate_1 providing caching functionality. */
1684 static pre_expr
1685 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1686 basic_block pred, basic_block phiblock)
1688 expr_pred_trans_t slot = NULL;
1689 pre_expr phitrans;
1691 if (!expr)
1692 return NULL;
1694 /* Constants contain no values that need translation. */
1695 if (expr->kind == CONSTANT)
1696 return expr;
1698 if (value_id_constant_p (get_expr_value_id (expr)))
1699 return expr;
1701 /* Don't add translations of NAMEs as those are cheap to translate. */
1702 if (expr->kind != NAME)
1704 if (phi_trans_add (&slot, expr, pred))
1705 return slot->v;
1706 /* Store NULL for the value we want to return in the case of
1707 recursing. */
1708 slot->v = NULL;
1711 /* Translate. */
1712 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1714 if (slot)
1716 if (phitrans)
1717 slot->v = phitrans;
1718 else
1719 /* Remove failed translations again, they cause insert
1720 iteration to not pick up new opportunities reliably. */
1721 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1724 return phitrans;
1728 /* For each expression in SET, translate the values through phi nodes
1729 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1730 expressions in DEST. */
1732 static void
1733 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1734 basic_block phiblock)
1736 vec<pre_expr> exprs;
1737 pre_expr expr;
1738 int i;
1740 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1742 bitmap_set_copy (dest, set);
1743 return;
1746 exprs = sorted_array_from_bitmap_set (set);
1747 FOR_EACH_VEC_ELT (exprs, i, expr)
1749 pre_expr translated;
1750 translated = phi_translate (expr, set, NULL, pred, phiblock);
1751 if (!translated)
1752 continue;
1754 /* We might end up with multiple expressions from SET being
1755 translated to the same value. In this case we do not want
1756 to retain the NARY or REFERENCE expression but prefer a NAME
1757 which would be the leader. */
1758 if (translated->kind == NAME)
1759 bitmap_value_replace_in_set (dest, translated);
1760 else
1761 bitmap_value_insert_into_set (dest, translated);
1763 exprs.release ();
1766 /* Find the leader for a value (i.e., the name representing that
1767 value) in a given set, and return it. Return NULL if no leader
1768 is found. */
1770 static pre_expr
1771 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1773 if (value_id_constant_p (val))
1775 unsigned int i;
1776 bitmap_iterator bi;
1777 bitmap exprset = value_expressions[val];
1779 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1781 pre_expr expr = expression_for_id (i);
1782 if (expr->kind == CONSTANT)
1783 return expr;
1786 if (bitmap_set_contains_value (set, val))
1788 /* Rather than walk the entire bitmap of expressions, and see
1789 whether any of them has the value we are looking for, we look
1790 at the reverse mapping, which tells us the set of expressions
1791 that have a given value (IE value->expressions with that
1792 value) and see if any of those expressions are in our set.
1793 The number of expressions per value is usually significantly
1794 less than the number of expressions in the set. In fact, for
1795 large testcases, doing it this way is roughly 5-10x faster
1796 than walking the bitmap.
1797 If this is somehow a significant lose for some cases, we can
1798 choose which set to walk based on which set is smaller. */
1799 unsigned int i;
1800 bitmap_iterator bi;
1801 bitmap exprset = value_expressions[val];
1803 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1804 return expression_for_id (i);
1806 return NULL;
1809 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1810 BLOCK by seeing if it is not killed in the block. Note that we are
1811 only determining whether there is a store that kills it. Because
1812 of the order in which clean iterates over values, we are guaranteed
1813 that altered operands will have caused us to be eliminated from the
1814 ANTIC_IN set already. */
1816 static bool
1817 value_dies_in_block_x (pre_expr expr, basic_block block)
1819 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1820 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1821 gimple *def;
1822 gimple_stmt_iterator gsi;
1823 unsigned id = get_expression_id (expr);
1824 bool res = false;
1825 ao_ref ref;
1827 if (!vuse)
1828 return false;
1830 /* Lookup a previously calculated result. */
1831 if (EXPR_DIES (block)
1832 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1833 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1835 /* A memory expression {e, VUSE} dies in the block if there is a
1836 statement that may clobber e. If, starting statement walk from the
1837 top of the basic block, a statement uses VUSE there can be no kill
1838 inbetween that use and the original statement that loaded {e, VUSE},
1839 so we can stop walking. */
1840 ref.base = NULL_TREE;
1841 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1843 tree def_vuse, def_vdef;
1844 def = gsi_stmt (gsi);
1845 def_vuse = gimple_vuse (def);
1846 def_vdef = gimple_vdef (def);
1848 /* Not a memory statement. */
1849 if (!def_vuse)
1850 continue;
1852 /* Not a may-def. */
1853 if (!def_vdef)
1855 /* A load with the same VUSE, we're done. */
1856 if (def_vuse == vuse)
1857 break;
1859 continue;
1862 /* Init ref only if we really need it. */
1863 if (ref.base == NULL_TREE
1864 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1865 refx->operands))
1867 res = true;
1868 break;
1870 /* If the statement may clobber expr, it dies. */
1871 if (stmt_may_clobber_ref_p_1 (def, &ref))
1873 res = true;
1874 break;
1878 /* Remember the result. */
1879 if (!EXPR_DIES (block))
1880 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1881 bitmap_set_bit (EXPR_DIES (block), id * 2);
1882 if (res)
1883 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1885 return res;
1889 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1890 contains its value-id. */
1892 static bool
1893 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1895 if (op && TREE_CODE (op) == SSA_NAME)
1897 unsigned int value_id = VN_INFO (op)->value_id;
1898 if (!(bitmap_set_contains_value (set1, value_id)
1899 || (set2 && bitmap_set_contains_value (set2, value_id))))
1900 return false;
1902 return true;
1905 /* Determine if the expression EXPR is valid in SET1 U SET2.
1906 ONLY SET2 CAN BE NULL.
1907 This means that we have a leader for each part of the expression
1908 (if it consists of values), or the expression is an SSA_NAME.
1909 For loads/calls, we also see if the vuse is killed in this block. */
1911 static bool
1912 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1914 switch (expr->kind)
1916 case NAME:
1917 /* By construction all NAMEs are available. Non-available
1918 NAMEs are removed by subtracting TMP_GEN from the sets. */
1919 return true;
1920 case NARY:
1922 unsigned int i;
1923 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1924 for (i = 0; i < nary->length; i++)
1925 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1926 return false;
1927 return true;
1929 break;
1930 case REFERENCE:
1932 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1933 vn_reference_op_t vro;
1934 unsigned int i;
1936 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1938 if (!op_valid_in_sets (set1, set2, vro->op0)
1939 || !op_valid_in_sets (set1, set2, vro->op1)
1940 || !op_valid_in_sets (set1, set2, vro->op2))
1941 return false;
1943 return true;
1945 default:
1946 gcc_unreachable ();
1950 /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2.
1951 This means expressions that are made up of values we have no leaders for
1952 in SET1 or SET2. */
1954 static void
1955 clean (bitmap_set_t set1, bitmap_set_t set2 = NULL)
1957 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
1958 pre_expr expr;
1959 int i;
1961 FOR_EACH_VEC_ELT (exprs, i, expr)
1963 if (!valid_in_sets (set1, set2, expr))
1964 bitmap_remove_expr_from_set (set1, expr);
1966 exprs.release ();
1969 /* Clean the set of expressions that are no longer valid in SET because
1970 they are clobbered in BLOCK or because they trap and may not be executed. */
1972 static void
1973 prune_clobbered_mems (bitmap_set_t set, basic_block block)
1975 bitmap_iterator bi;
1976 unsigned i;
1977 pre_expr to_remove = NULL;
1979 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1981 /* Remove queued expr. */
1982 if (to_remove)
1984 bitmap_remove_expr_from_set (set, to_remove);
1985 to_remove = NULL;
1988 pre_expr expr = expression_for_id (i);
1989 if (expr->kind == REFERENCE)
1991 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1992 if (ref->vuse)
1994 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
1995 if (!gimple_nop_p (def_stmt)
1996 && ((gimple_bb (def_stmt) != block
1997 && !dominated_by_p (CDI_DOMINATORS,
1998 block, gimple_bb (def_stmt)))
1999 || (gimple_bb (def_stmt) == block
2000 && value_dies_in_block_x (expr, block))))
2001 to_remove = expr;
2004 else if (expr->kind == NARY)
2006 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2007 /* If the NARY may trap make sure the block does not contain
2008 a possible exit point.
2009 ??? This is overly conservative if we translate AVAIL_OUT
2010 as the available expression might be after the exit point. */
2011 if (BB_MAY_NOTRETURN (block)
2012 && vn_nary_may_trap (nary))
2013 to_remove = expr;
2017 /* Remove queued expr. */
2018 if (to_remove)
2019 bitmap_remove_expr_from_set (set, to_remove);
2022 static sbitmap has_abnormal_preds;
2024 /* Compute the ANTIC set for BLOCK.
2026 If succs(BLOCK) > 1 then
2027 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2028 else if succs(BLOCK) == 1 then
2029 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2031 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2033 Note that clean() is deferred until after the iteration. */
2035 static bool
2036 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2038 bitmap_set_t S, old, ANTIC_OUT;
2039 bitmap_iterator bi;
2040 unsigned int bii;
2041 edge e;
2042 edge_iterator ei;
2044 bool changed = ! BB_VISITED (block);
2045 BB_VISITED (block) = 1;
2046 old = ANTIC_OUT = S = NULL;
2048 /* If any edges from predecessors are abnormal, antic_in is empty,
2049 so do nothing. */
2050 if (block_has_abnormal_pred_edge)
2051 goto maybe_dump_sets;
2053 old = ANTIC_IN (block);
2054 ANTIC_OUT = bitmap_set_new ();
2056 /* If the block has no successors, ANTIC_OUT is empty. */
2057 if (EDGE_COUNT (block->succs) == 0)
2059 /* If we have one successor, we could have some phi nodes to
2060 translate through. */
2061 else if (single_succ_p (block))
2063 basic_block succ_bb = single_succ (block);
2064 gcc_assert (BB_VISITED (succ_bb));
2065 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2067 /* If we have multiple successors, we take the intersection of all of
2068 them. Note that in the case of loop exit phi nodes, we may have
2069 phis to translate through. */
2070 else
2072 size_t i;
2073 basic_block bprime, first = NULL;
2075 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2076 FOR_EACH_EDGE (e, ei, block->succs)
2078 if (!first
2079 && BB_VISITED (e->dest))
2080 first = e->dest;
2081 else if (BB_VISITED (e->dest))
2082 worklist.quick_push (e->dest);
2083 else
2085 /* Unvisited successors get their ANTIC_IN replaced by the
2086 maximal set to arrive at a maximum ANTIC_IN solution.
2087 We can ignore them in the intersection operation and thus
2088 need not explicitely represent that maximum solution. */
2089 if (dump_file && (dump_flags & TDF_DETAILS))
2090 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2091 e->src->index, e->dest->index);
2095 /* Of multiple successors we have to have visited one already
2096 which is guaranteed by iteration order. */
2097 gcc_assert (first != NULL);
2099 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2101 /* If we have multiple successors we need to intersect the ANTIC_OUT
2102 sets. For values that's a simple intersection but for
2103 expressions it is a union. Given we want to have a single
2104 expression per value in our sets we have to canonicalize.
2105 Avoid randomness and running into cycles like for PR82129 and
2106 canonicalize the expression we choose to the one with the
2107 lowest id. This requires we actually compute the union first. */
2108 FOR_EACH_VEC_ELT (worklist, i, bprime)
2110 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2112 bitmap_set_t tmp = bitmap_set_new ();
2113 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2114 bitmap_and_into (&ANTIC_OUT->values, &tmp->values);
2115 bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions);
2116 bitmap_set_free (tmp);
2118 else
2120 bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (bprime)->values);
2121 bitmap_ior_into (&ANTIC_OUT->expressions,
2122 &ANTIC_IN (bprime)->expressions);
2125 if (! worklist.is_empty ())
2127 /* Prune expressions not in the value set, canonicalizing to
2128 expression with lowest ID. */
2129 bitmap_iterator bi;
2130 unsigned int i;
2131 unsigned int to_clear = -1U;
2132 bitmap seen_value = BITMAP_ALLOC (NULL);
2133 FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT, i, bi)
2135 if (to_clear != -1U)
2137 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2138 to_clear = -1U;
2140 pre_expr expr = expression_for_id (i);
2141 unsigned int value_id = get_expr_value_id (expr);
2142 if (!bitmap_bit_p (&ANTIC_OUT->values, value_id)
2143 || !bitmap_set_bit (seen_value, value_id))
2144 to_clear = i;
2146 if (to_clear != -1U)
2147 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2148 BITMAP_FREE (seen_value);
2152 /* Prune expressions that are clobbered in block and thus become
2153 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2154 prune_clobbered_mems (ANTIC_OUT, block);
2156 /* Generate ANTIC_OUT - TMP_GEN. */
2157 S = bitmap_set_subtract_expressions (ANTIC_OUT, TMP_GEN (block));
2159 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2160 ANTIC_IN (block) = bitmap_set_subtract_expressions (EXP_GEN (block),
2161 TMP_GEN (block));
2163 /* Then union in the ANTIC_OUT - TMP_GEN values,
2164 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2165 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2166 bitmap_value_insert_into_set (ANTIC_IN (block),
2167 expression_for_id (bii));
2169 /* clean (ANTIC_IN (block)) is defered to after the iteration converged
2170 because it can cause non-convergence, see for example PR81181. */
2172 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2173 changed = true;
2175 maybe_dump_sets:
2176 if (dump_file && (dump_flags & TDF_DETAILS))
2178 if (ANTIC_OUT)
2179 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2181 if (changed)
2182 fprintf (dump_file, "[changed] ");
2183 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2184 block->index);
2186 if (S)
2187 print_bitmap_set (dump_file, S, "S", block->index);
2189 if (old)
2190 bitmap_set_free (old);
2191 if (S)
2192 bitmap_set_free (S);
2193 if (ANTIC_OUT)
2194 bitmap_set_free (ANTIC_OUT);
2195 return changed;
2198 /* Compute PARTIAL_ANTIC for BLOCK.
2200 If succs(BLOCK) > 1 then
2201 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2202 in ANTIC_OUT for all succ(BLOCK)
2203 else if succs(BLOCK) == 1 then
2204 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2206 PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK])
2209 static void
2210 compute_partial_antic_aux (basic_block block,
2211 bool block_has_abnormal_pred_edge)
2213 bitmap_set_t old_PA_IN;
2214 bitmap_set_t PA_OUT;
2215 edge e;
2216 edge_iterator ei;
2217 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2219 old_PA_IN = PA_OUT = NULL;
2221 /* If any edges from predecessors are abnormal, antic_in is empty,
2222 so do nothing. */
2223 if (block_has_abnormal_pred_edge)
2224 goto maybe_dump_sets;
2226 /* If there are too many partially anticipatable values in the
2227 block, phi_translate_set can take an exponential time: stop
2228 before the translation starts. */
2229 if (max_pa
2230 && single_succ_p (block)
2231 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2232 goto maybe_dump_sets;
2234 old_PA_IN = PA_IN (block);
2235 PA_OUT = bitmap_set_new ();
2237 /* If the block has no successors, ANTIC_OUT is empty. */
2238 if (EDGE_COUNT (block->succs) == 0)
2240 /* If we have one successor, we could have some phi nodes to
2241 translate through. Note that we can't phi translate across DFS
2242 back edges in partial antic, because it uses a union operation on
2243 the successors. For recurrences like IV's, we will end up
2244 generating a new value in the set on each go around (i + 3 (VH.1)
2245 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2246 else if (single_succ_p (block))
2248 basic_block succ = single_succ (block);
2249 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2250 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2252 /* If we have multiple successors, we take the union of all of
2253 them. */
2254 else
2256 size_t i;
2257 basic_block bprime;
2259 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2260 FOR_EACH_EDGE (e, ei, block->succs)
2262 if (e->flags & EDGE_DFS_BACK)
2263 continue;
2264 worklist.quick_push (e->dest);
2266 if (worklist.length () > 0)
2268 FOR_EACH_VEC_ELT (worklist, i, bprime)
2270 unsigned int i;
2271 bitmap_iterator bi;
2273 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2274 bitmap_value_insert_into_set (PA_OUT,
2275 expression_for_id (i));
2276 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2278 bitmap_set_t pa_in = bitmap_set_new ();
2279 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2280 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2281 bitmap_value_insert_into_set (PA_OUT,
2282 expression_for_id (i));
2283 bitmap_set_free (pa_in);
2285 else
2286 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2287 bitmap_value_insert_into_set (PA_OUT,
2288 expression_for_id (i));
2293 /* Prune expressions that are clobbered in block and thus become
2294 invalid if translated from PA_OUT to PA_IN. */
2295 prune_clobbered_mems (PA_OUT, block);
2297 /* PA_IN starts with PA_OUT - TMP_GEN.
2298 Then we subtract things from ANTIC_IN. */
2299 PA_IN (block) = bitmap_set_subtract_expressions (PA_OUT, TMP_GEN (block));
2301 /* For partial antic, we want to put back in the phi results, since
2302 we will properly avoid making them partially antic over backedges. */
2303 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2304 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2306 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2307 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2309 clean (PA_IN (block), ANTIC_IN (block));
2311 maybe_dump_sets:
2312 if (dump_file && (dump_flags & TDF_DETAILS))
2314 if (PA_OUT)
2315 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2317 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2319 if (old_PA_IN)
2320 bitmap_set_free (old_PA_IN);
2321 if (PA_OUT)
2322 bitmap_set_free (PA_OUT);
2325 /* Compute ANTIC and partial ANTIC sets. */
2327 static void
2328 compute_antic (void)
2330 bool changed = true;
2331 int num_iterations = 0;
2332 basic_block block;
2333 int i;
2334 edge_iterator ei;
2335 edge e;
2337 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2338 We pre-build the map of blocks with incoming abnormal edges here. */
2339 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2340 bitmap_clear (has_abnormal_preds);
2342 FOR_ALL_BB_FN (block, cfun)
2344 BB_VISITED (block) = 0;
2346 FOR_EACH_EDGE (e, ei, block->preds)
2347 if (e->flags & EDGE_ABNORMAL)
2349 bitmap_set_bit (has_abnormal_preds, block->index);
2350 break;
2353 /* While we are here, give empty ANTIC_IN sets to each block. */
2354 ANTIC_IN (block) = bitmap_set_new ();
2355 if (do_partial_partial)
2356 PA_IN (block) = bitmap_set_new ();
2359 /* At the exit block we anticipate nothing. */
2360 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2362 /* For ANTIC computation we need a postorder that also guarantees that
2363 a block with a single successor is visited after its successor.
2364 RPO on the inverted CFG has this property. */
2365 auto_vec<int, 20> postorder;
2366 inverted_post_order_compute (&postorder);
2368 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
2369 bitmap_clear (worklist);
2370 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2371 bitmap_set_bit (worklist, e->src->index);
2372 while (changed)
2374 if (dump_file && (dump_flags & TDF_DETAILS))
2375 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2376 /* ??? We need to clear our PHI translation cache here as the
2377 ANTIC sets shrink and we restrict valid translations to
2378 those having operands with leaders in ANTIC. Same below
2379 for PA ANTIC computation. */
2380 num_iterations++;
2381 changed = false;
2382 for (i = postorder.length () - 1; i >= 0; i--)
2384 if (bitmap_bit_p (worklist, postorder[i]))
2386 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2387 bitmap_clear_bit (worklist, block->index);
2388 if (compute_antic_aux (block,
2389 bitmap_bit_p (has_abnormal_preds,
2390 block->index)))
2392 FOR_EACH_EDGE (e, ei, block->preds)
2393 bitmap_set_bit (worklist, e->src->index);
2394 changed = true;
2398 /* Theoretically possible, but *highly* unlikely. */
2399 gcc_checking_assert (num_iterations < 500);
2402 /* We have to clean after the dataflow problem converged as cleaning
2403 can cause non-convergence because it is based on expressions
2404 rather than values. */
2405 FOR_EACH_BB_FN (block, cfun)
2406 clean (ANTIC_IN (block));
2408 statistics_histogram_event (cfun, "compute_antic iterations",
2409 num_iterations);
2411 if (do_partial_partial)
2413 /* For partial antic we ignore backedges and thus we do not need
2414 to perform any iteration when we process blocks in postorder. */
2415 int postorder_num
2416 = pre_and_rev_post_order_compute (NULL, postorder.address (), false);
2417 for (i = postorder_num - 1 ; i >= 0; i--)
2419 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2420 compute_partial_antic_aux (block,
2421 bitmap_bit_p (has_abnormal_preds,
2422 block->index));
2426 sbitmap_free (has_abnormal_preds);
2430 /* Inserted expressions are placed onto this worklist, which is used
2431 for performing quick dead code elimination of insertions we made
2432 that didn't turn out to be necessary. */
2433 static bitmap inserted_exprs;
2435 /* The actual worker for create_component_ref_by_pieces. */
2437 static tree
2438 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2439 unsigned int *operand, gimple_seq *stmts)
2441 vn_reference_op_t currop = &ref->operands[*operand];
2442 tree genop;
2443 ++*operand;
2444 switch (currop->opcode)
2446 case CALL_EXPR:
2447 gcc_unreachable ();
2449 case MEM_REF:
2451 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2452 stmts);
2453 if (!baseop)
2454 return NULL_TREE;
2455 tree offset = currop->op0;
2456 if (TREE_CODE (baseop) == ADDR_EXPR
2457 && handled_component_p (TREE_OPERAND (baseop, 0)))
2459 HOST_WIDE_INT off;
2460 tree base;
2461 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2462 &off);
2463 gcc_assert (base);
2464 offset = int_const_binop (PLUS_EXPR, offset,
2465 build_int_cst (TREE_TYPE (offset),
2466 off));
2467 baseop = build_fold_addr_expr (base);
2469 genop = build2 (MEM_REF, currop->type, baseop, offset);
2470 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2471 MR_DEPENDENCE_BASE (genop) = currop->base;
2472 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2473 return genop;
2476 case TARGET_MEM_REF:
2478 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2479 vn_reference_op_t nextop = &ref->operands[++*operand];
2480 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2481 stmts);
2482 if (!baseop)
2483 return NULL_TREE;
2484 if (currop->op0)
2486 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2487 if (!genop0)
2488 return NULL_TREE;
2490 if (nextop->op0)
2492 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2493 if (!genop1)
2494 return NULL_TREE;
2496 genop = build5 (TARGET_MEM_REF, currop->type,
2497 baseop, currop->op2, genop0, currop->op1, genop1);
2499 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2500 MR_DEPENDENCE_BASE (genop) = currop->base;
2501 return genop;
2504 case ADDR_EXPR:
2505 if (currop->op0)
2507 gcc_assert (is_gimple_min_invariant (currop->op0));
2508 return currop->op0;
2510 /* Fallthrough. */
2511 case REALPART_EXPR:
2512 case IMAGPART_EXPR:
2513 case VIEW_CONVERT_EXPR:
2515 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2516 stmts);
2517 if (!genop0)
2518 return NULL_TREE;
2519 return fold_build1 (currop->opcode, currop->type, genop0);
2522 case WITH_SIZE_EXPR:
2524 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2525 stmts);
2526 if (!genop0)
2527 return NULL_TREE;
2528 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2529 if (!genop1)
2530 return NULL_TREE;
2531 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2534 case BIT_FIELD_REF:
2536 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2537 stmts);
2538 if (!genop0)
2539 return NULL_TREE;
2540 tree op1 = currop->op0;
2541 tree op2 = currop->op1;
2542 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2543 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2544 return fold (t);
2547 /* For array ref vn_reference_op's, operand 1 of the array ref
2548 is op0 of the reference op and operand 3 of the array ref is
2549 op1. */
2550 case ARRAY_RANGE_REF:
2551 case ARRAY_REF:
2553 tree genop0;
2554 tree genop1 = currop->op0;
2555 tree genop2 = currop->op1;
2556 tree genop3 = currop->op2;
2557 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2558 stmts);
2559 if (!genop0)
2560 return NULL_TREE;
2561 genop1 = find_or_generate_expression (block, genop1, stmts);
2562 if (!genop1)
2563 return NULL_TREE;
2564 if (genop2)
2566 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2567 /* Drop zero minimum index if redundant. */
2568 if (integer_zerop (genop2)
2569 && (!domain_type
2570 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2571 genop2 = NULL_TREE;
2572 else
2574 genop2 = find_or_generate_expression (block, genop2, stmts);
2575 if (!genop2)
2576 return NULL_TREE;
2579 if (genop3)
2581 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2582 /* We can't always put a size in units of the element alignment
2583 here as the element alignment may be not visible. See
2584 PR43783. Simply drop the element size for constant
2585 sizes. */
2586 if (TREE_CODE (genop3) == INTEGER_CST
2587 && TREE_CODE (TYPE_SIZE_UNIT (elmt_type)) == INTEGER_CST
2588 && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)),
2589 (wi::to_offset (genop3)
2590 * vn_ref_op_align_unit (currop))))
2591 genop3 = NULL_TREE;
2592 else
2594 genop3 = find_or_generate_expression (block, genop3, stmts);
2595 if (!genop3)
2596 return NULL_TREE;
2599 return build4 (currop->opcode, currop->type, genop0, genop1,
2600 genop2, genop3);
2602 case COMPONENT_REF:
2604 tree op0;
2605 tree op1;
2606 tree genop2 = currop->op1;
2607 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2608 if (!op0)
2609 return NULL_TREE;
2610 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2611 op1 = currop->op0;
2612 if (genop2)
2614 genop2 = find_or_generate_expression (block, genop2, stmts);
2615 if (!genop2)
2616 return NULL_TREE;
2618 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2621 case SSA_NAME:
2623 genop = find_or_generate_expression (block, currop->op0, stmts);
2624 return genop;
2626 case STRING_CST:
2627 case INTEGER_CST:
2628 case COMPLEX_CST:
2629 case VECTOR_CST:
2630 case REAL_CST:
2631 case CONSTRUCTOR:
2632 case VAR_DECL:
2633 case PARM_DECL:
2634 case CONST_DECL:
2635 case RESULT_DECL:
2636 case FUNCTION_DECL:
2637 return currop->op0;
2639 default:
2640 gcc_unreachable ();
2644 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2645 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2646 trying to rename aggregates into ssa form directly, which is a no no.
2648 Thus, this routine doesn't create temporaries, it just builds a
2649 single access expression for the array, calling
2650 find_or_generate_expression to build the innermost pieces.
2652 This function is a subroutine of create_expression_by_pieces, and
2653 should not be called on it's own unless you really know what you
2654 are doing. */
2656 static tree
2657 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2658 gimple_seq *stmts)
2660 unsigned int op = 0;
2661 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2664 /* Find a simple leader for an expression, or generate one using
2665 create_expression_by_pieces from a NARY expression for the value.
2666 BLOCK is the basic_block we are looking for leaders in.
2667 OP is the tree expression to find a leader for or generate.
2668 Returns the leader or NULL_TREE on failure. */
2670 static tree
2671 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2673 pre_expr expr = get_or_alloc_expr_for (op);
2674 unsigned int lookfor = get_expr_value_id (expr);
2675 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2676 if (leader)
2678 if (leader->kind == NAME)
2679 return PRE_EXPR_NAME (leader);
2680 else if (leader->kind == CONSTANT)
2681 return PRE_EXPR_CONSTANT (leader);
2683 /* Defer. */
2684 return NULL_TREE;
2687 /* It must be a complex expression, so generate it recursively. Note
2688 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2689 where the insert algorithm fails to insert a required expression. */
2690 bitmap exprset = value_expressions[lookfor];
2691 bitmap_iterator bi;
2692 unsigned int i;
2693 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2695 pre_expr temp = expression_for_id (i);
2696 /* We cannot insert random REFERENCE expressions at arbitrary
2697 places. We can insert NARYs which eventually re-materializes
2698 its operand values. */
2699 if (temp->kind == NARY)
2700 return create_expression_by_pieces (block, temp, stmts,
2701 get_expr_type (expr));
2704 /* Defer. */
2705 return NULL_TREE;
2708 /* Create an expression in pieces, so that we can handle very complex
2709 expressions that may be ANTIC, but not necessary GIMPLE.
2710 BLOCK is the basic block the expression will be inserted into,
2711 EXPR is the expression to insert (in value form)
2712 STMTS is a statement list to append the necessary insertions into.
2714 This function will die if we hit some value that shouldn't be
2715 ANTIC but is (IE there is no leader for it, or its components).
2716 The function returns NULL_TREE in case a different antic expression
2717 has to be inserted first.
2718 This function may also generate expressions that are themselves
2719 partially or fully redundant. Those that are will be either made
2720 fully redundant during the next iteration of insert (for partially
2721 redundant ones), or eliminated by eliminate (for fully redundant
2722 ones). */
2724 static tree
2725 create_expression_by_pieces (basic_block block, pre_expr expr,
2726 gimple_seq *stmts, tree type)
2728 tree name;
2729 tree folded;
2730 gimple_seq forced_stmts = NULL;
2731 unsigned int value_id;
2732 gimple_stmt_iterator gsi;
2733 tree exprtype = type ? type : get_expr_type (expr);
2734 pre_expr nameexpr;
2735 gassign *newstmt;
2737 switch (expr->kind)
2739 /* We may hit the NAME/CONSTANT case if we have to convert types
2740 that value numbering saw through. */
2741 case NAME:
2742 folded = PRE_EXPR_NAME (expr);
2743 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2744 return folded;
2745 break;
2746 case CONSTANT:
2748 folded = PRE_EXPR_CONSTANT (expr);
2749 tree tem = fold_convert (exprtype, folded);
2750 if (is_gimple_min_invariant (tem))
2751 return tem;
2752 break;
2754 case REFERENCE:
2755 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2757 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2758 unsigned int operand = 1;
2759 vn_reference_op_t currop = &ref->operands[0];
2760 tree sc = NULL_TREE;
2761 tree fn;
2762 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2763 fn = currop->op0;
2764 else
2765 fn = find_or_generate_expression (block, currop->op0, stmts);
2766 if (!fn)
2767 return NULL_TREE;
2768 if (currop->op1)
2770 sc = find_or_generate_expression (block, currop->op1, stmts);
2771 if (!sc)
2772 return NULL_TREE;
2774 auto_vec<tree> args (ref->operands.length () - 1);
2775 while (operand < ref->operands.length ())
2777 tree arg = create_component_ref_by_pieces_1 (block, ref,
2778 &operand, stmts);
2779 if (!arg)
2780 return NULL_TREE;
2781 args.quick_push (arg);
2783 gcall *call
2784 = gimple_build_call_vec ((TREE_CODE (fn) == FUNCTION_DECL
2785 ? build_fold_addr_expr (fn) : fn), args);
2786 gimple_call_set_with_bounds (call, currop->with_bounds);
2787 if (sc)
2788 gimple_call_set_chain (call, sc);
2789 tree forcedname = make_ssa_name (currop->type);
2790 gimple_call_set_lhs (call, forcedname);
2791 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2792 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2793 folded = forcedname;
2795 else
2797 folded = create_component_ref_by_pieces (block,
2798 PRE_EXPR_REFERENCE (expr),
2799 stmts);
2800 if (!folded)
2801 return NULL_TREE;
2802 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2803 newstmt = gimple_build_assign (name, folded);
2804 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2805 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2806 folded = name;
2808 break;
2809 case NARY:
2811 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2812 tree *genop = XALLOCAVEC (tree, nary->length);
2813 unsigned i;
2814 for (i = 0; i < nary->length; ++i)
2816 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2817 if (!genop[i])
2818 return NULL_TREE;
2819 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2820 may have conversions stripped. */
2821 if (nary->opcode == POINTER_PLUS_EXPR)
2823 if (i == 0)
2824 genop[i] = gimple_convert (&forced_stmts,
2825 nary->type, genop[i]);
2826 else if (i == 1)
2827 genop[i] = gimple_convert (&forced_stmts,
2828 sizetype, genop[i]);
2830 else
2831 genop[i] = gimple_convert (&forced_stmts,
2832 TREE_TYPE (nary->op[i]), genop[i]);
2834 if (nary->opcode == CONSTRUCTOR)
2836 vec<constructor_elt, va_gc> *elts = NULL;
2837 for (i = 0; i < nary->length; ++i)
2838 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2839 folded = build_constructor (nary->type, elts);
2840 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2841 newstmt = gimple_build_assign (name, folded);
2842 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2843 folded = name;
2845 else
2847 switch (nary->length)
2849 case 1:
2850 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2851 genop[0]);
2852 break;
2853 case 2:
2854 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2855 genop[0], genop[1]);
2856 break;
2857 case 3:
2858 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2859 genop[0], genop[1], genop[2]);
2860 break;
2861 default:
2862 gcc_unreachable ();
2866 break;
2867 default:
2868 gcc_unreachable ();
2871 folded = gimple_convert (&forced_stmts, exprtype, folded);
2873 /* If there is nothing to insert, return the simplified result. */
2874 if (gimple_seq_empty_p (forced_stmts))
2875 return folded;
2876 /* If we simplified to a constant return it and discard eventually
2877 built stmts. */
2878 if (is_gimple_min_invariant (folded))
2880 gimple_seq_discard (forced_stmts);
2881 return folded;
2883 /* Likewise if we simplified to sth not queued for insertion. */
2884 bool found = false;
2885 gsi = gsi_last (forced_stmts);
2886 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2888 gimple *stmt = gsi_stmt (gsi);
2889 tree forcedname = gimple_get_lhs (stmt);
2890 if (forcedname == folded)
2892 found = true;
2893 break;
2896 if (! found)
2898 gimple_seq_discard (forced_stmts);
2899 return folded;
2901 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2903 /* If we have any intermediate expressions to the value sets, add them
2904 to the value sets and chain them in the instruction stream. */
2905 if (forced_stmts)
2907 gsi = gsi_start (forced_stmts);
2908 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2910 gimple *stmt = gsi_stmt (gsi);
2911 tree forcedname = gimple_get_lhs (stmt);
2912 pre_expr nameexpr;
2914 if (forcedname != folded)
2916 VN_INFO_GET (forcedname)->valnum = forcedname;
2917 VN_INFO (forcedname)->value_id = get_next_value_id ();
2918 nameexpr = get_or_alloc_expr_for_name (forcedname);
2919 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2920 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2921 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2924 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2926 gimple_seq_add_seq (stmts, forced_stmts);
2929 name = folded;
2931 /* Fold the last statement. */
2932 gsi = gsi_last (*stmts);
2933 if (fold_stmt_inplace (&gsi))
2934 update_stmt (gsi_stmt (gsi));
2936 /* Add a value number to the temporary.
2937 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2938 we are creating the expression by pieces, and this particular piece of
2939 the expression may have been represented. There is no harm in replacing
2940 here. */
2941 value_id = get_expr_value_id (expr);
2942 VN_INFO_GET (name)->value_id = value_id;
2943 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2944 if (VN_INFO (name)->valnum == NULL_TREE)
2945 VN_INFO (name)->valnum = name;
2946 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2947 nameexpr = get_or_alloc_expr_for_name (name);
2948 add_to_value (value_id, nameexpr);
2949 if (NEW_SETS (block))
2950 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2951 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2953 pre_stats.insertions++;
2954 if (dump_file && (dump_flags & TDF_DETAILS))
2956 fprintf (dump_file, "Inserted ");
2957 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0);
2958 fprintf (dump_file, " in predecessor %d (%04d)\n",
2959 block->index, value_id);
2962 return name;
2966 /* Insert the to-be-made-available values of expression EXPRNUM for each
2967 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2968 merge the result with a phi node, given the same value number as
2969 NODE. Return true if we have inserted new stuff. */
2971 static bool
2972 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2973 vec<pre_expr> avail)
2975 pre_expr expr = expression_for_id (exprnum);
2976 pre_expr newphi;
2977 unsigned int val = get_expr_value_id (expr);
2978 edge pred;
2979 bool insertions = false;
2980 bool nophi = false;
2981 basic_block bprime;
2982 pre_expr eprime;
2983 edge_iterator ei;
2984 tree type = get_expr_type (expr);
2985 tree temp;
2986 gphi *phi;
2988 /* Make sure we aren't creating an induction variable. */
2989 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
2991 bool firstinsideloop = false;
2992 bool secondinsideloop = false;
2993 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2994 EDGE_PRED (block, 0)->src);
2995 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2996 EDGE_PRED (block, 1)->src);
2997 /* Induction variables only have one edge inside the loop. */
2998 if ((firstinsideloop ^ secondinsideloop)
2999 && expr->kind != REFERENCE)
3001 if (dump_file && (dump_flags & TDF_DETAILS))
3002 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3003 nophi = true;
3007 /* Make the necessary insertions. */
3008 FOR_EACH_EDGE (pred, ei, block->preds)
3010 gimple_seq stmts = NULL;
3011 tree builtexpr;
3012 bprime = pred->src;
3013 eprime = avail[pred->dest_idx];
3014 builtexpr = create_expression_by_pieces (bprime, eprime,
3015 &stmts, type);
3016 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3017 if (!gimple_seq_empty_p (stmts))
3019 gsi_insert_seq_on_edge (pred, stmts);
3020 insertions = true;
3022 if (!builtexpr)
3024 /* We cannot insert a PHI node if we failed to insert
3025 on one edge. */
3026 nophi = true;
3027 continue;
3029 if (is_gimple_min_invariant (builtexpr))
3030 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3031 else
3032 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3034 /* If we didn't want a phi node, and we made insertions, we still have
3035 inserted new stuff, and thus return true. If we didn't want a phi node,
3036 and didn't make insertions, we haven't added anything new, so return
3037 false. */
3038 if (nophi && insertions)
3039 return true;
3040 else if (nophi && !insertions)
3041 return false;
3043 /* Now build a phi for the new variable. */
3044 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3045 phi = create_phi_node (temp, block);
3047 VN_INFO_GET (temp)->value_id = val;
3048 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3049 if (VN_INFO (temp)->valnum == NULL_TREE)
3050 VN_INFO (temp)->valnum = temp;
3051 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3052 FOR_EACH_EDGE (pred, ei, block->preds)
3054 pre_expr ae = avail[pred->dest_idx];
3055 gcc_assert (get_expr_type (ae) == type
3056 || useless_type_conversion_p (type, get_expr_type (ae)));
3057 if (ae->kind == CONSTANT)
3058 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3059 pred, UNKNOWN_LOCATION);
3060 else
3061 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3064 newphi = get_or_alloc_expr_for_name (temp);
3065 add_to_value (val, newphi);
3067 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3068 this insertion, since we test for the existence of this value in PHI_GEN
3069 before proceeding with the partial redundancy checks in insert_aux.
3071 The value may exist in AVAIL_OUT, in particular, it could be represented
3072 by the expression we are trying to eliminate, in which case we want the
3073 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3074 inserted there.
3076 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3077 this block, because if it did, it would have existed in our dominator's
3078 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3081 bitmap_insert_into_set (PHI_GEN (block), newphi);
3082 bitmap_value_replace_in_set (AVAIL_OUT (block),
3083 newphi);
3084 bitmap_insert_into_set (NEW_SETS (block),
3085 newphi);
3087 /* If we insert a PHI node for a conversion of another PHI node
3088 in the same basic-block try to preserve range information.
3089 This is important so that followup loop passes receive optimal
3090 number of iteration analysis results. See PR61743. */
3091 if (expr->kind == NARY
3092 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3093 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3094 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3095 && INTEGRAL_TYPE_P (type)
3096 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3097 && (TYPE_PRECISION (type)
3098 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3099 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3101 wide_int min, max;
3102 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3103 && !wi::neg_p (min, SIGNED)
3104 && !wi::neg_p (max, SIGNED))
3105 /* Just handle extension and sign-changes of all-positive ranges. */
3106 set_range_info (temp,
3107 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3108 wide_int_storage::from (min, TYPE_PRECISION (type),
3109 TYPE_SIGN (type)),
3110 wide_int_storage::from (max, TYPE_PRECISION (type),
3111 TYPE_SIGN (type)));
3114 if (dump_file && (dump_flags & TDF_DETAILS))
3116 fprintf (dump_file, "Created phi ");
3117 print_gimple_stmt (dump_file, phi, 0);
3118 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3120 pre_stats.phis++;
3121 return true;
3126 /* Perform insertion of partially redundant or hoistable values.
3127 For BLOCK, do the following:
3128 1. Propagate the NEW_SETS of the dominator into the current block.
3129 If the block has multiple predecessors,
3130 2a. Iterate over the ANTIC expressions for the block to see if
3131 any of them are partially redundant.
3132 2b. If so, insert them into the necessary predecessors to make
3133 the expression fully redundant.
3134 2c. Insert a new PHI merging the values of the predecessors.
3135 2d. Insert the new PHI, and the new expressions, into the
3136 NEW_SETS set.
3137 If the block has multiple successors,
3138 3a. Iterate over the ANTIC values for the block to see if
3139 any of them are good candidates for hoisting.
3140 3b. If so, insert expressions computing the values in BLOCK,
3141 and add the new expressions into the NEW_SETS set.
3142 4. Recursively call ourselves on the dominator children of BLOCK.
3144 Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3145 do_pre_regular_insertion and do_partial_insertion. 3a and 3b are
3146 done in do_hoist_insertion.
3149 static bool
3150 do_pre_regular_insertion (basic_block block, basic_block dom)
3152 bool new_stuff = false;
3153 vec<pre_expr> exprs;
3154 pre_expr expr;
3155 auto_vec<pre_expr> avail;
3156 int i;
3158 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3159 avail.safe_grow (EDGE_COUNT (block->preds));
3161 FOR_EACH_VEC_ELT (exprs, i, expr)
3163 if (expr->kind == NARY
3164 || expr->kind == REFERENCE)
3166 unsigned int val;
3167 bool by_some = false;
3168 bool cant_insert = false;
3169 bool all_same = true;
3170 pre_expr first_s = NULL;
3171 edge pred;
3172 basic_block bprime;
3173 pre_expr eprime = NULL;
3174 edge_iterator ei;
3175 pre_expr edoubleprime = NULL;
3176 bool do_insertion = false;
3178 val = get_expr_value_id (expr);
3179 if (bitmap_set_contains_value (PHI_GEN (block), val))
3180 continue;
3181 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3183 if (dump_file && (dump_flags & TDF_DETAILS))
3185 fprintf (dump_file, "Found fully redundant value: ");
3186 print_pre_expr (dump_file, expr);
3187 fprintf (dump_file, "\n");
3189 continue;
3192 FOR_EACH_EDGE (pred, ei, block->preds)
3194 unsigned int vprime;
3196 /* We should never run insertion for the exit block
3197 and so not come across fake pred edges. */
3198 gcc_assert (!(pred->flags & EDGE_FAKE));
3199 bprime = pred->src;
3200 /* We are looking at ANTIC_OUT of bprime. */
3201 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3202 bprime, block);
3204 /* eprime will generally only be NULL if the
3205 value of the expression, translated
3206 through the PHI for this predecessor, is
3207 undefined. If that is the case, we can't
3208 make the expression fully redundant,
3209 because its value is undefined along a
3210 predecessor path. We can thus break out
3211 early because it doesn't matter what the
3212 rest of the results are. */
3213 if (eprime == NULL)
3215 avail[pred->dest_idx] = NULL;
3216 cant_insert = true;
3217 break;
3220 vprime = get_expr_value_id (eprime);
3221 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3222 vprime);
3223 if (edoubleprime == NULL)
3225 avail[pred->dest_idx] = eprime;
3226 all_same = false;
3228 else
3230 avail[pred->dest_idx] = edoubleprime;
3231 by_some = true;
3232 /* We want to perform insertions to remove a redundancy on
3233 a path in the CFG we want to optimize for speed. */
3234 if (optimize_edge_for_speed_p (pred))
3235 do_insertion = true;
3236 if (first_s == NULL)
3237 first_s = edoubleprime;
3238 else if (!pre_expr_d::equal (first_s, edoubleprime))
3239 all_same = false;
3242 /* If we can insert it, it's not the same value
3243 already existing along every predecessor, and
3244 it's defined by some predecessor, it is
3245 partially redundant. */
3246 if (!cant_insert && !all_same && by_some)
3248 if (!do_insertion)
3250 if (dump_file && (dump_flags & TDF_DETAILS))
3252 fprintf (dump_file, "Skipping partial redundancy for "
3253 "expression ");
3254 print_pre_expr (dump_file, expr);
3255 fprintf (dump_file, " (%04d), no redundancy on to be "
3256 "optimized for speed edge\n", val);
3259 else if (dbg_cnt (treepre_insert))
3261 if (dump_file && (dump_flags & TDF_DETAILS))
3263 fprintf (dump_file, "Found partial redundancy for "
3264 "expression ");
3265 print_pre_expr (dump_file, expr);
3266 fprintf (dump_file, " (%04d)\n",
3267 get_expr_value_id (expr));
3269 if (insert_into_preds_of_block (block,
3270 get_expression_id (expr),
3271 avail))
3272 new_stuff = true;
3275 /* If all edges produce the same value and that value is
3276 an invariant, then the PHI has the same value on all
3277 edges. Note this. */
3278 else if (!cant_insert && all_same)
3280 gcc_assert (edoubleprime->kind == CONSTANT
3281 || edoubleprime->kind == NAME);
3283 tree temp = make_temp_ssa_name (get_expr_type (expr),
3284 NULL, "pretmp");
3285 gassign *assign
3286 = gimple_build_assign (temp,
3287 edoubleprime->kind == CONSTANT ?
3288 PRE_EXPR_CONSTANT (edoubleprime) :
3289 PRE_EXPR_NAME (edoubleprime));
3290 gimple_stmt_iterator gsi = gsi_after_labels (block);
3291 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3293 VN_INFO_GET (temp)->value_id = val;
3294 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3295 if (VN_INFO (temp)->valnum == NULL_TREE)
3296 VN_INFO (temp)->valnum = temp;
3297 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3298 pre_expr newe = get_or_alloc_expr_for_name (temp);
3299 add_to_value (val, newe);
3300 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3301 bitmap_insert_into_set (NEW_SETS (block), newe);
3306 exprs.release ();
3307 return new_stuff;
3311 /* Perform insertion for partially anticipatable expressions. There
3312 is only one case we will perform insertion for these. This case is
3313 if the expression is partially anticipatable, and fully available.
3314 In this case, we know that putting it earlier will enable us to
3315 remove the later computation. */
3317 static bool
3318 do_pre_partial_partial_insertion (basic_block block, basic_block dom)
3320 bool new_stuff = false;
3321 vec<pre_expr> exprs;
3322 pre_expr expr;
3323 auto_vec<pre_expr> avail;
3324 int i;
3326 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3327 avail.safe_grow (EDGE_COUNT (block->preds));
3329 FOR_EACH_VEC_ELT (exprs, i, expr)
3331 if (expr->kind == NARY
3332 || expr->kind == REFERENCE)
3334 unsigned int val;
3335 bool by_all = true;
3336 bool cant_insert = false;
3337 edge pred;
3338 basic_block bprime;
3339 pre_expr eprime = NULL;
3340 edge_iterator ei;
3342 val = get_expr_value_id (expr);
3343 if (bitmap_set_contains_value (PHI_GEN (block), val))
3344 continue;
3345 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3346 continue;
3348 FOR_EACH_EDGE (pred, ei, block->preds)
3350 unsigned int vprime;
3351 pre_expr edoubleprime;
3353 /* We should never run insertion for the exit block
3354 and so not come across fake pred edges. */
3355 gcc_assert (!(pred->flags & EDGE_FAKE));
3356 bprime = pred->src;
3357 eprime = phi_translate (expr, ANTIC_IN (block),
3358 PA_IN (block),
3359 bprime, block);
3361 /* eprime will generally only be NULL if the
3362 value of the expression, translated
3363 through the PHI for this predecessor, is
3364 undefined. If that is the case, we can't
3365 make the expression fully redundant,
3366 because its value is undefined along a
3367 predecessor path. We can thus break out
3368 early because it doesn't matter what the
3369 rest of the results are. */
3370 if (eprime == NULL)
3372 avail[pred->dest_idx] = NULL;
3373 cant_insert = true;
3374 break;
3377 vprime = get_expr_value_id (eprime);
3378 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3379 avail[pred->dest_idx] = edoubleprime;
3380 if (edoubleprime == NULL)
3382 by_all = false;
3383 break;
3387 /* If we can insert it, it's not the same value
3388 already existing along every predecessor, and
3389 it's defined by some predecessor, it is
3390 partially redundant. */
3391 if (!cant_insert && by_all)
3393 edge succ;
3394 bool do_insertion = false;
3396 /* Insert only if we can remove a later expression on a path
3397 that we want to optimize for speed.
3398 The phi node that we will be inserting in BLOCK is not free,
3399 and inserting it for the sake of !optimize_for_speed successor
3400 may cause regressions on the speed path. */
3401 FOR_EACH_EDGE (succ, ei, block->succs)
3403 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3404 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3406 if (optimize_edge_for_speed_p (succ))
3407 do_insertion = true;
3411 if (!do_insertion)
3413 if (dump_file && (dump_flags & TDF_DETAILS))
3415 fprintf (dump_file, "Skipping partial partial redundancy "
3416 "for expression ");
3417 print_pre_expr (dump_file, expr);
3418 fprintf (dump_file, " (%04d), not (partially) anticipated "
3419 "on any to be optimized for speed edges\n", val);
3422 else if (dbg_cnt (treepre_insert))
3424 pre_stats.pa_insert++;
3425 if (dump_file && (dump_flags & TDF_DETAILS))
3427 fprintf (dump_file, "Found partial partial redundancy "
3428 "for expression ");
3429 print_pre_expr (dump_file, expr);
3430 fprintf (dump_file, " (%04d)\n",
3431 get_expr_value_id (expr));
3433 if (insert_into_preds_of_block (block,
3434 get_expression_id (expr),
3435 avail))
3436 new_stuff = true;
3442 exprs.release ();
3443 return new_stuff;
3446 /* Insert expressions in BLOCK to compute hoistable values up.
3447 Return TRUE if something was inserted, otherwise return FALSE.
3448 The caller has to make sure that BLOCK has at least two successors. */
3450 static bool
3451 do_hoist_insertion (basic_block block)
3453 edge e;
3454 edge_iterator ei;
3455 bool new_stuff = false;
3456 unsigned i;
3457 gimple_stmt_iterator last;
3459 /* At least two successors, or else... */
3460 gcc_assert (EDGE_COUNT (block->succs) >= 2);
3462 /* Check that all successors of BLOCK are dominated by block.
3463 We could use dominated_by_p() for this, but actually there is a much
3464 quicker check: any successor that is dominated by BLOCK can't have
3465 more than one predecessor edge. */
3466 FOR_EACH_EDGE (e, ei, block->succs)
3467 if (! single_pred_p (e->dest))
3468 return false;
3470 /* Determine the insertion point. If we cannot safely insert before
3471 the last stmt if we'd have to, bail out. */
3472 last = gsi_last_bb (block);
3473 if (!gsi_end_p (last)
3474 && !is_ctrl_stmt (gsi_stmt (last))
3475 && stmt_ends_bb_p (gsi_stmt (last)))
3476 return false;
3478 /* Compute the set of hoistable expressions from ANTIC_IN. First compute
3479 hoistable values. */
3480 bitmap_set hoistable_set;
3482 /* A hoistable value must be in ANTIC_IN(block)
3483 but not in AVAIL_OUT(BLOCK). */
3484 bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack);
3485 bitmap_and_compl (&hoistable_set.values,
3486 &ANTIC_IN (block)->values, &AVAIL_OUT (block)->values);
3488 /* Short-cut for a common case: hoistable_set is empty. */
3489 if (bitmap_empty_p (&hoistable_set.values))
3490 return false;
3492 /* Compute which of the hoistable values is in AVAIL_OUT of
3493 at least one of the successors of BLOCK. */
3494 bitmap_head availout_in_some;
3495 bitmap_initialize (&availout_in_some, &grand_bitmap_obstack);
3496 FOR_EACH_EDGE (e, ei, block->succs)
3497 /* Do not consider expressions solely because their availability
3498 on loop exits. They'd be ANTIC-IN throughout the whole loop
3499 and thus effectively hoisted across loops by combination of
3500 PRE and hoisting. */
3501 if (! loop_exit_edge_p (block->loop_father, e))
3502 bitmap_ior_and_into (&availout_in_some, &hoistable_set.values,
3503 &AVAIL_OUT (e->dest)->values);
3504 bitmap_clear (&hoistable_set.values);
3506 /* Short-cut for a common case: availout_in_some is empty. */
3507 if (bitmap_empty_p (&availout_in_some))
3508 return false;
3510 /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */
3511 hoistable_set.values = availout_in_some;
3512 hoistable_set.expressions = ANTIC_IN (block)->expressions;
3514 /* Now finally construct the topological-ordered expression set. */
3515 vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set);
3517 bitmap_clear (&hoistable_set.values);
3519 /* If there are candidate values for hoisting, insert expressions
3520 strategically to make the hoistable expressions fully redundant. */
3521 pre_expr expr;
3522 FOR_EACH_VEC_ELT (exprs, i, expr)
3524 /* While we try to sort expressions topologically above the
3525 sorting doesn't work out perfectly. Catch expressions we
3526 already inserted. */
3527 unsigned int value_id = get_expr_value_id (expr);
3528 if (bitmap_set_contains_value (AVAIL_OUT (block), value_id))
3530 if (dump_file && (dump_flags & TDF_DETAILS))
3532 fprintf (dump_file,
3533 "Already inserted expression for ");
3534 print_pre_expr (dump_file, expr);
3535 fprintf (dump_file, " (%04d)\n", value_id);
3537 continue;
3540 /* OK, we should hoist this value. Perform the transformation. */
3541 pre_stats.hoist_insert++;
3542 if (dump_file && (dump_flags & TDF_DETAILS))
3544 fprintf (dump_file,
3545 "Inserting expression in block %d for code hoisting: ",
3546 block->index);
3547 print_pre_expr (dump_file, expr);
3548 fprintf (dump_file, " (%04d)\n", value_id);
3551 gimple_seq stmts = NULL;
3552 tree res = create_expression_by_pieces (block, expr, &stmts,
3553 get_expr_type (expr));
3555 /* Do not return true if expression creation ultimately
3556 did not insert any statements. */
3557 if (gimple_seq_empty_p (stmts))
3558 res = NULL_TREE;
3559 else
3561 if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last)))
3562 gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT);
3563 else
3564 gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT);
3567 /* Make sure to not return true if expression creation ultimately
3568 failed but also make sure to insert any stmts produced as they
3569 are tracked in inserted_exprs. */
3570 if (! res)
3571 continue;
3573 new_stuff = true;
3576 exprs.release ();
3578 return new_stuff;
3581 /* Do a dominator walk on the control flow graph, and insert computations
3582 of values as necessary for PRE and hoisting. */
3584 static bool
3585 insert_aux (basic_block block, bool do_pre, bool do_hoist)
3587 basic_block son;
3588 bool new_stuff = false;
3590 if (block)
3592 basic_block dom;
3593 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3594 if (dom)
3596 unsigned i;
3597 bitmap_iterator bi;
3598 bitmap_set_t newset;
3600 /* First, update the AVAIL_OUT set with anything we may have
3601 inserted higher up in the dominator tree. */
3602 newset = NEW_SETS (dom);
3603 if (newset)
3605 /* Note that we need to value_replace both NEW_SETS, and
3606 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3607 represented by some non-simple expression here that we want
3608 to replace it with. */
3609 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3611 pre_expr expr = expression_for_id (i);
3612 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3613 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3617 /* Insert expressions for partial redundancies. */
3618 if (do_pre && !single_pred_p (block))
3620 new_stuff |= do_pre_regular_insertion (block, dom);
3621 if (do_partial_partial)
3622 new_stuff |= do_pre_partial_partial_insertion (block, dom);
3625 /* Insert expressions for hoisting. */
3626 if (do_hoist && EDGE_COUNT (block->succs) >= 2)
3627 new_stuff |= do_hoist_insertion (block);
3630 for (son = first_dom_son (CDI_DOMINATORS, block);
3631 son;
3632 son = next_dom_son (CDI_DOMINATORS, son))
3634 new_stuff |= insert_aux (son, do_pre, do_hoist);
3637 return new_stuff;
3640 /* Perform insertion of partially redundant and hoistable values. */
3642 static void
3643 insert (void)
3645 bool new_stuff = true;
3646 basic_block bb;
3647 int num_iterations = 0;
3649 FOR_ALL_BB_FN (bb, cfun)
3650 NEW_SETS (bb) = bitmap_set_new ();
3652 while (new_stuff)
3654 num_iterations++;
3655 if (dump_file && dump_flags & TDF_DETAILS)
3656 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3657 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun), flag_tree_pre,
3658 flag_code_hoisting);
3660 /* Clear the NEW sets before the next iteration. We have already
3661 fully propagated its contents. */
3662 if (new_stuff)
3663 FOR_ALL_BB_FN (bb, cfun)
3664 bitmap_set_free (NEW_SETS (bb));
3666 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3670 /* Compute the AVAIL set for all basic blocks.
3672 This function performs value numbering of the statements in each basic
3673 block. The AVAIL sets are built from information we glean while doing
3674 this value numbering, since the AVAIL sets contain only one entry per
3675 value.
3677 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3678 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3680 static void
3681 compute_avail (void)
3684 basic_block block, son;
3685 basic_block *worklist;
3686 size_t sp = 0;
3687 unsigned i;
3688 tree name;
3690 /* We pretend that default definitions are defined in the entry block.
3691 This includes function arguments and the static chain decl. */
3692 FOR_EACH_SSA_NAME (i, name, cfun)
3694 pre_expr e;
3695 if (!SSA_NAME_IS_DEFAULT_DEF (name)
3696 || has_zero_uses (name)
3697 || virtual_operand_p (name))
3698 continue;
3700 e = get_or_alloc_expr_for_name (name);
3701 add_to_value (get_expr_value_id (e), e);
3702 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3703 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3707 if (dump_file && (dump_flags & TDF_DETAILS))
3709 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3710 "tmp_gen", ENTRY_BLOCK);
3711 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3712 "avail_out", ENTRY_BLOCK);
3715 /* Allocate the worklist. */
3716 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3718 /* Seed the algorithm by putting the dominator children of the entry
3719 block on the worklist. */
3720 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3721 son;
3722 son = next_dom_son (CDI_DOMINATORS, son))
3723 worklist[sp++] = son;
3725 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3726 = ssa_default_def (cfun, gimple_vop (cfun));
3728 /* Loop until the worklist is empty. */
3729 while (sp)
3731 gimple *stmt;
3732 basic_block dom;
3734 /* Pick a block from the worklist. */
3735 block = worklist[--sp];
3737 /* Initially, the set of available values in BLOCK is that of
3738 its immediate dominator. */
3739 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3740 if (dom)
3742 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3743 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3746 /* Generate values for PHI nodes. */
3747 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3748 gsi_next (&gsi))
3750 tree result = gimple_phi_result (gsi.phi ());
3752 /* We have no need for virtual phis, as they don't represent
3753 actual computations. */
3754 if (virtual_operand_p (result))
3756 BB_LIVE_VOP_ON_EXIT (block) = result;
3757 continue;
3760 pre_expr e = get_or_alloc_expr_for_name (result);
3761 add_to_value (get_expr_value_id (e), e);
3762 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3763 bitmap_insert_into_set (PHI_GEN (block), e);
3766 BB_MAY_NOTRETURN (block) = 0;
3768 /* Now compute value numbers and populate value sets with all
3769 the expressions computed in BLOCK. */
3770 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3771 gsi_next (&gsi))
3773 ssa_op_iter iter;
3774 tree op;
3776 stmt = gsi_stmt (gsi);
3778 /* Cache whether the basic-block has any non-visible side-effect
3779 or control flow.
3780 If this isn't a call or it is the last stmt in the
3781 basic-block then the CFG represents things correctly. */
3782 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3784 /* Non-looping const functions always return normally.
3785 Otherwise the call might not return or have side-effects
3786 that forbids hoisting possibly trapping expressions
3787 before it. */
3788 int flags = gimple_call_flags (stmt);
3789 if (!(flags & ECF_CONST)
3790 || (flags & ECF_LOOPING_CONST_OR_PURE))
3791 BB_MAY_NOTRETURN (block) = 1;
3794 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3796 pre_expr e = get_or_alloc_expr_for_name (op);
3798 add_to_value (get_expr_value_id (e), e);
3799 bitmap_insert_into_set (TMP_GEN (block), e);
3800 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3803 if (gimple_vdef (stmt))
3804 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3806 if (gimple_has_side_effects (stmt)
3807 || stmt_could_throw_p (stmt)
3808 || is_gimple_debug (stmt))
3809 continue;
3811 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3813 if (ssa_undefined_value_p (op))
3814 continue;
3815 pre_expr e = get_or_alloc_expr_for_name (op);
3816 bitmap_value_insert_into_set (EXP_GEN (block), e);
3819 switch (gimple_code (stmt))
3821 case GIMPLE_RETURN:
3822 continue;
3824 case GIMPLE_CALL:
3826 vn_reference_t ref;
3827 vn_reference_s ref1;
3828 pre_expr result = NULL;
3830 /* We can value number only calls to real functions. */
3831 if (gimple_call_internal_p (stmt))
3832 continue;
3834 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3835 if (!ref)
3836 continue;
3838 /* If the value of the call is not invalidated in
3839 this block until it is computed, add the expression
3840 to EXP_GEN. */
3841 if (!gimple_vuse (stmt)
3842 || gimple_code
3843 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3844 || gimple_bb (SSA_NAME_DEF_STMT
3845 (gimple_vuse (stmt))) != block)
3847 result = pre_expr_pool.allocate ();
3848 result->kind = REFERENCE;
3849 result->id = 0;
3850 PRE_EXPR_REFERENCE (result) = ref;
3852 get_or_alloc_expression_id (result);
3853 add_to_value (get_expr_value_id (result), result);
3854 bitmap_value_insert_into_set (EXP_GEN (block), result);
3856 continue;
3859 case GIMPLE_ASSIGN:
3861 pre_expr result = NULL;
3862 switch (vn_get_stmt_kind (stmt))
3864 case VN_NARY:
3866 enum tree_code code = gimple_assign_rhs_code (stmt);
3867 vn_nary_op_t nary;
3869 /* COND_EXPR and VEC_COND_EXPR are awkward in
3870 that they contain an embedded complex expression.
3871 Don't even try to shove those through PRE. */
3872 if (code == COND_EXPR
3873 || code == VEC_COND_EXPR)
3874 continue;
3876 vn_nary_op_lookup_stmt (stmt, &nary);
3877 if (!nary)
3878 continue;
3880 /* If the NARY traps and there was a preceding
3881 point in the block that might not return avoid
3882 adding the nary to EXP_GEN. */
3883 if (BB_MAY_NOTRETURN (block)
3884 && vn_nary_may_trap (nary))
3885 continue;
3887 result = pre_expr_pool.allocate ();
3888 result->kind = NARY;
3889 result->id = 0;
3890 PRE_EXPR_NARY (result) = nary;
3891 break;
3894 case VN_REFERENCE:
3896 tree rhs1 = gimple_assign_rhs1 (stmt);
3897 alias_set_type set = get_alias_set (rhs1);
3898 vec<vn_reference_op_s> operands
3899 = vn_reference_operands_for_lookup (rhs1);
3900 vn_reference_t ref;
3901 vn_reference_lookup_pieces (gimple_vuse (stmt), set,
3902 TREE_TYPE (rhs1),
3903 operands, &ref, VN_WALK);
3904 if (!ref)
3906 operands.release ();
3907 continue;
3910 /* If the value of the reference is not invalidated in
3911 this block until it is computed, add the expression
3912 to EXP_GEN. */
3913 if (gimple_vuse (stmt))
3915 gimple *def_stmt;
3916 bool ok = true;
3917 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3918 while (!gimple_nop_p (def_stmt)
3919 && gimple_code (def_stmt) != GIMPLE_PHI
3920 && gimple_bb (def_stmt) == block)
3922 if (stmt_may_clobber_ref_p
3923 (def_stmt, gimple_assign_rhs1 (stmt)))
3925 ok = false;
3926 break;
3928 def_stmt
3929 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3931 if (!ok)
3933 operands.release ();
3934 continue;
3938 /* If the load was value-numbered to another
3939 load make sure we do not use its expression
3940 for insertion if it wouldn't be a valid
3941 replacement. */
3942 /* At the momemt we have a testcase
3943 for hoist insertion of aligned vs. misaligned
3944 variants in gcc.dg/torture/pr65270-1.c thus
3945 with just alignment to be considered we can
3946 simply replace the expression in the hashtable
3947 with the most conservative one. */
3948 vn_reference_op_t ref1 = &ref->operands.last ();
3949 while (ref1->opcode != TARGET_MEM_REF
3950 && ref1->opcode != MEM_REF
3951 && ref1 != &ref->operands[0])
3952 --ref1;
3953 vn_reference_op_t ref2 = &operands.last ();
3954 while (ref2->opcode != TARGET_MEM_REF
3955 && ref2->opcode != MEM_REF
3956 && ref2 != &operands[0])
3957 --ref2;
3958 if ((ref1->opcode == TARGET_MEM_REF
3959 || ref1->opcode == MEM_REF)
3960 && (TYPE_ALIGN (ref1->type)
3961 > TYPE_ALIGN (ref2->type)))
3962 ref1->type
3963 = build_aligned_type (ref1->type,
3964 TYPE_ALIGN (ref2->type));
3965 /* TBAA behavior is an obvious part so make sure
3966 that the hashtable one covers this as well
3967 by adjusting the ref alias set and its base. */
3968 if (ref->set == set
3969 || alias_set_subset_of (set, ref->set))
3971 else if (alias_set_subset_of (ref->set, set))
3973 ref->set = set;
3974 if (ref1->opcode == MEM_REF)
3975 ref1->op0
3976 = wide_int_to_tree (TREE_TYPE (ref2->op0),
3977 wi::to_wide (ref1->op0));
3978 else
3979 ref1->op2
3980 = wide_int_to_tree (TREE_TYPE (ref2->op2),
3981 wi::to_wide (ref1->op2));
3983 else
3985 ref->set = 0;
3986 if (ref1->opcode == MEM_REF)
3987 ref1->op0
3988 = wide_int_to_tree (ptr_type_node,
3989 wi::to_wide (ref1->op0));
3990 else
3991 ref1->op2
3992 = wide_int_to_tree (ptr_type_node,
3993 wi::to_wide (ref1->op2));
3995 operands.release ();
3997 result = pre_expr_pool.allocate ();
3998 result->kind = REFERENCE;
3999 result->id = 0;
4000 PRE_EXPR_REFERENCE (result) = ref;
4001 break;
4004 default:
4005 continue;
4008 get_or_alloc_expression_id (result);
4009 add_to_value (get_expr_value_id (result), result);
4010 bitmap_value_insert_into_set (EXP_GEN (block), result);
4011 continue;
4013 default:
4014 break;
4018 if (dump_file && (dump_flags & TDF_DETAILS))
4020 print_bitmap_set (dump_file, EXP_GEN (block),
4021 "exp_gen", block->index);
4022 print_bitmap_set (dump_file, PHI_GEN (block),
4023 "phi_gen", block->index);
4024 print_bitmap_set (dump_file, TMP_GEN (block),
4025 "tmp_gen", block->index);
4026 print_bitmap_set (dump_file, AVAIL_OUT (block),
4027 "avail_out", block->index);
4030 /* Put the dominator children of BLOCK on the worklist of blocks
4031 to compute available sets for. */
4032 for (son = first_dom_son (CDI_DOMINATORS, block);
4033 son;
4034 son = next_dom_son (CDI_DOMINATORS, son))
4035 worklist[sp++] = son;
4038 free (worklist);
4041 /* Cheap DCE of a known set of possibly dead stmts.
4043 Because we don't follow exactly the standard PRE algorithm, and decide not
4044 to insert PHI nodes sometimes, and because value numbering of casts isn't
4045 perfect, we sometimes end up inserting dead code. This simple DCE-like
4046 pass removes any insertions we made that weren't actually used. */
4048 static void
4049 remove_dead_inserted_code (void)
4051 /* ??? Re-use inserted_exprs as worklist not only as initial set.
4052 This may end up removing non-inserted code as well. If we
4053 keep inserted_exprs unchanged we could restrict new worklist
4054 elements to members of inserted_exprs. */
4055 bitmap worklist = inserted_exprs;
4056 while (! bitmap_empty_p (worklist))
4058 /* Pop item. */
4059 unsigned i = bitmap_first_set_bit (worklist);
4060 bitmap_clear_bit (worklist, i);
4062 tree def = ssa_name (i);
4063 /* Removed by somebody else or still in use. */
4064 if (! def || ! has_zero_uses (def))
4065 continue;
4067 gimple *t = SSA_NAME_DEF_STMT (def);
4068 if (gimple_has_side_effects (t))
4069 continue;
4071 /* Add uses to the worklist. */
4072 ssa_op_iter iter;
4073 use_operand_p use_p;
4074 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
4076 tree use = USE_FROM_PTR (use_p);
4077 if (TREE_CODE (use) == SSA_NAME
4078 && ! SSA_NAME_IS_DEFAULT_DEF (use))
4079 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4082 /* Remove stmt. */
4083 if (dump_file && (dump_flags & TDF_DETAILS))
4085 fprintf (dump_file, "Removing unnecessary insertion:");
4086 print_gimple_stmt (dump_file, t, 0);
4088 gimple_stmt_iterator gsi = gsi_for_stmt (t);
4089 if (gimple_code (t) == GIMPLE_PHI)
4090 remove_phi_node (&gsi, true);
4091 else
4093 gsi_remove (&gsi, true);
4094 release_defs (t);
4100 /* Initialize data structures used by PRE. */
4102 static void
4103 init_pre (void)
4105 basic_block bb;
4107 next_expression_id = 1;
4108 expressions.create (0);
4109 expressions.safe_push (NULL);
4110 value_expressions.create (get_max_value_id () + 1);
4111 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4112 name_to_id.create (0);
4114 inserted_exprs = BITMAP_ALLOC (NULL);
4116 connect_infinite_loops_to_exit ();
4117 memset (&pre_stats, 0, sizeof (pre_stats));
4119 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4121 calculate_dominance_info (CDI_DOMINATORS);
4123 bitmap_obstack_initialize (&grand_bitmap_obstack);
4124 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4125 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4126 FOR_ALL_BB_FN (bb, cfun)
4128 EXP_GEN (bb) = bitmap_set_new ();
4129 PHI_GEN (bb) = bitmap_set_new ();
4130 TMP_GEN (bb) = bitmap_set_new ();
4131 AVAIL_OUT (bb) = bitmap_set_new ();
4136 /* Deallocate data structures used by PRE. */
4138 static void
4139 fini_pre ()
4141 value_expressions.release ();
4142 BITMAP_FREE (inserted_exprs);
4143 bitmap_obstack_release (&grand_bitmap_obstack);
4144 bitmap_set_pool.release ();
4145 pre_expr_pool.release ();
4146 delete phi_translate_table;
4147 phi_translate_table = NULL;
4148 delete expression_to_id;
4149 expression_to_id = NULL;
4150 name_to_id.release ();
4152 free_aux_for_blocks ();
4155 namespace {
4157 const pass_data pass_data_pre =
4159 GIMPLE_PASS, /* type */
4160 "pre", /* name */
4161 OPTGROUP_NONE, /* optinfo_flags */
4162 TV_TREE_PRE, /* tv_id */
4163 ( PROP_cfg | PROP_ssa ), /* properties_required */
4164 0, /* properties_provided */
4165 0, /* properties_destroyed */
4166 TODO_rebuild_alias, /* todo_flags_start */
4167 0, /* todo_flags_finish */
4170 class pass_pre : public gimple_opt_pass
4172 public:
4173 pass_pre (gcc::context *ctxt)
4174 : gimple_opt_pass (pass_data_pre, ctxt)
4177 /* opt_pass methods: */
4178 virtual bool gate (function *)
4179 { return flag_tree_pre != 0 || flag_code_hoisting != 0; }
4180 virtual unsigned int execute (function *);
4182 }; // class pass_pre
4184 unsigned int
4185 pass_pre::execute (function *fun)
4187 unsigned int todo = 0;
4189 do_partial_partial =
4190 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4192 /* This has to happen before SCCVN runs because
4193 loop_optimizer_init may create new phis, etc. */
4194 loop_optimizer_init (LOOPS_NORMAL);
4195 split_critical_edges ();
4197 run_scc_vn (VN_WALK);
4199 init_pre ();
4200 scev_initialize ();
4202 /* Collect and value number expressions computed in each basic block. */
4203 compute_avail ();
4205 /* Insert can get quite slow on an incredibly large number of basic
4206 blocks due to some quadratic behavior. Until this behavior is
4207 fixed, don't run it when he have an incredibly large number of
4208 bb's. If we aren't going to run insert, there is no point in
4209 computing ANTIC, either, even though it's plenty fast. */
4210 if (n_basic_blocks_for_fn (fun) < 4000)
4212 compute_antic ();
4213 insert ();
4216 /* Make sure to remove fake edges before committing our inserts.
4217 This makes sure we don't end up with extra critical edges that
4218 we would need to split. */
4219 remove_fake_exit_edges ();
4220 gsi_commit_edge_inserts ();
4222 /* Eliminate folds statements which might (should not...) end up
4223 not keeping virtual operands up-to-date. */
4224 gcc_assert (!need_ssa_update_p (fun));
4226 /* Remove all the redundant expressions. */
4227 todo |= vn_eliminate (inserted_exprs);
4229 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4230 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4231 statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
4232 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4234 clear_expression_ids ();
4236 scev_finalize ();
4237 remove_dead_inserted_code ();
4238 fini_pre ();
4239 loop_optimizer_finalize ();
4241 /* Restore SSA info before tail-merging as that resets it as well. */
4242 scc_vn_restore_ssa_info ();
4244 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4245 case we can merge the block with the remaining predecessor of the block.
4246 It should either:
4247 - call merge_blocks after each tail merge iteration
4248 - call merge_blocks after all tail merge iterations
4249 - mark TODO_cleanup_cfg when necessary
4250 - share the cfg cleanup with fini_pre. */
4251 todo |= tail_merge_optimize (todo);
4253 free_scc_vn ();
4255 /* Tail merging invalidates the virtual SSA web, together with
4256 cfg-cleanup opportunities exposed by PRE this will wreck the
4257 SSA updating machinery. So make sure to run update-ssa
4258 manually, before eventually scheduling cfg-cleanup as part of
4259 the todo. */
4260 update_ssa (TODO_update_ssa_only_virtuals);
4262 return todo;
4265 } // anon namespace
4267 gimple_opt_pass *
4268 make_pass_pre (gcc::context *ctxt)
4270 return new pass_pre (ctxt);