* gimple-ssa-evrp.c (class evrp_range_analyzer): New class extracted
[official-gcc.git] / gcc / tree-ssa-pre.c
blobecc348436ff31d0296854fd7379c32fbbf391c96
1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-ssa.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
48 #include "params.h"
49 #include "dbgcnt.h"
50 #include "domwalk.h"
51 #include "tree-ssa-propagate.h"
52 #include "tree-cfgcleanup.h"
53 #include "alias.h"
55 /* Even though this file is called tree-ssa-pre.c, we actually
56 implement a bit more than just PRE here. All of them piggy-back
57 on GVN which is implemented in tree-ssa-sccvn.c.
59 1. Full Redundancy Elimination (FRE)
60 This is the elimination phase of GVN.
62 2. Partial Redundancy Elimination (PRE)
63 This is adds computation of AVAIL_OUT and ANTIC_IN and
64 doing expression insertion to form GVN-PRE.
66 3. Code hoisting
67 This optimization uses the ANTIC_IN sets computed for PRE
68 to move expressions further up than PRE would do, to make
69 multiple computations of the same value fully redundant.
70 This pass is explained below (after the explanation of the
71 basic algorithm for PRE).
74 /* TODO:
76 1. Avail sets can be shared by making an avail_find_leader that
77 walks up the dominator tree and looks in those avail sets.
78 This might affect code optimality, it's unclear right now.
79 Currently the AVAIL_OUT sets are the remaining quadraticness in
80 memory of GVN-PRE.
81 2. Strength reduction can be performed by anticipating expressions
82 we can repair later on.
83 3. We can do back-substitution or smarter value numbering to catch
84 commutative expressions split up over multiple statements.
87 /* For ease of terminology, "expression node" in the below refers to
88 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
89 represent the actual statement containing the expressions we care about,
90 and we cache the value number by putting it in the expression. */
92 /* Basic algorithm for Partial Redundancy Elimination:
94 First we walk the statements to generate the AVAIL sets, the
95 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
96 generation of values/expressions by a given block. We use them
97 when computing the ANTIC sets. The AVAIL sets consist of
98 SSA_NAME's that represent values, so we know what values are
99 available in what blocks. AVAIL is a forward dataflow problem. In
100 SSA, values are never killed, so we don't need a kill set, or a
101 fixpoint iteration, in order to calculate the AVAIL sets. In
102 traditional parlance, AVAIL sets tell us the downsafety of the
103 expressions/values.
105 Next, we generate the ANTIC sets. These sets represent the
106 anticipatable expressions. ANTIC is a backwards dataflow
107 problem. An expression is anticipatable in a given block if it could
108 be generated in that block. This means that if we had to perform
109 an insertion in that block, of the value of that expression, we
110 could. Calculating the ANTIC sets requires phi translation of
111 expressions, because the flow goes backwards through phis. We must
112 iterate to a fixpoint of the ANTIC sets, because we have a kill
113 set. Even in SSA form, values are not live over the entire
114 function, only from their definition point onwards. So we have to
115 remove values from the ANTIC set once we go past the definition
116 point of the leaders that make them up.
117 compute_antic/compute_antic_aux performs this computation.
119 Third, we perform insertions to make partially redundant
120 expressions fully redundant.
122 An expression is partially redundant (excluding partial
123 anticipation) if:
125 1. It is AVAIL in some, but not all, of the predecessors of a
126 given block.
127 2. It is ANTIC in all the predecessors.
129 In order to make it fully redundant, we insert the expression into
130 the predecessors where it is not available, but is ANTIC.
132 When optimizing for size, we only eliminate the partial redundancy
133 if we need to insert in only one predecessor. This avoids almost
134 completely the code size increase that PRE usually causes.
136 For the partial anticipation case, we only perform insertion if it
137 is partially anticipated in some block, and fully available in all
138 of the predecessors.
140 do_pre_regular_insertion/do_pre_partial_partial_insertion
141 performs these steps, driven by insert/insert_aux.
143 Fourth, we eliminate fully redundant expressions.
144 This is a simple statement walk that replaces redundant
145 calculations with the now available values. */
147 /* Basic algorithm for Code Hoisting:
149 Code hoisting is: Moving value computations up in the control flow
150 graph to make multiple copies redundant. Typically this is a size
151 optimization, but there are cases where it also is helpful for speed.
153 A simple code hoisting algorithm is implemented that piggy-backs on
154 the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT
155 which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be
156 computed for PRE, and we can use them to perform a limited version of
157 code hoisting, too.
159 For the purpose of this implementation, a value is hoistable to a basic
160 block B if the following properties are met:
162 1. The value is in ANTIC_IN(B) -- the value will be computed on all
163 paths from B to function exit and it can be computed in B);
165 2. The value is not in AVAIL_OUT(B) -- there would be no need to
166 compute the value again and make it available twice;
168 3. All successors of B are dominated by B -- makes sure that inserting
169 a computation of the value in B will make the remaining
170 computations fully redundant;
172 4. At least one successor has the value in AVAIL_OUT -- to avoid
173 hoisting values up too far;
175 5. There are at least two successors of B -- hoisting in straight
176 line code is pointless.
178 The third condition is not strictly necessary, but it would complicate
179 the hoisting pass a lot. In fact, I don't know of any code hoisting
180 algorithm that does not have this requirement. Fortunately, experiments
181 have show that most candidate hoistable values are in regions that meet
182 this condition (e.g. diamond-shape regions).
184 The forth condition is necessary to avoid hoisting things up too far
185 away from the uses of the value. Nothing else limits the algorithm
186 from hoisting everything up as far as ANTIC_IN allows. Experiments
187 with SPEC and CSiBE have shown that hoisting up too far results in more
188 spilling, less benefits for code size, and worse benchmark scores.
189 Fortunately, in practice most of the interesting hoisting opportunities
190 are caught despite this limitation.
192 For hoistable values that meet all conditions, expressions are inserted
193 to make the calculation of the hoistable value fully redundant. We
194 perform code hoisting insertions after each round of PRE insertions,
195 because code hoisting never exposes new PRE opportunities, but PRE can
196 create new code hoisting opportunities.
198 The code hoisting algorithm is implemented in do_hoist_insert, driven
199 by insert/insert_aux. */
201 /* Representations of value numbers:
203 Value numbers are represented by a representative SSA_NAME. We
204 will create fake SSA_NAME's in situations where we need a
205 representative but do not have one (because it is a complex
206 expression). In order to facilitate storing the value numbers in
207 bitmaps, and keep the number of wasted SSA_NAME's down, we also
208 associate a value_id with each value number, and create full blown
209 ssa_name's only where we actually need them (IE in operands of
210 existing expressions).
212 Theoretically you could replace all the value_id's with
213 SSA_NAME_VERSION, but this would allocate a large number of
214 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
215 It would also require an additional indirection at each point we
216 use the value id. */
218 /* Representation of expressions on value numbers:
220 Expressions consisting of value numbers are represented the same
221 way as our VN internally represents them, with an additional
222 "pre_expr" wrapping around them in order to facilitate storing all
223 of the expressions in the same sets. */
225 /* Representation of sets:
227 The dataflow sets do not need to be sorted in any particular order
228 for the majority of their lifetime, are simply represented as two
229 bitmaps, one that keeps track of values present in the set, and one
230 that keeps track of expressions present in the set.
232 When we need them in topological order, we produce it on demand by
233 transforming the bitmap into an array and sorting it into topo
234 order. */
236 /* Type of expression, used to know which member of the PRE_EXPR union
237 is valid. */
239 enum pre_expr_kind
241 NAME,
242 NARY,
243 REFERENCE,
244 CONSTANT
247 union pre_expr_union
249 tree name;
250 tree constant;
251 vn_nary_op_t nary;
252 vn_reference_t reference;
255 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
257 enum pre_expr_kind kind;
258 unsigned int id;
259 pre_expr_union u;
261 /* hash_table support. */
262 static inline hashval_t hash (const pre_expr_d *);
263 static inline int equal (const pre_expr_d *, const pre_expr_d *);
264 } *pre_expr;
266 #define PRE_EXPR_NAME(e) (e)->u.name
267 #define PRE_EXPR_NARY(e) (e)->u.nary
268 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
269 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
271 /* Compare E1 and E1 for equality. */
273 inline int
274 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
276 if (e1->kind != e2->kind)
277 return false;
279 switch (e1->kind)
281 case CONSTANT:
282 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
283 PRE_EXPR_CONSTANT (e2));
284 case NAME:
285 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
286 case NARY:
287 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
288 case REFERENCE:
289 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
290 PRE_EXPR_REFERENCE (e2));
291 default:
292 gcc_unreachable ();
296 /* Hash E. */
298 inline hashval_t
299 pre_expr_d::hash (const pre_expr_d *e)
301 switch (e->kind)
303 case CONSTANT:
304 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
305 case NAME:
306 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
307 case NARY:
308 return PRE_EXPR_NARY (e)->hashcode;
309 case REFERENCE:
310 return PRE_EXPR_REFERENCE (e)->hashcode;
311 default:
312 gcc_unreachable ();
316 /* Next global expression id number. */
317 static unsigned int next_expression_id;
319 /* Mapping from expression to id number we can use in bitmap sets. */
320 static vec<pre_expr> expressions;
321 static hash_table<pre_expr_d> *expression_to_id;
322 static vec<unsigned> name_to_id;
324 /* Allocate an expression id for EXPR. */
326 static inline unsigned int
327 alloc_expression_id (pre_expr expr)
329 struct pre_expr_d **slot;
330 /* Make sure we won't overflow. */
331 gcc_assert (next_expression_id + 1 > next_expression_id);
332 expr->id = next_expression_id++;
333 expressions.safe_push (expr);
334 if (expr->kind == NAME)
336 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
337 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
338 re-allocations by using vec::reserve upfront. */
339 unsigned old_len = name_to_id.length ();
340 name_to_id.reserve (num_ssa_names - old_len);
341 name_to_id.quick_grow_cleared (num_ssa_names);
342 gcc_assert (name_to_id[version] == 0);
343 name_to_id[version] = expr->id;
345 else
347 slot = expression_to_id->find_slot (expr, INSERT);
348 gcc_assert (!*slot);
349 *slot = expr;
351 return next_expression_id - 1;
354 /* Return the expression id for tree EXPR. */
356 static inline unsigned int
357 get_expression_id (const pre_expr expr)
359 return expr->id;
362 static inline unsigned int
363 lookup_expression_id (const pre_expr expr)
365 struct pre_expr_d **slot;
367 if (expr->kind == NAME)
369 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
370 if (name_to_id.length () <= version)
371 return 0;
372 return name_to_id[version];
374 else
376 slot = expression_to_id->find_slot (expr, NO_INSERT);
377 if (!slot)
378 return 0;
379 return ((pre_expr)*slot)->id;
383 /* Return the existing expression id for EXPR, or create one if one
384 does not exist yet. */
386 static inline unsigned int
387 get_or_alloc_expression_id (pre_expr expr)
389 unsigned int id = lookup_expression_id (expr);
390 if (id == 0)
391 return alloc_expression_id (expr);
392 return expr->id = id;
395 /* Return the expression that has expression id ID */
397 static inline pre_expr
398 expression_for_id (unsigned int id)
400 return expressions[id];
403 /* Free the expression id field in all of our expressions,
404 and then destroy the expressions array. */
406 static void
407 clear_expression_ids (void)
409 expressions.release ();
412 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
414 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
416 static pre_expr
417 get_or_alloc_expr_for_name (tree name)
419 struct pre_expr_d expr;
420 pre_expr result;
421 unsigned int result_id;
423 expr.kind = NAME;
424 expr.id = 0;
425 PRE_EXPR_NAME (&expr) = name;
426 result_id = lookup_expression_id (&expr);
427 if (result_id != 0)
428 return expression_for_id (result_id);
430 result = pre_expr_pool.allocate ();
431 result->kind = NAME;
432 PRE_EXPR_NAME (result) = name;
433 alloc_expression_id (result);
434 return result;
437 /* An unordered bitmap set. One bitmap tracks values, the other,
438 expressions. */
439 typedef struct bitmap_set
441 bitmap_head expressions;
442 bitmap_head values;
443 } *bitmap_set_t;
445 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
446 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
448 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
449 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
451 /* Mapping from value id to expressions with that value_id. */
452 static vec<bitmap> value_expressions;
454 /* Sets that we need to keep track of. */
455 typedef struct bb_bitmap_sets
457 /* The EXP_GEN set, which represents expressions/values generated in
458 a basic block. */
459 bitmap_set_t exp_gen;
461 /* The PHI_GEN set, which represents PHI results generated in a
462 basic block. */
463 bitmap_set_t phi_gen;
465 /* The TMP_GEN set, which represents results/temporaries generated
466 in a basic block. IE the LHS of an expression. */
467 bitmap_set_t tmp_gen;
469 /* The AVAIL_OUT set, which represents which values are available in
470 a given basic block. */
471 bitmap_set_t avail_out;
473 /* The ANTIC_IN set, which represents which values are anticipatable
474 in a given basic block. */
475 bitmap_set_t antic_in;
477 /* The PA_IN set, which represents which values are
478 partially anticipatable in a given basic block. */
479 bitmap_set_t pa_in;
481 /* The NEW_SETS set, which is used during insertion to augment the
482 AVAIL_OUT set of blocks with the new insertions performed during
483 the current iteration. */
484 bitmap_set_t new_sets;
486 /* A cache for value_dies_in_block_x. */
487 bitmap expr_dies;
489 /* The live virtual operand on successor edges. */
490 tree vop_on_exit;
492 /* True if we have visited this block during ANTIC calculation. */
493 unsigned int visited : 1;
495 /* True when the block contains a call that might not return. */
496 unsigned int contains_may_not_return_call : 1;
497 } *bb_value_sets_t;
499 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
500 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
501 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
502 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
503 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
504 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
505 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
506 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
507 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
508 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
509 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
512 /* This structure is used to keep track of statistics on what
513 optimization PRE was able to perform. */
514 static struct
516 /* The number of new expressions/temporaries generated by PRE. */
517 int insertions;
519 /* The number of inserts found due to partial anticipation */
520 int pa_insert;
522 /* The number of inserts made for code hoisting. */
523 int hoist_insert;
525 /* The number of new PHI nodes added by PRE. */
526 int phis;
527 } pre_stats;
529 static bool do_partial_partial;
530 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
531 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
532 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
533 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
534 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
535 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
536 static bitmap_set_t bitmap_set_new (void);
537 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
538 tree);
539 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
540 static unsigned int get_expr_value_id (pre_expr);
542 /* We can add and remove elements and entries to and from sets
543 and hash tables, so we use alloc pools for them. */
545 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
546 static bitmap_obstack grand_bitmap_obstack;
548 /* A three tuple {e, pred, v} used to cache phi translations in the
549 phi_translate_table. */
551 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
553 /* The expression. */
554 pre_expr e;
556 /* The predecessor block along which we translated the expression. */
557 basic_block pred;
559 /* The value that resulted from the translation. */
560 pre_expr v;
562 /* The hashcode for the expression, pred pair. This is cached for
563 speed reasons. */
564 hashval_t hashcode;
566 /* hash_table support. */
567 static inline hashval_t hash (const expr_pred_trans_d *);
568 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
569 } *expr_pred_trans_t;
570 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
572 inline hashval_t
573 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
575 return e->hashcode;
578 inline int
579 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
580 const expr_pred_trans_d *ve2)
582 basic_block b1 = ve1->pred;
583 basic_block b2 = ve2->pred;
585 /* If they are not translations for the same basic block, they can't
586 be equal. */
587 if (b1 != b2)
588 return false;
589 return pre_expr_d::equal (ve1->e, ve2->e);
592 /* The phi_translate_table caches phi translations for a given
593 expression and predecessor. */
594 static hash_table<expr_pred_trans_d> *phi_translate_table;
596 /* Add the tuple mapping from {expression E, basic block PRED} to
597 the phi translation table and return whether it pre-existed. */
599 static inline bool
600 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
602 expr_pred_trans_t *slot;
603 expr_pred_trans_d tem;
604 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
605 pred->index);
606 tem.e = e;
607 tem.pred = pred;
608 tem.hashcode = hash;
609 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
610 if (*slot)
612 *entry = *slot;
613 return true;
616 *entry = *slot = XNEW (struct expr_pred_trans_d);
617 (*entry)->e = e;
618 (*entry)->pred = pred;
619 (*entry)->hashcode = hash;
620 return false;
624 /* Add expression E to the expression set of value id V. */
626 static void
627 add_to_value (unsigned int v, pre_expr e)
629 bitmap set;
631 gcc_checking_assert (get_expr_value_id (e) == v);
633 if (v >= value_expressions.length ())
635 value_expressions.safe_grow_cleared (v + 1);
638 set = value_expressions[v];
639 if (!set)
641 set = BITMAP_ALLOC (&grand_bitmap_obstack);
642 value_expressions[v] = set;
645 bitmap_set_bit (set, get_or_alloc_expression_id (e));
648 /* Create a new bitmap set and return it. */
650 static bitmap_set_t
651 bitmap_set_new (void)
653 bitmap_set_t ret = bitmap_set_pool.allocate ();
654 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
655 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
656 return ret;
659 /* Return the value id for a PRE expression EXPR. */
661 static unsigned int
662 get_expr_value_id (pre_expr expr)
664 unsigned int id;
665 switch (expr->kind)
667 case CONSTANT:
668 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
669 break;
670 case NAME:
671 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
672 break;
673 case NARY:
674 id = PRE_EXPR_NARY (expr)->value_id;
675 break;
676 case REFERENCE:
677 id = PRE_EXPR_REFERENCE (expr)->value_id;
678 break;
679 default:
680 gcc_unreachable ();
682 /* ??? We cannot assert that expr has a value-id (it can be 0), because
683 we assign value-ids only to expressions that have a result
684 in set_hashtable_value_ids. */
685 return id;
688 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
690 static tree
691 sccvn_valnum_from_value_id (unsigned int val)
693 bitmap_iterator bi;
694 unsigned int i;
695 bitmap exprset = value_expressions[val];
696 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
698 pre_expr vexpr = expression_for_id (i);
699 if (vexpr->kind == NAME)
700 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
701 else if (vexpr->kind == CONSTANT)
702 return PRE_EXPR_CONSTANT (vexpr);
704 return NULL_TREE;
707 /* Remove an expression EXPR from a bitmapped set. */
709 static void
710 bitmap_remove_expr_from_set (bitmap_set_t set, pre_expr expr)
712 unsigned int val = get_expr_value_id (expr);
713 bitmap_clear_bit (&set->values, val);
714 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
717 /* Insert an expression EXPR into a bitmapped set. */
719 static void
720 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
722 unsigned int val = get_expr_value_id (expr);
723 if (! value_id_constant_p (val))
725 /* Note this is the only function causing multiple expressions
726 for the same value to appear in a set. This is needed for
727 TMP_GEN, PHI_GEN and NEW_SETs. */
728 bitmap_set_bit (&set->values, val);
729 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
733 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
735 static void
736 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
738 bitmap_copy (&dest->expressions, &orig->expressions);
739 bitmap_copy (&dest->values, &orig->values);
743 /* Free memory used up by SET. */
744 static void
745 bitmap_set_free (bitmap_set_t set)
747 bitmap_clear (&set->expressions);
748 bitmap_clear (&set->values);
752 /* Generate an topological-ordered array of bitmap set SET. */
754 static vec<pre_expr>
755 sorted_array_from_bitmap_set (bitmap_set_t set)
757 unsigned int i, j;
758 bitmap_iterator bi, bj;
759 vec<pre_expr> result;
761 /* Pre-allocate enough space for the array. */
762 result.create (bitmap_count_bits (&set->expressions));
764 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
766 /* The number of expressions having a given value is usually
767 relatively small. Thus, rather than making a vector of all
768 the expressions and sorting it by value-id, we walk the values
769 and check in the reverse mapping that tells us what expressions
770 have a given value, to filter those in our set. As a result,
771 the expressions are inserted in value-id order, which means
772 topological order.
774 If this is somehow a significant lose for some cases, we can
775 choose which set to walk based on the set size. */
776 bitmap exprset = value_expressions[i];
777 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
779 if (bitmap_bit_p (&set->expressions, j))
780 result.quick_push (expression_for_id (j));
784 return result;
787 /* Subtract all expressions contained in ORIG from DEST. */
789 static bitmap_set_t
790 bitmap_set_subtract_expressions (bitmap_set_t dest, bitmap_set_t orig)
792 bitmap_set_t result = bitmap_set_new ();
793 bitmap_iterator bi;
794 unsigned int i;
796 bitmap_and_compl (&result->expressions, &dest->expressions,
797 &orig->expressions);
799 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
801 pre_expr expr = expression_for_id (i);
802 unsigned int value_id = get_expr_value_id (expr);
803 bitmap_set_bit (&result->values, value_id);
806 return result;
809 /* Subtract all values in bitmap set B from bitmap set A. */
811 static void
812 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
814 unsigned int i;
815 bitmap_iterator bi;
816 pre_expr to_remove = NULL;
817 FOR_EACH_EXPR_ID_IN_SET (a, i, bi)
819 if (to_remove)
821 bitmap_remove_expr_from_set (a, to_remove);
822 to_remove = NULL;
824 pre_expr expr = expression_for_id (i);
825 if (bitmap_bit_p (&b->values, get_expr_value_id (expr)))
826 to_remove = expr;
828 if (to_remove)
829 bitmap_remove_expr_from_set (a, to_remove);
833 /* Return true if bitmapped set SET contains the value VALUE_ID. */
835 static bool
836 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
838 if (value_id_constant_p (value_id))
839 return true;
841 return bitmap_bit_p (&set->values, value_id);
844 static inline bool
845 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
847 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
850 /* Return true if two bitmap sets are equal. */
852 static bool
853 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
855 return bitmap_equal_p (&a->values, &b->values);
858 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
859 and add it otherwise. */
861 static void
862 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
864 unsigned int val = get_expr_value_id (expr);
865 if (value_id_constant_p (val))
866 return;
868 if (bitmap_set_contains_value (set, val))
870 /* The number of expressions having a given value is usually
871 significantly less than the total number of expressions in SET.
872 Thus, rather than check, for each expression in SET, whether it
873 has the value LOOKFOR, we walk the reverse mapping that tells us
874 what expressions have a given value, and see if any of those
875 expressions are in our set. For large testcases, this is about
876 5-10x faster than walking the bitmap. If this is somehow a
877 significant lose for some cases, we can choose which set to walk
878 based on the set size. */
879 unsigned int i;
880 bitmap_iterator bi;
881 bitmap exprset = value_expressions[val];
882 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
884 if (bitmap_clear_bit (&set->expressions, i))
886 bitmap_set_bit (&set->expressions, get_expression_id (expr));
887 return;
890 gcc_unreachable ();
892 else
893 bitmap_insert_into_set (set, expr);
896 /* Insert EXPR into SET if EXPR's value is not already present in
897 SET. */
899 static void
900 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
902 unsigned int val = get_expr_value_id (expr);
904 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
906 /* Constant values are always considered to be part of the set. */
907 if (value_id_constant_p (val))
908 return;
910 /* If the value membership changed, add the expression. */
911 if (bitmap_set_bit (&set->values, val))
912 bitmap_set_bit (&set->expressions, expr->id);
915 /* Print out EXPR to outfile. */
917 static void
918 print_pre_expr (FILE *outfile, const pre_expr expr)
920 if (! expr)
922 fprintf (outfile, "NULL");
923 return;
925 switch (expr->kind)
927 case CONSTANT:
928 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr));
929 break;
930 case NAME:
931 print_generic_expr (outfile, PRE_EXPR_NAME (expr));
932 break;
933 case NARY:
935 unsigned int i;
936 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
937 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
938 for (i = 0; i < nary->length; i++)
940 print_generic_expr (outfile, nary->op[i]);
941 if (i != (unsigned) nary->length - 1)
942 fprintf (outfile, ",");
944 fprintf (outfile, "}");
946 break;
948 case REFERENCE:
950 vn_reference_op_t vro;
951 unsigned int i;
952 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
953 fprintf (outfile, "{");
954 for (i = 0;
955 ref->operands.iterate (i, &vro);
956 i++)
958 bool closebrace = false;
959 if (vro->opcode != SSA_NAME
960 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
962 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
963 if (vro->op0)
965 fprintf (outfile, "<");
966 closebrace = true;
969 if (vro->op0)
971 print_generic_expr (outfile, vro->op0);
972 if (vro->op1)
974 fprintf (outfile, ",");
975 print_generic_expr (outfile, vro->op1);
977 if (vro->op2)
979 fprintf (outfile, ",");
980 print_generic_expr (outfile, vro->op2);
983 if (closebrace)
984 fprintf (outfile, ">");
985 if (i != ref->operands.length () - 1)
986 fprintf (outfile, ",");
988 fprintf (outfile, "}");
989 if (ref->vuse)
991 fprintf (outfile, "@");
992 print_generic_expr (outfile, ref->vuse);
995 break;
998 void debug_pre_expr (pre_expr);
1000 /* Like print_pre_expr but always prints to stderr. */
1001 DEBUG_FUNCTION void
1002 debug_pre_expr (pre_expr e)
1004 print_pre_expr (stderr, e);
1005 fprintf (stderr, "\n");
1008 /* Print out SET to OUTFILE. */
1010 static void
1011 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1012 const char *setname, int blockindex)
1014 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1015 if (set)
1017 bool first = true;
1018 unsigned i;
1019 bitmap_iterator bi;
1021 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1023 const pre_expr expr = expression_for_id (i);
1025 if (!first)
1026 fprintf (outfile, ", ");
1027 first = false;
1028 print_pre_expr (outfile, expr);
1030 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1033 fprintf (outfile, " }\n");
1036 void debug_bitmap_set (bitmap_set_t);
1038 DEBUG_FUNCTION void
1039 debug_bitmap_set (bitmap_set_t set)
1041 print_bitmap_set (stderr, set, "debug", 0);
1044 void debug_bitmap_sets_for (basic_block);
1046 DEBUG_FUNCTION void
1047 debug_bitmap_sets_for (basic_block bb)
1049 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1050 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1051 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1052 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1053 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1054 if (do_partial_partial)
1055 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1056 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1059 /* Print out the expressions that have VAL to OUTFILE. */
1061 static void
1062 print_value_expressions (FILE *outfile, unsigned int val)
1064 bitmap set = value_expressions[val];
1065 if (set)
1067 bitmap_set x;
1068 char s[10];
1069 sprintf (s, "%04d", val);
1070 x.expressions = *set;
1071 print_bitmap_set (outfile, &x, s, 0);
1076 DEBUG_FUNCTION void
1077 debug_value_expressions (unsigned int val)
1079 print_value_expressions (stderr, val);
1082 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1083 represent it. */
1085 static pre_expr
1086 get_or_alloc_expr_for_constant (tree constant)
1088 unsigned int result_id;
1089 unsigned int value_id;
1090 struct pre_expr_d expr;
1091 pre_expr newexpr;
1093 expr.kind = CONSTANT;
1094 PRE_EXPR_CONSTANT (&expr) = constant;
1095 result_id = lookup_expression_id (&expr);
1096 if (result_id != 0)
1097 return expression_for_id (result_id);
1099 newexpr = pre_expr_pool.allocate ();
1100 newexpr->kind = CONSTANT;
1101 PRE_EXPR_CONSTANT (newexpr) = constant;
1102 alloc_expression_id (newexpr);
1103 value_id = get_or_alloc_constant_value_id (constant);
1104 add_to_value (value_id, newexpr);
1105 return newexpr;
1108 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1109 Currently only supports constants and SSA_NAMES. */
1110 static pre_expr
1111 get_or_alloc_expr_for (tree t)
1113 if (TREE_CODE (t) == SSA_NAME)
1114 return get_or_alloc_expr_for_name (t);
1115 else if (is_gimple_min_invariant (t))
1116 return get_or_alloc_expr_for_constant (t);
1117 gcc_unreachable ();
1120 /* Return the folded version of T if T, when folded, is a gimple
1121 min_invariant or an SSA name. Otherwise, return T. */
1123 static pre_expr
1124 fully_constant_expression (pre_expr e)
1126 switch (e->kind)
1128 case CONSTANT:
1129 return e;
1130 case NARY:
1132 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1133 tree res = vn_nary_simplify (nary);
1134 if (!res)
1135 return e;
1136 if (is_gimple_min_invariant (res))
1137 return get_or_alloc_expr_for_constant (res);
1138 if (TREE_CODE (res) == SSA_NAME)
1139 return get_or_alloc_expr_for_name (res);
1140 return e;
1142 case REFERENCE:
1144 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1145 tree folded;
1146 if ((folded = fully_constant_vn_reference_p (ref)))
1147 return get_or_alloc_expr_for_constant (folded);
1148 return e;
1150 default:
1151 return e;
1153 return e;
1156 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1157 it has the value it would have in BLOCK. Set *SAME_VALID to true
1158 in case the new vuse doesn't change the value id of the OPERANDS. */
1160 static tree
1161 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1162 alias_set_type set, tree type, tree vuse,
1163 basic_block phiblock,
1164 basic_block block, bool *same_valid)
1166 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1167 ao_ref ref;
1168 edge e = NULL;
1169 bool use_oracle;
1171 *same_valid = true;
1173 if (gimple_bb (phi) != phiblock)
1174 return vuse;
1176 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1178 /* Use the alias-oracle to find either the PHI node in this block,
1179 the first VUSE used in this block that is equivalent to vuse or
1180 the first VUSE which definition in this block kills the value. */
1181 if (gimple_code (phi) == GIMPLE_PHI)
1182 e = find_edge (block, phiblock);
1183 else if (use_oracle)
1184 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1186 vuse = gimple_vuse (phi);
1187 phi = SSA_NAME_DEF_STMT (vuse);
1188 if (gimple_bb (phi) != phiblock)
1189 return vuse;
1190 if (gimple_code (phi) == GIMPLE_PHI)
1192 e = find_edge (block, phiblock);
1193 break;
1196 else
1197 return NULL_TREE;
1199 if (e)
1201 if (use_oracle)
1203 bitmap visited = NULL;
1204 unsigned int cnt;
1205 /* Try to find a vuse that dominates this phi node by skipping
1206 non-clobbering statements. */
1207 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1208 NULL, NULL);
1209 if (visited)
1210 BITMAP_FREE (visited);
1212 else
1213 vuse = NULL_TREE;
1214 if (!vuse)
1216 /* If we didn't find any, the value ID can't stay the same,
1217 but return the translated vuse. */
1218 *same_valid = false;
1219 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1221 /* ??? We would like to return vuse here as this is the canonical
1222 upmost vdef that this reference is associated with. But during
1223 insertion of the references into the hash tables we only ever
1224 directly insert with their direct gimple_vuse, hence returning
1225 something else would make us not find the other expression. */
1226 return PHI_ARG_DEF (phi, e->dest_idx);
1229 return NULL_TREE;
1232 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1233 SET2 *or* SET3. This is used to avoid making a set consisting of the union
1234 of PA_IN and ANTIC_IN during insert and phi-translation. */
1236 static inline pre_expr
1237 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2,
1238 bitmap_set_t set3 = NULL)
1240 pre_expr result;
1242 result = bitmap_find_leader (set1, val);
1243 if (!result && set2)
1244 result = bitmap_find_leader (set2, val);
1245 if (!result && set3)
1246 result = bitmap_find_leader (set3, val);
1247 return result;
1250 /* Get the tree type for our PRE expression e. */
1252 static tree
1253 get_expr_type (const pre_expr e)
1255 switch (e->kind)
1257 case NAME:
1258 return TREE_TYPE (PRE_EXPR_NAME (e));
1259 case CONSTANT:
1260 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1261 case REFERENCE:
1262 return PRE_EXPR_REFERENCE (e)->type;
1263 case NARY:
1264 return PRE_EXPR_NARY (e)->type;
1266 gcc_unreachable ();
1269 /* Get a representative SSA_NAME for a given expression.
1270 Since all of our sub-expressions are treated as values, we require
1271 them to be SSA_NAME's for simplicity.
1272 Prior versions of GVNPRE used to use "value handles" here, so that
1273 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1274 either case, the operands are really values (IE we do not expect
1275 them to be usable without finding leaders). */
1277 static tree
1278 get_representative_for (const pre_expr e)
1280 tree name;
1281 unsigned int value_id = get_expr_value_id (e);
1283 switch (e->kind)
1285 case NAME:
1286 return VN_INFO (PRE_EXPR_NAME (e))->valnum;
1287 case CONSTANT:
1288 return PRE_EXPR_CONSTANT (e);
1289 case NARY:
1290 case REFERENCE:
1292 /* Go through all of the expressions representing this value
1293 and pick out an SSA_NAME. */
1294 unsigned int i;
1295 bitmap_iterator bi;
1296 bitmap exprs = value_expressions[value_id];
1297 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1299 pre_expr rep = expression_for_id (i);
1300 if (rep->kind == NAME)
1301 return VN_INFO (PRE_EXPR_NAME (rep))->valnum;
1302 else if (rep->kind == CONSTANT)
1303 return PRE_EXPR_CONSTANT (rep);
1306 break;
1309 /* If we reached here we couldn't find an SSA_NAME. This can
1310 happen when we've discovered a value that has never appeared in
1311 the program as set to an SSA_NAME, as the result of phi translation.
1312 Create one here.
1313 ??? We should be able to re-use this when we insert the statement
1314 to compute it. */
1315 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1316 VN_INFO_GET (name)->value_id = value_id;
1317 VN_INFO (name)->valnum = name;
1318 /* ??? For now mark this SSA name for release by SCCVN. */
1319 VN_INFO (name)->needs_insertion = true;
1320 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1321 if (dump_file && (dump_flags & TDF_DETAILS))
1323 fprintf (dump_file, "Created SSA_NAME representative ");
1324 print_generic_expr (dump_file, name);
1325 fprintf (dump_file, " for expression:");
1326 print_pre_expr (dump_file, e);
1327 fprintf (dump_file, " (%04d)\n", value_id);
1330 return name;
1335 static pre_expr
1336 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1337 basic_block pred, basic_block phiblock);
1339 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1340 the phis in PRED. Return NULL if we can't find a leader for each part
1341 of the translated expression. */
1343 static pre_expr
1344 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1345 basic_block pred, basic_block phiblock)
1347 switch (expr->kind)
1349 case NARY:
1351 unsigned int i;
1352 bool changed = false;
1353 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1354 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1355 sizeof_vn_nary_op (nary->length));
1356 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1358 for (i = 0; i < newnary->length; i++)
1360 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1361 continue;
1362 else
1364 pre_expr leader, result;
1365 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1366 leader = find_leader_in_sets (op_val_id, set1, set2);
1367 result = phi_translate (leader, set1, set2, pred, phiblock);
1368 if (result && result != leader)
1369 newnary->op[i] = get_representative_for (result);
1370 else if (!result)
1371 return NULL;
1373 changed |= newnary->op[i] != nary->op[i];
1376 if (changed)
1378 pre_expr constant;
1379 unsigned int new_val_id;
1381 PRE_EXPR_NARY (expr) = newnary;
1382 constant = fully_constant_expression (expr);
1383 PRE_EXPR_NARY (expr) = nary;
1384 if (constant != expr)
1386 /* For non-CONSTANTs we have to make sure we can eventually
1387 insert the expression. Which means we need to have a
1388 leader for it. */
1389 if (constant->kind != CONSTANT)
1391 /* Do not allow simplifications to non-constants over
1392 backedges as this will likely result in a loop PHI node
1393 to be inserted and increased register pressure.
1394 See PR77498 - this avoids doing predcoms work in
1395 a less efficient way. */
1396 if (find_edge (pred, phiblock)->flags & EDGE_DFS_BACK)
1398 else
1400 unsigned value_id = get_expr_value_id (constant);
1401 constant = find_leader_in_sets (value_id, set1, set2,
1402 AVAIL_OUT (pred));
1403 if (constant)
1404 return constant;
1407 else
1408 return constant;
1411 tree result = vn_nary_op_lookup_pieces (newnary->length,
1412 newnary->opcode,
1413 newnary->type,
1414 &newnary->op[0],
1415 &nary);
1416 if (result && is_gimple_min_invariant (result))
1417 return get_or_alloc_expr_for_constant (result);
1419 expr = pre_expr_pool.allocate ();
1420 expr->kind = NARY;
1421 expr->id = 0;
1422 if (nary)
1424 PRE_EXPR_NARY (expr) = nary;
1425 new_val_id = nary->value_id;
1426 get_or_alloc_expression_id (expr);
1427 /* When we end up re-using a value number make sure that
1428 doesn't have unrelated (which we can't check here)
1429 range or points-to info on it. */
1430 if (result
1431 && INTEGRAL_TYPE_P (TREE_TYPE (result))
1432 && SSA_NAME_RANGE_INFO (result)
1433 && ! SSA_NAME_IS_DEFAULT_DEF (result))
1435 if (! VN_INFO (result)->info.range_info)
1437 VN_INFO (result)->info.range_info
1438 = SSA_NAME_RANGE_INFO (result);
1439 VN_INFO (result)->range_info_anti_range_p
1440 = SSA_NAME_ANTI_RANGE_P (result);
1442 if (dump_file && (dump_flags & TDF_DETAILS))
1444 fprintf (dump_file, "clearing range info of ");
1445 print_generic_expr (dump_file, result);
1446 fprintf (dump_file, "\n");
1448 SSA_NAME_RANGE_INFO (result) = NULL;
1450 else if (result
1451 && POINTER_TYPE_P (TREE_TYPE (result))
1452 && SSA_NAME_PTR_INFO (result)
1453 && ! SSA_NAME_IS_DEFAULT_DEF (result))
1455 if (! VN_INFO (result)->info.ptr_info)
1456 VN_INFO (result)->info.ptr_info
1457 = SSA_NAME_PTR_INFO (result);
1458 if (dump_file && (dump_flags & TDF_DETAILS))
1460 fprintf (dump_file, "clearing points-to info of ");
1461 print_generic_expr (dump_file, result);
1462 fprintf (dump_file, "\n");
1464 SSA_NAME_PTR_INFO (result) = NULL;
1467 else
1469 new_val_id = get_next_value_id ();
1470 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1471 nary = vn_nary_op_insert_pieces (newnary->length,
1472 newnary->opcode,
1473 newnary->type,
1474 &newnary->op[0],
1475 result, new_val_id);
1476 PRE_EXPR_NARY (expr) = nary;
1477 get_or_alloc_expression_id (expr);
1479 add_to_value (new_val_id, expr);
1481 return expr;
1483 break;
1485 case REFERENCE:
1487 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1488 vec<vn_reference_op_s> operands = ref->operands;
1489 tree vuse = ref->vuse;
1490 tree newvuse = vuse;
1491 vec<vn_reference_op_s> newoperands = vNULL;
1492 bool changed = false, same_valid = true;
1493 unsigned int i, n;
1494 vn_reference_op_t operand;
1495 vn_reference_t newref;
1497 for (i = 0; operands.iterate (i, &operand); i++)
1499 pre_expr opresult;
1500 pre_expr leader;
1501 tree op[3];
1502 tree type = operand->type;
1503 vn_reference_op_s newop = *operand;
1504 op[0] = operand->op0;
1505 op[1] = operand->op1;
1506 op[2] = operand->op2;
1507 for (n = 0; n < 3; ++n)
1509 unsigned int op_val_id;
1510 if (!op[n])
1511 continue;
1512 if (TREE_CODE (op[n]) != SSA_NAME)
1514 /* We can't possibly insert these. */
1515 if (n != 0
1516 && !is_gimple_min_invariant (op[n]))
1517 break;
1518 continue;
1520 op_val_id = VN_INFO (op[n])->value_id;
1521 leader = find_leader_in_sets (op_val_id, set1, set2);
1522 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1523 if (opresult && opresult != leader)
1525 tree name = get_representative_for (opresult);
1526 changed |= name != op[n];
1527 op[n] = name;
1529 else if (!opresult)
1530 break;
1532 if (n != 3)
1534 newoperands.release ();
1535 return NULL;
1537 if (!changed)
1538 continue;
1539 if (!newoperands.exists ())
1540 newoperands = operands.copy ();
1541 /* We may have changed from an SSA_NAME to a constant */
1542 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1543 newop.opcode = TREE_CODE (op[0]);
1544 newop.type = type;
1545 newop.op0 = op[0];
1546 newop.op1 = op[1];
1547 newop.op2 = op[2];
1548 newoperands[i] = newop;
1550 gcc_checking_assert (i == operands.length ());
1552 if (vuse)
1554 newvuse = translate_vuse_through_block (newoperands.exists ()
1555 ? newoperands : operands,
1556 ref->set, ref->type,
1557 vuse, phiblock, pred,
1558 &same_valid);
1559 if (newvuse == NULL_TREE)
1561 newoperands.release ();
1562 return NULL;
1566 if (changed || newvuse != vuse)
1568 unsigned int new_val_id;
1570 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1571 ref->type,
1572 newoperands.exists ()
1573 ? newoperands : operands,
1574 &newref, VN_WALK);
1575 if (result)
1576 newoperands.release ();
1578 /* We can always insert constants, so if we have a partial
1579 redundant constant load of another type try to translate it
1580 to a constant of appropriate type. */
1581 if (result && is_gimple_min_invariant (result))
1583 tree tem = result;
1584 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1586 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1587 if (tem && !is_gimple_min_invariant (tem))
1588 tem = NULL_TREE;
1590 if (tem)
1591 return get_or_alloc_expr_for_constant (tem);
1594 /* If we'd have to convert things we would need to validate
1595 if we can insert the translated expression. So fail
1596 here for now - we cannot insert an alias with a different
1597 type in the VN tables either, as that would assert. */
1598 if (result
1599 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1600 return NULL;
1601 else if (!result && newref
1602 && !useless_type_conversion_p (ref->type, newref->type))
1604 newoperands.release ();
1605 return NULL;
1608 expr = pre_expr_pool.allocate ();
1609 expr->kind = REFERENCE;
1610 expr->id = 0;
1612 if (newref)
1613 new_val_id = newref->value_id;
1614 else
1616 if (changed || !same_valid)
1618 new_val_id = get_next_value_id ();
1619 value_expressions.safe_grow_cleared
1620 (get_max_value_id () + 1);
1622 else
1623 new_val_id = ref->value_id;
1624 if (!newoperands.exists ())
1625 newoperands = operands.copy ();
1626 newref = vn_reference_insert_pieces (newvuse, ref->set,
1627 ref->type,
1628 newoperands,
1629 result, new_val_id);
1630 newoperands = vNULL;
1632 PRE_EXPR_REFERENCE (expr) = newref;
1633 get_or_alloc_expression_id (expr);
1634 add_to_value (new_val_id, expr);
1636 newoperands.release ();
1637 return expr;
1639 break;
1641 case NAME:
1643 tree name = PRE_EXPR_NAME (expr);
1644 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1645 /* If the SSA name is defined by a PHI node in this block,
1646 translate it. */
1647 if (gimple_code (def_stmt) == GIMPLE_PHI
1648 && gimple_bb (def_stmt) == phiblock)
1650 edge e = find_edge (pred, gimple_bb (def_stmt));
1651 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1653 /* Handle constant. */
1654 if (is_gimple_min_invariant (def))
1655 return get_or_alloc_expr_for_constant (def);
1657 return get_or_alloc_expr_for_name (def);
1659 /* Otherwise return it unchanged - it will get removed if its
1660 value is not available in PREDs AVAIL_OUT set of expressions
1661 by the subtraction of TMP_GEN. */
1662 return expr;
1665 default:
1666 gcc_unreachable ();
1670 /* Wrapper around phi_translate_1 providing caching functionality. */
1672 static pre_expr
1673 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1674 basic_block pred, basic_block phiblock)
1676 expr_pred_trans_t slot = NULL;
1677 pre_expr phitrans;
1679 if (!expr)
1680 return NULL;
1682 /* Constants contain no values that need translation. */
1683 if (expr->kind == CONSTANT)
1684 return expr;
1686 if (value_id_constant_p (get_expr_value_id (expr)))
1687 return expr;
1689 /* Don't add translations of NAMEs as those are cheap to translate. */
1690 if (expr->kind != NAME)
1692 if (phi_trans_add (&slot, expr, pred))
1693 return slot->v;
1694 /* Store NULL for the value we want to return in the case of
1695 recursing. */
1696 slot->v = NULL;
1699 /* Translate. */
1700 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1702 if (slot)
1704 if (phitrans)
1705 slot->v = phitrans;
1706 else
1707 /* Remove failed translations again, they cause insert
1708 iteration to not pick up new opportunities reliably. */
1709 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1712 return phitrans;
1716 /* For each expression in SET, translate the values through phi nodes
1717 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1718 expressions in DEST. */
1720 static void
1721 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1722 basic_block phiblock)
1724 vec<pre_expr> exprs;
1725 pre_expr expr;
1726 int i;
1728 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1730 bitmap_set_copy (dest, set);
1731 return;
1734 exprs = sorted_array_from_bitmap_set (set);
1735 FOR_EACH_VEC_ELT (exprs, i, expr)
1737 pre_expr translated;
1738 translated = phi_translate (expr, set, NULL, pred, phiblock);
1739 if (!translated)
1740 continue;
1742 /* We might end up with multiple expressions from SET being
1743 translated to the same value. In this case we do not want
1744 to retain the NARY or REFERENCE expression but prefer a NAME
1745 which would be the leader. */
1746 if (translated->kind == NAME)
1747 bitmap_value_replace_in_set (dest, translated);
1748 else
1749 bitmap_value_insert_into_set (dest, translated);
1751 exprs.release ();
1754 /* Find the leader for a value (i.e., the name representing that
1755 value) in a given set, and return it. Return NULL if no leader
1756 is found. */
1758 static pre_expr
1759 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1761 if (value_id_constant_p (val))
1763 unsigned int i;
1764 bitmap_iterator bi;
1765 bitmap exprset = value_expressions[val];
1767 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1769 pre_expr expr = expression_for_id (i);
1770 if (expr->kind == CONSTANT)
1771 return expr;
1774 if (bitmap_set_contains_value (set, val))
1776 /* Rather than walk the entire bitmap of expressions, and see
1777 whether any of them has the value we are looking for, we look
1778 at the reverse mapping, which tells us the set of expressions
1779 that have a given value (IE value->expressions with that
1780 value) and see if any of those expressions are in our set.
1781 The number of expressions per value is usually significantly
1782 less than the number of expressions in the set. In fact, for
1783 large testcases, doing it this way is roughly 5-10x faster
1784 than walking the bitmap.
1785 If this is somehow a significant lose for some cases, we can
1786 choose which set to walk based on which set is smaller. */
1787 unsigned int i;
1788 bitmap_iterator bi;
1789 bitmap exprset = value_expressions[val];
1791 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1792 return expression_for_id (i);
1794 return NULL;
1797 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1798 BLOCK by seeing if it is not killed in the block. Note that we are
1799 only determining whether there is a store that kills it. Because
1800 of the order in which clean iterates over values, we are guaranteed
1801 that altered operands will have caused us to be eliminated from the
1802 ANTIC_IN set already. */
1804 static bool
1805 value_dies_in_block_x (pre_expr expr, basic_block block)
1807 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1808 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1809 gimple *def;
1810 gimple_stmt_iterator gsi;
1811 unsigned id = get_expression_id (expr);
1812 bool res = false;
1813 ao_ref ref;
1815 if (!vuse)
1816 return false;
1818 /* Lookup a previously calculated result. */
1819 if (EXPR_DIES (block)
1820 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1821 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1823 /* A memory expression {e, VUSE} dies in the block if there is a
1824 statement that may clobber e. If, starting statement walk from the
1825 top of the basic block, a statement uses VUSE there can be no kill
1826 inbetween that use and the original statement that loaded {e, VUSE},
1827 so we can stop walking. */
1828 ref.base = NULL_TREE;
1829 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1831 tree def_vuse, def_vdef;
1832 def = gsi_stmt (gsi);
1833 def_vuse = gimple_vuse (def);
1834 def_vdef = gimple_vdef (def);
1836 /* Not a memory statement. */
1837 if (!def_vuse)
1838 continue;
1840 /* Not a may-def. */
1841 if (!def_vdef)
1843 /* A load with the same VUSE, we're done. */
1844 if (def_vuse == vuse)
1845 break;
1847 continue;
1850 /* Init ref only if we really need it. */
1851 if (ref.base == NULL_TREE
1852 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1853 refx->operands))
1855 res = true;
1856 break;
1858 /* If the statement may clobber expr, it dies. */
1859 if (stmt_may_clobber_ref_p_1 (def, &ref))
1861 res = true;
1862 break;
1866 /* Remember the result. */
1867 if (!EXPR_DIES (block))
1868 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1869 bitmap_set_bit (EXPR_DIES (block), id * 2);
1870 if (res)
1871 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1873 return res;
1877 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1878 contains its value-id. */
1880 static bool
1881 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1883 if (op && TREE_CODE (op) == SSA_NAME)
1885 unsigned int value_id = VN_INFO (op)->value_id;
1886 if (!(bitmap_set_contains_value (set1, value_id)
1887 || (set2 && bitmap_set_contains_value (set2, value_id))))
1888 return false;
1890 return true;
1893 /* Determine if the expression EXPR is valid in SET1 U SET2.
1894 ONLY SET2 CAN BE NULL.
1895 This means that we have a leader for each part of the expression
1896 (if it consists of values), or the expression is an SSA_NAME.
1897 For loads/calls, we also see if the vuse is killed in this block. */
1899 static bool
1900 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1902 switch (expr->kind)
1904 case NAME:
1905 /* By construction all NAMEs are available. Non-available
1906 NAMEs are removed by subtracting TMP_GEN from the sets. */
1907 return true;
1908 case NARY:
1910 unsigned int i;
1911 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1912 for (i = 0; i < nary->length; i++)
1913 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1914 return false;
1915 return true;
1917 break;
1918 case REFERENCE:
1920 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1921 vn_reference_op_t vro;
1922 unsigned int i;
1924 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1926 if (!op_valid_in_sets (set1, set2, vro->op0)
1927 || !op_valid_in_sets (set1, set2, vro->op1)
1928 || !op_valid_in_sets (set1, set2, vro->op2))
1929 return false;
1931 return true;
1933 default:
1934 gcc_unreachable ();
1938 /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2.
1939 This means expressions that are made up of values we have no leaders for
1940 in SET1 or SET2. */
1942 static void
1943 clean (bitmap_set_t set1, bitmap_set_t set2 = NULL)
1945 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
1946 pre_expr expr;
1947 int i;
1949 FOR_EACH_VEC_ELT (exprs, i, expr)
1951 if (!valid_in_sets (set1, set2, expr))
1952 bitmap_remove_expr_from_set (set1, expr);
1954 exprs.release ();
1957 /* Clean the set of expressions that are no longer valid in SET because
1958 they are clobbered in BLOCK or because they trap and may not be executed. */
1960 static void
1961 prune_clobbered_mems (bitmap_set_t set, basic_block block)
1963 bitmap_iterator bi;
1964 unsigned i;
1965 pre_expr to_remove = NULL;
1967 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1969 /* Remove queued expr. */
1970 if (to_remove)
1972 bitmap_remove_expr_from_set (set, to_remove);
1973 to_remove = NULL;
1976 pre_expr expr = expression_for_id (i);
1977 if (expr->kind == REFERENCE)
1979 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1980 if (ref->vuse)
1982 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
1983 if (!gimple_nop_p (def_stmt)
1984 && ((gimple_bb (def_stmt) != block
1985 && !dominated_by_p (CDI_DOMINATORS,
1986 block, gimple_bb (def_stmt)))
1987 || (gimple_bb (def_stmt) == block
1988 && value_dies_in_block_x (expr, block))))
1989 to_remove = expr;
1992 else if (expr->kind == NARY)
1994 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1995 /* If the NARY may trap make sure the block does not contain
1996 a possible exit point.
1997 ??? This is overly conservative if we translate AVAIL_OUT
1998 as the available expression might be after the exit point. */
1999 if (BB_MAY_NOTRETURN (block)
2000 && vn_nary_may_trap (nary))
2001 to_remove = expr;
2005 /* Remove queued expr. */
2006 if (to_remove)
2007 bitmap_remove_expr_from_set (set, to_remove);
2010 static sbitmap has_abnormal_preds;
2012 /* Compute the ANTIC set for BLOCK.
2014 If succs(BLOCK) > 1 then
2015 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2016 else if succs(BLOCK) == 1 then
2017 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2019 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2021 Note that clean() is deferred until after the iteration. */
2023 static bool
2024 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2026 bitmap_set_t S, old, ANTIC_OUT;
2027 bitmap_iterator bi;
2028 unsigned int bii;
2029 edge e;
2030 edge_iterator ei;
2032 bool changed = ! BB_VISITED (block);
2033 BB_VISITED (block) = 1;
2034 old = ANTIC_OUT = S = NULL;
2036 /* If any edges from predecessors are abnormal, antic_in is empty,
2037 so do nothing. */
2038 if (block_has_abnormal_pred_edge)
2039 goto maybe_dump_sets;
2041 old = ANTIC_IN (block);
2042 ANTIC_OUT = bitmap_set_new ();
2044 /* If the block has no successors, ANTIC_OUT is empty. */
2045 if (EDGE_COUNT (block->succs) == 0)
2047 /* If we have one successor, we could have some phi nodes to
2048 translate through. */
2049 else if (single_succ_p (block))
2051 basic_block succ_bb = single_succ (block);
2052 gcc_assert (BB_VISITED (succ_bb));
2053 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2055 /* If we have multiple successors, we take the intersection of all of
2056 them. Note that in the case of loop exit phi nodes, we may have
2057 phis to translate through. */
2058 else
2060 size_t i;
2061 basic_block bprime, first = NULL;
2063 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2064 FOR_EACH_EDGE (e, ei, block->succs)
2066 if (!first
2067 && BB_VISITED (e->dest))
2068 first = e->dest;
2069 else if (BB_VISITED (e->dest))
2070 worklist.quick_push (e->dest);
2071 else
2073 /* Unvisited successors get their ANTIC_IN replaced by the
2074 maximal set to arrive at a maximum ANTIC_IN solution.
2075 We can ignore them in the intersection operation and thus
2076 need not explicitely represent that maximum solution. */
2077 if (dump_file && (dump_flags & TDF_DETAILS))
2078 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2079 e->src->index, e->dest->index);
2083 /* Of multiple successors we have to have visited one already
2084 which is guaranteed by iteration order. */
2085 gcc_assert (first != NULL);
2087 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2089 /* If we have multiple successors we need to intersect the ANTIC_OUT
2090 sets. For values that's a simple intersection but for
2091 expressions it is a union. Given we want to have a single
2092 expression per value in our sets we have to canonicalize.
2093 Avoid randomness and running into cycles like for PR82129 and
2094 canonicalize the expression we choose to the one with the
2095 lowest id. This requires we actually compute the union first. */
2096 FOR_EACH_VEC_ELT (worklist, i, bprime)
2098 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2100 bitmap_set_t tmp = bitmap_set_new ();
2101 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2102 bitmap_and_into (&ANTIC_OUT->values, &tmp->values);
2103 bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions);
2104 bitmap_set_free (tmp);
2106 else
2108 bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (bprime)->values);
2109 bitmap_ior_into (&ANTIC_OUT->expressions,
2110 &ANTIC_IN (bprime)->expressions);
2113 if (! worklist.is_empty ())
2115 /* Prune expressions not in the value set, canonicalizing to
2116 expression with lowest ID. */
2117 bitmap_iterator bi;
2118 unsigned int i;
2119 unsigned int to_clear = -1U;
2120 bitmap seen_value = BITMAP_ALLOC (NULL);
2121 FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT, i, bi)
2123 if (to_clear != -1U)
2125 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2126 to_clear = -1U;
2128 pre_expr expr = expression_for_id (i);
2129 unsigned int value_id = get_expr_value_id (expr);
2130 if (!bitmap_bit_p (&ANTIC_OUT->values, value_id)
2131 || !bitmap_set_bit (seen_value, value_id))
2132 to_clear = i;
2134 if (to_clear != -1U)
2135 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2136 BITMAP_FREE (seen_value);
2140 /* Prune expressions that are clobbered in block and thus become
2141 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2142 prune_clobbered_mems (ANTIC_OUT, block);
2144 /* Generate ANTIC_OUT - TMP_GEN. */
2145 S = bitmap_set_subtract_expressions (ANTIC_OUT, TMP_GEN (block));
2147 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2148 ANTIC_IN (block) = bitmap_set_subtract_expressions (EXP_GEN (block),
2149 TMP_GEN (block));
2151 /* Then union in the ANTIC_OUT - TMP_GEN values,
2152 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2153 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2154 bitmap_value_insert_into_set (ANTIC_IN (block),
2155 expression_for_id (bii));
2157 /* clean (ANTIC_IN (block)) is defered to after the iteration converged
2158 because it can cause non-convergence, see for example PR81181. */
2160 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2161 changed = true;
2163 maybe_dump_sets:
2164 if (dump_file && (dump_flags & TDF_DETAILS))
2166 if (ANTIC_OUT)
2167 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2169 if (changed)
2170 fprintf (dump_file, "[changed] ");
2171 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2172 block->index);
2174 if (S)
2175 print_bitmap_set (dump_file, S, "S", block->index);
2177 if (old)
2178 bitmap_set_free (old);
2179 if (S)
2180 bitmap_set_free (S);
2181 if (ANTIC_OUT)
2182 bitmap_set_free (ANTIC_OUT);
2183 return changed;
2186 /* Compute PARTIAL_ANTIC for BLOCK.
2188 If succs(BLOCK) > 1 then
2189 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2190 in ANTIC_OUT for all succ(BLOCK)
2191 else if succs(BLOCK) == 1 then
2192 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2194 PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK])
2197 static void
2198 compute_partial_antic_aux (basic_block block,
2199 bool block_has_abnormal_pred_edge)
2201 bitmap_set_t old_PA_IN;
2202 bitmap_set_t PA_OUT;
2203 edge e;
2204 edge_iterator ei;
2205 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2207 old_PA_IN = PA_OUT = NULL;
2209 /* If any edges from predecessors are abnormal, antic_in is empty,
2210 so do nothing. */
2211 if (block_has_abnormal_pred_edge)
2212 goto maybe_dump_sets;
2214 /* If there are too many partially anticipatable values in the
2215 block, phi_translate_set can take an exponential time: stop
2216 before the translation starts. */
2217 if (max_pa
2218 && single_succ_p (block)
2219 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2220 goto maybe_dump_sets;
2222 old_PA_IN = PA_IN (block);
2223 PA_OUT = bitmap_set_new ();
2225 /* If the block has no successors, ANTIC_OUT is empty. */
2226 if (EDGE_COUNT (block->succs) == 0)
2228 /* If we have one successor, we could have some phi nodes to
2229 translate through. Note that we can't phi translate across DFS
2230 back edges in partial antic, because it uses a union operation on
2231 the successors. For recurrences like IV's, we will end up
2232 generating a new value in the set on each go around (i + 3 (VH.1)
2233 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2234 else if (single_succ_p (block))
2236 basic_block succ = single_succ (block);
2237 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2238 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2240 /* If we have multiple successors, we take the union of all of
2241 them. */
2242 else
2244 size_t i;
2245 basic_block bprime;
2247 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2248 FOR_EACH_EDGE (e, ei, block->succs)
2250 if (e->flags & EDGE_DFS_BACK)
2251 continue;
2252 worklist.quick_push (e->dest);
2254 if (worklist.length () > 0)
2256 FOR_EACH_VEC_ELT (worklist, i, bprime)
2258 unsigned int i;
2259 bitmap_iterator bi;
2261 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2262 bitmap_value_insert_into_set (PA_OUT,
2263 expression_for_id (i));
2264 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2266 bitmap_set_t pa_in = bitmap_set_new ();
2267 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2268 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2269 bitmap_value_insert_into_set (PA_OUT,
2270 expression_for_id (i));
2271 bitmap_set_free (pa_in);
2273 else
2274 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2275 bitmap_value_insert_into_set (PA_OUT,
2276 expression_for_id (i));
2281 /* Prune expressions that are clobbered in block and thus become
2282 invalid if translated from PA_OUT to PA_IN. */
2283 prune_clobbered_mems (PA_OUT, block);
2285 /* PA_IN starts with PA_OUT - TMP_GEN.
2286 Then we subtract things from ANTIC_IN. */
2287 PA_IN (block) = bitmap_set_subtract_expressions (PA_OUT, TMP_GEN (block));
2289 /* For partial antic, we want to put back in the phi results, since
2290 we will properly avoid making them partially antic over backedges. */
2291 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2292 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2294 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2295 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2297 clean (PA_IN (block), ANTIC_IN (block));
2299 maybe_dump_sets:
2300 if (dump_file && (dump_flags & TDF_DETAILS))
2302 if (PA_OUT)
2303 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2305 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2307 if (old_PA_IN)
2308 bitmap_set_free (old_PA_IN);
2309 if (PA_OUT)
2310 bitmap_set_free (PA_OUT);
2313 /* Compute ANTIC and partial ANTIC sets. */
2315 static void
2316 compute_antic (void)
2318 bool changed = true;
2319 int num_iterations = 0;
2320 basic_block block;
2321 int i;
2322 edge_iterator ei;
2323 edge e;
2325 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2326 We pre-build the map of blocks with incoming abnormal edges here. */
2327 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2328 bitmap_clear (has_abnormal_preds);
2330 FOR_ALL_BB_FN (block, cfun)
2332 BB_VISITED (block) = 0;
2334 FOR_EACH_EDGE (e, ei, block->preds)
2335 if (e->flags & EDGE_ABNORMAL)
2337 bitmap_set_bit (has_abnormal_preds, block->index);
2338 break;
2341 /* While we are here, give empty ANTIC_IN sets to each block. */
2342 ANTIC_IN (block) = bitmap_set_new ();
2343 if (do_partial_partial)
2344 PA_IN (block) = bitmap_set_new ();
2347 /* At the exit block we anticipate nothing. */
2348 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2350 /* For ANTIC computation we need a postorder that also guarantees that
2351 a block with a single successor is visited after its successor.
2352 RPO on the inverted CFG has this property. */
2353 auto_vec<int, 20> postorder;
2354 inverted_post_order_compute (&postorder);
2356 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
2357 bitmap_clear (worklist);
2358 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2359 bitmap_set_bit (worklist, e->src->index);
2360 while (changed)
2362 if (dump_file && (dump_flags & TDF_DETAILS))
2363 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2364 /* ??? We need to clear our PHI translation cache here as the
2365 ANTIC sets shrink and we restrict valid translations to
2366 those having operands with leaders in ANTIC. Same below
2367 for PA ANTIC computation. */
2368 num_iterations++;
2369 changed = false;
2370 for (i = postorder.length () - 1; i >= 0; i--)
2372 if (bitmap_bit_p (worklist, postorder[i]))
2374 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2375 bitmap_clear_bit (worklist, block->index);
2376 if (compute_antic_aux (block,
2377 bitmap_bit_p (has_abnormal_preds,
2378 block->index)))
2380 FOR_EACH_EDGE (e, ei, block->preds)
2381 bitmap_set_bit (worklist, e->src->index);
2382 changed = true;
2386 /* Theoretically possible, but *highly* unlikely. */
2387 gcc_checking_assert (num_iterations < 500);
2390 /* We have to clean after the dataflow problem converged as cleaning
2391 can cause non-convergence because it is based on expressions
2392 rather than values. */
2393 FOR_EACH_BB_FN (block, cfun)
2394 clean (ANTIC_IN (block));
2396 statistics_histogram_event (cfun, "compute_antic iterations",
2397 num_iterations);
2399 if (do_partial_partial)
2401 /* For partial antic we ignore backedges and thus we do not need
2402 to perform any iteration when we process blocks in postorder. */
2403 int postorder_num
2404 = pre_and_rev_post_order_compute (NULL, postorder.address (), false);
2405 for (i = postorder_num - 1 ; i >= 0; i--)
2407 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2408 compute_partial_antic_aux (block,
2409 bitmap_bit_p (has_abnormal_preds,
2410 block->index));
2414 sbitmap_free (has_abnormal_preds);
2418 /* Inserted expressions are placed onto this worklist, which is used
2419 for performing quick dead code elimination of insertions we made
2420 that didn't turn out to be necessary. */
2421 static bitmap inserted_exprs;
2423 /* The actual worker for create_component_ref_by_pieces. */
2425 static tree
2426 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2427 unsigned int *operand, gimple_seq *stmts)
2429 vn_reference_op_t currop = &ref->operands[*operand];
2430 tree genop;
2431 ++*operand;
2432 switch (currop->opcode)
2434 case CALL_EXPR:
2435 gcc_unreachable ();
2437 case MEM_REF:
2439 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2440 stmts);
2441 if (!baseop)
2442 return NULL_TREE;
2443 tree offset = currop->op0;
2444 if (TREE_CODE (baseop) == ADDR_EXPR
2445 && handled_component_p (TREE_OPERAND (baseop, 0)))
2447 HOST_WIDE_INT off;
2448 tree base;
2449 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2450 &off);
2451 gcc_assert (base);
2452 offset = int_const_binop (PLUS_EXPR, offset,
2453 build_int_cst (TREE_TYPE (offset),
2454 off));
2455 baseop = build_fold_addr_expr (base);
2457 genop = build2 (MEM_REF, currop->type, baseop, offset);
2458 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2459 MR_DEPENDENCE_BASE (genop) = currop->base;
2460 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2461 return genop;
2464 case TARGET_MEM_REF:
2466 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2467 vn_reference_op_t nextop = &ref->operands[++*operand];
2468 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2469 stmts);
2470 if (!baseop)
2471 return NULL_TREE;
2472 if (currop->op0)
2474 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2475 if (!genop0)
2476 return NULL_TREE;
2478 if (nextop->op0)
2480 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2481 if (!genop1)
2482 return NULL_TREE;
2484 genop = build5 (TARGET_MEM_REF, currop->type,
2485 baseop, currop->op2, genop0, currop->op1, genop1);
2487 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2488 MR_DEPENDENCE_BASE (genop) = currop->base;
2489 return genop;
2492 case ADDR_EXPR:
2493 if (currop->op0)
2495 gcc_assert (is_gimple_min_invariant (currop->op0));
2496 return currop->op0;
2498 /* Fallthrough. */
2499 case REALPART_EXPR:
2500 case IMAGPART_EXPR:
2501 case VIEW_CONVERT_EXPR:
2503 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2504 stmts);
2505 if (!genop0)
2506 return NULL_TREE;
2507 return fold_build1 (currop->opcode, currop->type, genop0);
2510 case WITH_SIZE_EXPR:
2512 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2513 stmts);
2514 if (!genop0)
2515 return NULL_TREE;
2516 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2517 if (!genop1)
2518 return NULL_TREE;
2519 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2522 case BIT_FIELD_REF:
2524 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2525 stmts);
2526 if (!genop0)
2527 return NULL_TREE;
2528 tree op1 = currop->op0;
2529 tree op2 = currop->op1;
2530 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2531 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2532 return fold (t);
2535 /* For array ref vn_reference_op's, operand 1 of the array ref
2536 is op0 of the reference op and operand 3 of the array ref is
2537 op1. */
2538 case ARRAY_RANGE_REF:
2539 case ARRAY_REF:
2541 tree genop0;
2542 tree genop1 = currop->op0;
2543 tree genop2 = currop->op1;
2544 tree genop3 = currop->op2;
2545 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2546 stmts);
2547 if (!genop0)
2548 return NULL_TREE;
2549 genop1 = find_or_generate_expression (block, genop1, stmts);
2550 if (!genop1)
2551 return NULL_TREE;
2552 if (genop2)
2554 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2555 /* Drop zero minimum index if redundant. */
2556 if (integer_zerop (genop2)
2557 && (!domain_type
2558 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2559 genop2 = NULL_TREE;
2560 else
2562 genop2 = find_or_generate_expression (block, genop2, stmts);
2563 if (!genop2)
2564 return NULL_TREE;
2567 if (genop3)
2569 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2570 /* We can't always put a size in units of the element alignment
2571 here as the element alignment may be not visible. See
2572 PR43783. Simply drop the element size for constant
2573 sizes. */
2574 if (TREE_CODE (genop3) == INTEGER_CST
2575 && TREE_CODE (TYPE_SIZE_UNIT (elmt_type)) == INTEGER_CST
2576 && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)),
2577 (wi::to_offset (genop3)
2578 * vn_ref_op_align_unit (currop))))
2579 genop3 = NULL_TREE;
2580 else
2582 genop3 = find_or_generate_expression (block, genop3, stmts);
2583 if (!genop3)
2584 return NULL_TREE;
2587 return build4 (currop->opcode, currop->type, genop0, genop1,
2588 genop2, genop3);
2590 case COMPONENT_REF:
2592 tree op0;
2593 tree op1;
2594 tree genop2 = currop->op1;
2595 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2596 if (!op0)
2597 return NULL_TREE;
2598 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2599 op1 = currop->op0;
2600 if (genop2)
2602 genop2 = find_or_generate_expression (block, genop2, stmts);
2603 if (!genop2)
2604 return NULL_TREE;
2606 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2609 case SSA_NAME:
2611 genop = find_or_generate_expression (block, currop->op0, stmts);
2612 return genop;
2614 case STRING_CST:
2615 case INTEGER_CST:
2616 case COMPLEX_CST:
2617 case VECTOR_CST:
2618 case REAL_CST:
2619 case CONSTRUCTOR:
2620 case VAR_DECL:
2621 case PARM_DECL:
2622 case CONST_DECL:
2623 case RESULT_DECL:
2624 case FUNCTION_DECL:
2625 return currop->op0;
2627 default:
2628 gcc_unreachable ();
2632 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2633 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2634 trying to rename aggregates into ssa form directly, which is a no no.
2636 Thus, this routine doesn't create temporaries, it just builds a
2637 single access expression for the array, calling
2638 find_or_generate_expression to build the innermost pieces.
2640 This function is a subroutine of create_expression_by_pieces, and
2641 should not be called on it's own unless you really know what you
2642 are doing. */
2644 static tree
2645 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2646 gimple_seq *stmts)
2648 unsigned int op = 0;
2649 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2652 /* Find a simple leader for an expression, or generate one using
2653 create_expression_by_pieces from a NARY expression for the value.
2654 BLOCK is the basic_block we are looking for leaders in.
2655 OP is the tree expression to find a leader for or generate.
2656 Returns the leader or NULL_TREE on failure. */
2658 static tree
2659 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2661 pre_expr expr = get_or_alloc_expr_for (op);
2662 unsigned int lookfor = get_expr_value_id (expr);
2663 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2664 if (leader)
2666 if (leader->kind == NAME)
2667 return PRE_EXPR_NAME (leader);
2668 else if (leader->kind == CONSTANT)
2669 return PRE_EXPR_CONSTANT (leader);
2671 /* Defer. */
2672 return NULL_TREE;
2675 /* It must be a complex expression, so generate it recursively. Note
2676 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2677 where the insert algorithm fails to insert a required expression. */
2678 bitmap exprset = value_expressions[lookfor];
2679 bitmap_iterator bi;
2680 unsigned int i;
2681 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2683 pre_expr temp = expression_for_id (i);
2684 /* We cannot insert random REFERENCE expressions at arbitrary
2685 places. We can insert NARYs which eventually re-materializes
2686 its operand values. */
2687 if (temp->kind == NARY)
2688 return create_expression_by_pieces (block, temp, stmts,
2689 get_expr_type (expr));
2692 /* Defer. */
2693 return NULL_TREE;
2696 /* Create an expression in pieces, so that we can handle very complex
2697 expressions that may be ANTIC, but not necessary GIMPLE.
2698 BLOCK is the basic block the expression will be inserted into,
2699 EXPR is the expression to insert (in value form)
2700 STMTS is a statement list to append the necessary insertions into.
2702 This function will die if we hit some value that shouldn't be
2703 ANTIC but is (IE there is no leader for it, or its components).
2704 The function returns NULL_TREE in case a different antic expression
2705 has to be inserted first.
2706 This function may also generate expressions that are themselves
2707 partially or fully redundant. Those that are will be either made
2708 fully redundant during the next iteration of insert (for partially
2709 redundant ones), or eliminated by eliminate (for fully redundant
2710 ones). */
2712 static tree
2713 create_expression_by_pieces (basic_block block, pre_expr expr,
2714 gimple_seq *stmts, tree type)
2716 tree name;
2717 tree folded;
2718 gimple_seq forced_stmts = NULL;
2719 unsigned int value_id;
2720 gimple_stmt_iterator gsi;
2721 tree exprtype = type ? type : get_expr_type (expr);
2722 pre_expr nameexpr;
2723 gassign *newstmt;
2725 switch (expr->kind)
2727 /* We may hit the NAME/CONSTANT case if we have to convert types
2728 that value numbering saw through. */
2729 case NAME:
2730 folded = PRE_EXPR_NAME (expr);
2731 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2732 return folded;
2733 break;
2734 case CONSTANT:
2736 folded = PRE_EXPR_CONSTANT (expr);
2737 tree tem = fold_convert (exprtype, folded);
2738 if (is_gimple_min_invariant (tem))
2739 return tem;
2740 break;
2742 case REFERENCE:
2743 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2745 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2746 unsigned int operand = 1;
2747 vn_reference_op_t currop = &ref->operands[0];
2748 tree sc = NULL_TREE;
2749 tree fn;
2750 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2751 fn = currop->op0;
2752 else
2753 fn = find_or_generate_expression (block, currop->op0, stmts);
2754 if (!fn)
2755 return NULL_TREE;
2756 if (currop->op1)
2758 sc = find_or_generate_expression (block, currop->op1, stmts);
2759 if (!sc)
2760 return NULL_TREE;
2762 auto_vec<tree> args (ref->operands.length () - 1);
2763 while (operand < ref->operands.length ())
2765 tree arg = create_component_ref_by_pieces_1 (block, ref,
2766 &operand, stmts);
2767 if (!arg)
2768 return NULL_TREE;
2769 args.quick_push (arg);
2771 gcall *call
2772 = gimple_build_call_vec ((TREE_CODE (fn) == FUNCTION_DECL
2773 ? build_fold_addr_expr (fn) : fn), args);
2774 gimple_call_set_with_bounds (call, currop->with_bounds);
2775 if (sc)
2776 gimple_call_set_chain (call, sc);
2777 tree forcedname = make_ssa_name (currop->type);
2778 gimple_call_set_lhs (call, forcedname);
2779 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2780 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2781 folded = forcedname;
2783 else
2785 folded = create_component_ref_by_pieces (block,
2786 PRE_EXPR_REFERENCE (expr),
2787 stmts);
2788 if (!folded)
2789 return NULL_TREE;
2790 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2791 newstmt = gimple_build_assign (name, folded);
2792 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2793 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2794 folded = name;
2796 break;
2797 case NARY:
2799 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2800 tree *genop = XALLOCAVEC (tree, nary->length);
2801 unsigned i;
2802 for (i = 0; i < nary->length; ++i)
2804 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2805 if (!genop[i])
2806 return NULL_TREE;
2807 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2808 may have conversions stripped. */
2809 if (nary->opcode == POINTER_PLUS_EXPR)
2811 if (i == 0)
2812 genop[i] = gimple_convert (&forced_stmts,
2813 nary->type, genop[i]);
2814 else if (i == 1)
2815 genop[i] = gimple_convert (&forced_stmts,
2816 sizetype, genop[i]);
2818 else
2819 genop[i] = gimple_convert (&forced_stmts,
2820 TREE_TYPE (nary->op[i]), genop[i]);
2822 if (nary->opcode == CONSTRUCTOR)
2824 vec<constructor_elt, va_gc> *elts = NULL;
2825 for (i = 0; i < nary->length; ++i)
2826 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2827 folded = build_constructor (nary->type, elts);
2828 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2829 newstmt = gimple_build_assign (name, folded);
2830 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2831 folded = name;
2833 else
2835 switch (nary->length)
2837 case 1:
2838 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2839 genop[0]);
2840 break;
2841 case 2:
2842 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2843 genop[0], genop[1]);
2844 break;
2845 case 3:
2846 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2847 genop[0], genop[1], genop[2]);
2848 break;
2849 default:
2850 gcc_unreachable ();
2854 break;
2855 default:
2856 gcc_unreachable ();
2859 folded = gimple_convert (&forced_stmts, exprtype, folded);
2861 /* If there is nothing to insert, return the simplified result. */
2862 if (gimple_seq_empty_p (forced_stmts))
2863 return folded;
2864 /* If we simplified to a constant return it and discard eventually
2865 built stmts. */
2866 if (is_gimple_min_invariant (folded))
2868 gimple_seq_discard (forced_stmts);
2869 return folded;
2871 /* Likewise if we simplified to sth not queued for insertion. */
2872 bool found = false;
2873 gsi = gsi_last (forced_stmts);
2874 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2876 gimple *stmt = gsi_stmt (gsi);
2877 tree forcedname = gimple_get_lhs (stmt);
2878 if (forcedname == folded)
2880 found = true;
2881 break;
2884 if (! found)
2886 gimple_seq_discard (forced_stmts);
2887 return folded;
2889 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2891 /* If we have any intermediate expressions to the value sets, add them
2892 to the value sets and chain them in the instruction stream. */
2893 if (forced_stmts)
2895 gsi = gsi_start (forced_stmts);
2896 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2898 gimple *stmt = gsi_stmt (gsi);
2899 tree forcedname = gimple_get_lhs (stmt);
2900 pre_expr nameexpr;
2902 if (forcedname != folded)
2904 VN_INFO_GET (forcedname)->valnum = forcedname;
2905 VN_INFO (forcedname)->value_id = get_next_value_id ();
2906 nameexpr = get_or_alloc_expr_for_name (forcedname);
2907 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2908 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2909 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2912 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2914 gimple_seq_add_seq (stmts, forced_stmts);
2917 name = folded;
2919 /* Fold the last statement. */
2920 gsi = gsi_last (*stmts);
2921 if (fold_stmt_inplace (&gsi))
2922 update_stmt (gsi_stmt (gsi));
2924 /* Add a value number to the temporary.
2925 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2926 we are creating the expression by pieces, and this particular piece of
2927 the expression may have been represented. There is no harm in replacing
2928 here. */
2929 value_id = get_expr_value_id (expr);
2930 VN_INFO_GET (name)->value_id = value_id;
2931 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2932 if (VN_INFO (name)->valnum == NULL_TREE)
2933 VN_INFO (name)->valnum = name;
2934 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2935 nameexpr = get_or_alloc_expr_for_name (name);
2936 add_to_value (value_id, nameexpr);
2937 if (NEW_SETS (block))
2938 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2939 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2941 pre_stats.insertions++;
2942 if (dump_file && (dump_flags & TDF_DETAILS))
2944 fprintf (dump_file, "Inserted ");
2945 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0);
2946 fprintf (dump_file, " in predecessor %d (%04d)\n",
2947 block->index, value_id);
2950 return name;
2954 /* Insert the to-be-made-available values of expression EXPRNUM for each
2955 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2956 merge the result with a phi node, given the same value number as
2957 NODE. Return true if we have inserted new stuff. */
2959 static bool
2960 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2961 vec<pre_expr> avail)
2963 pre_expr expr = expression_for_id (exprnum);
2964 pre_expr newphi;
2965 unsigned int val = get_expr_value_id (expr);
2966 edge pred;
2967 bool insertions = false;
2968 bool nophi = false;
2969 basic_block bprime;
2970 pre_expr eprime;
2971 edge_iterator ei;
2972 tree type = get_expr_type (expr);
2973 tree temp;
2974 gphi *phi;
2976 /* Make sure we aren't creating an induction variable. */
2977 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
2979 bool firstinsideloop = false;
2980 bool secondinsideloop = false;
2981 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2982 EDGE_PRED (block, 0)->src);
2983 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2984 EDGE_PRED (block, 1)->src);
2985 /* Induction variables only have one edge inside the loop. */
2986 if ((firstinsideloop ^ secondinsideloop)
2987 && expr->kind != REFERENCE)
2989 if (dump_file && (dump_flags & TDF_DETAILS))
2990 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2991 nophi = true;
2995 /* Make the necessary insertions. */
2996 FOR_EACH_EDGE (pred, ei, block->preds)
2998 gimple_seq stmts = NULL;
2999 tree builtexpr;
3000 bprime = pred->src;
3001 eprime = avail[pred->dest_idx];
3002 builtexpr = create_expression_by_pieces (bprime, eprime,
3003 &stmts, type);
3004 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3005 if (!gimple_seq_empty_p (stmts))
3007 gsi_insert_seq_on_edge (pred, stmts);
3008 insertions = true;
3010 if (!builtexpr)
3012 /* We cannot insert a PHI node if we failed to insert
3013 on one edge. */
3014 nophi = true;
3015 continue;
3017 if (is_gimple_min_invariant (builtexpr))
3018 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3019 else
3020 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3022 /* If we didn't want a phi node, and we made insertions, we still have
3023 inserted new stuff, and thus return true. If we didn't want a phi node,
3024 and didn't make insertions, we haven't added anything new, so return
3025 false. */
3026 if (nophi && insertions)
3027 return true;
3028 else if (nophi && !insertions)
3029 return false;
3031 /* Now build a phi for the new variable. */
3032 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3033 phi = create_phi_node (temp, block);
3035 VN_INFO_GET (temp)->value_id = val;
3036 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3037 if (VN_INFO (temp)->valnum == NULL_TREE)
3038 VN_INFO (temp)->valnum = temp;
3039 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3040 FOR_EACH_EDGE (pred, ei, block->preds)
3042 pre_expr ae = avail[pred->dest_idx];
3043 gcc_assert (get_expr_type (ae) == type
3044 || useless_type_conversion_p (type, get_expr_type (ae)));
3045 if (ae->kind == CONSTANT)
3046 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3047 pred, UNKNOWN_LOCATION);
3048 else
3049 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3052 newphi = get_or_alloc_expr_for_name (temp);
3053 add_to_value (val, newphi);
3055 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3056 this insertion, since we test for the existence of this value in PHI_GEN
3057 before proceeding with the partial redundancy checks in insert_aux.
3059 The value may exist in AVAIL_OUT, in particular, it could be represented
3060 by the expression we are trying to eliminate, in which case we want the
3061 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3062 inserted there.
3064 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3065 this block, because if it did, it would have existed in our dominator's
3066 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3069 bitmap_insert_into_set (PHI_GEN (block), newphi);
3070 bitmap_value_replace_in_set (AVAIL_OUT (block),
3071 newphi);
3072 bitmap_insert_into_set (NEW_SETS (block),
3073 newphi);
3075 /* If we insert a PHI node for a conversion of another PHI node
3076 in the same basic-block try to preserve range information.
3077 This is important so that followup loop passes receive optimal
3078 number of iteration analysis results. See PR61743. */
3079 if (expr->kind == NARY
3080 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3081 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3082 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3083 && INTEGRAL_TYPE_P (type)
3084 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3085 && (TYPE_PRECISION (type)
3086 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3087 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3089 wide_int min, max;
3090 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3091 && !wi::neg_p (min, SIGNED)
3092 && !wi::neg_p (max, SIGNED))
3093 /* Just handle extension and sign-changes of all-positive ranges. */
3094 set_range_info (temp,
3095 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3096 wide_int_storage::from (min, TYPE_PRECISION (type),
3097 TYPE_SIGN (type)),
3098 wide_int_storage::from (max, TYPE_PRECISION (type),
3099 TYPE_SIGN (type)));
3102 if (dump_file && (dump_flags & TDF_DETAILS))
3104 fprintf (dump_file, "Created phi ");
3105 print_gimple_stmt (dump_file, phi, 0);
3106 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3108 pre_stats.phis++;
3109 return true;
3114 /* Perform insertion of partially redundant or hoistable values.
3115 For BLOCK, do the following:
3116 1. Propagate the NEW_SETS of the dominator into the current block.
3117 If the block has multiple predecessors,
3118 2a. Iterate over the ANTIC expressions for the block to see if
3119 any of them are partially redundant.
3120 2b. If so, insert them into the necessary predecessors to make
3121 the expression fully redundant.
3122 2c. Insert a new PHI merging the values of the predecessors.
3123 2d. Insert the new PHI, and the new expressions, into the
3124 NEW_SETS set.
3125 If the block has multiple successors,
3126 3a. Iterate over the ANTIC values for the block to see if
3127 any of them are good candidates for hoisting.
3128 3b. If so, insert expressions computing the values in BLOCK,
3129 and add the new expressions into the NEW_SETS set.
3130 4. Recursively call ourselves on the dominator children of BLOCK.
3132 Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3133 do_pre_regular_insertion and do_partial_insertion. 3a and 3b are
3134 done in do_hoist_insertion.
3137 static bool
3138 do_pre_regular_insertion (basic_block block, basic_block dom)
3140 bool new_stuff = false;
3141 vec<pre_expr> exprs;
3142 pre_expr expr;
3143 auto_vec<pre_expr> avail;
3144 int i;
3146 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3147 avail.safe_grow (EDGE_COUNT (block->preds));
3149 FOR_EACH_VEC_ELT (exprs, i, expr)
3151 if (expr->kind == NARY
3152 || expr->kind == REFERENCE)
3154 unsigned int val;
3155 bool by_some = false;
3156 bool cant_insert = false;
3157 bool all_same = true;
3158 pre_expr first_s = NULL;
3159 edge pred;
3160 basic_block bprime;
3161 pre_expr eprime = NULL;
3162 edge_iterator ei;
3163 pre_expr edoubleprime = NULL;
3164 bool do_insertion = false;
3166 val = get_expr_value_id (expr);
3167 if (bitmap_set_contains_value (PHI_GEN (block), val))
3168 continue;
3169 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3171 if (dump_file && (dump_flags & TDF_DETAILS))
3173 fprintf (dump_file, "Found fully redundant value: ");
3174 print_pre_expr (dump_file, expr);
3175 fprintf (dump_file, "\n");
3177 continue;
3180 FOR_EACH_EDGE (pred, ei, block->preds)
3182 unsigned int vprime;
3184 /* We should never run insertion for the exit block
3185 and so not come across fake pred edges. */
3186 gcc_assert (!(pred->flags & EDGE_FAKE));
3187 bprime = pred->src;
3188 /* We are looking at ANTIC_OUT of bprime. */
3189 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3190 bprime, block);
3192 /* eprime will generally only be NULL if the
3193 value of the expression, translated
3194 through the PHI for this predecessor, is
3195 undefined. If that is the case, we can't
3196 make the expression fully redundant,
3197 because its value is undefined along a
3198 predecessor path. We can thus break out
3199 early because it doesn't matter what the
3200 rest of the results are. */
3201 if (eprime == NULL)
3203 avail[pred->dest_idx] = NULL;
3204 cant_insert = true;
3205 break;
3208 vprime = get_expr_value_id (eprime);
3209 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3210 vprime);
3211 if (edoubleprime == NULL)
3213 avail[pred->dest_idx] = eprime;
3214 all_same = false;
3216 else
3218 avail[pred->dest_idx] = edoubleprime;
3219 by_some = true;
3220 /* We want to perform insertions to remove a redundancy on
3221 a path in the CFG we want to optimize for speed. */
3222 if (optimize_edge_for_speed_p (pred))
3223 do_insertion = true;
3224 if (first_s == NULL)
3225 first_s = edoubleprime;
3226 else if (!pre_expr_d::equal (first_s, edoubleprime))
3227 all_same = false;
3230 /* If we can insert it, it's not the same value
3231 already existing along every predecessor, and
3232 it's defined by some predecessor, it is
3233 partially redundant. */
3234 if (!cant_insert && !all_same && by_some)
3236 if (!do_insertion)
3238 if (dump_file && (dump_flags & TDF_DETAILS))
3240 fprintf (dump_file, "Skipping partial redundancy for "
3241 "expression ");
3242 print_pre_expr (dump_file, expr);
3243 fprintf (dump_file, " (%04d), no redundancy on to be "
3244 "optimized for speed edge\n", val);
3247 else if (dbg_cnt (treepre_insert))
3249 if (dump_file && (dump_flags & TDF_DETAILS))
3251 fprintf (dump_file, "Found partial redundancy for "
3252 "expression ");
3253 print_pre_expr (dump_file, expr);
3254 fprintf (dump_file, " (%04d)\n",
3255 get_expr_value_id (expr));
3257 if (insert_into_preds_of_block (block,
3258 get_expression_id (expr),
3259 avail))
3260 new_stuff = true;
3263 /* If all edges produce the same value and that value is
3264 an invariant, then the PHI has the same value on all
3265 edges. Note this. */
3266 else if (!cant_insert && all_same)
3268 gcc_assert (edoubleprime->kind == CONSTANT
3269 || edoubleprime->kind == NAME);
3271 tree temp = make_temp_ssa_name (get_expr_type (expr),
3272 NULL, "pretmp");
3273 gassign *assign
3274 = gimple_build_assign (temp,
3275 edoubleprime->kind == CONSTANT ?
3276 PRE_EXPR_CONSTANT (edoubleprime) :
3277 PRE_EXPR_NAME (edoubleprime));
3278 gimple_stmt_iterator gsi = gsi_after_labels (block);
3279 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3281 VN_INFO_GET (temp)->value_id = val;
3282 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3283 if (VN_INFO (temp)->valnum == NULL_TREE)
3284 VN_INFO (temp)->valnum = temp;
3285 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3286 pre_expr newe = get_or_alloc_expr_for_name (temp);
3287 add_to_value (val, newe);
3288 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3289 bitmap_insert_into_set (NEW_SETS (block), newe);
3294 exprs.release ();
3295 return new_stuff;
3299 /* Perform insertion for partially anticipatable expressions. There
3300 is only one case we will perform insertion for these. This case is
3301 if the expression is partially anticipatable, and fully available.
3302 In this case, we know that putting it earlier will enable us to
3303 remove the later computation. */
3305 static bool
3306 do_pre_partial_partial_insertion (basic_block block, basic_block dom)
3308 bool new_stuff = false;
3309 vec<pre_expr> exprs;
3310 pre_expr expr;
3311 auto_vec<pre_expr> avail;
3312 int i;
3314 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3315 avail.safe_grow (EDGE_COUNT (block->preds));
3317 FOR_EACH_VEC_ELT (exprs, i, expr)
3319 if (expr->kind == NARY
3320 || expr->kind == REFERENCE)
3322 unsigned int val;
3323 bool by_all = true;
3324 bool cant_insert = false;
3325 edge pred;
3326 basic_block bprime;
3327 pre_expr eprime = NULL;
3328 edge_iterator ei;
3330 val = get_expr_value_id (expr);
3331 if (bitmap_set_contains_value (PHI_GEN (block), val))
3332 continue;
3333 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3334 continue;
3336 FOR_EACH_EDGE (pred, ei, block->preds)
3338 unsigned int vprime;
3339 pre_expr edoubleprime;
3341 /* We should never run insertion for the exit block
3342 and so not come across fake pred edges. */
3343 gcc_assert (!(pred->flags & EDGE_FAKE));
3344 bprime = pred->src;
3345 eprime = phi_translate (expr, ANTIC_IN (block),
3346 PA_IN (block),
3347 bprime, block);
3349 /* eprime will generally only be NULL if the
3350 value of the expression, translated
3351 through the PHI for this predecessor, is
3352 undefined. If that is the case, we can't
3353 make the expression fully redundant,
3354 because its value is undefined along a
3355 predecessor path. We can thus break out
3356 early because it doesn't matter what the
3357 rest of the results are. */
3358 if (eprime == NULL)
3360 avail[pred->dest_idx] = NULL;
3361 cant_insert = true;
3362 break;
3365 vprime = get_expr_value_id (eprime);
3366 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3367 avail[pred->dest_idx] = edoubleprime;
3368 if (edoubleprime == NULL)
3370 by_all = false;
3371 break;
3375 /* If we can insert it, it's not the same value
3376 already existing along every predecessor, and
3377 it's defined by some predecessor, it is
3378 partially redundant. */
3379 if (!cant_insert && by_all)
3381 edge succ;
3382 bool do_insertion = false;
3384 /* Insert only if we can remove a later expression on a path
3385 that we want to optimize for speed.
3386 The phi node that we will be inserting in BLOCK is not free,
3387 and inserting it for the sake of !optimize_for_speed successor
3388 may cause regressions on the speed path. */
3389 FOR_EACH_EDGE (succ, ei, block->succs)
3391 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3392 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3394 if (optimize_edge_for_speed_p (succ))
3395 do_insertion = true;
3399 if (!do_insertion)
3401 if (dump_file && (dump_flags & TDF_DETAILS))
3403 fprintf (dump_file, "Skipping partial partial redundancy "
3404 "for expression ");
3405 print_pre_expr (dump_file, expr);
3406 fprintf (dump_file, " (%04d), not (partially) anticipated "
3407 "on any to be optimized for speed edges\n", val);
3410 else if (dbg_cnt (treepre_insert))
3412 pre_stats.pa_insert++;
3413 if (dump_file && (dump_flags & TDF_DETAILS))
3415 fprintf (dump_file, "Found partial partial redundancy "
3416 "for expression ");
3417 print_pre_expr (dump_file, expr);
3418 fprintf (dump_file, " (%04d)\n",
3419 get_expr_value_id (expr));
3421 if (insert_into_preds_of_block (block,
3422 get_expression_id (expr),
3423 avail))
3424 new_stuff = true;
3430 exprs.release ();
3431 return new_stuff;
3434 /* Insert expressions in BLOCK to compute hoistable values up.
3435 Return TRUE if something was inserted, otherwise return FALSE.
3436 The caller has to make sure that BLOCK has at least two successors. */
3438 static bool
3439 do_hoist_insertion (basic_block block)
3441 edge e;
3442 edge_iterator ei;
3443 bool new_stuff = false;
3444 unsigned i;
3445 gimple_stmt_iterator last;
3447 /* At least two successors, or else... */
3448 gcc_assert (EDGE_COUNT (block->succs) >= 2);
3450 /* Check that all successors of BLOCK are dominated by block.
3451 We could use dominated_by_p() for this, but actually there is a much
3452 quicker check: any successor that is dominated by BLOCK can't have
3453 more than one predecessor edge. */
3454 FOR_EACH_EDGE (e, ei, block->succs)
3455 if (! single_pred_p (e->dest))
3456 return false;
3458 /* Determine the insertion point. If we cannot safely insert before
3459 the last stmt if we'd have to, bail out. */
3460 last = gsi_last_bb (block);
3461 if (!gsi_end_p (last)
3462 && !is_ctrl_stmt (gsi_stmt (last))
3463 && stmt_ends_bb_p (gsi_stmt (last)))
3464 return false;
3466 /* Compute the set of hoistable expressions from ANTIC_IN. First compute
3467 hoistable values. */
3468 bitmap_set hoistable_set;
3470 /* A hoistable value must be in ANTIC_IN(block)
3471 but not in AVAIL_OUT(BLOCK). */
3472 bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack);
3473 bitmap_and_compl (&hoistable_set.values,
3474 &ANTIC_IN (block)->values, &AVAIL_OUT (block)->values);
3476 /* Short-cut for a common case: hoistable_set is empty. */
3477 if (bitmap_empty_p (&hoistable_set.values))
3478 return false;
3480 /* Compute which of the hoistable values is in AVAIL_OUT of
3481 at least one of the successors of BLOCK. */
3482 bitmap_head availout_in_some;
3483 bitmap_initialize (&availout_in_some, &grand_bitmap_obstack);
3484 FOR_EACH_EDGE (e, ei, block->succs)
3485 /* Do not consider expressions solely because their availability
3486 on loop exits. They'd be ANTIC-IN throughout the whole loop
3487 and thus effectively hoisted across loops by combination of
3488 PRE and hoisting. */
3489 if (! loop_exit_edge_p (block->loop_father, e))
3490 bitmap_ior_and_into (&availout_in_some, &hoistable_set.values,
3491 &AVAIL_OUT (e->dest)->values);
3492 bitmap_clear (&hoistable_set.values);
3494 /* Short-cut for a common case: availout_in_some is empty. */
3495 if (bitmap_empty_p (&availout_in_some))
3496 return false;
3498 /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */
3499 hoistable_set.values = availout_in_some;
3500 hoistable_set.expressions = ANTIC_IN (block)->expressions;
3502 /* Now finally construct the topological-ordered expression set. */
3503 vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set);
3505 bitmap_clear (&hoistable_set.values);
3507 /* If there are candidate values for hoisting, insert expressions
3508 strategically to make the hoistable expressions fully redundant. */
3509 pre_expr expr;
3510 FOR_EACH_VEC_ELT (exprs, i, expr)
3512 /* While we try to sort expressions topologically above the
3513 sorting doesn't work out perfectly. Catch expressions we
3514 already inserted. */
3515 unsigned int value_id = get_expr_value_id (expr);
3516 if (bitmap_set_contains_value (AVAIL_OUT (block), value_id))
3518 if (dump_file && (dump_flags & TDF_DETAILS))
3520 fprintf (dump_file,
3521 "Already inserted expression for ");
3522 print_pre_expr (dump_file, expr);
3523 fprintf (dump_file, " (%04d)\n", value_id);
3525 continue;
3528 /* OK, we should hoist this value. Perform the transformation. */
3529 pre_stats.hoist_insert++;
3530 if (dump_file && (dump_flags & TDF_DETAILS))
3532 fprintf (dump_file,
3533 "Inserting expression in block %d for code hoisting: ",
3534 block->index);
3535 print_pre_expr (dump_file, expr);
3536 fprintf (dump_file, " (%04d)\n", value_id);
3539 gimple_seq stmts = NULL;
3540 tree res = create_expression_by_pieces (block, expr, &stmts,
3541 get_expr_type (expr));
3543 /* Do not return true if expression creation ultimately
3544 did not insert any statements. */
3545 if (gimple_seq_empty_p (stmts))
3546 res = NULL_TREE;
3547 else
3549 if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last)))
3550 gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT);
3551 else
3552 gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT);
3555 /* Make sure to not return true if expression creation ultimately
3556 failed but also make sure to insert any stmts produced as they
3557 are tracked in inserted_exprs. */
3558 if (! res)
3559 continue;
3561 new_stuff = true;
3564 exprs.release ();
3566 return new_stuff;
3569 /* Do a dominator walk on the control flow graph, and insert computations
3570 of values as necessary for PRE and hoisting. */
3572 static bool
3573 insert_aux (basic_block block, bool do_pre, bool do_hoist)
3575 basic_block son;
3576 bool new_stuff = false;
3578 if (block)
3580 basic_block dom;
3581 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3582 if (dom)
3584 unsigned i;
3585 bitmap_iterator bi;
3586 bitmap_set_t newset;
3588 /* First, update the AVAIL_OUT set with anything we may have
3589 inserted higher up in the dominator tree. */
3590 newset = NEW_SETS (dom);
3591 if (newset)
3593 /* Note that we need to value_replace both NEW_SETS, and
3594 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3595 represented by some non-simple expression here that we want
3596 to replace it with. */
3597 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3599 pre_expr expr = expression_for_id (i);
3600 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3601 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3605 /* Insert expressions for partial redundancies. */
3606 if (do_pre && !single_pred_p (block))
3608 new_stuff |= do_pre_regular_insertion (block, dom);
3609 if (do_partial_partial)
3610 new_stuff |= do_pre_partial_partial_insertion (block, dom);
3613 /* Insert expressions for hoisting. */
3614 if (do_hoist && EDGE_COUNT (block->succs) >= 2)
3615 new_stuff |= do_hoist_insertion (block);
3618 for (son = first_dom_son (CDI_DOMINATORS, block);
3619 son;
3620 son = next_dom_son (CDI_DOMINATORS, son))
3622 new_stuff |= insert_aux (son, do_pre, do_hoist);
3625 return new_stuff;
3628 /* Perform insertion of partially redundant and hoistable values. */
3630 static void
3631 insert (void)
3633 bool new_stuff = true;
3634 basic_block bb;
3635 int num_iterations = 0;
3637 FOR_ALL_BB_FN (bb, cfun)
3638 NEW_SETS (bb) = bitmap_set_new ();
3640 while (new_stuff)
3642 num_iterations++;
3643 if (dump_file && dump_flags & TDF_DETAILS)
3644 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3645 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun), flag_tree_pre,
3646 flag_code_hoisting);
3648 /* Clear the NEW sets before the next iteration. We have already
3649 fully propagated its contents. */
3650 if (new_stuff)
3651 FOR_ALL_BB_FN (bb, cfun)
3652 bitmap_set_free (NEW_SETS (bb));
3654 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3658 /* Compute the AVAIL set for all basic blocks.
3660 This function performs value numbering of the statements in each basic
3661 block. The AVAIL sets are built from information we glean while doing
3662 this value numbering, since the AVAIL sets contain only one entry per
3663 value.
3665 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3666 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3668 static void
3669 compute_avail (void)
3672 basic_block block, son;
3673 basic_block *worklist;
3674 size_t sp = 0;
3675 unsigned i;
3676 tree name;
3678 /* We pretend that default definitions are defined in the entry block.
3679 This includes function arguments and the static chain decl. */
3680 FOR_EACH_SSA_NAME (i, name, cfun)
3682 pre_expr e;
3683 if (!SSA_NAME_IS_DEFAULT_DEF (name)
3684 || has_zero_uses (name)
3685 || virtual_operand_p (name))
3686 continue;
3688 e = get_or_alloc_expr_for_name (name);
3689 add_to_value (get_expr_value_id (e), e);
3690 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3691 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3695 if (dump_file && (dump_flags & TDF_DETAILS))
3697 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3698 "tmp_gen", ENTRY_BLOCK);
3699 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3700 "avail_out", ENTRY_BLOCK);
3703 /* Allocate the worklist. */
3704 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3706 /* Seed the algorithm by putting the dominator children of the entry
3707 block on the worklist. */
3708 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3709 son;
3710 son = next_dom_son (CDI_DOMINATORS, son))
3711 worklist[sp++] = son;
3713 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3714 = ssa_default_def (cfun, gimple_vop (cfun));
3716 /* Loop until the worklist is empty. */
3717 while (sp)
3719 gimple *stmt;
3720 basic_block dom;
3722 /* Pick a block from the worklist. */
3723 block = worklist[--sp];
3725 /* Initially, the set of available values in BLOCK is that of
3726 its immediate dominator. */
3727 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3728 if (dom)
3730 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3731 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3734 /* Generate values for PHI nodes. */
3735 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3736 gsi_next (&gsi))
3738 tree result = gimple_phi_result (gsi.phi ());
3740 /* We have no need for virtual phis, as they don't represent
3741 actual computations. */
3742 if (virtual_operand_p (result))
3744 BB_LIVE_VOP_ON_EXIT (block) = result;
3745 continue;
3748 pre_expr e = get_or_alloc_expr_for_name (result);
3749 add_to_value (get_expr_value_id (e), e);
3750 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3751 bitmap_insert_into_set (PHI_GEN (block), e);
3754 BB_MAY_NOTRETURN (block) = 0;
3756 /* Now compute value numbers and populate value sets with all
3757 the expressions computed in BLOCK. */
3758 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3759 gsi_next (&gsi))
3761 ssa_op_iter iter;
3762 tree op;
3764 stmt = gsi_stmt (gsi);
3766 /* Cache whether the basic-block has any non-visible side-effect
3767 or control flow.
3768 If this isn't a call or it is the last stmt in the
3769 basic-block then the CFG represents things correctly. */
3770 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3772 /* Non-looping const functions always return normally.
3773 Otherwise the call might not return or have side-effects
3774 that forbids hoisting possibly trapping expressions
3775 before it. */
3776 int flags = gimple_call_flags (stmt);
3777 if (!(flags & ECF_CONST)
3778 || (flags & ECF_LOOPING_CONST_OR_PURE))
3779 BB_MAY_NOTRETURN (block) = 1;
3782 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3784 pre_expr e = get_or_alloc_expr_for_name (op);
3786 add_to_value (get_expr_value_id (e), e);
3787 bitmap_insert_into_set (TMP_GEN (block), e);
3788 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3791 if (gimple_vdef (stmt))
3792 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3794 if (gimple_has_side_effects (stmt)
3795 || stmt_could_throw_p (stmt)
3796 || is_gimple_debug (stmt))
3797 continue;
3799 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3801 if (ssa_undefined_value_p (op))
3802 continue;
3803 pre_expr e = get_or_alloc_expr_for_name (op);
3804 bitmap_value_insert_into_set (EXP_GEN (block), e);
3807 switch (gimple_code (stmt))
3809 case GIMPLE_RETURN:
3810 continue;
3812 case GIMPLE_CALL:
3814 vn_reference_t ref;
3815 vn_reference_s ref1;
3816 pre_expr result = NULL;
3818 /* We can value number only calls to real functions. */
3819 if (gimple_call_internal_p (stmt))
3820 continue;
3822 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3823 if (!ref)
3824 continue;
3826 /* If the value of the call is not invalidated in
3827 this block until it is computed, add the expression
3828 to EXP_GEN. */
3829 if (!gimple_vuse (stmt)
3830 || gimple_code
3831 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3832 || gimple_bb (SSA_NAME_DEF_STMT
3833 (gimple_vuse (stmt))) != block)
3835 result = pre_expr_pool.allocate ();
3836 result->kind = REFERENCE;
3837 result->id = 0;
3838 PRE_EXPR_REFERENCE (result) = ref;
3840 get_or_alloc_expression_id (result);
3841 add_to_value (get_expr_value_id (result), result);
3842 bitmap_value_insert_into_set (EXP_GEN (block), result);
3844 continue;
3847 case GIMPLE_ASSIGN:
3849 pre_expr result = NULL;
3850 switch (vn_get_stmt_kind (stmt))
3852 case VN_NARY:
3854 enum tree_code code = gimple_assign_rhs_code (stmt);
3855 vn_nary_op_t nary;
3857 /* COND_EXPR and VEC_COND_EXPR are awkward in
3858 that they contain an embedded complex expression.
3859 Don't even try to shove those through PRE. */
3860 if (code == COND_EXPR
3861 || code == VEC_COND_EXPR)
3862 continue;
3864 vn_nary_op_lookup_stmt (stmt, &nary);
3865 if (!nary)
3866 continue;
3868 /* If the NARY traps and there was a preceding
3869 point in the block that might not return avoid
3870 adding the nary to EXP_GEN. */
3871 if (BB_MAY_NOTRETURN (block)
3872 && vn_nary_may_trap (nary))
3873 continue;
3875 result = pre_expr_pool.allocate ();
3876 result->kind = NARY;
3877 result->id = 0;
3878 PRE_EXPR_NARY (result) = nary;
3879 break;
3882 case VN_REFERENCE:
3884 tree rhs1 = gimple_assign_rhs1 (stmt);
3885 alias_set_type set = get_alias_set (rhs1);
3886 vec<vn_reference_op_s> operands
3887 = vn_reference_operands_for_lookup (rhs1);
3888 vn_reference_t ref;
3889 vn_reference_lookup_pieces (gimple_vuse (stmt), set,
3890 TREE_TYPE (rhs1),
3891 operands, &ref, VN_WALK);
3892 if (!ref)
3894 operands.release ();
3895 continue;
3898 /* If the value of the reference is not invalidated in
3899 this block until it is computed, add the expression
3900 to EXP_GEN. */
3901 if (gimple_vuse (stmt))
3903 gimple *def_stmt;
3904 bool ok = true;
3905 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3906 while (!gimple_nop_p (def_stmt)
3907 && gimple_code (def_stmt) != GIMPLE_PHI
3908 && gimple_bb (def_stmt) == block)
3910 if (stmt_may_clobber_ref_p
3911 (def_stmt, gimple_assign_rhs1 (stmt)))
3913 ok = false;
3914 break;
3916 def_stmt
3917 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3919 if (!ok)
3921 operands.release ();
3922 continue;
3926 /* If the load was value-numbered to another
3927 load make sure we do not use its expression
3928 for insertion if it wouldn't be a valid
3929 replacement. */
3930 /* At the momemt we have a testcase
3931 for hoist insertion of aligned vs. misaligned
3932 variants in gcc.dg/torture/pr65270-1.c thus
3933 with just alignment to be considered we can
3934 simply replace the expression in the hashtable
3935 with the most conservative one. */
3936 vn_reference_op_t ref1 = &ref->operands.last ();
3937 while (ref1->opcode != TARGET_MEM_REF
3938 && ref1->opcode != MEM_REF
3939 && ref1 != &ref->operands[0])
3940 --ref1;
3941 vn_reference_op_t ref2 = &operands.last ();
3942 while (ref2->opcode != TARGET_MEM_REF
3943 && ref2->opcode != MEM_REF
3944 && ref2 != &operands[0])
3945 --ref2;
3946 if ((ref1->opcode == TARGET_MEM_REF
3947 || ref1->opcode == MEM_REF)
3948 && (TYPE_ALIGN (ref1->type)
3949 > TYPE_ALIGN (ref2->type)))
3950 ref1->type
3951 = build_aligned_type (ref1->type,
3952 TYPE_ALIGN (ref2->type));
3953 /* TBAA behavior is an obvious part so make sure
3954 that the hashtable one covers this as well
3955 by adjusting the ref alias set and its base. */
3956 if (ref->set == set
3957 || alias_set_subset_of (set, ref->set))
3959 else if (alias_set_subset_of (ref->set, set))
3961 ref->set = set;
3962 if (ref1->opcode == MEM_REF)
3963 ref1->op0
3964 = wide_int_to_tree (TREE_TYPE (ref2->op0),
3965 wi::to_wide (ref1->op0));
3966 else
3967 ref1->op2
3968 = wide_int_to_tree (TREE_TYPE (ref2->op2),
3969 wi::to_wide (ref1->op2));
3971 else
3973 ref->set = 0;
3974 if (ref1->opcode == MEM_REF)
3975 ref1->op0
3976 = wide_int_to_tree (ptr_type_node,
3977 wi::to_wide (ref1->op0));
3978 else
3979 ref1->op2
3980 = wide_int_to_tree (ptr_type_node,
3981 wi::to_wide (ref1->op2));
3983 operands.release ();
3985 result = pre_expr_pool.allocate ();
3986 result->kind = REFERENCE;
3987 result->id = 0;
3988 PRE_EXPR_REFERENCE (result) = ref;
3989 break;
3992 default:
3993 continue;
3996 get_or_alloc_expression_id (result);
3997 add_to_value (get_expr_value_id (result), result);
3998 bitmap_value_insert_into_set (EXP_GEN (block), result);
3999 continue;
4001 default:
4002 break;
4006 if (dump_file && (dump_flags & TDF_DETAILS))
4008 print_bitmap_set (dump_file, EXP_GEN (block),
4009 "exp_gen", block->index);
4010 print_bitmap_set (dump_file, PHI_GEN (block),
4011 "phi_gen", block->index);
4012 print_bitmap_set (dump_file, TMP_GEN (block),
4013 "tmp_gen", block->index);
4014 print_bitmap_set (dump_file, AVAIL_OUT (block),
4015 "avail_out", block->index);
4018 /* Put the dominator children of BLOCK on the worklist of blocks
4019 to compute available sets for. */
4020 for (son = first_dom_son (CDI_DOMINATORS, block);
4021 son;
4022 son = next_dom_son (CDI_DOMINATORS, son))
4023 worklist[sp++] = son;
4026 free (worklist);
4029 /* Cheap DCE of a known set of possibly dead stmts.
4031 Because we don't follow exactly the standard PRE algorithm, and decide not
4032 to insert PHI nodes sometimes, and because value numbering of casts isn't
4033 perfect, we sometimes end up inserting dead code. This simple DCE-like
4034 pass removes any insertions we made that weren't actually used. */
4036 static void
4037 remove_dead_inserted_code (void)
4039 /* ??? Re-use inserted_exprs as worklist not only as initial set.
4040 This may end up removing non-inserted code as well. If we
4041 keep inserted_exprs unchanged we could restrict new worklist
4042 elements to members of inserted_exprs. */
4043 bitmap worklist = inserted_exprs;
4044 while (! bitmap_empty_p (worklist))
4046 /* Pop item. */
4047 unsigned i = bitmap_first_set_bit (worklist);
4048 bitmap_clear_bit (worklist, i);
4050 tree def = ssa_name (i);
4051 /* Removed by somebody else or still in use. */
4052 if (! def || ! has_zero_uses (def))
4053 continue;
4055 gimple *t = SSA_NAME_DEF_STMT (def);
4056 if (gimple_has_side_effects (t))
4057 continue;
4059 /* Add uses to the worklist. */
4060 ssa_op_iter iter;
4061 use_operand_p use_p;
4062 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
4064 tree use = USE_FROM_PTR (use_p);
4065 if (TREE_CODE (use) == SSA_NAME
4066 && ! SSA_NAME_IS_DEFAULT_DEF (use))
4067 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4070 /* Remove stmt. */
4071 if (dump_file && (dump_flags & TDF_DETAILS))
4073 fprintf (dump_file, "Removing unnecessary insertion:");
4074 print_gimple_stmt (dump_file, t, 0);
4076 gimple_stmt_iterator gsi = gsi_for_stmt (t);
4077 if (gimple_code (t) == GIMPLE_PHI)
4078 remove_phi_node (&gsi, true);
4079 else
4081 gsi_remove (&gsi, true);
4082 release_defs (t);
4088 /* Initialize data structures used by PRE. */
4090 static void
4091 init_pre (void)
4093 basic_block bb;
4095 next_expression_id = 1;
4096 expressions.create (0);
4097 expressions.safe_push (NULL);
4098 value_expressions.create (get_max_value_id () + 1);
4099 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4100 name_to_id.create (0);
4102 inserted_exprs = BITMAP_ALLOC (NULL);
4104 connect_infinite_loops_to_exit ();
4105 memset (&pre_stats, 0, sizeof (pre_stats));
4107 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4109 calculate_dominance_info (CDI_DOMINATORS);
4111 bitmap_obstack_initialize (&grand_bitmap_obstack);
4112 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4113 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4114 FOR_ALL_BB_FN (bb, cfun)
4116 EXP_GEN (bb) = bitmap_set_new ();
4117 PHI_GEN (bb) = bitmap_set_new ();
4118 TMP_GEN (bb) = bitmap_set_new ();
4119 AVAIL_OUT (bb) = bitmap_set_new ();
4124 /* Deallocate data structures used by PRE. */
4126 static void
4127 fini_pre ()
4129 value_expressions.release ();
4130 BITMAP_FREE (inserted_exprs);
4131 bitmap_obstack_release (&grand_bitmap_obstack);
4132 bitmap_set_pool.release ();
4133 pre_expr_pool.release ();
4134 delete phi_translate_table;
4135 phi_translate_table = NULL;
4136 delete expression_to_id;
4137 expression_to_id = NULL;
4138 name_to_id.release ();
4140 free_aux_for_blocks ();
4143 namespace {
4145 const pass_data pass_data_pre =
4147 GIMPLE_PASS, /* type */
4148 "pre", /* name */
4149 OPTGROUP_NONE, /* optinfo_flags */
4150 TV_TREE_PRE, /* tv_id */
4151 ( PROP_cfg | PROP_ssa ), /* properties_required */
4152 0, /* properties_provided */
4153 0, /* properties_destroyed */
4154 TODO_rebuild_alias, /* todo_flags_start */
4155 0, /* todo_flags_finish */
4158 class pass_pre : public gimple_opt_pass
4160 public:
4161 pass_pre (gcc::context *ctxt)
4162 : gimple_opt_pass (pass_data_pre, ctxt)
4165 /* opt_pass methods: */
4166 virtual bool gate (function *)
4167 { return flag_tree_pre != 0 || flag_code_hoisting != 0; }
4168 virtual unsigned int execute (function *);
4170 }; // class pass_pre
4172 unsigned int
4173 pass_pre::execute (function *fun)
4175 unsigned int todo = 0;
4177 do_partial_partial =
4178 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4180 /* This has to happen before SCCVN runs because
4181 loop_optimizer_init may create new phis, etc. */
4182 loop_optimizer_init (LOOPS_NORMAL);
4183 split_critical_edges ();
4185 run_scc_vn (VN_WALK);
4187 init_pre ();
4188 scev_initialize ();
4190 /* Collect and value number expressions computed in each basic block. */
4191 compute_avail ();
4193 /* Insert can get quite slow on an incredibly large number of basic
4194 blocks due to some quadratic behavior. Until this behavior is
4195 fixed, don't run it when he have an incredibly large number of
4196 bb's. If we aren't going to run insert, there is no point in
4197 computing ANTIC, either, even though it's plenty fast. */
4198 if (n_basic_blocks_for_fn (fun) < 4000)
4200 compute_antic ();
4201 insert ();
4204 /* Make sure to remove fake edges before committing our inserts.
4205 This makes sure we don't end up with extra critical edges that
4206 we would need to split. */
4207 remove_fake_exit_edges ();
4208 gsi_commit_edge_inserts ();
4210 /* Eliminate folds statements which might (should not...) end up
4211 not keeping virtual operands up-to-date. */
4212 gcc_assert (!need_ssa_update_p (fun));
4214 /* Remove all the redundant expressions. */
4215 todo |= vn_eliminate (inserted_exprs);
4217 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4218 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4219 statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
4220 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4222 clear_expression_ids ();
4224 scev_finalize ();
4225 remove_dead_inserted_code ();
4226 fini_pre ();
4227 loop_optimizer_finalize ();
4229 /* Restore SSA info before tail-merging as that resets it as well. */
4230 scc_vn_restore_ssa_info ();
4232 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4233 case we can merge the block with the remaining predecessor of the block.
4234 It should either:
4235 - call merge_blocks after each tail merge iteration
4236 - call merge_blocks after all tail merge iterations
4237 - mark TODO_cleanup_cfg when necessary
4238 - share the cfg cleanup with fini_pre. */
4239 todo |= tail_merge_optimize (todo);
4241 free_scc_vn ();
4243 /* Tail merging invalidates the virtual SSA web, together with
4244 cfg-cleanup opportunities exposed by PRE this will wreck the
4245 SSA updating machinery. So make sure to run update-ssa
4246 manually, before eventually scheduling cfg-cleanup as part of
4247 the todo. */
4248 update_ssa (TODO_update_ssa_only_virtuals);
4250 return todo;
4253 } // anon namespace
4255 gimple_opt_pass *
4256 make_pass_pre (gcc::context *ctxt)
4258 return new pass_pre (ctxt);