2017-10-27 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-pre.c
blobe4189d1fd8eb0c2dd124ddc19aaf0f7d87a4d89c
1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-ssa.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
48 #include "params.h"
49 #include "dbgcnt.h"
50 #include "domwalk.h"
51 #include "tree-ssa-propagate.h"
52 #include "tree-cfgcleanup.h"
53 #include "alias.h"
55 /* Even though this file is called tree-ssa-pre.c, we actually
56 implement a bit more than just PRE here. All of them piggy-back
57 on GVN which is implemented in tree-ssa-sccvn.c.
59 1. Full Redundancy Elimination (FRE)
60 This is the elimination phase of GVN.
62 2. Partial Redundancy Elimination (PRE)
63 This is adds computation of AVAIL_OUT and ANTIC_IN and
64 doing expression insertion to form GVN-PRE.
66 3. Code hoisting
67 This optimization uses the ANTIC_IN sets computed for PRE
68 to move expressions further up than PRE would do, to make
69 multiple computations of the same value fully redundant.
70 This pass is explained below (after the explanation of the
71 basic algorithm for PRE).
74 /* TODO:
76 1. Avail sets can be shared by making an avail_find_leader that
77 walks up the dominator tree and looks in those avail sets.
78 This might affect code optimality, it's unclear right now.
79 Currently the AVAIL_OUT sets are the remaining quadraticness in
80 memory of GVN-PRE.
81 2. Strength reduction can be performed by anticipating expressions
82 we can repair later on.
83 3. We can do back-substitution or smarter value numbering to catch
84 commutative expressions split up over multiple statements.
87 /* For ease of terminology, "expression node" in the below refers to
88 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
89 represent the actual statement containing the expressions we care about,
90 and we cache the value number by putting it in the expression. */
92 /* Basic algorithm for Partial Redundancy Elimination:
94 First we walk the statements to generate the AVAIL sets, the
95 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
96 generation of values/expressions by a given block. We use them
97 when computing the ANTIC sets. The AVAIL sets consist of
98 SSA_NAME's that represent values, so we know what values are
99 available in what blocks. AVAIL is a forward dataflow problem. In
100 SSA, values are never killed, so we don't need a kill set, or a
101 fixpoint iteration, in order to calculate the AVAIL sets. In
102 traditional parlance, AVAIL sets tell us the downsafety of the
103 expressions/values.
105 Next, we generate the ANTIC sets. These sets represent the
106 anticipatable expressions. ANTIC is a backwards dataflow
107 problem. An expression is anticipatable in a given block if it could
108 be generated in that block. This means that if we had to perform
109 an insertion in that block, of the value of that expression, we
110 could. Calculating the ANTIC sets requires phi translation of
111 expressions, because the flow goes backwards through phis. We must
112 iterate to a fixpoint of the ANTIC sets, because we have a kill
113 set. Even in SSA form, values are not live over the entire
114 function, only from their definition point onwards. So we have to
115 remove values from the ANTIC set once we go past the definition
116 point of the leaders that make them up.
117 compute_antic/compute_antic_aux performs this computation.
119 Third, we perform insertions to make partially redundant
120 expressions fully redundant.
122 An expression is partially redundant (excluding partial
123 anticipation) if:
125 1. It is AVAIL in some, but not all, of the predecessors of a
126 given block.
127 2. It is ANTIC in all the predecessors.
129 In order to make it fully redundant, we insert the expression into
130 the predecessors where it is not available, but is ANTIC.
132 When optimizing for size, we only eliminate the partial redundancy
133 if we need to insert in only one predecessor. This avoids almost
134 completely the code size increase that PRE usually causes.
136 For the partial anticipation case, we only perform insertion if it
137 is partially anticipated in some block, and fully available in all
138 of the predecessors.
140 do_pre_regular_insertion/do_pre_partial_partial_insertion
141 performs these steps, driven by insert/insert_aux.
143 Fourth, we eliminate fully redundant expressions.
144 This is a simple statement walk that replaces redundant
145 calculations with the now available values. */
147 /* Basic algorithm for Code Hoisting:
149 Code hoisting is: Moving value computations up in the control flow
150 graph to make multiple copies redundant. Typically this is a size
151 optimization, but there are cases where it also is helpful for speed.
153 A simple code hoisting algorithm is implemented that piggy-backs on
154 the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT
155 which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be
156 computed for PRE, and we can use them to perform a limited version of
157 code hoisting, too.
159 For the purpose of this implementation, a value is hoistable to a basic
160 block B if the following properties are met:
162 1. The value is in ANTIC_IN(B) -- the value will be computed on all
163 paths from B to function exit and it can be computed in B);
165 2. The value is not in AVAIL_OUT(B) -- there would be no need to
166 compute the value again and make it available twice;
168 3. All successors of B are dominated by B -- makes sure that inserting
169 a computation of the value in B will make the remaining
170 computations fully redundant;
172 4. At least one successor has the value in AVAIL_OUT -- to avoid
173 hoisting values up too far;
175 5. There are at least two successors of B -- hoisting in straight
176 line code is pointless.
178 The third condition is not strictly necessary, but it would complicate
179 the hoisting pass a lot. In fact, I don't know of any code hoisting
180 algorithm that does not have this requirement. Fortunately, experiments
181 have show that most candidate hoistable values are in regions that meet
182 this condition (e.g. diamond-shape regions).
184 The forth condition is necessary to avoid hoisting things up too far
185 away from the uses of the value. Nothing else limits the algorithm
186 from hoisting everything up as far as ANTIC_IN allows. Experiments
187 with SPEC and CSiBE have shown that hoisting up too far results in more
188 spilling, less benefits for code size, and worse benchmark scores.
189 Fortunately, in practice most of the interesting hoisting opportunities
190 are caught despite this limitation.
192 For hoistable values that meet all conditions, expressions are inserted
193 to make the calculation of the hoistable value fully redundant. We
194 perform code hoisting insertions after each round of PRE insertions,
195 because code hoisting never exposes new PRE opportunities, but PRE can
196 create new code hoisting opportunities.
198 The code hoisting algorithm is implemented in do_hoist_insert, driven
199 by insert/insert_aux. */
201 /* Representations of value numbers:
203 Value numbers are represented by a representative SSA_NAME. We
204 will create fake SSA_NAME's in situations where we need a
205 representative but do not have one (because it is a complex
206 expression). In order to facilitate storing the value numbers in
207 bitmaps, and keep the number of wasted SSA_NAME's down, we also
208 associate a value_id with each value number, and create full blown
209 ssa_name's only where we actually need them (IE in operands of
210 existing expressions).
212 Theoretically you could replace all the value_id's with
213 SSA_NAME_VERSION, but this would allocate a large number of
214 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
215 It would also require an additional indirection at each point we
216 use the value id. */
218 /* Representation of expressions on value numbers:
220 Expressions consisting of value numbers are represented the same
221 way as our VN internally represents them, with an additional
222 "pre_expr" wrapping around them in order to facilitate storing all
223 of the expressions in the same sets. */
225 /* Representation of sets:
227 The dataflow sets do not need to be sorted in any particular order
228 for the majority of their lifetime, are simply represented as two
229 bitmaps, one that keeps track of values present in the set, and one
230 that keeps track of expressions present in the set.
232 When we need them in topological order, we produce it on demand by
233 transforming the bitmap into an array and sorting it into topo
234 order. */
236 /* Type of expression, used to know which member of the PRE_EXPR union
237 is valid. */
239 enum pre_expr_kind
241 NAME,
242 NARY,
243 REFERENCE,
244 CONSTANT
247 union pre_expr_union
249 tree name;
250 tree constant;
251 vn_nary_op_t nary;
252 vn_reference_t reference;
255 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
257 enum pre_expr_kind kind;
258 unsigned int id;
259 pre_expr_union u;
261 /* hash_table support. */
262 static inline hashval_t hash (const pre_expr_d *);
263 static inline int equal (const pre_expr_d *, const pre_expr_d *);
264 } *pre_expr;
266 #define PRE_EXPR_NAME(e) (e)->u.name
267 #define PRE_EXPR_NARY(e) (e)->u.nary
268 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
269 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
271 /* Compare E1 and E1 for equality. */
273 inline int
274 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
276 if (e1->kind != e2->kind)
277 return false;
279 switch (e1->kind)
281 case CONSTANT:
282 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
283 PRE_EXPR_CONSTANT (e2));
284 case NAME:
285 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
286 case NARY:
287 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
288 case REFERENCE:
289 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
290 PRE_EXPR_REFERENCE (e2));
291 default:
292 gcc_unreachable ();
296 /* Hash E. */
298 inline hashval_t
299 pre_expr_d::hash (const pre_expr_d *e)
301 switch (e->kind)
303 case CONSTANT:
304 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
305 case NAME:
306 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
307 case NARY:
308 return PRE_EXPR_NARY (e)->hashcode;
309 case REFERENCE:
310 return PRE_EXPR_REFERENCE (e)->hashcode;
311 default:
312 gcc_unreachable ();
316 /* Next global expression id number. */
317 static unsigned int next_expression_id;
319 /* Mapping from expression to id number we can use in bitmap sets. */
320 static vec<pre_expr> expressions;
321 static hash_table<pre_expr_d> *expression_to_id;
322 static vec<unsigned> name_to_id;
324 /* Allocate an expression id for EXPR. */
326 static inline unsigned int
327 alloc_expression_id (pre_expr expr)
329 struct pre_expr_d **slot;
330 /* Make sure we won't overflow. */
331 gcc_assert (next_expression_id + 1 > next_expression_id);
332 expr->id = next_expression_id++;
333 expressions.safe_push (expr);
334 if (expr->kind == NAME)
336 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
337 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
338 re-allocations by using vec::reserve upfront. */
339 unsigned old_len = name_to_id.length ();
340 name_to_id.reserve (num_ssa_names - old_len);
341 name_to_id.quick_grow_cleared (num_ssa_names);
342 gcc_assert (name_to_id[version] == 0);
343 name_to_id[version] = expr->id;
345 else
347 slot = expression_to_id->find_slot (expr, INSERT);
348 gcc_assert (!*slot);
349 *slot = expr;
351 return next_expression_id - 1;
354 /* Return the expression id for tree EXPR. */
356 static inline unsigned int
357 get_expression_id (const pre_expr expr)
359 return expr->id;
362 static inline unsigned int
363 lookup_expression_id (const pre_expr expr)
365 struct pre_expr_d **slot;
367 if (expr->kind == NAME)
369 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
370 if (name_to_id.length () <= version)
371 return 0;
372 return name_to_id[version];
374 else
376 slot = expression_to_id->find_slot (expr, NO_INSERT);
377 if (!slot)
378 return 0;
379 return ((pre_expr)*slot)->id;
383 /* Return the existing expression id for EXPR, or create one if one
384 does not exist yet. */
386 static inline unsigned int
387 get_or_alloc_expression_id (pre_expr expr)
389 unsigned int id = lookup_expression_id (expr);
390 if (id == 0)
391 return alloc_expression_id (expr);
392 return expr->id = id;
395 /* Return the expression that has expression id ID */
397 static inline pre_expr
398 expression_for_id (unsigned int id)
400 return expressions[id];
403 /* Free the expression id field in all of our expressions,
404 and then destroy the expressions array. */
406 static void
407 clear_expression_ids (void)
409 expressions.release ();
412 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
414 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
416 static pre_expr
417 get_or_alloc_expr_for_name (tree name)
419 struct pre_expr_d expr;
420 pre_expr result;
421 unsigned int result_id;
423 expr.kind = NAME;
424 expr.id = 0;
425 PRE_EXPR_NAME (&expr) = name;
426 result_id = lookup_expression_id (&expr);
427 if (result_id != 0)
428 return expression_for_id (result_id);
430 result = pre_expr_pool.allocate ();
431 result->kind = NAME;
432 PRE_EXPR_NAME (result) = name;
433 alloc_expression_id (result);
434 return result;
437 /* An unordered bitmap set. One bitmap tracks values, the other,
438 expressions. */
439 typedef struct bitmap_set
441 bitmap_head expressions;
442 bitmap_head values;
443 } *bitmap_set_t;
445 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
446 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
448 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
449 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
451 /* Mapping from value id to expressions with that value_id. */
452 static vec<bitmap> value_expressions;
454 /* Sets that we need to keep track of. */
455 typedef struct bb_bitmap_sets
457 /* The EXP_GEN set, which represents expressions/values generated in
458 a basic block. */
459 bitmap_set_t exp_gen;
461 /* The PHI_GEN set, which represents PHI results generated in a
462 basic block. */
463 bitmap_set_t phi_gen;
465 /* The TMP_GEN set, which represents results/temporaries generated
466 in a basic block. IE the LHS of an expression. */
467 bitmap_set_t tmp_gen;
469 /* The AVAIL_OUT set, which represents which values are available in
470 a given basic block. */
471 bitmap_set_t avail_out;
473 /* The ANTIC_IN set, which represents which values are anticipatable
474 in a given basic block. */
475 bitmap_set_t antic_in;
477 /* The PA_IN set, which represents which values are
478 partially anticipatable in a given basic block. */
479 bitmap_set_t pa_in;
481 /* The NEW_SETS set, which is used during insertion to augment the
482 AVAIL_OUT set of blocks with the new insertions performed during
483 the current iteration. */
484 bitmap_set_t new_sets;
486 /* A cache for value_dies_in_block_x. */
487 bitmap expr_dies;
489 /* The live virtual operand on successor edges. */
490 tree vop_on_exit;
492 /* True if we have visited this block during ANTIC calculation. */
493 unsigned int visited : 1;
495 /* True when the block contains a call that might not return. */
496 unsigned int contains_may_not_return_call : 1;
497 } *bb_value_sets_t;
499 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
500 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
501 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
502 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
503 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
504 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
505 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
506 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
507 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
508 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
509 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
512 /* This structure is used to keep track of statistics on what
513 optimization PRE was able to perform. */
514 static struct
516 /* The number of new expressions/temporaries generated by PRE. */
517 int insertions;
519 /* The number of inserts found due to partial anticipation */
520 int pa_insert;
522 /* The number of inserts made for code hoisting. */
523 int hoist_insert;
525 /* The number of new PHI nodes added by PRE. */
526 int phis;
527 } pre_stats;
529 static bool do_partial_partial;
530 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
531 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
532 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
533 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
534 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
535 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
536 static bitmap_set_t bitmap_set_new (void);
537 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
538 tree);
539 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
540 static unsigned int get_expr_value_id (pre_expr);
542 /* We can add and remove elements and entries to and from sets
543 and hash tables, so we use alloc pools for them. */
545 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
546 static bitmap_obstack grand_bitmap_obstack;
548 /* A three tuple {e, pred, v} used to cache phi translations in the
549 phi_translate_table. */
551 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
553 /* The expression. */
554 pre_expr e;
556 /* The predecessor block along which we translated the expression. */
557 basic_block pred;
559 /* The value that resulted from the translation. */
560 pre_expr v;
562 /* The hashcode for the expression, pred pair. This is cached for
563 speed reasons. */
564 hashval_t hashcode;
566 /* hash_table support. */
567 static inline hashval_t hash (const expr_pred_trans_d *);
568 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
569 } *expr_pred_trans_t;
570 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
572 inline hashval_t
573 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
575 return e->hashcode;
578 inline int
579 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
580 const expr_pred_trans_d *ve2)
582 basic_block b1 = ve1->pred;
583 basic_block b2 = ve2->pred;
585 /* If they are not translations for the same basic block, they can't
586 be equal. */
587 if (b1 != b2)
588 return false;
589 return pre_expr_d::equal (ve1->e, ve2->e);
592 /* The phi_translate_table caches phi translations for a given
593 expression and predecessor. */
594 static hash_table<expr_pred_trans_d> *phi_translate_table;
596 /* Add the tuple mapping from {expression E, basic block PRED} to
597 the phi translation table and return whether it pre-existed. */
599 static inline bool
600 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
602 expr_pred_trans_t *slot;
603 expr_pred_trans_d tem;
604 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
605 pred->index);
606 tem.e = e;
607 tem.pred = pred;
608 tem.hashcode = hash;
609 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
610 if (*slot)
612 *entry = *slot;
613 return true;
616 *entry = *slot = XNEW (struct expr_pred_trans_d);
617 (*entry)->e = e;
618 (*entry)->pred = pred;
619 (*entry)->hashcode = hash;
620 return false;
624 /* Add expression E to the expression set of value id V. */
626 static void
627 add_to_value (unsigned int v, pre_expr e)
629 bitmap set;
631 gcc_checking_assert (get_expr_value_id (e) == v);
633 if (v >= value_expressions.length ())
635 value_expressions.safe_grow_cleared (v + 1);
638 set = value_expressions[v];
639 if (!set)
641 set = BITMAP_ALLOC (&grand_bitmap_obstack);
642 value_expressions[v] = set;
645 bitmap_set_bit (set, get_or_alloc_expression_id (e));
648 /* Create a new bitmap set and return it. */
650 static bitmap_set_t
651 bitmap_set_new (void)
653 bitmap_set_t ret = bitmap_set_pool.allocate ();
654 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
655 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
656 return ret;
659 /* Return the value id for a PRE expression EXPR. */
661 static unsigned int
662 get_expr_value_id (pre_expr expr)
664 unsigned int id;
665 switch (expr->kind)
667 case CONSTANT:
668 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
669 break;
670 case NAME:
671 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
672 break;
673 case NARY:
674 id = PRE_EXPR_NARY (expr)->value_id;
675 break;
676 case REFERENCE:
677 id = PRE_EXPR_REFERENCE (expr)->value_id;
678 break;
679 default:
680 gcc_unreachable ();
682 /* ??? We cannot assert that expr has a value-id (it can be 0), because
683 we assign value-ids only to expressions that have a result
684 in set_hashtable_value_ids. */
685 return id;
688 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
690 static tree
691 sccvn_valnum_from_value_id (unsigned int val)
693 bitmap_iterator bi;
694 unsigned int i;
695 bitmap exprset = value_expressions[val];
696 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
698 pre_expr vexpr = expression_for_id (i);
699 if (vexpr->kind == NAME)
700 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
701 else if (vexpr->kind == CONSTANT)
702 return PRE_EXPR_CONSTANT (vexpr);
704 return NULL_TREE;
707 /* Remove an expression EXPR from a bitmapped set. */
709 static void
710 bitmap_remove_expr_from_set (bitmap_set_t set, pre_expr expr)
712 unsigned int val = get_expr_value_id (expr);
713 bitmap_clear_bit (&set->values, val);
714 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
717 /* Insert an expression EXPR into a bitmapped set. */
719 static void
720 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
722 unsigned int val = get_expr_value_id (expr);
723 if (! value_id_constant_p (val))
725 /* Note this is the only function causing multiple expressions
726 for the same value to appear in a set. This is needed for
727 TMP_GEN, PHI_GEN and NEW_SETs. */
728 bitmap_set_bit (&set->values, val);
729 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
733 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
735 static void
736 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
738 bitmap_copy (&dest->expressions, &orig->expressions);
739 bitmap_copy (&dest->values, &orig->values);
743 /* Free memory used up by SET. */
744 static void
745 bitmap_set_free (bitmap_set_t set)
747 bitmap_clear (&set->expressions);
748 bitmap_clear (&set->values);
752 /* Generate an topological-ordered array of bitmap set SET. */
754 static vec<pre_expr>
755 sorted_array_from_bitmap_set (bitmap_set_t set)
757 unsigned int i, j;
758 bitmap_iterator bi, bj;
759 vec<pre_expr> result;
761 /* Pre-allocate enough space for the array. */
762 result.create (bitmap_count_bits (&set->expressions));
764 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
766 /* The number of expressions having a given value is usually
767 relatively small. Thus, rather than making a vector of all
768 the expressions and sorting it by value-id, we walk the values
769 and check in the reverse mapping that tells us what expressions
770 have a given value, to filter those in our set. As a result,
771 the expressions are inserted in value-id order, which means
772 topological order.
774 If this is somehow a significant lose for some cases, we can
775 choose which set to walk based on the set size. */
776 bitmap exprset = value_expressions[i];
777 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
779 if (bitmap_bit_p (&set->expressions, j))
780 result.quick_push (expression_for_id (j));
784 return result;
787 /* Subtract all expressions contained in ORIG from DEST. */
789 static bitmap_set_t
790 bitmap_set_subtract_expressions (bitmap_set_t dest, bitmap_set_t orig)
792 bitmap_set_t result = bitmap_set_new ();
793 bitmap_iterator bi;
794 unsigned int i;
796 bitmap_and_compl (&result->expressions, &dest->expressions,
797 &orig->expressions);
799 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
801 pre_expr expr = expression_for_id (i);
802 unsigned int value_id = get_expr_value_id (expr);
803 bitmap_set_bit (&result->values, value_id);
806 return result;
809 /* Subtract all values in bitmap set B from bitmap set A. */
811 static void
812 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
814 unsigned int i;
815 bitmap_iterator bi;
816 pre_expr to_remove = NULL;
817 FOR_EACH_EXPR_ID_IN_SET (a, i, bi)
819 if (to_remove)
821 bitmap_remove_expr_from_set (a, to_remove);
822 to_remove = NULL;
824 pre_expr expr = expression_for_id (i);
825 if (bitmap_bit_p (&b->values, get_expr_value_id (expr)))
826 to_remove = expr;
828 if (to_remove)
829 bitmap_remove_expr_from_set (a, to_remove);
833 /* Return true if bitmapped set SET contains the value VALUE_ID. */
835 static bool
836 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
838 if (value_id_constant_p (value_id))
839 return true;
841 return bitmap_bit_p (&set->values, value_id);
844 static inline bool
845 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
847 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
850 /* Return true if two bitmap sets are equal. */
852 static bool
853 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
855 return bitmap_equal_p (&a->values, &b->values);
858 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
859 and add it otherwise. */
861 static void
862 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
864 unsigned int val = get_expr_value_id (expr);
865 if (value_id_constant_p (val))
866 return;
868 if (bitmap_set_contains_value (set, val))
870 /* The number of expressions having a given value is usually
871 significantly less than the total number of expressions in SET.
872 Thus, rather than check, for each expression in SET, whether it
873 has the value LOOKFOR, we walk the reverse mapping that tells us
874 what expressions have a given value, and see if any of those
875 expressions are in our set. For large testcases, this is about
876 5-10x faster than walking the bitmap. If this is somehow a
877 significant lose for some cases, we can choose which set to walk
878 based on the set size. */
879 unsigned int i;
880 bitmap_iterator bi;
881 bitmap exprset = value_expressions[val];
882 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
884 if (bitmap_clear_bit (&set->expressions, i))
886 bitmap_set_bit (&set->expressions, get_expression_id (expr));
887 return;
890 gcc_unreachable ();
892 else
893 bitmap_insert_into_set (set, expr);
896 /* Insert EXPR into SET if EXPR's value is not already present in
897 SET. */
899 static void
900 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
902 unsigned int val = get_expr_value_id (expr);
904 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
906 /* Constant values are always considered to be part of the set. */
907 if (value_id_constant_p (val))
908 return;
910 /* If the value membership changed, add the expression. */
911 if (bitmap_set_bit (&set->values, val))
912 bitmap_set_bit (&set->expressions, expr->id);
915 /* Print out EXPR to outfile. */
917 static void
918 print_pre_expr (FILE *outfile, const pre_expr expr)
920 if (! expr)
922 fprintf (outfile, "NULL");
923 return;
925 switch (expr->kind)
927 case CONSTANT:
928 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr));
929 break;
930 case NAME:
931 print_generic_expr (outfile, PRE_EXPR_NAME (expr));
932 break;
933 case NARY:
935 unsigned int i;
936 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
937 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
938 for (i = 0; i < nary->length; i++)
940 print_generic_expr (outfile, nary->op[i]);
941 if (i != (unsigned) nary->length - 1)
942 fprintf (outfile, ",");
944 fprintf (outfile, "}");
946 break;
948 case REFERENCE:
950 vn_reference_op_t vro;
951 unsigned int i;
952 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
953 fprintf (outfile, "{");
954 for (i = 0;
955 ref->operands.iterate (i, &vro);
956 i++)
958 bool closebrace = false;
959 if (vro->opcode != SSA_NAME
960 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
962 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
963 if (vro->op0)
965 fprintf (outfile, "<");
966 closebrace = true;
969 if (vro->op0)
971 print_generic_expr (outfile, vro->op0);
972 if (vro->op1)
974 fprintf (outfile, ",");
975 print_generic_expr (outfile, vro->op1);
977 if (vro->op2)
979 fprintf (outfile, ",");
980 print_generic_expr (outfile, vro->op2);
983 if (closebrace)
984 fprintf (outfile, ">");
985 if (i != ref->operands.length () - 1)
986 fprintf (outfile, ",");
988 fprintf (outfile, "}");
989 if (ref->vuse)
991 fprintf (outfile, "@");
992 print_generic_expr (outfile, ref->vuse);
995 break;
998 void debug_pre_expr (pre_expr);
1000 /* Like print_pre_expr but always prints to stderr. */
1001 DEBUG_FUNCTION void
1002 debug_pre_expr (pre_expr e)
1004 print_pre_expr (stderr, e);
1005 fprintf (stderr, "\n");
1008 /* Print out SET to OUTFILE. */
1010 static void
1011 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1012 const char *setname, int blockindex)
1014 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1015 if (set)
1017 bool first = true;
1018 unsigned i;
1019 bitmap_iterator bi;
1021 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1023 const pre_expr expr = expression_for_id (i);
1025 if (!first)
1026 fprintf (outfile, ", ");
1027 first = false;
1028 print_pre_expr (outfile, expr);
1030 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1033 fprintf (outfile, " }\n");
1036 void debug_bitmap_set (bitmap_set_t);
1038 DEBUG_FUNCTION void
1039 debug_bitmap_set (bitmap_set_t set)
1041 print_bitmap_set (stderr, set, "debug", 0);
1044 void debug_bitmap_sets_for (basic_block);
1046 DEBUG_FUNCTION void
1047 debug_bitmap_sets_for (basic_block bb)
1049 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1050 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1051 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1052 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1053 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1054 if (do_partial_partial)
1055 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1056 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1059 /* Print out the expressions that have VAL to OUTFILE. */
1061 static void
1062 print_value_expressions (FILE *outfile, unsigned int val)
1064 bitmap set = value_expressions[val];
1065 if (set)
1067 bitmap_set x;
1068 char s[10];
1069 sprintf (s, "%04d", val);
1070 x.expressions = *set;
1071 print_bitmap_set (outfile, &x, s, 0);
1076 DEBUG_FUNCTION void
1077 debug_value_expressions (unsigned int val)
1079 print_value_expressions (stderr, val);
1082 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1083 represent it. */
1085 static pre_expr
1086 get_or_alloc_expr_for_constant (tree constant)
1088 unsigned int result_id;
1089 unsigned int value_id;
1090 struct pre_expr_d expr;
1091 pre_expr newexpr;
1093 expr.kind = CONSTANT;
1094 PRE_EXPR_CONSTANT (&expr) = constant;
1095 result_id = lookup_expression_id (&expr);
1096 if (result_id != 0)
1097 return expression_for_id (result_id);
1099 newexpr = pre_expr_pool.allocate ();
1100 newexpr->kind = CONSTANT;
1101 PRE_EXPR_CONSTANT (newexpr) = constant;
1102 alloc_expression_id (newexpr);
1103 value_id = get_or_alloc_constant_value_id (constant);
1104 add_to_value (value_id, newexpr);
1105 return newexpr;
1108 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1109 Currently only supports constants and SSA_NAMES. */
1110 static pre_expr
1111 get_or_alloc_expr_for (tree t)
1113 if (TREE_CODE (t) == SSA_NAME)
1114 return get_or_alloc_expr_for_name (t);
1115 else if (is_gimple_min_invariant (t))
1116 return get_or_alloc_expr_for_constant (t);
1117 gcc_unreachable ();
1120 /* Return the folded version of T if T, when folded, is a gimple
1121 min_invariant or an SSA name. Otherwise, return T. */
1123 static pre_expr
1124 fully_constant_expression (pre_expr e)
1126 switch (e->kind)
1128 case CONSTANT:
1129 return e;
1130 case NARY:
1132 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1133 tree res = vn_nary_simplify (nary);
1134 if (!res)
1135 return e;
1136 if (is_gimple_min_invariant (res))
1137 return get_or_alloc_expr_for_constant (res);
1138 if (TREE_CODE (res) == SSA_NAME)
1139 return get_or_alloc_expr_for_name (res);
1140 return e;
1142 case REFERENCE:
1144 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1145 tree folded;
1146 if ((folded = fully_constant_vn_reference_p (ref)))
1147 return get_or_alloc_expr_for_constant (folded);
1148 return e;
1150 default:
1151 return e;
1153 return e;
1156 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1157 it has the value it would have in BLOCK. Set *SAME_VALID to true
1158 in case the new vuse doesn't change the value id of the OPERANDS. */
1160 static tree
1161 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1162 alias_set_type set, tree type, tree vuse,
1163 basic_block phiblock,
1164 basic_block block, bool *same_valid)
1166 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1167 ao_ref ref;
1168 edge e = NULL;
1169 bool use_oracle;
1171 *same_valid = true;
1173 if (gimple_bb (phi) != phiblock)
1174 return vuse;
1176 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1178 /* Use the alias-oracle to find either the PHI node in this block,
1179 the first VUSE used in this block that is equivalent to vuse or
1180 the first VUSE which definition in this block kills the value. */
1181 if (gimple_code (phi) == GIMPLE_PHI)
1182 e = find_edge (block, phiblock);
1183 else if (use_oracle)
1184 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1186 vuse = gimple_vuse (phi);
1187 phi = SSA_NAME_DEF_STMT (vuse);
1188 if (gimple_bb (phi) != phiblock)
1189 return vuse;
1190 if (gimple_code (phi) == GIMPLE_PHI)
1192 e = find_edge (block, phiblock);
1193 break;
1196 else
1197 return NULL_TREE;
1199 if (e)
1201 if (use_oracle)
1203 bitmap visited = NULL;
1204 unsigned int cnt;
1205 /* Try to find a vuse that dominates this phi node by skipping
1206 non-clobbering statements. */
1207 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1208 NULL, NULL);
1209 if (visited)
1210 BITMAP_FREE (visited);
1212 else
1213 vuse = NULL_TREE;
1214 if (!vuse)
1216 /* If we didn't find any, the value ID can't stay the same,
1217 but return the translated vuse. */
1218 *same_valid = false;
1219 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1221 /* ??? We would like to return vuse here as this is the canonical
1222 upmost vdef that this reference is associated with. But during
1223 insertion of the references into the hash tables we only ever
1224 directly insert with their direct gimple_vuse, hence returning
1225 something else would make us not find the other expression. */
1226 return PHI_ARG_DEF (phi, e->dest_idx);
1229 return NULL_TREE;
1232 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1233 SET2 *or* SET3. This is used to avoid making a set consisting of the union
1234 of PA_IN and ANTIC_IN during insert and phi-translation. */
1236 static inline pre_expr
1237 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2,
1238 bitmap_set_t set3 = NULL)
1240 pre_expr result;
1242 result = bitmap_find_leader (set1, val);
1243 if (!result && set2)
1244 result = bitmap_find_leader (set2, val);
1245 if (!result && set3)
1246 result = bitmap_find_leader (set3, val);
1247 return result;
1250 /* Get the tree type for our PRE expression e. */
1252 static tree
1253 get_expr_type (const pre_expr e)
1255 switch (e->kind)
1257 case NAME:
1258 return TREE_TYPE (PRE_EXPR_NAME (e));
1259 case CONSTANT:
1260 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1261 case REFERENCE:
1262 return PRE_EXPR_REFERENCE (e)->type;
1263 case NARY:
1264 return PRE_EXPR_NARY (e)->type;
1266 gcc_unreachable ();
1269 /* Get a representative SSA_NAME for a given expression.
1270 Since all of our sub-expressions are treated as values, we require
1271 them to be SSA_NAME's for simplicity.
1272 Prior versions of GVNPRE used to use "value handles" here, so that
1273 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1274 either case, the operands are really values (IE we do not expect
1275 them to be usable without finding leaders). */
1277 static tree
1278 get_representative_for (const pre_expr e)
1280 tree name;
1281 unsigned int value_id = get_expr_value_id (e);
1283 switch (e->kind)
1285 case NAME:
1286 return VN_INFO (PRE_EXPR_NAME (e))->valnum;
1287 case CONSTANT:
1288 return PRE_EXPR_CONSTANT (e);
1289 case NARY:
1290 case REFERENCE:
1292 /* Go through all of the expressions representing this value
1293 and pick out an SSA_NAME. */
1294 unsigned int i;
1295 bitmap_iterator bi;
1296 bitmap exprs = value_expressions[value_id];
1297 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1299 pre_expr rep = expression_for_id (i);
1300 if (rep->kind == NAME)
1301 return VN_INFO (PRE_EXPR_NAME (rep))->valnum;
1302 else if (rep->kind == CONSTANT)
1303 return PRE_EXPR_CONSTANT (rep);
1306 break;
1309 /* If we reached here we couldn't find an SSA_NAME. This can
1310 happen when we've discovered a value that has never appeared in
1311 the program as set to an SSA_NAME, as the result of phi translation.
1312 Create one here.
1313 ??? We should be able to re-use this when we insert the statement
1314 to compute it. */
1315 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1316 VN_INFO_GET (name)->value_id = value_id;
1317 VN_INFO (name)->valnum = name;
1318 /* ??? For now mark this SSA name for release by SCCVN. */
1319 VN_INFO (name)->needs_insertion = true;
1320 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1321 if (dump_file && (dump_flags & TDF_DETAILS))
1323 fprintf (dump_file, "Created SSA_NAME representative ");
1324 print_generic_expr (dump_file, name);
1325 fprintf (dump_file, " for expression:");
1326 print_pre_expr (dump_file, e);
1327 fprintf (dump_file, " (%04d)\n", value_id);
1330 return name;
1335 static pre_expr
1336 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1337 basic_block pred, basic_block phiblock);
1339 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1340 the phis in PRED. Return NULL if we can't find a leader for each part
1341 of the translated expression. */
1343 static pre_expr
1344 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1345 basic_block pred, basic_block phiblock)
1347 switch (expr->kind)
1349 case NARY:
1351 unsigned int i;
1352 bool changed = false;
1353 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1354 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1355 sizeof_vn_nary_op (nary->length));
1356 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1358 for (i = 0; i < newnary->length; i++)
1360 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1361 continue;
1362 else
1364 pre_expr leader, result;
1365 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1366 leader = find_leader_in_sets (op_val_id, set1, set2);
1367 result = phi_translate (leader, set1, set2, pred, phiblock);
1368 if (result && result != leader)
1369 newnary->op[i] = get_representative_for (result);
1370 else if (!result)
1371 return NULL;
1373 changed |= newnary->op[i] != nary->op[i];
1376 if (changed)
1378 pre_expr constant;
1379 unsigned int new_val_id;
1381 PRE_EXPR_NARY (expr) = newnary;
1382 constant = fully_constant_expression (expr);
1383 PRE_EXPR_NARY (expr) = nary;
1384 if (constant != expr)
1386 /* For non-CONSTANTs we have to make sure we can eventually
1387 insert the expression. Which means we need to have a
1388 leader for it. */
1389 if (constant->kind != CONSTANT)
1391 /* Do not allow simplifications to non-constants over
1392 backedges as this will likely result in a loop PHI node
1393 to be inserted and increased register pressure.
1394 See PR77498 - this avoids doing predcoms work in
1395 a less efficient way. */
1396 if (find_edge (pred, phiblock)->flags & EDGE_DFS_BACK)
1398 else
1400 unsigned value_id = get_expr_value_id (constant);
1401 constant = find_leader_in_sets (value_id, set1, set2,
1402 AVAIL_OUT (pred));
1403 if (constant)
1404 return constant;
1407 else
1408 return constant;
1411 tree result = vn_nary_op_lookup_pieces (newnary->length,
1412 newnary->opcode,
1413 newnary->type,
1414 &newnary->op[0],
1415 &nary);
1416 if (result && is_gimple_min_invariant (result))
1417 return get_or_alloc_expr_for_constant (result);
1419 expr = pre_expr_pool.allocate ();
1420 expr->kind = NARY;
1421 expr->id = 0;
1422 if (nary)
1424 PRE_EXPR_NARY (expr) = nary;
1425 new_val_id = nary->value_id;
1426 get_or_alloc_expression_id (expr);
1427 /* When we end up re-using a value number make sure that
1428 doesn't have unrelated (which we can't check here)
1429 range or points-to info on it. */
1430 if (result
1431 && INTEGRAL_TYPE_P (TREE_TYPE (result))
1432 && SSA_NAME_RANGE_INFO (result)
1433 && ! SSA_NAME_IS_DEFAULT_DEF (result))
1435 if (! VN_INFO (result)->info.range_info)
1437 VN_INFO (result)->info.range_info
1438 = SSA_NAME_RANGE_INFO (result);
1439 VN_INFO (result)->range_info_anti_range_p
1440 = SSA_NAME_ANTI_RANGE_P (result);
1442 if (dump_file && (dump_flags & TDF_DETAILS))
1444 fprintf (dump_file, "clearing range info of ");
1445 print_generic_expr (dump_file, result);
1446 fprintf (dump_file, "\n");
1448 SSA_NAME_RANGE_INFO (result) = NULL;
1450 else if (result
1451 && POINTER_TYPE_P (TREE_TYPE (result))
1452 && SSA_NAME_PTR_INFO (result)
1453 && ! SSA_NAME_IS_DEFAULT_DEF (result))
1455 if (! VN_INFO (result)->info.ptr_info)
1456 VN_INFO (result)->info.ptr_info
1457 = SSA_NAME_PTR_INFO (result);
1458 if (dump_file && (dump_flags & TDF_DETAILS))
1460 fprintf (dump_file, "clearing points-to info of ");
1461 print_generic_expr (dump_file, result);
1462 fprintf (dump_file, "\n");
1464 SSA_NAME_PTR_INFO (result) = NULL;
1467 else
1469 new_val_id = get_next_value_id ();
1470 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1471 nary = vn_nary_op_insert_pieces (newnary->length,
1472 newnary->opcode,
1473 newnary->type,
1474 &newnary->op[0],
1475 result, new_val_id);
1476 PRE_EXPR_NARY (expr) = nary;
1477 get_or_alloc_expression_id (expr);
1479 add_to_value (new_val_id, expr);
1481 return expr;
1483 break;
1485 case REFERENCE:
1487 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1488 vec<vn_reference_op_s> operands = ref->operands;
1489 tree vuse = ref->vuse;
1490 tree newvuse = vuse;
1491 vec<vn_reference_op_s> newoperands = vNULL;
1492 bool changed = false, same_valid = true;
1493 unsigned int i, n;
1494 vn_reference_op_t operand;
1495 vn_reference_t newref;
1497 for (i = 0; operands.iterate (i, &operand); i++)
1499 pre_expr opresult;
1500 pre_expr leader;
1501 tree op[3];
1502 tree type = operand->type;
1503 vn_reference_op_s newop = *operand;
1504 op[0] = operand->op0;
1505 op[1] = operand->op1;
1506 op[2] = operand->op2;
1507 for (n = 0; n < 3; ++n)
1509 unsigned int op_val_id;
1510 if (!op[n])
1511 continue;
1512 if (TREE_CODE (op[n]) != SSA_NAME)
1514 /* We can't possibly insert these. */
1515 if (n != 0
1516 && !is_gimple_min_invariant (op[n]))
1517 break;
1518 continue;
1520 op_val_id = VN_INFO (op[n])->value_id;
1521 leader = find_leader_in_sets (op_val_id, set1, set2);
1522 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1523 if (opresult && opresult != leader)
1525 tree name = get_representative_for (opresult);
1526 changed |= name != op[n];
1527 op[n] = name;
1529 else if (!opresult)
1530 break;
1532 if (n != 3)
1534 newoperands.release ();
1535 return NULL;
1537 if (!changed)
1538 continue;
1539 if (!newoperands.exists ())
1540 newoperands = operands.copy ();
1541 /* We may have changed from an SSA_NAME to a constant */
1542 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1543 newop.opcode = TREE_CODE (op[0]);
1544 newop.type = type;
1545 newop.op0 = op[0];
1546 newop.op1 = op[1];
1547 newop.op2 = op[2];
1548 newoperands[i] = newop;
1550 gcc_checking_assert (i == operands.length ());
1552 if (vuse)
1554 newvuse = translate_vuse_through_block (newoperands.exists ()
1555 ? newoperands : operands,
1556 ref->set, ref->type,
1557 vuse, phiblock, pred,
1558 &same_valid);
1559 if (newvuse == NULL_TREE)
1561 newoperands.release ();
1562 return NULL;
1566 if (changed || newvuse != vuse)
1568 unsigned int new_val_id;
1569 pre_expr constant;
1571 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1572 ref->type,
1573 newoperands.exists ()
1574 ? newoperands : operands,
1575 &newref, VN_WALK);
1576 if (result)
1577 newoperands.release ();
1579 /* We can always insert constants, so if we have a partial
1580 redundant constant load of another type try to translate it
1581 to a constant of appropriate type. */
1582 if (result && is_gimple_min_invariant (result))
1584 tree tem = result;
1585 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1587 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1588 if (tem && !is_gimple_min_invariant (tem))
1589 tem = NULL_TREE;
1591 if (tem)
1592 return get_or_alloc_expr_for_constant (tem);
1595 /* If we'd have to convert things we would need to validate
1596 if we can insert the translated expression. So fail
1597 here for now - we cannot insert an alias with a different
1598 type in the VN tables either, as that would assert. */
1599 if (result
1600 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1601 return NULL;
1602 else if (!result && newref
1603 && !useless_type_conversion_p (ref->type, newref->type))
1605 newoperands.release ();
1606 return NULL;
1609 expr = pre_expr_pool.allocate ();
1610 expr->kind = REFERENCE;
1611 expr->id = 0;
1613 if (newref)
1615 PRE_EXPR_REFERENCE (expr) = newref;
1616 constant = fully_constant_expression (expr);
1617 if (constant != expr)
1618 return constant;
1620 new_val_id = newref->value_id;
1621 get_or_alloc_expression_id (expr);
1623 else
1625 if (changed || !same_valid)
1627 new_val_id = get_next_value_id ();
1628 value_expressions.safe_grow_cleared
1629 (get_max_value_id () + 1);
1631 else
1632 new_val_id = ref->value_id;
1633 if (!newoperands.exists ())
1634 newoperands = operands.copy ();
1635 newref = vn_reference_insert_pieces (newvuse, ref->set,
1636 ref->type,
1637 newoperands,
1638 result, new_val_id);
1639 newoperands = vNULL;
1640 PRE_EXPR_REFERENCE (expr) = newref;
1641 constant = fully_constant_expression (expr);
1642 if (constant != expr)
1643 return constant;
1644 get_or_alloc_expression_id (expr);
1646 add_to_value (new_val_id, expr);
1648 newoperands.release ();
1649 return expr;
1651 break;
1653 case NAME:
1655 tree name = PRE_EXPR_NAME (expr);
1656 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1657 /* If the SSA name is defined by a PHI node in this block,
1658 translate it. */
1659 if (gimple_code (def_stmt) == GIMPLE_PHI
1660 && gimple_bb (def_stmt) == phiblock)
1662 edge e = find_edge (pred, gimple_bb (def_stmt));
1663 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1665 /* Handle constant. */
1666 if (is_gimple_min_invariant (def))
1667 return get_or_alloc_expr_for_constant (def);
1669 return get_or_alloc_expr_for_name (def);
1671 /* Otherwise return it unchanged - it will get removed if its
1672 value is not available in PREDs AVAIL_OUT set of expressions
1673 by the subtraction of TMP_GEN. */
1674 return expr;
1677 default:
1678 gcc_unreachable ();
1682 /* Wrapper around phi_translate_1 providing caching functionality. */
1684 static pre_expr
1685 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1686 basic_block pred, basic_block phiblock)
1688 expr_pred_trans_t slot = NULL;
1689 pre_expr phitrans;
1691 if (!expr)
1692 return NULL;
1694 /* Constants contain no values that need translation. */
1695 if (expr->kind == CONSTANT)
1696 return expr;
1698 if (value_id_constant_p (get_expr_value_id (expr)))
1699 return expr;
1701 /* Don't add translations of NAMEs as those are cheap to translate. */
1702 if (expr->kind != NAME)
1704 if (phi_trans_add (&slot, expr, pred))
1705 return slot->v;
1706 /* Store NULL for the value we want to return in the case of
1707 recursing. */
1708 slot->v = NULL;
1711 /* Translate. */
1712 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1714 if (slot)
1716 if (phitrans)
1717 slot->v = phitrans;
1718 else
1719 /* Remove failed translations again, they cause insert
1720 iteration to not pick up new opportunities reliably. */
1721 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1724 return phitrans;
1728 /* For each expression in SET, translate the values through phi nodes
1729 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1730 expressions in DEST. */
1732 static void
1733 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1734 basic_block phiblock)
1736 vec<pre_expr> exprs;
1737 pre_expr expr;
1738 int i;
1740 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1742 bitmap_set_copy (dest, set);
1743 return;
1746 exprs = sorted_array_from_bitmap_set (set);
1747 FOR_EACH_VEC_ELT (exprs, i, expr)
1749 pre_expr translated;
1750 translated = phi_translate (expr, set, NULL, pred, phiblock);
1751 if (!translated)
1752 continue;
1754 /* We might end up with multiple expressions from SET being
1755 translated to the same value. In this case we do not want
1756 to retain the NARY or REFERENCE expression but prefer a NAME
1757 which would be the leader. */
1758 if (translated->kind == NAME)
1759 bitmap_value_replace_in_set (dest, translated);
1760 else
1761 bitmap_value_insert_into_set (dest, translated);
1763 exprs.release ();
1766 /* Find the leader for a value (i.e., the name representing that
1767 value) in a given set, and return it. Return NULL if no leader
1768 is found. */
1770 static pre_expr
1771 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1773 if (value_id_constant_p (val))
1775 unsigned int i;
1776 bitmap_iterator bi;
1777 bitmap exprset = value_expressions[val];
1779 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1781 pre_expr expr = expression_for_id (i);
1782 if (expr->kind == CONSTANT)
1783 return expr;
1786 if (bitmap_set_contains_value (set, val))
1788 /* Rather than walk the entire bitmap of expressions, and see
1789 whether any of them has the value we are looking for, we look
1790 at the reverse mapping, which tells us the set of expressions
1791 that have a given value (IE value->expressions with that
1792 value) and see if any of those expressions are in our set.
1793 The number of expressions per value is usually significantly
1794 less than the number of expressions in the set. In fact, for
1795 large testcases, doing it this way is roughly 5-10x faster
1796 than walking the bitmap.
1797 If this is somehow a significant lose for some cases, we can
1798 choose which set to walk based on which set is smaller. */
1799 unsigned int i;
1800 bitmap_iterator bi;
1801 bitmap exprset = value_expressions[val];
1803 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1804 return expression_for_id (i);
1806 return NULL;
1809 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1810 BLOCK by seeing if it is not killed in the block. Note that we are
1811 only determining whether there is a store that kills it. Because
1812 of the order in which clean iterates over values, we are guaranteed
1813 that altered operands will have caused us to be eliminated from the
1814 ANTIC_IN set already. */
1816 static bool
1817 value_dies_in_block_x (pre_expr expr, basic_block block)
1819 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1820 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1821 gimple *def;
1822 gimple_stmt_iterator gsi;
1823 unsigned id = get_expression_id (expr);
1824 bool res = false;
1825 ao_ref ref;
1827 if (!vuse)
1828 return false;
1830 /* Lookup a previously calculated result. */
1831 if (EXPR_DIES (block)
1832 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1833 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1835 /* A memory expression {e, VUSE} dies in the block if there is a
1836 statement that may clobber e. If, starting statement walk from the
1837 top of the basic block, a statement uses VUSE there can be no kill
1838 inbetween that use and the original statement that loaded {e, VUSE},
1839 so we can stop walking. */
1840 ref.base = NULL_TREE;
1841 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1843 tree def_vuse, def_vdef;
1844 def = gsi_stmt (gsi);
1845 def_vuse = gimple_vuse (def);
1846 def_vdef = gimple_vdef (def);
1848 /* Not a memory statement. */
1849 if (!def_vuse)
1850 continue;
1852 /* Not a may-def. */
1853 if (!def_vdef)
1855 /* A load with the same VUSE, we're done. */
1856 if (def_vuse == vuse)
1857 break;
1859 continue;
1862 /* Init ref only if we really need it. */
1863 if (ref.base == NULL_TREE
1864 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1865 refx->operands))
1867 res = true;
1868 break;
1870 /* If the statement may clobber expr, it dies. */
1871 if (stmt_may_clobber_ref_p_1 (def, &ref))
1873 res = true;
1874 break;
1878 /* Remember the result. */
1879 if (!EXPR_DIES (block))
1880 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1881 bitmap_set_bit (EXPR_DIES (block), id * 2);
1882 if (res)
1883 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1885 return res;
1889 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1890 contains its value-id. */
1892 static bool
1893 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1895 if (op && TREE_CODE (op) == SSA_NAME)
1897 unsigned int value_id = VN_INFO (op)->value_id;
1898 if (!(bitmap_set_contains_value (set1, value_id)
1899 || (set2 && bitmap_set_contains_value (set2, value_id))))
1900 return false;
1902 return true;
1905 /* Determine if the expression EXPR is valid in SET1 U SET2.
1906 ONLY SET2 CAN BE NULL.
1907 This means that we have a leader for each part of the expression
1908 (if it consists of values), or the expression is an SSA_NAME.
1909 For loads/calls, we also see if the vuse is killed in this block. */
1911 static bool
1912 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1914 switch (expr->kind)
1916 case NAME:
1917 /* By construction all NAMEs are available. Non-available
1918 NAMEs are removed by subtracting TMP_GEN from the sets. */
1919 return true;
1920 case NARY:
1922 unsigned int i;
1923 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1924 for (i = 0; i < nary->length; i++)
1925 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1926 return false;
1927 return true;
1929 break;
1930 case REFERENCE:
1932 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1933 vn_reference_op_t vro;
1934 unsigned int i;
1936 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1938 if (!op_valid_in_sets (set1, set2, vro->op0)
1939 || !op_valid_in_sets (set1, set2, vro->op1)
1940 || !op_valid_in_sets (set1, set2, vro->op2))
1941 return false;
1943 return true;
1945 default:
1946 gcc_unreachable ();
1950 /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2.
1951 This means expressions that are made up of values we have no leaders for
1952 in SET1 or SET2. */
1954 static void
1955 clean (bitmap_set_t set1, bitmap_set_t set2 = NULL)
1957 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
1958 pre_expr expr;
1959 int i;
1961 FOR_EACH_VEC_ELT (exprs, i, expr)
1963 if (!valid_in_sets (set1, set2, expr))
1964 bitmap_remove_expr_from_set (set1, expr);
1966 exprs.release ();
1969 /* Clean the set of expressions that are no longer valid in SET because
1970 they are clobbered in BLOCK or because they trap and may not be executed. */
1972 static void
1973 prune_clobbered_mems (bitmap_set_t set, basic_block block)
1975 bitmap_iterator bi;
1976 unsigned i;
1977 pre_expr to_remove = NULL;
1979 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1981 /* Remove queued expr. */
1982 if (to_remove)
1984 bitmap_remove_expr_from_set (set, to_remove);
1985 to_remove = NULL;
1988 pre_expr expr = expression_for_id (i);
1989 if (expr->kind == REFERENCE)
1991 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1992 if (ref->vuse)
1994 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
1995 if (!gimple_nop_p (def_stmt)
1996 && ((gimple_bb (def_stmt) != block
1997 && !dominated_by_p (CDI_DOMINATORS,
1998 block, gimple_bb (def_stmt)))
1999 || (gimple_bb (def_stmt) == block
2000 && value_dies_in_block_x (expr, block))))
2001 to_remove = expr;
2004 else if (expr->kind == NARY)
2006 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2007 /* If the NARY may trap make sure the block does not contain
2008 a possible exit point.
2009 ??? This is overly conservative if we translate AVAIL_OUT
2010 as the available expression might be after the exit point. */
2011 if (BB_MAY_NOTRETURN (block)
2012 && vn_nary_may_trap (nary))
2013 to_remove = expr;
2017 /* Remove queued expr. */
2018 if (to_remove)
2019 bitmap_remove_expr_from_set (set, to_remove);
2022 static sbitmap has_abnormal_preds;
2024 /* Compute the ANTIC set for BLOCK.
2026 If succs(BLOCK) > 1 then
2027 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2028 else if succs(BLOCK) == 1 then
2029 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2031 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2034 static bool
2035 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2037 bitmap_set_t S, old, ANTIC_OUT;
2038 bitmap_iterator bi;
2039 unsigned int bii;
2040 edge e;
2041 edge_iterator ei;
2043 bool changed = ! BB_VISITED (block);
2044 BB_VISITED (block) = 1;
2045 old = ANTIC_OUT = S = NULL;
2047 /* If any edges from predecessors are abnormal, antic_in is empty,
2048 so do nothing. */
2049 if (block_has_abnormal_pred_edge)
2050 goto maybe_dump_sets;
2052 old = ANTIC_IN (block);
2053 ANTIC_OUT = bitmap_set_new ();
2055 /* If the block has no successors, ANTIC_OUT is empty. */
2056 if (EDGE_COUNT (block->succs) == 0)
2058 /* If we have one successor, we could have some phi nodes to
2059 translate through. */
2060 else if (single_succ_p (block))
2062 basic_block succ_bb = single_succ (block);
2063 gcc_assert (BB_VISITED (succ_bb));
2064 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2066 /* If we have multiple successors, we take the intersection of all of
2067 them. Note that in the case of loop exit phi nodes, we may have
2068 phis to translate through. */
2069 else
2071 size_t i;
2072 basic_block bprime, first = NULL;
2074 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2075 FOR_EACH_EDGE (e, ei, block->succs)
2077 if (!first
2078 && BB_VISITED (e->dest))
2079 first = e->dest;
2080 else if (BB_VISITED (e->dest))
2081 worklist.quick_push (e->dest);
2082 else
2084 /* Unvisited successors get their ANTIC_IN replaced by the
2085 maximal set to arrive at a maximum ANTIC_IN solution.
2086 We can ignore them in the intersection operation and thus
2087 need not explicitely represent that maximum solution. */
2088 if (dump_file && (dump_flags & TDF_DETAILS))
2089 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2090 e->src->index, e->dest->index);
2094 /* Of multiple successors we have to have visited one already
2095 which is guaranteed by iteration order. */
2096 gcc_assert (first != NULL);
2098 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2100 /* If we have multiple successors we need to intersect the ANTIC_OUT
2101 sets. For values that's a simple intersection but for
2102 expressions it is a union. Given we want to have a single
2103 expression per value in our sets we have to canonicalize.
2104 Avoid randomness and running into cycles like for PR82129 and
2105 canonicalize the expression we choose to the one with the
2106 lowest id. This requires we actually compute the union first. */
2107 FOR_EACH_VEC_ELT (worklist, i, bprime)
2109 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2111 bitmap_set_t tmp = bitmap_set_new ();
2112 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2113 bitmap_and_into (&ANTIC_OUT->values, &tmp->values);
2114 bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions);
2115 bitmap_set_free (tmp);
2117 else
2119 bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (bprime)->values);
2120 bitmap_ior_into (&ANTIC_OUT->expressions,
2121 &ANTIC_IN (bprime)->expressions);
2124 if (! worklist.is_empty ())
2126 /* Prune expressions not in the value set, canonicalizing to
2127 expression with lowest ID. */
2128 bitmap_iterator bi;
2129 unsigned int i;
2130 unsigned int to_clear = -1U;
2131 bitmap seen_value = BITMAP_ALLOC (NULL);
2132 FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT, i, bi)
2134 if (to_clear != -1U)
2136 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2137 to_clear = -1U;
2139 pre_expr expr = expression_for_id (i);
2140 unsigned int value_id = get_expr_value_id (expr);
2141 if (!bitmap_bit_p (&ANTIC_OUT->values, value_id)
2142 || !bitmap_set_bit (seen_value, value_id))
2143 to_clear = i;
2145 if (to_clear != -1U)
2146 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2147 BITMAP_FREE (seen_value);
2151 /* Prune expressions that are clobbered in block and thus become
2152 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2153 prune_clobbered_mems (ANTIC_OUT, block);
2155 /* Generate ANTIC_OUT - TMP_GEN. */
2156 S = bitmap_set_subtract_expressions (ANTIC_OUT, TMP_GEN (block));
2158 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2159 ANTIC_IN (block) = bitmap_set_subtract_expressions (EXP_GEN (block),
2160 TMP_GEN (block));
2162 /* Then union in the ANTIC_OUT - TMP_GEN values,
2163 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2164 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2165 bitmap_value_insert_into_set (ANTIC_IN (block),
2166 expression_for_id (bii));
2168 clean (ANTIC_IN (block));
2170 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2171 changed = true;
2173 maybe_dump_sets:
2174 if (dump_file && (dump_flags & TDF_DETAILS))
2176 if (ANTIC_OUT)
2177 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2179 if (changed)
2180 fprintf (dump_file, "[changed] ");
2181 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2182 block->index);
2184 if (S)
2185 print_bitmap_set (dump_file, S, "S", block->index);
2187 if (old)
2188 bitmap_set_free (old);
2189 if (S)
2190 bitmap_set_free (S);
2191 if (ANTIC_OUT)
2192 bitmap_set_free (ANTIC_OUT);
2193 return changed;
2196 /* Compute PARTIAL_ANTIC for BLOCK.
2198 If succs(BLOCK) > 1 then
2199 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2200 in ANTIC_OUT for all succ(BLOCK)
2201 else if succs(BLOCK) == 1 then
2202 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2204 PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK])
2207 static void
2208 compute_partial_antic_aux (basic_block block,
2209 bool block_has_abnormal_pred_edge)
2211 bitmap_set_t old_PA_IN;
2212 bitmap_set_t PA_OUT;
2213 edge e;
2214 edge_iterator ei;
2215 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2217 old_PA_IN = PA_OUT = NULL;
2219 /* If any edges from predecessors are abnormal, antic_in is empty,
2220 so do nothing. */
2221 if (block_has_abnormal_pred_edge)
2222 goto maybe_dump_sets;
2224 /* If there are too many partially anticipatable values in the
2225 block, phi_translate_set can take an exponential time: stop
2226 before the translation starts. */
2227 if (max_pa
2228 && single_succ_p (block)
2229 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2230 goto maybe_dump_sets;
2232 old_PA_IN = PA_IN (block);
2233 PA_OUT = bitmap_set_new ();
2235 /* If the block has no successors, ANTIC_OUT is empty. */
2236 if (EDGE_COUNT (block->succs) == 0)
2238 /* If we have one successor, we could have some phi nodes to
2239 translate through. Note that we can't phi translate across DFS
2240 back edges in partial antic, because it uses a union operation on
2241 the successors. For recurrences like IV's, we will end up
2242 generating a new value in the set on each go around (i + 3 (VH.1)
2243 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2244 else if (single_succ_p (block))
2246 basic_block succ = single_succ (block);
2247 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2248 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2250 /* If we have multiple successors, we take the union of all of
2251 them. */
2252 else
2254 size_t i;
2255 basic_block bprime;
2257 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2258 FOR_EACH_EDGE (e, ei, block->succs)
2260 if (e->flags & EDGE_DFS_BACK)
2261 continue;
2262 worklist.quick_push (e->dest);
2264 if (worklist.length () > 0)
2266 FOR_EACH_VEC_ELT (worklist, i, bprime)
2268 unsigned int i;
2269 bitmap_iterator bi;
2271 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2272 bitmap_value_insert_into_set (PA_OUT,
2273 expression_for_id (i));
2274 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2276 bitmap_set_t pa_in = bitmap_set_new ();
2277 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2278 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2279 bitmap_value_insert_into_set (PA_OUT,
2280 expression_for_id (i));
2281 bitmap_set_free (pa_in);
2283 else
2284 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2285 bitmap_value_insert_into_set (PA_OUT,
2286 expression_for_id (i));
2291 /* Prune expressions that are clobbered in block and thus become
2292 invalid if translated from PA_OUT to PA_IN. */
2293 prune_clobbered_mems (PA_OUT, block);
2295 /* PA_IN starts with PA_OUT - TMP_GEN.
2296 Then we subtract things from ANTIC_IN. */
2297 PA_IN (block) = bitmap_set_subtract_expressions (PA_OUT, TMP_GEN (block));
2299 /* For partial antic, we want to put back in the phi results, since
2300 we will properly avoid making them partially antic over backedges. */
2301 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2302 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2304 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2305 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2307 clean (PA_IN (block), ANTIC_IN (block));
2309 maybe_dump_sets:
2310 if (dump_file && (dump_flags & TDF_DETAILS))
2312 if (PA_OUT)
2313 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2315 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2317 if (old_PA_IN)
2318 bitmap_set_free (old_PA_IN);
2319 if (PA_OUT)
2320 bitmap_set_free (PA_OUT);
2323 /* Compute ANTIC and partial ANTIC sets. */
2325 static void
2326 compute_antic (void)
2328 bool changed = true;
2329 int num_iterations = 0;
2330 basic_block block;
2331 int i;
2332 edge_iterator ei;
2333 edge e;
2335 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2336 We pre-build the map of blocks with incoming abnormal edges here. */
2337 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2338 bitmap_clear (has_abnormal_preds);
2340 FOR_ALL_BB_FN (block, cfun)
2342 BB_VISITED (block) = 0;
2344 FOR_EACH_EDGE (e, ei, block->preds)
2345 if (e->flags & EDGE_ABNORMAL)
2347 bitmap_set_bit (has_abnormal_preds, block->index);
2348 break;
2351 /* While we are here, give empty ANTIC_IN sets to each block. */
2352 ANTIC_IN (block) = bitmap_set_new ();
2353 if (do_partial_partial)
2354 PA_IN (block) = bitmap_set_new ();
2357 /* At the exit block we anticipate nothing. */
2358 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2360 /* For ANTIC computation we need a postorder that also guarantees that
2361 a block with a single successor is visited after its successor.
2362 RPO on the inverted CFG has this property. */
2363 auto_vec<int, 20> postorder;
2364 inverted_post_order_compute (&postorder);
2366 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
2367 bitmap_clear (worklist);
2368 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2369 bitmap_set_bit (worklist, e->src->index);
2370 while (changed)
2372 if (dump_file && (dump_flags & TDF_DETAILS))
2373 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2374 /* ??? We need to clear our PHI translation cache here as the
2375 ANTIC sets shrink and we restrict valid translations to
2376 those having operands with leaders in ANTIC. Same below
2377 for PA ANTIC computation. */
2378 num_iterations++;
2379 changed = false;
2380 for (i = postorder.length () - 1; i >= 0; i--)
2382 if (bitmap_bit_p (worklist, postorder[i]))
2384 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2385 bitmap_clear_bit (worklist, block->index);
2386 if (compute_antic_aux (block,
2387 bitmap_bit_p (has_abnormal_preds,
2388 block->index)))
2390 FOR_EACH_EDGE (e, ei, block->preds)
2391 bitmap_set_bit (worklist, e->src->index);
2392 changed = true;
2396 /* Theoretically possible, but *highly* unlikely. */
2397 gcc_checking_assert (num_iterations < 500);
2400 statistics_histogram_event (cfun, "compute_antic iterations",
2401 num_iterations);
2403 if (do_partial_partial)
2405 /* For partial antic we ignore backedges and thus we do not need
2406 to perform any iteration when we process blocks in postorder. */
2407 int postorder_num
2408 = pre_and_rev_post_order_compute (NULL, postorder.address (), false);
2409 for (i = postorder_num - 1 ; i >= 0; i--)
2411 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2412 compute_partial_antic_aux (block,
2413 bitmap_bit_p (has_abnormal_preds,
2414 block->index));
2418 sbitmap_free (has_abnormal_preds);
2422 /* Inserted expressions are placed onto this worklist, which is used
2423 for performing quick dead code elimination of insertions we made
2424 that didn't turn out to be necessary. */
2425 static bitmap inserted_exprs;
2427 /* The actual worker for create_component_ref_by_pieces. */
2429 static tree
2430 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2431 unsigned int *operand, gimple_seq *stmts)
2433 vn_reference_op_t currop = &ref->operands[*operand];
2434 tree genop;
2435 ++*operand;
2436 switch (currop->opcode)
2438 case CALL_EXPR:
2439 gcc_unreachable ();
2441 case MEM_REF:
2443 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2444 stmts);
2445 if (!baseop)
2446 return NULL_TREE;
2447 tree offset = currop->op0;
2448 if (TREE_CODE (baseop) == ADDR_EXPR
2449 && handled_component_p (TREE_OPERAND (baseop, 0)))
2451 HOST_WIDE_INT off;
2452 tree base;
2453 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2454 &off);
2455 gcc_assert (base);
2456 offset = int_const_binop (PLUS_EXPR, offset,
2457 build_int_cst (TREE_TYPE (offset),
2458 off));
2459 baseop = build_fold_addr_expr (base);
2461 genop = build2 (MEM_REF, currop->type, baseop, offset);
2462 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2463 MR_DEPENDENCE_BASE (genop) = currop->base;
2464 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2465 return genop;
2468 case TARGET_MEM_REF:
2470 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2471 vn_reference_op_t nextop = &ref->operands[++*operand];
2472 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2473 stmts);
2474 if (!baseop)
2475 return NULL_TREE;
2476 if (currop->op0)
2478 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2479 if (!genop0)
2480 return NULL_TREE;
2482 if (nextop->op0)
2484 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2485 if (!genop1)
2486 return NULL_TREE;
2488 genop = build5 (TARGET_MEM_REF, currop->type,
2489 baseop, currop->op2, genop0, currop->op1, genop1);
2491 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2492 MR_DEPENDENCE_BASE (genop) = currop->base;
2493 return genop;
2496 case ADDR_EXPR:
2497 if (currop->op0)
2499 gcc_assert (is_gimple_min_invariant (currop->op0));
2500 return currop->op0;
2502 /* Fallthrough. */
2503 case REALPART_EXPR:
2504 case IMAGPART_EXPR:
2505 case VIEW_CONVERT_EXPR:
2507 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2508 stmts);
2509 if (!genop0)
2510 return NULL_TREE;
2511 return fold_build1 (currop->opcode, currop->type, genop0);
2514 case WITH_SIZE_EXPR:
2516 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2517 stmts);
2518 if (!genop0)
2519 return NULL_TREE;
2520 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2521 if (!genop1)
2522 return NULL_TREE;
2523 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2526 case BIT_FIELD_REF:
2528 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2529 stmts);
2530 if (!genop0)
2531 return NULL_TREE;
2532 tree op1 = currop->op0;
2533 tree op2 = currop->op1;
2534 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2535 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2536 return fold (t);
2539 /* For array ref vn_reference_op's, operand 1 of the array ref
2540 is op0 of the reference op and operand 3 of the array ref is
2541 op1. */
2542 case ARRAY_RANGE_REF:
2543 case ARRAY_REF:
2545 tree genop0;
2546 tree genop1 = currop->op0;
2547 tree genop2 = currop->op1;
2548 tree genop3 = currop->op2;
2549 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2550 stmts);
2551 if (!genop0)
2552 return NULL_TREE;
2553 genop1 = find_or_generate_expression (block, genop1, stmts);
2554 if (!genop1)
2555 return NULL_TREE;
2556 if (genop2)
2558 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2559 /* Drop zero minimum index if redundant. */
2560 if (integer_zerop (genop2)
2561 && (!domain_type
2562 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2563 genop2 = NULL_TREE;
2564 else
2566 genop2 = find_or_generate_expression (block, genop2, stmts);
2567 if (!genop2)
2568 return NULL_TREE;
2571 if (genop3)
2573 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2574 /* We can't always put a size in units of the element alignment
2575 here as the element alignment may be not visible. See
2576 PR43783. Simply drop the element size for constant
2577 sizes. */
2578 if (TREE_CODE (genop3) == INTEGER_CST
2579 && TREE_CODE (TYPE_SIZE_UNIT (elmt_type)) == INTEGER_CST
2580 && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)),
2581 (wi::to_offset (genop3)
2582 * vn_ref_op_align_unit (currop))))
2583 genop3 = NULL_TREE;
2584 else
2586 genop3 = find_or_generate_expression (block, genop3, stmts);
2587 if (!genop3)
2588 return NULL_TREE;
2591 return build4 (currop->opcode, currop->type, genop0, genop1,
2592 genop2, genop3);
2594 case COMPONENT_REF:
2596 tree op0;
2597 tree op1;
2598 tree genop2 = currop->op1;
2599 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2600 if (!op0)
2601 return NULL_TREE;
2602 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2603 op1 = currop->op0;
2604 if (genop2)
2606 genop2 = find_or_generate_expression (block, genop2, stmts);
2607 if (!genop2)
2608 return NULL_TREE;
2610 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2613 case SSA_NAME:
2615 genop = find_or_generate_expression (block, currop->op0, stmts);
2616 return genop;
2618 case STRING_CST:
2619 case INTEGER_CST:
2620 case COMPLEX_CST:
2621 case VECTOR_CST:
2622 case REAL_CST:
2623 case CONSTRUCTOR:
2624 case VAR_DECL:
2625 case PARM_DECL:
2626 case CONST_DECL:
2627 case RESULT_DECL:
2628 case FUNCTION_DECL:
2629 return currop->op0;
2631 default:
2632 gcc_unreachable ();
2636 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2637 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2638 trying to rename aggregates into ssa form directly, which is a no no.
2640 Thus, this routine doesn't create temporaries, it just builds a
2641 single access expression for the array, calling
2642 find_or_generate_expression to build the innermost pieces.
2644 This function is a subroutine of create_expression_by_pieces, and
2645 should not be called on it's own unless you really know what you
2646 are doing. */
2648 static tree
2649 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2650 gimple_seq *stmts)
2652 unsigned int op = 0;
2653 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2656 /* Find a simple leader for an expression, or generate one using
2657 create_expression_by_pieces from a NARY expression for the value.
2658 BLOCK is the basic_block we are looking for leaders in.
2659 OP is the tree expression to find a leader for or generate.
2660 Returns the leader or NULL_TREE on failure. */
2662 static tree
2663 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2665 pre_expr expr = get_or_alloc_expr_for (op);
2666 unsigned int lookfor = get_expr_value_id (expr);
2667 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2668 if (leader)
2670 if (leader->kind == NAME)
2671 return PRE_EXPR_NAME (leader);
2672 else if (leader->kind == CONSTANT)
2673 return PRE_EXPR_CONSTANT (leader);
2675 /* Defer. */
2676 return NULL_TREE;
2679 /* It must be a complex expression, so generate it recursively. Note
2680 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2681 where the insert algorithm fails to insert a required expression. */
2682 bitmap exprset = value_expressions[lookfor];
2683 bitmap_iterator bi;
2684 unsigned int i;
2685 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2687 pre_expr temp = expression_for_id (i);
2688 /* We cannot insert random REFERENCE expressions at arbitrary
2689 places. We can insert NARYs which eventually re-materializes
2690 its operand values. */
2691 if (temp->kind == NARY)
2692 return create_expression_by_pieces (block, temp, stmts,
2693 get_expr_type (expr));
2696 /* Defer. */
2697 return NULL_TREE;
2700 /* Create an expression in pieces, so that we can handle very complex
2701 expressions that may be ANTIC, but not necessary GIMPLE.
2702 BLOCK is the basic block the expression will be inserted into,
2703 EXPR is the expression to insert (in value form)
2704 STMTS is a statement list to append the necessary insertions into.
2706 This function will die if we hit some value that shouldn't be
2707 ANTIC but is (IE there is no leader for it, or its components).
2708 The function returns NULL_TREE in case a different antic expression
2709 has to be inserted first.
2710 This function may also generate expressions that are themselves
2711 partially or fully redundant. Those that are will be either made
2712 fully redundant during the next iteration of insert (for partially
2713 redundant ones), or eliminated by eliminate (for fully redundant
2714 ones). */
2716 static tree
2717 create_expression_by_pieces (basic_block block, pre_expr expr,
2718 gimple_seq *stmts, tree type)
2720 tree name;
2721 tree folded;
2722 gimple_seq forced_stmts = NULL;
2723 unsigned int value_id;
2724 gimple_stmt_iterator gsi;
2725 tree exprtype = type ? type : get_expr_type (expr);
2726 pre_expr nameexpr;
2727 gassign *newstmt;
2729 switch (expr->kind)
2731 /* We may hit the NAME/CONSTANT case if we have to convert types
2732 that value numbering saw through. */
2733 case NAME:
2734 folded = PRE_EXPR_NAME (expr);
2735 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2736 return folded;
2737 break;
2738 case CONSTANT:
2740 folded = PRE_EXPR_CONSTANT (expr);
2741 tree tem = fold_convert (exprtype, folded);
2742 if (is_gimple_min_invariant (tem))
2743 return tem;
2744 break;
2746 case REFERENCE:
2747 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2749 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2750 unsigned int operand = 1;
2751 vn_reference_op_t currop = &ref->operands[0];
2752 tree sc = NULL_TREE;
2753 tree fn;
2754 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2755 fn = currop->op0;
2756 else
2757 fn = find_or_generate_expression (block, currop->op0, stmts);
2758 if (!fn)
2759 return NULL_TREE;
2760 if (currop->op1)
2762 sc = find_or_generate_expression (block, currop->op1, stmts);
2763 if (!sc)
2764 return NULL_TREE;
2766 auto_vec<tree> args (ref->operands.length () - 1);
2767 while (operand < ref->operands.length ())
2769 tree arg = create_component_ref_by_pieces_1 (block, ref,
2770 &operand, stmts);
2771 if (!arg)
2772 return NULL_TREE;
2773 args.quick_push (arg);
2775 gcall *call
2776 = gimple_build_call_vec ((TREE_CODE (fn) == FUNCTION_DECL
2777 ? build_fold_addr_expr (fn) : fn), args);
2778 gimple_call_set_with_bounds (call, currop->with_bounds);
2779 if (sc)
2780 gimple_call_set_chain (call, sc);
2781 tree forcedname = make_ssa_name (currop->type);
2782 gimple_call_set_lhs (call, forcedname);
2783 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2784 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2785 folded = forcedname;
2787 else
2789 folded = create_component_ref_by_pieces (block,
2790 PRE_EXPR_REFERENCE (expr),
2791 stmts);
2792 if (!folded)
2793 return NULL_TREE;
2794 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2795 newstmt = gimple_build_assign (name, folded);
2796 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2797 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2798 folded = name;
2800 break;
2801 case NARY:
2803 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2804 tree *genop = XALLOCAVEC (tree, nary->length);
2805 unsigned i;
2806 for (i = 0; i < nary->length; ++i)
2808 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2809 if (!genop[i])
2810 return NULL_TREE;
2811 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2812 may have conversions stripped. */
2813 if (nary->opcode == POINTER_PLUS_EXPR)
2815 if (i == 0)
2816 genop[i] = gimple_convert (&forced_stmts,
2817 nary->type, genop[i]);
2818 else if (i == 1)
2819 genop[i] = gimple_convert (&forced_stmts,
2820 sizetype, genop[i]);
2822 else
2823 genop[i] = gimple_convert (&forced_stmts,
2824 TREE_TYPE (nary->op[i]), genop[i]);
2826 if (nary->opcode == CONSTRUCTOR)
2828 vec<constructor_elt, va_gc> *elts = NULL;
2829 for (i = 0; i < nary->length; ++i)
2830 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2831 folded = build_constructor (nary->type, elts);
2832 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2833 newstmt = gimple_build_assign (name, folded);
2834 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2835 folded = name;
2837 else
2839 switch (nary->length)
2841 case 1:
2842 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2843 genop[0]);
2844 break;
2845 case 2:
2846 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2847 genop[0], genop[1]);
2848 break;
2849 case 3:
2850 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2851 genop[0], genop[1], genop[2]);
2852 break;
2853 default:
2854 gcc_unreachable ();
2858 break;
2859 default:
2860 gcc_unreachable ();
2863 folded = gimple_convert (&forced_stmts, exprtype, folded);
2865 /* If there is nothing to insert, return the simplified result. */
2866 if (gimple_seq_empty_p (forced_stmts))
2867 return folded;
2868 /* If we simplified to a constant return it and discard eventually
2869 built stmts. */
2870 if (is_gimple_min_invariant (folded))
2872 gimple_seq_discard (forced_stmts);
2873 return folded;
2875 /* Likewise if we simplified to sth not queued for insertion. */
2876 bool found = false;
2877 gsi = gsi_last (forced_stmts);
2878 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2880 gimple *stmt = gsi_stmt (gsi);
2881 tree forcedname = gimple_get_lhs (stmt);
2882 if (forcedname == folded)
2884 found = true;
2885 break;
2888 if (! found)
2890 gimple_seq_discard (forced_stmts);
2891 return folded;
2893 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2895 /* If we have any intermediate expressions to the value sets, add them
2896 to the value sets and chain them in the instruction stream. */
2897 if (forced_stmts)
2899 gsi = gsi_start (forced_stmts);
2900 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2902 gimple *stmt = gsi_stmt (gsi);
2903 tree forcedname = gimple_get_lhs (stmt);
2904 pre_expr nameexpr;
2906 if (forcedname != folded)
2908 VN_INFO_GET (forcedname)->valnum = forcedname;
2909 VN_INFO (forcedname)->value_id = get_next_value_id ();
2910 nameexpr = get_or_alloc_expr_for_name (forcedname);
2911 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2912 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2913 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2916 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2918 gimple_seq_add_seq (stmts, forced_stmts);
2921 name = folded;
2923 /* Fold the last statement. */
2924 gsi = gsi_last (*stmts);
2925 if (fold_stmt_inplace (&gsi))
2926 update_stmt (gsi_stmt (gsi));
2928 /* Add a value number to the temporary.
2929 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2930 we are creating the expression by pieces, and this particular piece of
2931 the expression may have been represented. There is no harm in replacing
2932 here. */
2933 value_id = get_expr_value_id (expr);
2934 VN_INFO_GET (name)->value_id = value_id;
2935 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2936 if (VN_INFO (name)->valnum == NULL_TREE)
2937 VN_INFO (name)->valnum = name;
2938 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2939 nameexpr = get_or_alloc_expr_for_name (name);
2940 add_to_value (value_id, nameexpr);
2941 if (NEW_SETS (block))
2942 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2943 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2945 pre_stats.insertions++;
2946 if (dump_file && (dump_flags & TDF_DETAILS))
2948 fprintf (dump_file, "Inserted ");
2949 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0);
2950 fprintf (dump_file, " in predecessor %d (%04d)\n",
2951 block->index, value_id);
2954 return name;
2958 /* Insert the to-be-made-available values of expression EXPRNUM for each
2959 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2960 merge the result with a phi node, given the same value number as
2961 NODE. Return true if we have inserted new stuff. */
2963 static bool
2964 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2965 vec<pre_expr> avail)
2967 pre_expr expr = expression_for_id (exprnum);
2968 pre_expr newphi;
2969 unsigned int val = get_expr_value_id (expr);
2970 edge pred;
2971 bool insertions = false;
2972 bool nophi = false;
2973 basic_block bprime;
2974 pre_expr eprime;
2975 edge_iterator ei;
2976 tree type = get_expr_type (expr);
2977 tree temp;
2978 gphi *phi;
2980 /* Make sure we aren't creating an induction variable. */
2981 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
2983 bool firstinsideloop = false;
2984 bool secondinsideloop = false;
2985 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2986 EDGE_PRED (block, 0)->src);
2987 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2988 EDGE_PRED (block, 1)->src);
2989 /* Induction variables only have one edge inside the loop. */
2990 if ((firstinsideloop ^ secondinsideloop)
2991 && expr->kind != REFERENCE)
2993 if (dump_file && (dump_flags & TDF_DETAILS))
2994 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2995 nophi = true;
2999 /* Make the necessary insertions. */
3000 FOR_EACH_EDGE (pred, ei, block->preds)
3002 gimple_seq stmts = NULL;
3003 tree builtexpr;
3004 bprime = pred->src;
3005 eprime = avail[pred->dest_idx];
3006 builtexpr = create_expression_by_pieces (bprime, eprime,
3007 &stmts, type);
3008 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3009 if (!gimple_seq_empty_p (stmts))
3011 gsi_insert_seq_on_edge (pred, stmts);
3012 insertions = true;
3014 if (!builtexpr)
3016 /* We cannot insert a PHI node if we failed to insert
3017 on one edge. */
3018 nophi = true;
3019 continue;
3021 if (is_gimple_min_invariant (builtexpr))
3022 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3023 else
3024 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3026 /* If we didn't want a phi node, and we made insertions, we still have
3027 inserted new stuff, and thus return true. If we didn't want a phi node,
3028 and didn't make insertions, we haven't added anything new, so return
3029 false. */
3030 if (nophi && insertions)
3031 return true;
3032 else if (nophi && !insertions)
3033 return false;
3035 /* Now build a phi for the new variable. */
3036 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3037 phi = create_phi_node (temp, block);
3039 VN_INFO_GET (temp)->value_id = val;
3040 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3041 if (VN_INFO (temp)->valnum == NULL_TREE)
3042 VN_INFO (temp)->valnum = temp;
3043 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3044 FOR_EACH_EDGE (pred, ei, block->preds)
3046 pre_expr ae = avail[pred->dest_idx];
3047 gcc_assert (get_expr_type (ae) == type
3048 || useless_type_conversion_p (type, get_expr_type (ae)));
3049 if (ae->kind == CONSTANT)
3050 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3051 pred, UNKNOWN_LOCATION);
3052 else
3053 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3056 newphi = get_or_alloc_expr_for_name (temp);
3057 add_to_value (val, newphi);
3059 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3060 this insertion, since we test for the existence of this value in PHI_GEN
3061 before proceeding with the partial redundancy checks in insert_aux.
3063 The value may exist in AVAIL_OUT, in particular, it could be represented
3064 by the expression we are trying to eliminate, in which case we want the
3065 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3066 inserted there.
3068 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3069 this block, because if it did, it would have existed in our dominator's
3070 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3073 bitmap_insert_into_set (PHI_GEN (block), newphi);
3074 bitmap_value_replace_in_set (AVAIL_OUT (block),
3075 newphi);
3076 bitmap_insert_into_set (NEW_SETS (block),
3077 newphi);
3079 /* If we insert a PHI node for a conversion of another PHI node
3080 in the same basic-block try to preserve range information.
3081 This is important so that followup loop passes receive optimal
3082 number of iteration analysis results. See PR61743. */
3083 if (expr->kind == NARY
3084 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3085 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3086 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3087 && INTEGRAL_TYPE_P (type)
3088 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3089 && (TYPE_PRECISION (type)
3090 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3091 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3093 wide_int min, max;
3094 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3095 && !wi::neg_p (min, SIGNED)
3096 && !wi::neg_p (max, SIGNED))
3097 /* Just handle extension and sign-changes of all-positive ranges. */
3098 set_range_info (temp,
3099 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3100 wide_int_storage::from (min, TYPE_PRECISION (type),
3101 TYPE_SIGN (type)),
3102 wide_int_storage::from (max, TYPE_PRECISION (type),
3103 TYPE_SIGN (type)));
3106 if (dump_file && (dump_flags & TDF_DETAILS))
3108 fprintf (dump_file, "Created phi ");
3109 print_gimple_stmt (dump_file, phi, 0);
3110 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3112 pre_stats.phis++;
3113 return true;
3118 /* Perform insertion of partially redundant or hoistable values.
3119 For BLOCK, do the following:
3120 1. Propagate the NEW_SETS of the dominator into the current block.
3121 If the block has multiple predecessors,
3122 2a. Iterate over the ANTIC expressions for the block to see if
3123 any of them are partially redundant.
3124 2b. If so, insert them into the necessary predecessors to make
3125 the expression fully redundant.
3126 2c. Insert a new PHI merging the values of the predecessors.
3127 2d. Insert the new PHI, and the new expressions, into the
3128 NEW_SETS set.
3129 If the block has multiple successors,
3130 3a. Iterate over the ANTIC values for the block to see if
3131 any of them are good candidates for hoisting.
3132 3b. If so, insert expressions computing the values in BLOCK,
3133 and add the new expressions into the NEW_SETS set.
3134 4. Recursively call ourselves on the dominator children of BLOCK.
3136 Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3137 do_pre_regular_insertion and do_partial_insertion. 3a and 3b are
3138 done in do_hoist_insertion.
3141 static bool
3142 do_pre_regular_insertion (basic_block block, basic_block dom)
3144 bool new_stuff = false;
3145 vec<pre_expr> exprs;
3146 pre_expr expr;
3147 auto_vec<pre_expr> avail;
3148 int i;
3150 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3151 avail.safe_grow (EDGE_COUNT (block->preds));
3153 FOR_EACH_VEC_ELT (exprs, i, expr)
3155 if (expr->kind == NARY
3156 || expr->kind == REFERENCE)
3158 unsigned int val;
3159 bool by_some = false;
3160 bool cant_insert = false;
3161 bool all_same = true;
3162 pre_expr first_s = NULL;
3163 edge pred;
3164 basic_block bprime;
3165 pre_expr eprime = NULL;
3166 edge_iterator ei;
3167 pre_expr edoubleprime = NULL;
3168 bool do_insertion = false;
3170 val = get_expr_value_id (expr);
3171 if (bitmap_set_contains_value (PHI_GEN (block), val))
3172 continue;
3173 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3175 if (dump_file && (dump_flags & TDF_DETAILS))
3177 fprintf (dump_file, "Found fully redundant value: ");
3178 print_pre_expr (dump_file, expr);
3179 fprintf (dump_file, "\n");
3181 continue;
3184 FOR_EACH_EDGE (pred, ei, block->preds)
3186 unsigned int vprime;
3188 /* We should never run insertion for the exit block
3189 and so not come across fake pred edges. */
3190 gcc_assert (!(pred->flags & EDGE_FAKE));
3191 bprime = pred->src;
3192 /* We are looking at ANTIC_OUT of bprime. */
3193 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3194 bprime, block);
3196 /* eprime will generally only be NULL if the
3197 value of the expression, translated
3198 through the PHI for this predecessor, is
3199 undefined. If that is the case, we can't
3200 make the expression fully redundant,
3201 because its value is undefined along a
3202 predecessor path. We can thus break out
3203 early because it doesn't matter what the
3204 rest of the results are. */
3205 if (eprime == NULL)
3207 avail[pred->dest_idx] = NULL;
3208 cant_insert = true;
3209 break;
3212 vprime = get_expr_value_id (eprime);
3213 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3214 vprime);
3215 if (edoubleprime == NULL)
3217 avail[pred->dest_idx] = eprime;
3218 all_same = false;
3220 else
3222 avail[pred->dest_idx] = edoubleprime;
3223 by_some = true;
3224 /* We want to perform insertions to remove a redundancy on
3225 a path in the CFG we want to optimize for speed. */
3226 if (optimize_edge_for_speed_p (pred))
3227 do_insertion = true;
3228 if (first_s == NULL)
3229 first_s = edoubleprime;
3230 else if (!pre_expr_d::equal (first_s, edoubleprime))
3231 all_same = false;
3234 /* If we can insert it, it's not the same value
3235 already existing along every predecessor, and
3236 it's defined by some predecessor, it is
3237 partially redundant. */
3238 if (!cant_insert && !all_same && by_some)
3240 if (!do_insertion)
3242 if (dump_file && (dump_flags & TDF_DETAILS))
3244 fprintf (dump_file, "Skipping partial redundancy for "
3245 "expression ");
3246 print_pre_expr (dump_file, expr);
3247 fprintf (dump_file, " (%04d), no redundancy on to be "
3248 "optimized for speed edge\n", val);
3251 else if (dbg_cnt (treepre_insert))
3253 if (dump_file && (dump_flags & TDF_DETAILS))
3255 fprintf (dump_file, "Found partial redundancy for "
3256 "expression ");
3257 print_pre_expr (dump_file, expr);
3258 fprintf (dump_file, " (%04d)\n",
3259 get_expr_value_id (expr));
3261 if (insert_into_preds_of_block (block,
3262 get_expression_id (expr),
3263 avail))
3264 new_stuff = true;
3267 /* If all edges produce the same value and that value is
3268 an invariant, then the PHI has the same value on all
3269 edges. Note this. */
3270 else if (!cant_insert && all_same)
3272 gcc_assert (edoubleprime->kind == CONSTANT
3273 || edoubleprime->kind == NAME);
3275 tree temp = make_temp_ssa_name (get_expr_type (expr),
3276 NULL, "pretmp");
3277 gassign *assign
3278 = gimple_build_assign (temp,
3279 edoubleprime->kind == CONSTANT ?
3280 PRE_EXPR_CONSTANT (edoubleprime) :
3281 PRE_EXPR_NAME (edoubleprime));
3282 gimple_stmt_iterator gsi = gsi_after_labels (block);
3283 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3285 VN_INFO_GET (temp)->value_id = val;
3286 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3287 if (VN_INFO (temp)->valnum == NULL_TREE)
3288 VN_INFO (temp)->valnum = temp;
3289 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3290 pre_expr newe = get_or_alloc_expr_for_name (temp);
3291 add_to_value (val, newe);
3292 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3293 bitmap_insert_into_set (NEW_SETS (block), newe);
3298 exprs.release ();
3299 return new_stuff;
3303 /* Perform insertion for partially anticipatable expressions. There
3304 is only one case we will perform insertion for these. This case is
3305 if the expression is partially anticipatable, and fully available.
3306 In this case, we know that putting it earlier will enable us to
3307 remove the later computation. */
3309 static bool
3310 do_pre_partial_partial_insertion (basic_block block, basic_block dom)
3312 bool new_stuff = false;
3313 vec<pre_expr> exprs;
3314 pre_expr expr;
3315 auto_vec<pre_expr> avail;
3316 int i;
3318 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3319 avail.safe_grow (EDGE_COUNT (block->preds));
3321 FOR_EACH_VEC_ELT (exprs, i, expr)
3323 if (expr->kind == NARY
3324 || expr->kind == REFERENCE)
3326 unsigned int val;
3327 bool by_all = true;
3328 bool cant_insert = false;
3329 edge pred;
3330 basic_block bprime;
3331 pre_expr eprime = NULL;
3332 edge_iterator ei;
3334 val = get_expr_value_id (expr);
3335 if (bitmap_set_contains_value (PHI_GEN (block), val))
3336 continue;
3337 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3338 continue;
3340 FOR_EACH_EDGE (pred, ei, block->preds)
3342 unsigned int vprime;
3343 pre_expr edoubleprime;
3345 /* We should never run insertion for the exit block
3346 and so not come across fake pred edges. */
3347 gcc_assert (!(pred->flags & EDGE_FAKE));
3348 bprime = pred->src;
3349 eprime = phi_translate (expr, ANTIC_IN (block),
3350 PA_IN (block),
3351 bprime, block);
3353 /* eprime will generally only be NULL if the
3354 value of the expression, translated
3355 through the PHI for this predecessor, is
3356 undefined. If that is the case, we can't
3357 make the expression fully redundant,
3358 because its value is undefined along a
3359 predecessor path. We can thus break out
3360 early because it doesn't matter what the
3361 rest of the results are. */
3362 if (eprime == NULL)
3364 avail[pred->dest_idx] = NULL;
3365 cant_insert = true;
3366 break;
3369 vprime = get_expr_value_id (eprime);
3370 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3371 avail[pred->dest_idx] = edoubleprime;
3372 if (edoubleprime == NULL)
3374 by_all = false;
3375 break;
3379 /* If we can insert it, it's not the same value
3380 already existing along every predecessor, and
3381 it's defined by some predecessor, it is
3382 partially redundant. */
3383 if (!cant_insert && by_all)
3385 edge succ;
3386 bool do_insertion = false;
3388 /* Insert only if we can remove a later expression on a path
3389 that we want to optimize for speed.
3390 The phi node that we will be inserting in BLOCK is not free,
3391 and inserting it for the sake of !optimize_for_speed successor
3392 may cause regressions on the speed path. */
3393 FOR_EACH_EDGE (succ, ei, block->succs)
3395 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3396 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3398 if (optimize_edge_for_speed_p (succ))
3399 do_insertion = true;
3403 if (!do_insertion)
3405 if (dump_file && (dump_flags & TDF_DETAILS))
3407 fprintf (dump_file, "Skipping partial partial redundancy "
3408 "for expression ");
3409 print_pre_expr (dump_file, expr);
3410 fprintf (dump_file, " (%04d), not (partially) anticipated "
3411 "on any to be optimized for speed edges\n", val);
3414 else if (dbg_cnt (treepre_insert))
3416 pre_stats.pa_insert++;
3417 if (dump_file && (dump_flags & TDF_DETAILS))
3419 fprintf (dump_file, "Found partial partial redundancy "
3420 "for expression ");
3421 print_pre_expr (dump_file, expr);
3422 fprintf (dump_file, " (%04d)\n",
3423 get_expr_value_id (expr));
3425 if (insert_into_preds_of_block (block,
3426 get_expression_id (expr),
3427 avail))
3428 new_stuff = true;
3434 exprs.release ();
3435 return new_stuff;
3438 /* Insert expressions in BLOCK to compute hoistable values up.
3439 Return TRUE if something was inserted, otherwise return FALSE.
3440 The caller has to make sure that BLOCK has at least two successors. */
3442 static bool
3443 do_hoist_insertion (basic_block block)
3445 edge e;
3446 edge_iterator ei;
3447 bool new_stuff = false;
3448 unsigned i;
3449 gimple_stmt_iterator last;
3451 /* At least two successors, or else... */
3452 gcc_assert (EDGE_COUNT (block->succs) >= 2);
3454 /* Check that all successors of BLOCK are dominated by block.
3455 We could use dominated_by_p() for this, but actually there is a much
3456 quicker check: any successor that is dominated by BLOCK can't have
3457 more than one predecessor edge. */
3458 FOR_EACH_EDGE (e, ei, block->succs)
3459 if (! single_pred_p (e->dest))
3460 return false;
3462 /* Determine the insertion point. If we cannot safely insert before
3463 the last stmt if we'd have to, bail out. */
3464 last = gsi_last_bb (block);
3465 if (!gsi_end_p (last)
3466 && !is_ctrl_stmt (gsi_stmt (last))
3467 && stmt_ends_bb_p (gsi_stmt (last)))
3468 return false;
3470 /* Compute the set of hoistable expressions from ANTIC_IN. First compute
3471 hoistable values. */
3472 bitmap_set hoistable_set;
3474 /* A hoistable value must be in ANTIC_IN(block)
3475 but not in AVAIL_OUT(BLOCK). */
3476 bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack);
3477 bitmap_and_compl (&hoistable_set.values,
3478 &ANTIC_IN (block)->values, &AVAIL_OUT (block)->values);
3480 /* Short-cut for a common case: hoistable_set is empty. */
3481 if (bitmap_empty_p (&hoistable_set.values))
3482 return false;
3484 /* Compute which of the hoistable values is in AVAIL_OUT of
3485 at least one of the successors of BLOCK. */
3486 bitmap_head availout_in_some;
3487 bitmap_initialize (&availout_in_some, &grand_bitmap_obstack);
3488 FOR_EACH_EDGE (e, ei, block->succs)
3489 /* Do not consider expressions solely because their availability
3490 on loop exits. They'd be ANTIC-IN throughout the whole loop
3491 and thus effectively hoisted across loops by combination of
3492 PRE and hoisting. */
3493 if (! loop_exit_edge_p (block->loop_father, e))
3494 bitmap_ior_and_into (&availout_in_some, &hoistable_set.values,
3495 &AVAIL_OUT (e->dest)->values);
3496 bitmap_clear (&hoistable_set.values);
3498 /* Short-cut for a common case: availout_in_some is empty. */
3499 if (bitmap_empty_p (&availout_in_some))
3500 return false;
3502 /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */
3503 hoistable_set.values = availout_in_some;
3504 hoistable_set.expressions = ANTIC_IN (block)->expressions;
3506 /* Now finally construct the topological-ordered expression set. */
3507 vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set);
3509 bitmap_clear (&hoistable_set.values);
3511 /* If there are candidate values for hoisting, insert expressions
3512 strategically to make the hoistable expressions fully redundant. */
3513 pre_expr expr;
3514 FOR_EACH_VEC_ELT (exprs, i, expr)
3516 /* While we try to sort expressions topologically above the
3517 sorting doesn't work out perfectly. Catch expressions we
3518 already inserted. */
3519 unsigned int value_id = get_expr_value_id (expr);
3520 if (bitmap_set_contains_value (AVAIL_OUT (block), value_id))
3522 if (dump_file && (dump_flags & TDF_DETAILS))
3524 fprintf (dump_file,
3525 "Already inserted expression for ");
3526 print_pre_expr (dump_file, expr);
3527 fprintf (dump_file, " (%04d)\n", value_id);
3529 continue;
3532 /* OK, we should hoist this value. Perform the transformation. */
3533 pre_stats.hoist_insert++;
3534 if (dump_file && (dump_flags & TDF_DETAILS))
3536 fprintf (dump_file,
3537 "Inserting expression in block %d for code hoisting: ",
3538 block->index);
3539 print_pre_expr (dump_file, expr);
3540 fprintf (dump_file, " (%04d)\n", value_id);
3543 gimple_seq stmts = NULL;
3544 tree res = create_expression_by_pieces (block, expr, &stmts,
3545 get_expr_type (expr));
3547 /* Do not return true if expression creation ultimately
3548 did not insert any statements. */
3549 if (gimple_seq_empty_p (stmts))
3550 res = NULL_TREE;
3551 else
3553 if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last)))
3554 gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT);
3555 else
3556 gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT);
3559 /* Make sure to not return true if expression creation ultimately
3560 failed but also make sure to insert any stmts produced as they
3561 are tracked in inserted_exprs. */
3562 if (! res)
3563 continue;
3565 new_stuff = true;
3568 exprs.release ();
3570 return new_stuff;
3573 /* Do a dominator walk on the control flow graph, and insert computations
3574 of values as necessary for PRE and hoisting. */
3576 static bool
3577 insert_aux (basic_block block, bool do_pre, bool do_hoist)
3579 basic_block son;
3580 bool new_stuff = false;
3582 if (block)
3584 basic_block dom;
3585 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3586 if (dom)
3588 unsigned i;
3589 bitmap_iterator bi;
3590 bitmap_set_t newset;
3592 /* First, update the AVAIL_OUT set with anything we may have
3593 inserted higher up in the dominator tree. */
3594 newset = NEW_SETS (dom);
3595 if (newset)
3597 /* Note that we need to value_replace both NEW_SETS, and
3598 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3599 represented by some non-simple expression here that we want
3600 to replace it with. */
3601 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3603 pre_expr expr = expression_for_id (i);
3604 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3605 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3609 /* Insert expressions for partial redundancies. */
3610 if (do_pre && !single_pred_p (block))
3612 new_stuff |= do_pre_regular_insertion (block, dom);
3613 if (do_partial_partial)
3614 new_stuff |= do_pre_partial_partial_insertion (block, dom);
3617 /* Insert expressions for hoisting. */
3618 if (do_hoist && EDGE_COUNT (block->succs) >= 2)
3619 new_stuff |= do_hoist_insertion (block);
3622 for (son = first_dom_son (CDI_DOMINATORS, block);
3623 son;
3624 son = next_dom_son (CDI_DOMINATORS, son))
3626 new_stuff |= insert_aux (son, do_pre, do_hoist);
3629 return new_stuff;
3632 /* Perform insertion of partially redundant and hoistable values. */
3634 static void
3635 insert (void)
3637 bool new_stuff = true;
3638 basic_block bb;
3639 int num_iterations = 0;
3641 FOR_ALL_BB_FN (bb, cfun)
3642 NEW_SETS (bb) = bitmap_set_new ();
3644 while (new_stuff)
3646 num_iterations++;
3647 if (dump_file && dump_flags & TDF_DETAILS)
3648 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3649 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun), flag_tree_pre,
3650 flag_code_hoisting);
3652 /* Clear the NEW sets before the next iteration. We have already
3653 fully propagated its contents. */
3654 if (new_stuff)
3655 FOR_ALL_BB_FN (bb, cfun)
3656 bitmap_set_free (NEW_SETS (bb));
3658 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3662 /* Compute the AVAIL set for all basic blocks.
3664 This function performs value numbering of the statements in each basic
3665 block. The AVAIL sets are built from information we glean while doing
3666 this value numbering, since the AVAIL sets contain only one entry per
3667 value.
3669 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3670 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3672 static void
3673 compute_avail (void)
3676 basic_block block, son;
3677 basic_block *worklist;
3678 size_t sp = 0;
3679 unsigned i;
3680 tree name;
3682 /* We pretend that default definitions are defined in the entry block.
3683 This includes function arguments and the static chain decl. */
3684 FOR_EACH_SSA_NAME (i, name, cfun)
3686 pre_expr e;
3687 if (!SSA_NAME_IS_DEFAULT_DEF (name)
3688 || has_zero_uses (name)
3689 || virtual_operand_p (name))
3690 continue;
3692 e = get_or_alloc_expr_for_name (name);
3693 add_to_value (get_expr_value_id (e), e);
3694 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3695 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3699 if (dump_file && (dump_flags & TDF_DETAILS))
3701 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3702 "tmp_gen", ENTRY_BLOCK);
3703 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3704 "avail_out", ENTRY_BLOCK);
3707 /* Allocate the worklist. */
3708 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3710 /* Seed the algorithm by putting the dominator children of the entry
3711 block on the worklist. */
3712 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3713 son;
3714 son = next_dom_son (CDI_DOMINATORS, son))
3715 worklist[sp++] = son;
3717 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3718 = ssa_default_def (cfun, gimple_vop (cfun));
3720 /* Loop until the worklist is empty. */
3721 while (sp)
3723 gimple *stmt;
3724 basic_block dom;
3726 /* Pick a block from the worklist. */
3727 block = worklist[--sp];
3729 /* Initially, the set of available values in BLOCK is that of
3730 its immediate dominator. */
3731 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3732 if (dom)
3734 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3735 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3738 /* Generate values for PHI nodes. */
3739 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3740 gsi_next (&gsi))
3742 tree result = gimple_phi_result (gsi.phi ());
3744 /* We have no need for virtual phis, as they don't represent
3745 actual computations. */
3746 if (virtual_operand_p (result))
3748 BB_LIVE_VOP_ON_EXIT (block) = result;
3749 continue;
3752 pre_expr e = get_or_alloc_expr_for_name (result);
3753 add_to_value (get_expr_value_id (e), e);
3754 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3755 bitmap_insert_into_set (PHI_GEN (block), e);
3758 BB_MAY_NOTRETURN (block) = 0;
3760 /* Now compute value numbers and populate value sets with all
3761 the expressions computed in BLOCK. */
3762 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3763 gsi_next (&gsi))
3765 ssa_op_iter iter;
3766 tree op;
3768 stmt = gsi_stmt (gsi);
3770 /* Cache whether the basic-block has any non-visible side-effect
3771 or control flow.
3772 If this isn't a call or it is the last stmt in the
3773 basic-block then the CFG represents things correctly. */
3774 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3776 /* Non-looping const functions always return normally.
3777 Otherwise the call might not return or have side-effects
3778 that forbids hoisting possibly trapping expressions
3779 before it. */
3780 int flags = gimple_call_flags (stmt);
3781 if (!(flags & ECF_CONST)
3782 || (flags & ECF_LOOPING_CONST_OR_PURE))
3783 BB_MAY_NOTRETURN (block) = 1;
3786 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3788 pre_expr e = get_or_alloc_expr_for_name (op);
3790 add_to_value (get_expr_value_id (e), e);
3791 bitmap_insert_into_set (TMP_GEN (block), e);
3792 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3795 if (gimple_vdef (stmt))
3796 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3798 if (gimple_has_side_effects (stmt)
3799 || stmt_could_throw_p (stmt)
3800 || is_gimple_debug (stmt))
3801 continue;
3803 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3805 if (ssa_undefined_value_p (op))
3806 continue;
3807 pre_expr e = get_or_alloc_expr_for_name (op);
3808 bitmap_value_insert_into_set (EXP_GEN (block), e);
3811 switch (gimple_code (stmt))
3813 case GIMPLE_RETURN:
3814 continue;
3816 case GIMPLE_CALL:
3818 vn_reference_t ref;
3819 vn_reference_s ref1;
3820 pre_expr result = NULL;
3822 /* We can value number only calls to real functions. */
3823 if (gimple_call_internal_p (stmt))
3824 continue;
3826 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3827 if (!ref)
3828 continue;
3830 /* If the value of the call is not invalidated in
3831 this block until it is computed, add the expression
3832 to EXP_GEN. */
3833 if (!gimple_vuse (stmt)
3834 || gimple_code
3835 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3836 || gimple_bb (SSA_NAME_DEF_STMT
3837 (gimple_vuse (stmt))) != block)
3839 result = pre_expr_pool.allocate ();
3840 result->kind = REFERENCE;
3841 result->id = 0;
3842 PRE_EXPR_REFERENCE (result) = ref;
3844 get_or_alloc_expression_id (result);
3845 add_to_value (get_expr_value_id (result), result);
3846 bitmap_value_insert_into_set (EXP_GEN (block), result);
3848 continue;
3851 case GIMPLE_ASSIGN:
3853 pre_expr result = NULL;
3854 switch (vn_get_stmt_kind (stmt))
3856 case VN_NARY:
3858 enum tree_code code = gimple_assign_rhs_code (stmt);
3859 vn_nary_op_t nary;
3861 /* COND_EXPR and VEC_COND_EXPR are awkward in
3862 that they contain an embedded complex expression.
3863 Don't even try to shove those through PRE. */
3864 if (code == COND_EXPR
3865 || code == VEC_COND_EXPR)
3866 continue;
3868 vn_nary_op_lookup_stmt (stmt, &nary);
3869 if (!nary)
3870 continue;
3872 /* If the NARY traps and there was a preceding
3873 point in the block that might not return avoid
3874 adding the nary to EXP_GEN. */
3875 if (BB_MAY_NOTRETURN (block)
3876 && vn_nary_may_trap (nary))
3877 continue;
3879 result = pre_expr_pool.allocate ();
3880 result->kind = NARY;
3881 result->id = 0;
3882 PRE_EXPR_NARY (result) = nary;
3883 break;
3886 case VN_REFERENCE:
3888 tree rhs1 = gimple_assign_rhs1 (stmt);
3889 alias_set_type set = get_alias_set (rhs1);
3890 vec<vn_reference_op_s> operands
3891 = vn_reference_operands_for_lookup (rhs1);
3892 vn_reference_t ref;
3893 vn_reference_lookup_pieces (gimple_vuse (stmt), set,
3894 TREE_TYPE (rhs1),
3895 operands, &ref, VN_WALK);
3896 if (!ref)
3898 operands.release ();
3899 continue;
3902 /* If the value of the reference is not invalidated in
3903 this block until it is computed, add the expression
3904 to EXP_GEN. */
3905 if (gimple_vuse (stmt))
3907 gimple *def_stmt;
3908 bool ok = true;
3909 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3910 while (!gimple_nop_p (def_stmt)
3911 && gimple_code (def_stmt) != GIMPLE_PHI
3912 && gimple_bb (def_stmt) == block)
3914 if (stmt_may_clobber_ref_p
3915 (def_stmt, gimple_assign_rhs1 (stmt)))
3917 ok = false;
3918 break;
3920 def_stmt
3921 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3923 if (!ok)
3925 operands.release ();
3926 continue;
3930 /* If the load was value-numbered to another
3931 load make sure we do not use its expression
3932 for insertion if it wouldn't be a valid
3933 replacement. */
3934 /* At the momemt we have a testcase
3935 for hoist insertion of aligned vs. misaligned
3936 variants in gcc.dg/torture/pr65270-1.c thus
3937 with just alignment to be considered we can
3938 simply replace the expression in the hashtable
3939 with the most conservative one. */
3940 vn_reference_op_t ref1 = &ref->operands.last ();
3941 while (ref1->opcode != TARGET_MEM_REF
3942 && ref1->opcode != MEM_REF
3943 && ref1 != &ref->operands[0])
3944 --ref1;
3945 vn_reference_op_t ref2 = &operands.last ();
3946 while (ref2->opcode != TARGET_MEM_REF
3947 && ref2->opcode != MEM_REF
3948 && ref2 != &operands[0])
3949 --ref2;
3950 if ((ref1->opcode == TARGET_MEM_REF
3951 || ref1->opcode == MEM_REF)
3952 && (TYPE_ALIGN (ref1->type)
3953 > TYPE_ALIGN (ref2->type)))
3954 ref1->type
3955 = build_aligned_type (ref1->type,
3956 TYPE_ALIGN (ref2->type));
3957 /* TBAA behavior is an obvious part so make sure
3958 that the hashtable one covers this as well
3959 by adjusting the ref alias set and its base. */
3960 if (ref->set == set
3961 || alias_set_subset_of (set, ref->set))
3963 else if (alias_set_subset_of (ref->set, set))
3965 ref->set = set;
3966 if (ref1->opcode == MEM_REF)
3967 ref1->op0
3968 = wide_int_to_tree (TREE_TYPE (ref2->op0),
3969 wi::to_wide (ref1->op0));
3970 else
3971 ref1->op2
3972 = wide_int_to_tree (TREE_TYPE (ref2->op2),
3973 wi::to_wide (ref1->op2));
3975 else
3977 ref->set = 0;
3978 if (ref1->opcode == MEM_REF)
3979 ref1->op0
3980 = wide_int_to_tree (ptr_type_node,
3981 wi::to_wide (ref1->op0));
3982 else
3983 ref1->op2
3984 = wide_int_to_tree (ptr_type_node,
3985 wi::to_wide (ref1->op2));
3987 operands.release ();
3989 result = pre_expr_pool.allocate ();
3990 result->kind = REFERENCE;
3991 result->id = 0;
3992 PRE_EXPR_REFERENCE (result) = ref;
3993 break;
3996 default:
3997 continue;
4000 get_or_alloc_expression_id (result);
4001 add_to_value (get_expr_value_id (result), result);
4002 bitmap_value_insert_into_set (EXP_GEN (block), result);
4003 continue;
4005 default:
4006 break;
4010 if (dump_file && (dump_flags & TDF_DETAILS))
4012 print_bitmap_set (dump_file, EXP_GEN (block),
4013 "exp_gen", block->index);
4014 print_bitmap_set (dump_file, PHI_GEN (block),
4015 "phi_gen", block->index);
4016 print_bitmap_set (dump_file, TMP_GEN (block),
4017 "tmp_gen", block->index);
4018 print_bitmap_set (dump_file, AVAIL_OUT (block),
4019 "avail_out", block->index);
4022 /* Put the dominator children of BLOCK on the worklist of blocks
4023 to compute available sets for. */
4024 for (son = first_dom_son (CDI_DOMINATORS, block);
4025 son;
4026 son = next_dom_son (CDI_DOMINATORS, son))
4027 worklist[sp++] = son;
4030 free (worklist);
4033 /* Cheap DCE of a known set of possibly dead stmts.
4035 Because we don't follow exactly the standard PRE algorithm, and decide not
4036 to insert PHI nodes sometimes, and because value numbering of casts isn't
4037 perfect, we sometimes end up inserting dead code. This simple DCE-like
4038 pass removes any insertions we made that weren't actually used. */
4040 static void
4041 remove_dead_inserted_code (void)
4043 /* ??? Re-use inserted_exprs as worklist not only as initial set.
4044 This may end up removing non-inserted code as well. If we
4045 keep inserted_exprs unchanged we could restrict new worklist
4046 elements to members of inserted_exprs. */
4047 bitmap worklist = inserted_exprs;
4048 while (! bitmap_empty_p (worklist))
4050 /* Pop item. */
4051 unsigned i = bitmap_first_set_bit (worklist);
4052 bitmap_clear_bit (worklist, i);
4054 tree def = ssa_name (i);
4055 /* Removed by somebody else or still in use. */
4056 if (! def || ! has_zero_uses (def))
4057 continue;
4059 gimple *t = SSA_NAME_DEF_STMT (def);
4060 if (gimple_has_side_effects (t))
4061 continue;
4063 /* Add uses to the worklist. */
4064 ssa_op_iter iter;
4065 use_operand_p use_p;
4066 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
4068 tree use = USE_FROM_PTR (use_p);
4069 if (TREE_CODE (use) == SSA_NAME
4070 && ! SSA_NAME_IS_DEFAULT_DEF (use))
4071 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4074 /* Remove stmt. */
4075 if (dump_file && (dump_flags & TDF_DETAILS))
4077 fprintf (dump_file, "Removing unnecessary insertion:");
4078 print_gimple_stmt (dump_file, t, 0);
4080 gimple_stmt_iterator gsi = gsi_for_stmt (t);
4081 if (gimple_code (t) == GIMPLE_PHI)
4082 remove_phi_node (&gsi, true);
4083 else
4085 gsi_remove (&gsi, true);
4086 release_defs (t);
4092 /* Initialize data structures used by PRE. */
4094 static void
4095 init_pre (void)
4097 basic_block bb;
4099 next_expression_id = 1;
4100 expressions.create (0);
4101 expressions.safe_push (NULL);
4102 value_expressions.create (get_max_value_id () + 1);
4103 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4104 name_to_id.create (0);
4106 inserted_exprs = BITMAP_ALLOC (NULL);
4108 connect_infinite_loops_to_exit ();
4109 memset (&pre_stats, 0, sizeof (pre_stats));
4111 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4113 calculate_dominance_info (CDI_DOMINATORS);
4115 bitmap_obstack_initialize (&grand_bitmap_obstack);
4116 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4117 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4118 FOR_ALL_BB_FN (bb, cfun)
4120 EXP_GEN (bb) = bitmap_set_new ();
4121 PHI_GEN (bb) = bitmap_set_new ();
4122 TMP_GEN (bb) = bitmap_set_new ();
4123 AVAIL_OUT (bb) = bitmap_set_new ();
4128 /* Deallocate data structures used by PRE. */
4130 static void
4131 fini_pre ()
4133 value_expressions.release ();
4134 BITMAP_FREE (inserted_exprs);
4135 bitmap_obstack_release (&grand_bitmap_obstack);
4136 bitmap_set_pool.release ();
4137 pre_expr_pool.release ();
4138 delete phi_translate_table;
4139 phi_translate_table = NULL;
4140 delete expression_to_id;
4141 expression_to_id = NULL;
4142 name_to_id.release ();
4144 free_aux_for_blocks ();
4147 namespace {
4149 const pass_data pass_data_pre =
4151 GIMPLE_PASS, /* type */
4152 "pre", /* name */
4153 OPTGROUP_NONE, /* optinfo_flags */
4154 TV_TREE_PRE, /* tv_id */
4155 ( PROP_cfg | PROP_ssa ), /* properties_required */
4156 0, /* properties_provided */
4157 0, /* properties_destroyed */
4158 TODO_rebuild_alias, /* todo_flags_start */
4159 0, /* todo_flags_finish */
4162 class pass_pre : public gimple_opt_pass
4164 public:
4165 pass_pre (gcc::context *ctxt)
4166 : gimple_opt_pass (pass_data_pre, ctxt)
4169 /* opt_pass methods: */
4170 virtual bool gate (function *)
4171 { return flag_tree_pre != 0 || flag_code_hoisting != 0; }
4172 virtual unsigned int execute (function *);
4174 }; // class pass_pre
4176 unsigned int
4177 pass_pre::execute (function *fun)
4179 unsigned int todo = 0;
4181 do_partial_partial =
4182 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4184 /* This has to happen before SCCVN runs because
4185 loop_optimizer_init may create new phis, etc. */
4186 loop_optimizer_init (LOOPS_NORMAL);
4187 split_critical_edges ();
4189 run_scc_vn (VN_WALK);
4191 init_pre ();
4192 scev_initialize ();
4194 /* Collect and value number expressions computed in each basic block. */
4195 compute_avail ();
4197 /* Insert can get quite slow on an incredibly large number of basic
4198 blocks due to some quadratic behavior. Until this behavior is
4199 fixed, don't run it when he have an incredibly large number of
4200 bb's. If we aren't going to run insert, there is no point in
4201 computing ANTIC, either, even though it's plenty fast. */
4202 if (n_basic_blocks_for_fn (fun) < 4000)
4204 compute_antic ();
4205 insert ();
4208 /* Make sure to remove fake edges before committing our inserts.
4209 This makes sure we don't end up with extra critical edges that
4210 we would need to split. */
4211 remove_fake_exit_edges ();
4212 gsi_commit_edge_inserts ();
4214 /* Eliminate folds statements which might (should not...) end up
4215 not keeping virtual operands up-to-date. */
4216 gcc_assert (!need_ssa_update_p (fun));
4218 /* Remove all the redundant expressions. */
4219 todo |= vn_eliminate (inserted_exprs);
4221 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4222 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4223 statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
4224 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4226 clear_expression_ids ();
4228 scev_finalize ();
4229 remove_dead_inserted_code ();
4230 fini_pre ();
4231 loop_optimizer_finalize ();
4233 /* Restore SSA info before tail-merging as that resets it as well. */
4234 scc_vn_restore_ssa_info ();
4236 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4237 case we can merge the block with the remaining predecessor of the block.
4238 It should either:
4239 - call merge_blocks after each tail merge iteration
4240 - call merge_blocks after all tail merge iterations
4241 - mark TODO_cleanup_cfg when necessary
4242 - share the cfg cleanup with fini_pre. */
4243 todo |= tail_merge_optimize (todo);
4245 free_scc_vn ();
4247 /* Tail merging invalidates the virtual SSA web, together with
4248 cfg-cleanup opportunities exposed by PRE this will wreck the
4249 SSA updating machinery. So make sure to run update-ssa
4250 manually, before eventually scheduling cfg-cleanup as part of
4251 the todo. */
4252 update_ssa (TODO_update_ssa_only_virtuals);
4254 return todo;
4257 } // anon namespace
4259 gimple_opt_pass *
4260 make_pass_pre (gcc::context *ctxt)
4262 return new pass_pre (ctxt);