PR inline-asm/84625
[official-gcc.git] / gcc / tree-ssa-pre.c
bloba535c325e0f33a944d817470485428437f321f54
1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-ssa.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
48 #include "params.h"
49 #include "dbgcnt.h"
50 #include "domwalk.h"
51 #include "tree-ssa-propagate.h"
52 #include "tree-ssa-dce.h"
53 #include "tree-cfgcleanup.h"
54 #include "alias.h"
56 /* Even though this file is called tree-ssa-pre.c, we actually
57 implement a bit more than just PRE here. All of them piggy-back
58 on GVN which is implemented in tree-ssa-sccvn.c.
60 1. Full Redundancy Elimination (FRE)
61 This is the elimination phase of GVN.
63 2. Partial Redundancy Elimination (PRE)
64 This is adds computation of AVAIL_OUT and ANTIC_IN and
65 doing expression insertion to form GVN-PRE.
67 3. Code hoisting
68 This optimization uses the ANTIC_IN sets computed for PRE
69 to move expressions further up than PRE would do, to make
70 multiple computations of the same value fully redundant.
71 This pass is explained below (after the explanation of the
72 basic algorithm for PRE).
75 /* TODO:
77 1. Avail sets can be shared by making an avail_find_leader that
78 walks up the dominator tree and looks in those avail sets.
79 This might affect code optimality, it's unclear right now.
80 Currently the AVAIL_OUT sets are the remaining quadraticness in
81 memory of GVN-PRE.
82 2. Strength reduction can be performed by anticipating expressions
83 we can repair later on.
84 3. We can do back-substitution or smarter value numbering to catch
85 commutative expressions split up over multiple statements.
88 /* For ease of terminology, "expression node" in the below refers to
89 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
90 represent the actual statement containing the expressions we care about,
91 and we cache the value number by putting it in the expression. */
93 /* Basic algorithm for Partial Redundancy Elimination:
95 First we walk the statements to generate the AVAIL sets, the
96 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
97 generation of values/expressions by a given block. We use them
98 when computing the ANTIC sets. The AVAIL sets consist of
99 SSA_NAME's that represent values, so we know what values are
100 available in what blocks. AVAIL is a forward dataflow problem. In
101 SSA, values are never killed, so we don't need a kill set, or a
102 fixpoint iteration, in order to calculate the AVAIL sets. In
103 traditional parlance, AVAIL sets tell us the downsafety of the
104 expressions/values.
106 Next, we generate the ANTIC sets. These sets represent the
107 anticipatable expressions. ANTIC is a backwards dataflow
108 problem. An expression is anticipatable in a given block if it could
109 be generated in that block. This means that if we had to perform
110 an insertion in that block, of the value of that expression, we
111 could. Calculating the ANTIC sets requires phi translation of
112 expressions, because the flow goes backwards through phis. We must
113 iterate to a fixpoint of the ANTIC sets, because we have a kill
114 set. Even in SSA form, values are not live over the entire
115 function, only from their definition point onwards. So we have to
116 remove values from the ANTIC set once we go past the definition
117 point of the leaders that make them up.
118 compute_antic/compute_antic_aux performs this computation.
120 Third, we perform insertions to make partially redundant
121 expressions fully redundant.
123 An expression is partially redundant (excluding partial
124 anticipation) if:
126 1. It is AVAIL in some, but not all, of the predecessors of a
127 given block.
128 2. It is ANTIC in all the predecessors.
130 In order to make it fully redundant, we insert the expression into
131 the predecessors where it is not available, but is ANTIC.
133 When optimizing for size, we only eliminate the partial redundancy
134 if we need to insert in only one predecessor. This avoids almost
135 completely the code size increase that PRE usually causes.
137 For the partial anticipation case, we only perform insertion if it
138 is partially anticipated in some block, and fully available in all
139 of the predecessors.
141 do_pre_regular_insertion/do_pre_partial_partial_insertion
142 performs these steps, driven by insert/insert_aux.
144 Fourth, we eliminate fully redundant expressions.
145 This is a simple statement walk that replaces redundant
146 calculations with the now available values. */
148 /* Basic algorithm for Code Hoisting:
150 Code hoisting is: Moving value computations up in the control flow
151 graph to make multiple copies redundant. Typically this is a size
152 optimization, but there are cases where it also is helpful for speed.
154 A simple code hoisting algorithm is implemented that piggy-backs on
155 the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT
156 which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be
157 computed for PRE, and we can use them to perform a limited version of
158 code hoisting, too.
160 For the purpose of this implementation, a value is hoistable to a basic
161 block B if the following properties are met:
163 1. The value is in ANTIC_IN(B) -- the value will be computed on all
164 paths from B to function exit and it can be computed in B);
166 2. The value is not in AVAIL_OUT(B) -- there would be no need to
167 compute the value again and make it available twice;
169 3. All successors of B are dominated by B -- makes sure that inserting
170 a computation of the value in B will make the remaining
171 computations fully redundant;
173 4. At least one successor has the value in AVAIL_OUT -- to avoid
174 hoisting values up too far;
176 5. There are at least two successors of B -- hoisting in straight
177 line code is pointless.
179 The third condition is not strictly necessary, but it would complicate
180 the hoisting pass a lot. In fact, I don't know of any code hoisting
181 algorithm that does not have this requirement. Fortunately, experiments
182 have show that most candidate hoistable values are in regions that meet
183 this condition (e.g. diamond-shape regions).
185 The forth condition is necessary to avoid hoisting things up too far
186 away from the uses of the value. Nothing else limits the algorithm
187 from hoisting everything up as far as ANTIC_IN allows. Experiments
188 with SPEC and CSiBE have shown that hoisting up too far results in more
189 spilling, less benefits for code size, and worse benchmark scores.
190 Fortunately, in practice most of the interesting hoisting opportunities
191 are caught despite this limitation.
193 For hoistable values that meet all conditions, expressions are inserted
194 to make the calculation of the hoistable value fully redundant. We
195 perform code hoisting insertions after each round of PRE insertions,
196 because code hoisting never exposes new PRE opportunities, but PRE can
197 create new code hoisting opportunities.
199 The code hoisting algorithm is implemented in do_hoist_insert, driven
200 by insert/insert_aux. */
202 /* Representations of value numbers:
204 Value numbers are represented by a representative SSA_NAME. We
205 will create fake SSA_NAME's in situations where we need a
206 representative but do not have one (because it is a complex
207 expression). In order to facilitate storing the value numbers in
208 bitmaps, and keep the number of wasted SSA_NAME's down, we also
209 associate a value_id with each value number, and create full blown
210 ssa_name's only where we actually need them (IE in operands of
211 existing expressions).
213 Theoretically you could replace all the value_id's with
214 SSA_NAME_VERSION, but this would allocate a large number of
215 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
216 It would also require an additional indirection at each point we
217 use the value id. */
219 /* Representation of expressions on value numbers:
221 Expressions consisting of value numbers are represented the same
222 way as our VN internally represents them, with an additional
223 "pre_expr" wrapping around them in order to facilitate storing all
224 of the expressions in the same sets. */
226 /* Representation of sets:
228 The dataflow sets do not need to be sorted in any particular order
229 for the majority of their lifetime, are simply represented as two
230 bitmaps, one that keeps track of values present in the set, and one
231 that keeps track of expressions present in the set.
233 When we need them in topological order, we produce it on demand by
234 transforming the bitmap into an array and sorting it into topo
235 order. */
237 /* Type of expression, used to know which member of the PRE_EXPR union
238 is valid. */
240 enum pre_expr_kind
242 NAME,
243 NARY,
244 REFERENCE,
245 CONSTANT
248 union pre_expr_union
250 tree name;
251 tree constant;
252 vn_nary_op_t nary;
253 vn_reference_t reference;
256 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
258 enum pre_expr_kind kind;
259 unsigned int id;
260 pre_expr_union u;
262 /* hash_table support. */
263 static inline hashval_t hash (const pre_expr_d *);
264 static inline int equal (const pre_expr_d *, const pre_expr_d *);
265 } *pre_expr;
267 #define PRE_EXPR_NAME(e) (e)->u.name
268 #define PRE_EXPR_NARY(e) (e)->u.nary
269 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
270 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
272 /* Compare E1 and E1 for equality. */
274 inline int
275 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
277 if (e1->kind != e2->kind)
278 return false;
280 switch (e1->kind)
282 case CONSTANT:
283 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
284 PRE_EXPR_CONSTANT (e2));
285 case NAME:
286 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
287 case NARY:
288 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
289 case REFERENCE:
290 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
291 PRE_EXPR_REFERENCE (e2));
292 default:
293 gcc_unreachable ();
297 /* Hash E. */
299 inline hashval_t
300 pre_expr_d::hash (const pre_expr_d *e)
302 switch (e->kind)
304 case CONSTANT:
305 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
306 case NAME:
307 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
308 case NARY:
309 return PRE_EXPR_NARY (e)->hashcode;
310 case REFERENCE:
311 return PRE_EXPR_REFERENCE (e)->hashcode;
312 default:
313 gcc_unreachable ();
317 /* Next global expression id number. */
318 static unsigned int next_expression_id;
320 /* Mapping from expression to id number we can use in bitmap sets. */
321 static vec<pre_expr> expressions;
322 static hash_table<pre_expr_d> *expression_to_id;
323 static vec<unsigned> name_to_id;
325 /* Allocate an expression id for EXPR. */
327 static inline unsigned int
328 alloc_expression_id (pre_expr expr)
330 struct pre_expr_d **slot;
331 /* Make sure we won't overflow. */
332 gcc_assert (next_expression_id + 1 > next_expression_id);
333 expr->id = next_expression_id++;
334 expressions.safe_push (expr);
335 if (expr->kind == NAME)
337 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
338 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
339 re-allocations by using vec::reserve upfront. */
340 unsigned old_len = name_to_id.length ();
341 name_to_id.reserve (num_ssa_names - old_len);
342 name_to_id.quick_grow_cleared (num_ssa_names);
343 gcc_assert (name_to_id[version] == 0);
344 name_to_id[version] = expr->id;
346 else
348 slot = expression_to_id->find_slot (expr, INSERT);
349 gcc_assert (!*slot);
350 *slot = expr;
352 return next_expression_id - 1;
355 /* Return the expression id for tree EXPR. */
357 static inline unsigned int
358 get_expression_id (const pre_expr expr)
360 return expr->id;
363 static inline unsigned int
364 lookup_expression_id (const pre_expr expr)
366 struct pre_expr_d **slot;
368 if (expr->kind == NAME)
370 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
371 if (name_to_id.length () <= version)
372 return 0;
373 return name_to_id[version];
375 else
377 slot = expression_to_id->find_slot (expr, NO_INSERT);
378 if (!slot)
379 return 0;
380 return ((pre_expr)*slot)->id;
384 /* Return the existing expression id for EXPR, or create one if one
385 does not exist yet. */
387 static inline unsigned int
388 get_or_alloc_expression_id (pre_expr expr)
390 unsigned int id = lookup_expression_id (expr);
391 if (id == 0)
392 return alloc_expression_id (expr);
393 return expr->id = id;
396 /* Return the expression that has expression id ID */
398 static inline pre_expr
399 expression_for_id (unsigned int id)
401 return expressions[id];
404 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
406 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
408 static pre_expr
409 get_or_alloc_expr_for_name (tree name)
411 struct pre_expr_d expr;
412 pre_expr result;
413 unsigned int result_id;
415 expr.kind = NAME;
416 expr.id = 0;
417 PRE_EXPR_NAME (&expr) = name;
418 result_id = lookup_expression_id (&expr);
419 if (result_id != 0)
420 return expression_for_id (result_id);
422 result = pre_expr_pool.allocate ();
423 result->kind = NAME;
424 PRE_EXPR_NAME (result) = name;
425 alloc_expression_id (result);
426 return result;
429 /* An unordered bitmap set. One bitmap tracks values, the other,
430 expressions. */
431 typedef struct bitmap_set
433 bitmap_head expressions;
434 bitmap_head values;
435 } *bitmap_set_t;
437 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
438 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
440 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
441 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
443 /* Mapping from value id to expressions with that value_id. */
444 static vec<bitmap> value_expressions;
446 /* Sets that we need to keep track of. */
447 typedef struct bb_bitmap_sets
449 /* The EXP_GEN set, which represents expressions/values generated in
450 a basic block. */
451 bitmap_set_t exp_gen;
453 /* The PHI_GEN set, which represents PHI results generated in a
454 basic block. */
455 bitmap_set_t phi_gen;
457 /* The TMP_GEN set, which represents results/temporaries generated
458 in a basic block. IE the LHS of an expression. */
459 bitmap_set_t tmp_gen;
461 /* The AVAIL_OUT set, which represents which values are available in
462 a given basic block. */
463 bitmap_set_t avail_out;
465 /* The ANTIC_IN set, which represents which values are anticipatable
466 in a given basic block. */
467 bitmap_set_t antic_in;
469 /* The PA_IN set, which represents which values are
470 partially anticipatable in a given basic block. */
471 bitmap_set_t pa_in;
473 /* The NEW_SETS set, which is used during insertion to augment the
474 AVAIL_OUT set of blocks with the new insertions performed during
475 the current iteration. */
476 bitmap_set_t new_sets;
478 /* A cache for value_dies_in_block_x. */
479 bitmap expr_dies;
481 /* The live virtual operand on successor edges. */
482 tree vop_on_exit;
484 /* True if we have visited this block during ANTIC calculation. */
485 unsigned int visited : 1;
487 /* True when the block contains a call that might not return. */
488 unsigned int contains_may_not_return_call : 1;
489 } *bb_value_sets_t;
491 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
492 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
493 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
494 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
495 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
496 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
497 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
498 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
499 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
500 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
501 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
504 /* This structure is used to keep track of statistics on what
505 optimization PRE was able to perform. */
506 static struct
508 /* The number of new expressions/temporaries generated by PRE. */
509 int insertions;
511 /* The number of inserts found due to partial anticipation */
512 int pa_insert;
514 /* The number of inserts made for code hoisting. */
515 int hoist_insert;
517 /* The number of new PHI nodes added by PRE. */
518 int phis;
519 } pre_stats;
521 static bool do_partial_partial;
522 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
523 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
524 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
525 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
526 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
527 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
528 static bitmap_set_t bitmap_set_new (void);
529 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
530 tree);
531 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
532 static unsigned int get_expr_value_id (pre_expr);
534 /* We can add and remove elements and entries to and from sets
535 and hash tables, so we use alloc pools for them. */
537 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
538 static bitmap_obstack grand_bitmap_obstack;
540 /* A three tuple {e, pred, v} used to cache phi translations in the
541 phi_translate_table. */
543 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
545 /* The expression. */
546 pre_expr e;
548 /* The predecessor block along which we translated the expression. */
549 basic_block pred;
551 /* The value that resulted from the translation. */
552 pre_expr v;
554 /* The hashcode for the expression, pred pair. This is cached for
555 speed reasons. */
556 hashval_t hashcode;
558 /* hash_table support. */
559 static inline hashval_t hash (const expr_pred_trans_d *);
560 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
561 } *expr_pred_trans_t;
562 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
564 inline hashval_t
565 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
567 return e->hashcode;
570 inline int
571 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
572 const expr_pred_trans_d *ve2)
574 basic_block b1 = ve1->pred;
575 basic_block b2 = ve2->pred;
577 /* If they are not translations for the same basic block, they can't
578 be equal. */
579 if (b1 != b2)
580 return false;
581 return pre_expr_d::equal (ve1->e, ve2->e);
584 /* The phi_translate_table caches phi translations for a given
585 expression and predecessor. */
586 static hash_table<expr_pred_trans_d> *phi_translate_table;
588 /* Add the tuple mapping from {expression E, basic block PRED} to
589 the phi translation table and return whether it pre-existed. */
591 static inline bool
592 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
594 expr_pred_trans_t *slot;
595 expr_pred_trans_d tem;
596 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
597 pred->index);
598 tem.e = e;
599 tem.pred = pred;
600 tem.hashcode = hash;
601 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
602 if (*slot)
604 *entry = *slot;
605 return true;
608 *entry = *slot = XNEW (struct expr_pred_trans_d);
609 (*entry)->e = e;
610 (*entry)->pred = pred;
611 (*entry)->hashcode = hash;
612 return false;
616 /* Add expression E to the expression set of value id V. */
618 static void
619 add_to_value (unsigned int v, pre_expr e)
621 bitmap set;
623 gcc_checking_assert (get_expr_value_id (e) == v);
625 if (v >= value_expressions.length ())
627 value_expressions.safe_grow_cleared (v + 1);
630 set = value_expressions[v];
631 if (!set)
633 set = BITMAP_ALLOC (&grand_bitmap_obstack);
634 value_expressions[v] = set;
637 bitmap_set_bit (set, get_or_alloc_expression_id (e));
640 /* Create a new bitmap set and return it. */
642 static bitmap_set_t
643 bitmap_set_new (void)
645 bitmap_set_t ret = bitmap_set_pool.allocate ();
646 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
647 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
648 return ret;
651 /* Return the value id for a PRE expression EXPR. */
653 static unsigned int
654 get_expr_value_id (pre_expr expr)
656 unsigned int id;
657 switch (expr->kind)
659 case CONSTANT:
660 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
661 break;
662 case NAME:
663 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
664 break;
665 case NARY:
666 id = PRE_EXPR_NARY (expr)->value_id;
667 break;
668 case REFERENCE:
669 id = PRE_EXPR_REFERENCE (expr)->value_id;
670 break;
671 default:
672 gcc_unreachable ();
674 /* ??? We cannot assert that expr has a value-id (it can be 0), because
675 we assign value-ids only to expressions that have a result
676 in set_hashtable_value_ids. */
677 return id;
680 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
682 static tree
683 sccvn_valnum_from_value_id (unsigned int val)
685 bitmap_iterator bi;
686 unsigned int i;
687 bitmap exprset = value_expressions[val];
688 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
690 pre_expr vexpr = expression_for_id (i);
691 if (vexpr->kind == NAME)
692 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
693 else if (vexpr->kind == CONSTANT)
694 return PRE_EXPR_CONSTANT (vexpr);
696 return NULL_TREE;
699 /* Insert an expression EXPR into a bitmapped set. */
701 static void
702 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
704 unsigned int val = get_expr_value_id (expr);
705 if (! value_id_constant_p (val))
707 /* Note this is the only function causing multiple expressions
708 for the same value to appear in a set. This is needed for
709 TMP_GEN, PHI_GEN and NEW_SETs. */
710 bitmap_set_bit (&set->values, val);
711 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
715 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
717 static void
718 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
720 bitmap_copy (&dest->expressions, &orig->expressions);
721 bitmap_copy (&dest->values, &orig->values);
725 /* Free memory used up by SET. */
726 static void
727 bitmap_set_free (bitmap_set_t set)
729 bitmap_clear (&set->expressions);
730 bitmap_clear (&set->values);
734 /* Generate an topological-ordered array of bitmap set SET. */
736 static vec<pre_expr>
737 sorted_array_from_bitmap_set (bitmap_set_t set)
739 unsigned int i, j;
740 bitmap_iterator bi, bj;
741 vec<pre_expr> result;
743 /* Pre-allocate enough space for the array. */
744 result.create (bitmap_count_bits (&set->expressions));
746 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
748 /* The number of expressions having a given value is usually
749 relatively small. Thus, rather than making a vector of all
750 the expressions and sorting it by value-id, we walk the values
751 and check in the reverse mapping that tells us what expressions
752 have a given value, to filter those in our set. As a result,
753 the expressions are inserted in value-id order, which means
754 topological order.
756 If this is somehow a significant lose for some cases, we can
757 choose which set to walk based on the set size. */
758 bitmap exprset = value_expressions[i];
759 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
761 if (bitmap_bit_p (&set->expressions, j))
762 result.quick_push (expression_for_id (j));
766 return result;
769 /* Subtract all expressions contained in ORIG from DEST. */
771 static bitmap_set_t
772 bitmap_set_subtract_expressions (bitmap_set_t dest, bitmap_set_t orig)
774 bitmap_set_t result = bitmap_set_new ();
775 bitmap_iterator bi;
776 unsigned int i;
778 bitmap_and_compl (&result->expressions, &dest->expressions,
779 &orig->expressions);
781 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
783 pre_expr expr = expression_for_id (i);
784 unsigned int value_id = get_expr_value_id (expr);
785 bitmap_set_bit (&result->values, value_id);
788 return result;
791 /* Subtract all values in bitmap set B from bitmap set A. */
793 static void
794 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
796 unsigned int i;
797 bitmap_iterator bi;
798 unsigned to_remove = -1U;
799 bitmap_and_compl_into (&a->values, &b->values);
800 FOR_EACH_EXPR_ID_IN_SET (a, i, bi)
802 if (to_remove != -1U)
804 bitmap_clear_bit (&a->expressions, to_remove);
805 to_remove = -1U;
807 pre_expr expr = expression_for_id (i);
808 if (! bitmap_bit_p (&a->values, get_expr_value_id (expr)))
809 to_remove = i;
811 if (to_remove != -1U)
812 bitmap_clear_bit (&a->expressions, to_remove);
816 /* Return true if bitmapped set SET contains the value VALUE_ID. */
818 static bool
819 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
821 if (value_id_constant_p (value_id))
822 return true;
824 return bitmap_bit_p (&set->values, value_id);
827 static inline bool
828 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
830 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
833 /* Return true if two bitmap sets are equal. */
835 static bool
836 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
838 return bitmap_equal_p (&a->values, &b->values);
841 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
842 and add it otherwise. */
844 static void
845 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
847 unsigned int val = get_expr_value_id (expr);
848 if (value_id_constant_p (val))
849 return;
851 if (bitmap_set_contains_value (set, val))
853 /* The number of expressions having a given value is usually
854 significantly less than the total number of expressions in SET.
855 Thus, rather than check, for each expression in SET, whether it
856 has the value LOOKFOR, we walk the reverse mapping that tells us
857 what expressions have a given value, and see if any of those
858 expressions are in our set. For large testcases, this is about
859 5-10x faster than walking the bitmap. If this is somehow a
860 significant lose for some cases, we can choose which set to walk
861 based on the set size. */
862 unsigned int i;
863 bitmap_iterator bi;
864 bitmap exprset = value_expressions[val];
865 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
867 if (bitmap_clear_bit (&set->expressions, i))
869 bitmap_set_bit (&set->expressions, get_expression_id (expr));
870 return;
873 gcc_unreachable ();
875 else
876 bitmap_insert_into_set (set, expr);
879 /* Insert EXPR into SET if EXPR's value is not already present in
880 SET. */
882 static void
883 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
885 unsigned int val = get_expr_value_id (expr);
887 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
889 /* Constant values are always considered to be part of the set. */
890 if (value_id_constant_p (val))
891 return;
893 /* If the value membership changed, add the expression. */
894 if (bitmap_set_bit (&set->values, val))
895 bitmap_set_bit (&set->expressions, expr->id);
898 /* Print out EXPR to outfile. */
900 static void
901 print_pre_expr (FILE *outfile, const pre_expr expr)
903 if (! expr)
905 fprintf (outfile, "NULL");
906 return;
908 switch (expr->kind)
910 case CONSTANT:
911 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr));
912 break;
913 case NAME:
914 print_generic_expr (outfile, PRE_EXPR_NAME (expr));
915 break;
916 case NARY:
918 unsigned int i;
919 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
920 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
921 for (i = 0; i < nary->length; i++)
923 print_generic_expr (outfile, nary->op[i]);
924 if (i != (unsigned) nary->length - 1)
925 fprintf (outfile, ",");
927 fprintf (outfile, "}");
929 break;
931 case REFERENCE:
933 vn_reference_op_t vro;
934 unsigned int i;
935 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
936 fprintf (outfile, "{");
937 for (i = 0;
938 ref->operands.iterate (i, &vro);
939 i++)
941 bool closebrace = false;
942 if (vro->opcode != SSA_NAME
943 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
945 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
946 if (vro->op0)
948 fprintf (outfile, "<");
949 closebrace = true;
952 if (vro->op0)
954 print_generic_expr (outfile, vro->op0);
955 if (vro->op1)
957 fprintf (outfile, ",");
958 print_generic_expr (outfile, vro->op1);
960 if (vro->op2)
962 fprintf (outfile, ",");
963 print_generic_expr (outfile, vro->op2);
966 if (closebrace)
967 fprintf (outfile, ">");
968 if (i != ref->operands.length () - 1)
969 fprintf (outfile, ",");
971 fprintf (outfile, "}");
972 if (ref->vuse)
974 fprintf (outfile, "@");
975 print_generic_expr (outfile, ref->vuse);
978 break;
981 void debug_pre_expr (pre_expr);
983 /* Like print_pre_expr but always prints to stderr. */
984 DEBUG_FUNCTION void
985 debug_pre_expr (pre_expr e)
987 print_pre_expr (stderr, e);
988 fprintf (stderr, "\n");
991 /* Print out SET to OUTFILE. */
993 static void
994 print_bitmap_set (FILE *outfile, bitmap_set_t set,
995 const char *setname, int blockindex)
997 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
998 if (set)
1000 bool first = true;
1001 unsigned i;
1002 bitmap_iterator bi;
1004 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1006 const pre_expr expr = expression_for_id (i);
1008 if (!first)
1009 fprintf (outfile, ", ");
1010 first = false;
1011 print_pre_expr (outfile, expr);
1013 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1016 fprintf (outfile, " }\n");
1019 void debug_bitmap_set (bitmap_set_t);
1021 DEBUG_FUNCTION void
1022 debug_bitmap_set (bitmap_set_t set)
1024 print_bitmap_set (stderr, set, "debug", 0);
1027 void debug_bitmap_sets_for (basic_block);
1029 DEBUG_FUNCTION void
1030 debug_bitmap_sets_for (basic_block bb)
1032 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1033 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1034 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1035 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1036 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1037 if (do_partial_partial)
1038 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1039 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1042 /* Print out the expressions that have VAL to OUTFILE. */
1044 static void
1045 print_value_expressions (FILE *outfile, unsigned int val)
1047 bitmap set = value_expressions[val];
1048 if (set)
1050 bitmap_set x;
1051 char s[10];
1052 sprintf (s, "%04d", val);
1053 x.expressions = *set;
1054 print_bitmap_set (outfile, &x, s, 0);
1059 DEBUG_FUNCTION void
1060 debug_value_expressions (unsigned int val)
1062 print_value_expressions (stderr, val);
1065 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1066 represent it. */
1068 static pre_expr
1069 get_or_alloc_expr_for_constant (tree constant)
1071 unsigned int result_id;
1072 unsigned int value_id;
1073 struct pre_expr_d expr;
1074 pre_expr newexpr;
1076 expr.kind = CONSTANT;
1077 PRE_EXPR_CONSTANT (&expr) = constant;
1078 result_id = lookup_expression_id (&expr);
1079 if (result_id != 0)
1080 return expression_for_id (result_id);
1082 newexpr = pre_expr_pool.allocate ();
1083 newexpr->kind = CONSTANT;
1084 PRE_EXPR_CONSTANT (newexpr) = constant;
1085 alloc_expression_id (newexpr);
1086 value_id = get_or_alloc_constant_value_id (constant);
1087 add_to_value (value_id, newexpr);
1088 return newexpr;
1091 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1092 Currently only supports constants and SSA_NAMES. */
1093 static pre_expr
1094 get_or_alloc_expr_for (tree t)
1096 if (TREE_CODE (t) == SSA_NAME)
1097 return get_or_alloc_expr_for_name (t);
1098 else if (is_gimple_min_invariant (t))
1099 return get_or_alloc_expr_for_constant (t);
1100 gcc_unreachable ();
1103 /* Return the folded version of T if T, when folded, is a gimple
1104 min_invariant or an SSA name. Otherwise, return T. */
1106 static pre_expr
1107 fully_constant_expression (pre_expr e)
1109 switch (e->kind)
1111 case CONSTANT:
1112 return e;
1113 case NARY:
1115 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1116 tree res = vn_nary_simplify (nary);
1117 if (!res)
1118 return e;
1119 if (is_gimple_min_invariant (res))
1120 return get_or_alloc_expr_for_constant (res);
1121 if (TREE_CODE (res) == SSA_NAME)
1122 return get_or_alloc_expr_for_name (res);
1123 return e;
1125 case REFERENCE:
1127 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1128 tree folded;
1129 if ((folded = fully_constant_vn_reference_p (ref)))
1130 return get_or_alloc_expr_for_constant (folded);
1131 return e;
1133 default:
1134 return e;
1136 return e;
1139 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1140 it has the value it would have in BLOCK. Set *SAME_VALID to true
1141 in case the new vuse doesn't change the value id of the OPERANDS. */
1143 static tree
1144 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1145 alias_set_type set, tree type, tree vuse,
1146 basic_block phiblock,
1147 basic_block block, bool *same_valid)
1149 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1150 ao_ref ref;
1151 edge e = NULL;
1152 bool use_oracle;
1154 *same_valid = true;
1156 if (gimple_bb (phi) != phiblock)
1157 return vuse;
1159 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1161 /* Use the alias-oracle to find either the PHI node in this block,
1162 the first VUSE used in this block that is equivalent to vuse or
1163 the first VUSE which definition in this block kills the value. */
1164 if (gimple_code (phi) == GIMPLE_PHI)
1165 e = find_edge (block, phiblock);
1166 else if (use_oracle)
1167 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1169 vuse = gimple_vuse (phi);
1170 phi = SSA_NAME_DEF_STMT (vuse);
1171 if (gimple_bb (phi) != phiblock)
1172 return vuse;
1173 if (gimple_code (phi) == GIMPLE_PHI)
1175 e = find_edge (block, phiblock);
1176 break;
1179 else
1180 return NULL_TREE;
1182 if (e)
1184 if (use_oracle)
1186 bitmap visited = NULL;
1187 unsigned int cnt;
1188 /* Try to find a vuse that dominates this phi node by skipping
1189 non-clobbering statements. */
1190 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1191 NULL, NULL);
1192 if (visited)
1193 BITMAP_FREE (visited);
1195 else
1196 vuse = NULL_TREE;
1197 if (!vuse)
1199 /* If we didn't find any, the value ID can't stay the same,
1200 but return the translated vuse. */
1201 *same_valid = false;
1202 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1204 /* ??? We would like to return vuse here as this is the canonical
1205 upmost vdef that this reference is associated with. But during
1206 insertion of the references into the hash tables we only ever
1207 directly insert with their direct gimple_vuse, hence returning
1208 something else would make us not find the other expression. */
1209 return PHI_ARG_DEF (phi, e->dest_idx);
1212 return NULL_TREE;
1215 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1216 SET2 *or* SET3. This is used to avoid making a set consisting of the union
1217 of PA_IN and ANTIC_IN during insert and phi-translation. */
1219 static inline pre_expr
1220 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2,
1221 bitmap_set_t set3 = NULL)
1223 pre_expr result;
1225 result = bitmap_find_leader (set1, val);
1226 if (!result && set2)
1227 result = bitmap_find_leader (set2, val);
1228 if (!result && set3)
1229 result = bitmap_find_leader (set3, val);
1230 return result;
1233 /* Get the tree type for our PRE expression e. */
1235 static tree
1236 get_expr_type (const pre_expr e)
1238 switch (e->kind)
1240 case NAME:
1241 return TREE_TYPE (PRE_EXPR_NAME (e));
1242 case CONSTANT:
1243 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1244 case REFERENCE:
1245 return PRE_EXPR_REFERENCE (e)->type;
1246 case NARY:
1247 return PRE_EXPR_NARY (e)->type;
1249 gcc_unreachable ();
1252 /* Get a representative SSA_NAME for a given expression that is available in B.
1253 Since all of our sub-expressions are treated as values, we require
1254 them to be SSA_NAME's for simplicity.
1255 Prior versions of GVNPRE used to use "value handles" here, so that
1256 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1257 either case, the operands are really values (IE we do not expect
1258 them to be usable without finding leaders). */
1260 static tree
1261 get_representative_for (const pre_expr e, basic_block b = NULL)
1263 tree name, valnum = NULL_TREE;
1264 unsigned int value_id = get_expr_value_id (e);
1266 switch (e->kind)
1268 case NAME:
1269 return VN_INFO (PRE_EXPR_NAME (e))->valnum;
1270 case CONSTANT:
1271 return PRE_EXPR_CONSTANT (e);
1272 case NARY:
1273 case REFERENCE:
1275 /* Go through all of the expressions representing this value
1276 and pick out an SSA_NAME. */
1277 unsigned int i;
1278 bitmap_iterator bi;
1279 bitmap exprs = value_expressions[value_id];
1280 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1282 pre_expr rep = expression_for_id (i);
1283 if (rep->kind == NAME)
1285 tree name = PRE_EXPR_NAME (rep);
1286 valnum = VN_INFO (name)->valnum;
1287 gimple *def = SSA_NAME_DEF_STMT (name);
1288 /* We have to return either a new representative or one
1289 that can be used for expression simplification and thus
1290 is available in B. */
1291 if (! b
1292 || gimple_nop_p (def)
1293 || dominated_by_p (CDI_DOMINATORS, b, gimple_bb (def)))
1294 return name;
1296 else if (rep->kind == CONSTANT)
1297 return PRE_EXPR_CONSTANT (rep);
1300 break;
1303 /* If we reached here we couldn't find an SSA_NAME. This can
1304 happen when we've discovered a value that has never appeared in
1305 the program as set to an SSA_NAME, as the result of phi translation.
1306 Create one here.
1307 ??? We should be able to re-use this when we insert the statement
1308 to compute it. */
1309 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1310 VN_INFO_GET (name)->value_id = value_id;
1311 VN_INFO (name)->valnum = valnum ? valnum : name;
1312 /* ??? For now mark this SSA name for release by SCCVN. */
1313 VN_INFO (name)->needs_insertion = true;
1314 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1315 if (dump_file && (dump_flags & TDF_DETAILS))
1317 fprintf (dump_file, "Created SSA_NAME representative ");
1318 print_generic_expr (dump_file, name);
1319 fprintf (dump_file, " for expression:");
1320 print_pre_expr (dump_file, e);
1321 fprintf (dump_file, " (%04d)\n", value_id);
1324 return name;
1328 static pre_expr
1329 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, edge e);
1331 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1332 the phis in PRED. Return NULL if we can't find a leader for each part
1333 of the translated expression. */
1335 static pre_expr
1336 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, edge e)
1338 basic_block pred = e->src;
1339 basic_block phiblock = e->dest;
1340 switch (expr->kind)
1342 case NARY:
1344 unsigned int i;
1345 bool changed = false;
1346 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1347 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1348 sizeof_vn_nary_op (nary->length));
1349 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1351 for (i = 0; i < newnary->length; i++)
1353 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1354 continue;
1355 else
1357 pre_expr leader, result;
1358 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1359 leader = find_leader_in_sets (op_val_id, set1, set2);
1360 result = phi_translate (leader, set1, set2, e);
1361 if (result && result != leader)
1362 /* Force a leader as well as we are simplifying this
1363 expression. */
1364 newnary->op[i] = get_representative_for (result, pred);
1365 else if (!result)
1366 return NULL;
1368 changed |= newnary->op[i] != nary->op[i];
1371 if (changed)
1373 pre_expr constant;
1374 unsigned int new_val_id;
1376 PRE_EXPR_NARY (expr) = newnary;
1377 constant = fully_constant_expression (expr);
1378 PRE_EXPR_NARY (expr) = nary;
1379 if (constant != expr)
1381 /* For non-CONSTANTs we have to make sure we can eventually
1382 insert the expression. Which means we need to have a
1383 leader for it. */
1384 if (constant->kind != CONSTANT)
1386 /* Do not allow simplifications to non-constants over
1387 backedges as this will likely result in a loop PHI node
1388 to be inserted and increased register pressure.
1389 See PR77498 - this avoids doing predcoms work in
1390 a less efficient way. */
1391 if (e->flags & EDGE_DFS_BACK)
1393 else
1395 unsigned value_id = get_expr_value_id (constant);
1396 constant = find_leader_in_sets (value_id, set1, set2,
1397 AVAIL_OUT (pred));
1398 if (constant)
1399 return constant;
1402 else
1403 return constant;
1406 /* vn_nary_* do not valueize operands. */
1407 for (i = 0; i < newnary->length; ++i)
1408 if (TREE_CODE (newnary->op[i]) == SSA_NAME)
1409 newnary->op[i] = VN_INFO (newnary->op[i])->valnum;
1410 tree result = vn_nary_op_lookup_pieces (newnary->length,
1411 newnary->opcode,
1412 newnary->type,
1413 &newnary->op[0],
1414 &nary);
1415 if (result && is_gimple_min_invariant (result))
1416 return get_or_alloc_expr_for_constant (result);
1418 expr = pre_expr_pool.allocate ();
1419 expr->kind = NARY;
1420 expr->id = 0;
1421 if (nary)
1423 PRE_EXPR_NARY (expr) = nary;
1424 new_val_id = nary->value_id;
1425 get_or_alloc_expression_id (expr);
1427 else
1429 new_val_id = get_next_value_id ();
1430 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1431 nary = vn_nary_op_insert_pieces (newnary->length,
1432 newnary->opcode,
1433 newnary->type,
1434 &newnary->op[0],
1435 result, new_val_id);
1436 PRE_EXPR_NARY (expr) = nary;
1437 get_or_alloc_expression_id (expr);
1439 add_to_value (new_val_id, expr);
1441 return expr;
1443 break;
1445 case REFERENCE:
1447 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1448 vec<vn_reference_op_s> operands = ref->operands;
1449 tree vuse = ref->vuse;
1450 tree newvuse = vuse;
1451 vec<vn_reference_op_s> newoperands = vNULL;
1452 bool changed = false, same_valid = true;
1453 unsigned int i, n;
1454 vn_reference_op_t operand;
1455 vn_reference_t newref;
1457 for (i = 0; operands.iterate (i, &operand); i++)
1459 pre_expr opresult;
1460 pre_expr leader;
1461 tree op[3];
1462 tree type = operand->type;
1463 vn_reference_op_s newop = *operand;
1464 op[0] = operand->op0;
1465 op[1] = operand->op1;
1466 op[2] = operand->op2;
1467 for (n = 0; n < 3; ++n)
1469 unsigned int op_val_id;
1470 if (!op[n])
1471 continue;
1472 if (TREE_CODE (op[n]) != SSA_NAME)
1474 /* We can't possibly insert these. */
1475 if (n != 0
1476 && !is_gimple_min_invariant (op[n]))
1477 break;
1478 continue;
1480 op_val_id = VN_INFO (op[n])->value_id;
1481 leader = find_leader_in_sets (op_val_id, set1, set2);
1482 opresult = phi_translate (leader, set1, set2, e);
1483 if (opresult && opresult != leader)
1485 tree name = get_representative_for (opresult);
1486 changed |= name != op[n];
1487 op[n] = name;
1489 else if (!opresult)
1490 break;
1492 if (n != 3)
1494 newoperands.release ();
1495 return NULL;
1497 if (!changed)
1498 continue;
1499 if (!newoperands.exists ())
1500 newoperands = operands.copy ();
1501 /* We may have changed from an SSA_NAME to a constant */
1502 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1503 newop.opcode = TREE_CODE (op[0]);
1504 newop.type = type;
1505 newop.op0 = op[0];
1506 newop.op1 = op[1];
1507 newop.op2 = op[2];
1508 newoperands[i] = newop;
1510 gcc_checking_assert (i == operands.length ());
1512 if (vuse)
1514 newvuse = translate_vuse_through_block (newoperands.exists ()
1515 ? newoperands : operands,
1516 ref->set, ref->type,
1517 vuse, phiblock, pred,
1518 &same_valid);
1519 if (newvuse == NULL_TREE)
1521 newoperands.release ();
1522 return NULL;
1526 if (changed || newvuse != vuse)
1528 unsigned int new_val_id;
1530 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1531 ref->type,
1532 newoperands.exists ()
1533 ? newoperands : operands,
1534 &newref, VN_WALK);
1535 if (result)
1536 newoperands.release ();
1538 /* We can always insert constants, so if we have a partial
1539 redundant constant load of another type try to translate it
1540 to a constant of appropriate type. */
1541 if (result && is_gimple_min_invariant (result))
1543 tree tem = result;
1544 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1546 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1547 if (tem && !is_gimple_min_invariant (tem))
1548 tem = NULL_TREE;
1550 if (tem)
1551 return get_or_alloc_expr_for_constant (tem);
1554 /* If we'd have to convert things we would need to validate
1555 if we can insert the translated expression. So fail
1556 here for now - we cannot insert an alias with a different
1557 type in the VN tables either, as that would assert. */
1558 if (result
1559 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1560 return NULL;
1561 else if (!result && newref
1562 && !useless_type_conversion_p (ref->type, newref->type))
1564 newoperands.release ();
1565 return NULL;
1568 expr = pre_expr_pool.allocate ();
1569 expr->kind = REFERENCE;
1570 expr->id = 0;
1572 if (newref)
1573 new_val_id = newref->value_id;
1574 else
1576 if (changed || !same_valid)
1578 new_val_id = get_next_value_id ();
1579 value_expressions.safe_grow_cleared
1580 (get_max_value_id () + 1);
1582 else
1583 new_val_id = ref->value_id;
1584 if (!newoperands.exists ())
1585 newoperands = operands.copy ();
1586 newref = vn_reference_insert_pieces (newvuse, ref->set,
1587 ref->type,
1588 newoperands,
1589 result, new_val_id);
1590 newoperands = vNULL;
1592 PRE_EXPR_REFERENCE (expr) = newref;
1593 get_or_alloc_expression_id (expr);
1594 add_to_value (new_val_id, expr);
1596 newoperands.release ();
1597 return expr;
1599 break;
1601 case NAME:
1603 tree name = PRE_EXPR_NAME (expr);
1604 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1605 /* If the SSA name is defined by a PHI node in this block,
1606 translate it. */
1607 if (gimple_code (def_stmt) == GIMPLE_PHI
1608 && gimple_bb (def_stmt) == phiblock)
1610 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1612 /* Handle constant. */
1613 if (is_gimple_min_invariant (def))
1614 return get_or_alloc_expr_for_constant (def);
1616 return get_or_alloc_expr_for_name (def);
1618 /* Otherwise return it unchanged - it will get removed if its
1619 value is not available in PREDs AVAIL_OUT set of expressions
1620 by the subtraction of TMP_GEN. */
1621 return expr;
1624 default:
1625 gcc_unreachable ();
1629 /* Wrapper around phi_translate_1 providing caching functionality. */
1631 static pre_expr
1632 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, edge e)
1634 expr_pred_trans_t slot = NULL;
1635 pre_expr phitrans;
1637 if (!expr)
1638 return NULL;
1640 /* Constants contain no values that need translation. */
1641 if (expr->kind == CONSTANT)
1642 return expr;
1644 if (value_id_constant_p (get_expr_value_id (expr)))
1645 return expr;
1647 /* Don't add translations of NAMEs as those are cheap to translate. */
1648 if (expr->kind != NAME)
1650 if (phi_trans_add (&slot, expr, e->src))
1651 return slot->v;
1652 /* Store NULL for the value we want to return in the case of
1653 recursing. */
1654 slot->v = NULL;
1657 /* Translate. */
1658 phitrans = phi_translate_1 (expr, set1, set2, e);
1660 if (slot)
1662 if (phitrans)
1663 slot->v = phitrans;
1664 else
1665 /* Remove failed translations again, they cause insert
1666 iteration to not pick up new opportunities reliably. */
1667 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1670 return phitrans;
1674 /* For each expression in SET, translate the values through phi nodes
1675 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1676 expressions in DEST. */
1678 static void
1679 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, edge e)
1681 vec<pre_expr> exprs;
1682 pre_expr expr;
1683 int i;
1685 if (gimple_seq_empty_p (phi_nodes (e->dest)))
1687 bitmap_set_copy (dest, set);
1688 return;
1691 exprs = sorted_array_from_bitmap_set (set);
1692 FOR_EACH_VEC_ELT (exprs, i, expr)
1694 pre_expr translated;
1695 translated = phi_translate (expr, set, NULL, e);
1696 if (!translated)
1697 continue;
1699 bitmap_insert_into_set (dest, translated);
1701 exprs.release ();
1704 /* Find the leader for a value (i.e., the name representing that
1705 value) in a given set, and return it. Return NULL if no leader
1706 is found. */
1708 static pre_expr
1709 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1711 if (value_id_constant_p (val))
1713 unsigned int i;
1714 bitmap_iterator bi;
1715 bitmap exprset = value_expressions[val];
1717 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1719 pre_expr expr = expression_for_id (i);
1720 if (expr->kind == CONSTANT)
1721 return expr;
1724 if (bitmap_set_contains_value (set, val))
1726 /* Rather than walk the entire bitmap of expressions, and see
1727 whether any of them has the value we are looking for, we look
1728 at the reverse mapping, which tells us the set of expressions
1729 that have a given value (IE value->expressions with that
1730 value) and see if any of those expressions are in our set.
1731 The number of expressions per value is usually significantly
1732 less than the number of expressions in the set. In fact, for
1733 large testcases, doing it this way is roughly 5-10x faster
1734 than walking the bitmap.
1735 If this is somehow a significant lose for some cases, we can
1736 choose which set to walk based on which set is smaller. */
1737 unsigned int i;
1738 bitmap_iterator bi;
1739 bitmap exprset = value_expressions[val];
1741 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1742 return expression_for_id (i);
1744 return NULL;
1747 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1748 BLOCK by seeing if it is not killed in the block. Note that we are
1749 only determining whether there is a store that kills it. Because
1750 of the order in which clean iterates over values, we are guaranteed
1751 that altered operands will have caused us to be eliminated from the
1752 ANTIC_IN set already. */
1754 static bool
1755 value_dies_in_block_x (pre_expr expr, basic_block block)
1757 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1758 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1759 gimple *def;
1760 gimple_stmt_iterator gsi;
1761 unsigned id = get_expression_id (expr);
1762 bool res = false;
1763 ao_ref ref;
1765 if (!vuse)
1766 return false;
1768 /* Lookup a previously calculated result. */
1769 if (EXPR_DIES (block)
1770 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1771 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1773 /* A memory expression {e, VUSE} dies in the block if there is a
1774 statement that may clobber e. If, starting statement walk from the
1775 top of the basic block, a statement uses VUSE there can be no kill
1776 inbetween that use and the original statement that loaded {e, VUSE},
1777 so we can stop walking. */
1778 ref.base = NULL_TREE;
1779 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1781 tree def_vuse, def_vdef;
1782 def = gsi_stmt (gsi);
1783 def_vuse = gimple_vuse (def);
1784 def_vdef = gimple_vdef (def);
1786 /* Not a memory statement. */
1787 if (!def_vuse)
1788 continue;
1790 /* Not a may-def. */
1791 if (!def_vdef)
1793 /* A load with the same VUSE, we're done. */
1794 if (def_vuse == vuse)
1795 break;
1797 continue;
1800 /* Init ref only if we really need it. */
1801 if (ref.base == NULL_TREE
1802 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1803 refx->operands))
1805 res = true;
1806 break;
1808 /* If the statement may clobber expr, it dies. */
1809 if (stmt_may_clobber_ref_p_1 (def, &ref))
1811 res = true;
1812 break;
1816 /* Remember the result. */
1817 if (!EXPR_DIES (block))
1818 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1819 bitmap_set_bit (EXPR_DIES (block), id * 2);
1820 if (res)
1821 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1823 return res;
1827 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1828 contains its value-id. */
1830 static bool
1831 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1833 if (op && TREE_CODE (op) == SSA_NAME)
1835 unsigned int value_id = VN_INFO (op)->value_id;
1836 if (!(bitmap_set_contains_value (set1, value_id)
1837 || (set2 && bitmap_set_contains_value (set2, value_id))))
1838 return false;
1840 return true;
1843 /* Determine if the expression EXPR is valid in SET1 U SET2.
1844 ONLY SET2 CAN BE NULL.
1845 This means that we have a leader for each part of the expression
1846 (if it consists of values), or the expression is an SSA_NAME.
1847 For loads/calls, we also see if the vuse is killed in this block. */
1849 static bool
1850 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1852 switch (expr->kind)
1854 case NAME:
1855 /* By construction all NAMEs are available. Non-available
1856 NAMEs are removed by subtracting TMP_GEN from the sets. */
1857 return true;
1858 case NARY:
1860 unsigned int i;
1861 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1862 for (i = 0; i < nary->length; i++)
1863 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1864 return false;
1865 return true;
1867 break;
1868 case REFERENCE:
1870 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1871 vn_reference_op_t vro;
1872 unsigned int i;
1874 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1876 if (!op_valid_in_sets (set1, set2, vro->op0)
1877 || !op_valid_in_sets (set1, set2, vro->op1)
1878 || !op_valid_in_sets (set1, set2, vro->op2))
1879 return false;
1881 return true;
1883 default:
1884 gcc_unreachable ();
1888 /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2.
1889 This means expressions that are made up of values we have no leaders for
1890 in SET1 or SET2. */
1892 static void
1893 clean (bitmap_set_t set1, bitmap_set_t set2 = NULL)
1895 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
1896 pre_expr expr;
1897 int i;
1899 FOR_EACH_VEC_ELT (exprs, i, expr)
1901 if (!valid_in_sets (set1, set2, expr))
1903 unsigned int val = get_expr_value_id (expr);
1904 bitmap_clear_bit (&set1->expressions, get_expression_id (expr));
1905 /* We are entered with possibly multiple expressions for a value
1906 so before removing a value from the set see if there's an
1907 expression for it left. */
1908 if (! bitmap_find_leader (set1, val))
1909 bitmap_clear_bit (&set1->values, val);
1912 exprs.release ();
1915 /* Clean the set of expressions that are no longer valid in SET because
1916 they are clobbered in BLOCK or because they trap and may not be executed. */
1918 static void
1919 prune_clobbered_mems (bitmap_set_t set, basic_block block)
1921 bitmap_iterator bi;
1922 unsigned i;
1923 unsigned to_remove = -1U;
1924 bool any_removed = false;
1926 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1928 /* Remove queued expr. */
1929 if (to_remove != -1U)
1931 bitmap_clear_bit (&set->expressions, to_remove);
1932 any_removed = true;
1933 to_remove = -1U;
1936 pre_expr expr = expression_for_id (i);
1937 if (expr->kind == REFERENCE)
1939 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1940 if (ref->vuse)
1942 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
1943 if (!gimple_nop_p (def_stmt)
1944 && ((gimple_bb (def_stmt) != block
1945 && !dominated_by_p (CDI_DOMINATORS,
1946 block, gimple_bb (def_stmt)))
1947 || (gimple_bb (def_stmt) == block
1948 && value_dies_in_block_x (expr, block))))
1949 to_remove = i;
1952 else if (expr->kind == NARY)
1954 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1955 /* If the NARY may trap make sure the block does not contain
1956 a possible exit point.
1957 ??? This is overly conservative if we translate AVAIL_OUT
1958 as the available expression might be after the exit point. */
1959 if (BB_MAY_NOTRETURN (block)
1960 && vn_nary_may_trap (nary))
1961 to_remove = i;
1965 /* Remove queued expr. */
1966 if (to_remove != -1U)
1968 bitmap_clear_bit (&set->expressions, to_remove);
1969 any_removed = true;
1972 /* Above we only removed expressions, now clean the set of values
1973 which no longer have any corresponding expression. We cannot
1974 clear the value at the time we remove an expression since there
1975 may be multiple expressions per value.
1976 If we'd queue possibly to be removed values we could use
1977 the bitmap_find_leader way to see if there's still an expression
1978 for it. For some ratio of to be removed values and number of
1979 values/expressions in the set this might be faster than rebuilding
1980 the value-set. */
1981 if (any_removed)
1983 bitmap_clear (&set->values);
1984 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1986 pre_expr expr = expression_for_id (i);
1987 unsigned int value_id = get_expr_value_id (expr);
1988 bitmap_set_bit (&set->values, value_id);
1993 static sbitmap has_abnormal_preds;
1995 /* Compute the ANTIC set for BLOCK.
1997 If succs(BLOCK) > 1 then
1998 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1999 else if succs(BLOCK) == 1 then
2000 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2002 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2004 Note that clean() is deferred until after the iteration. */
2006 static bool
2007 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2009 bitmap_set_t S, old, ANTIC_OUT;
2010 edge e;
2011 edge_iterator ei;
2013 bool was_visited = BB_VISITED (block);
2014 bool changed = ! BB_VISITED (block);
2015 BB_VISITED (block) = 1;
2016 old = ANTIC_OUT = S = NULL;
2018 /* If any edges from predecessors are abnormal, antic_in is empty,
2019 so do nothing. */
2020 if (block_has_abnormal_pred_edge)
2021 goto maybe_dump_sets;
2023 old = ANTIC_IN (block);
2024 ANTIC_OUT = bitmap_set_new ();
2026 /* If the block has no successors, ANTIC_OUT is empty. */
2027 if (EDGE_COUNT (block->succs) == 0)
2029 /* If we have one successor, we could have some phi nodes to
2030 translate through. */
2031 else if (single_succ_p (block))
2033 e = single_succ_edge (block);
2034 gcc_assert (BB_VISITED (e->dest));
2035 phi_translate_set (ANTIC_OUT, ANTIC_IN (e->dest), e);
2037 /* If we have multiple successors, we take the intersection of all of
2038 them. Note that in the case of loop exit phi nodes, we may have
2039 phis to translate through. */
2040 else
2042 size_t i;
2043 edge first = NULL;
2045 auto_vec<edge> worklist (EDGE_COUNT (block->succs));
2046 FOR_EACH_EDGE (e, ei, block->succs)
2048 if (!first
2049 && BB_VISITED (e->dest))
2050 first = e;
2051 else if (BB_VISITED (e->dest))
2052 worklist.quick_push (e);
2053 else
2055 /* Unvisited successors get their ANTIC_IN replaced by the
2056 maximal set to arrive at a maximum ANTIC_IN solution.
2057 We can ignore them in the intersection operation and thus
2058 need not explicitely represent that maximum solution. */
2059 if (dump_file && (dump_flags & TDF_DETAILS))
2060 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2061 e->src->index, e->dest->index);
2065 /* Of multiple successors we have to have visited one already
2066 which is guaranteed by iteration order. */
2067 gcc_assert (first != NULL);
2069 phi_translate_set (ANTIC_OUT, ANTIC_IN (first->dest), first);
2071 /* If we have multiple successors we need to intersect the ANTIC_OUT
2072 sets. For values that's a simple intersection but for
2073 expressions it is a union. Given we want to have a single
2074 expression per value in our sets we have to canonicalize.
2075 Avoid randomness and running into cycles like for PR82129 and
2076 canonicalize the expression we choose to the one with the
2077 lowest id. This requires we actually compute the union first. */
2078 FOR_EACH_VEC_ELT (worklist, i, e)
2080 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
2082 bitmap_set_t tmp = bitmap_set_new ();
2083 phi_translate_set (tmp, ANTIC_IN (e->dest), e);
2084 bitmap_and_into (&ANTIC_OUT->values, &tmp->values);
2085 bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions);
2086 bitmap_set_free (tmp);
2088 else
2090 bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (e->dest)->values);
2091 bitmap_ior_into (&ANTIC_OUT->expressions,
2092 &ANTIC_IN (e->dest)->expressions);
2095 if (! worklist.is_empty ())
2097 /* Prune expressions not in the value set. */
2098 bitmap_iterator bi;
2099 unsigned int i;
2100 unsigned int to_clear = -1U;
2101 FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT, i, bi)
2103 if (to_clear != -1U)
2105 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2106 to_clear = -1U;
2108 pre_expr expr = expression_for_id (i);
2109 unsigned int value_id = get_expr_value_id (expr);
2110 if (!bitmap_bit_p (&ANTIC_OUT->values, value_id))
2111 to_clear = i;
2113 if (to_clear != -1U)
2114 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2118 /* Prune expressions that are clobbered in block and thus become
2119 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2120 prune_clobbered_mems (ANTIC_OUT, block);
2122 /* Generate ANTIC_OUT - TMP_GEN. */
2123 S = bitmap_set_subtract_expressions (ANTIC_OUT, TMP_GEN (block));
2125 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2126 ANTIC_IN (block) = bitmap_set_subtract_expressions (EXP_GEN (block),
2127 TMP_GEN (block));
2129 /* Then union in the ANTIC_OUT - TMP_GEN values,
2130 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2131 bitmap_ior_into (&ANTIC_IN (block)->values, &S->values);
2132 bitmap_ior_into (&ANTIC_IN (block)->expressions, &S->expressions);
2134 /* clean (ANTIC_IN (block)) is defered to after the iteration converged
2135 because it can cause non-convergence, see for example PR81181. */
2137 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2139 changed = true;
2140 /* After the initial value set computation the value set may
2141 only shrink during the iteration. */
2142 if (was_visited && flag_checking)
2144 bitmap_iterator bi;
2145 unsigned int i;
2146 EXECUTE_IF_AND_COMPL_IN_BITMAP (&ANTIC_IN (block)->values,
2147 &old->values, 0, i, bi)
2148 gcc_unreachable ();
2152 maybe_dump_sets:
2153 if (dump_file && (dump_flags & TDF_DETAILS))
2155 if (ANTIC_OUT)
2156 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2158 if (changed)
2159 fprintf (dump_file, "[changed] ");
2160 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2161 block->index);
2163 if (S)
2164 print_bitmap_set (dump_file, S, "S", block->index);
2166 if (old)
2167 bitmap_set_free (old);
2168 if (S)
2169 bitmap_set_free (S);
2170 if (ANTIC_OUT)
2171 bitmap_set_free (ANTIC_OUT);
2172 return changed;
2175 /* Compute PARTIAL_ANTIC for BLOCK.
2177 If succs(BLOCK) > 1 then
2178 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2179 in ANTIC_OUT for all succ(BLOCK)
2180 else if succs(BLOCK) == 1 then
2181 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2183 PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK])
2186 static void
2187 compute_partial_antic_aux (basic_block block,
2188 bool block_has_abnormal_pred_edge)
2190 bitmap_set_t old_PA_IN;
2191 bitmap_set_t PA_OUT;
2192 edge e;
2193 edge_iterator ei;
2194 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2196 old_PA_IN = PA_OUT = NULL;
2198 /* If any edges from predecessors are abnormal, antic_in is empty,
2199 so do nothing. */
2200 if (block_has_abnormal_pred_edge)
2201 goto maybe_dump_sets;
2203 /* If there are too many partially anticipatable values in the
2204 block, phi_translate_set can take an exponential time: stop
2205 before the translation starts. */
2206 if (max_pa
2207 && single_succ_p (block)
2208 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2209 goto maybe_dump_sets;
2211 old_PA_IN = PA_IN (block);
2212 PA_OUT = bitmap_set_new ();
2214 /* If the block has no successors, ANTIC_OUT is empty. */
2215 if (EDGE_COUNT (block->succs) == 0)
2217 /* If we have one successor, we could have some phi nodes to
2218 translate through. Note that we can't phi translate across DFS
2219 back edges in partial antic, because it uses a union operation on
2220 the successors. For recurrences like IV's, we will end up
2221 generating a new value in the set on each go around (i + 3 (VH.1)
2222 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2223 else if (single_succ_p (block))
2225 e = single_succ_edge (block);
2226 if (!(e->flags & EDGE_DFS_BACK))
2227 phi_translate_set (PA_OUT, PA_IN (e->dest), e);
2229 /* If we have multiple successors, we take the union of all of
2230 them. */
2231 else
2233 size_t i;
2235 auto_vec<edge> worklist (EDGE_COUNT (block->succs));
2236 FOR_EACH_EDGE (e, ei, block->succs)
2238 if (e->flags & EDGE_DFS_BACK)
2239 continue;
2240 worklist.quick_push (e);
2242 if (worklist.length () > 0)
2244 FOR_EACH_VEC_ELT (worklist, i, e)
2246 unsigned int i;
2247 bitmap_iterator bi;
2249 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (e->dest), i, bi)
2250 bitmap_value_insert_into_set (PA_OUT,
2251 expression_for_id (i));
2252 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
2254 bitmap_set_t pa_in = bitmap_set_new ();
2255 phi_translate_set (pa_in, PA_IN (e->dest), e);
2256 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2257 bitmap_value_insert_into_set (PA_OUT,
2258 expression_for_id (i));
2259 bitmap_set_free (pa_in);
2261 else
2262 FOR_EACH_EXPR_ID_IN_SET (PA_IN (e->dest), i, bi)
2263 bitmap_value_insert_into_set (PA_OUT,
2264 expression_for_id (i));
2269 /* Prune expressions that are clobbered in block and thus become
2270 invalid if translated from PA_OUT to PA_IN. */
2271 prune_clobbered_mems (PA_OUT, block);
2273 /* PA_IN starts with PA_OUT - TMP_GEN.
2274 Then we subtract things from ANTIC_IN. */
2275 PA_IN (block) = bitmap_set_subtract_expressions (PA_OUT, TMP_GEN (block));
2277 /* For partial antic, we want to put back in the phi results, since
2278 we will properly avoid making them partially antic over backedges. */
2279 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2280 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2282 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2283 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2285 clean (PA_IN (block), ANTIC_IN (block));
2287 maybe_dump_sets:
2288 if (dump_file && (dump_flags & TDF_DETAILS))
2290 if (PA_OUT)
2291 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2293 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2295 if (old_PA_IN)
2296 bitmap_set_free (old_PA_IN);
2297 if (PA_OUT)
2298 bitmap_set_free (PA_OUT);
2301 /* Compute ANTIC and partial ANTIC sets. */
2303 static void
2304 compute_antic (void)
2306 bool changed = true;
2307 int num_iterations = 0;
2308 basic_block block;
2309 int i;
2310 edge_iterator ei;
2311 edge e;
2313 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2314 We pre-build the map of blocks with incoming abnormal edges here. */
2315 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2316 bitmap_clear (has_abnormal_preds);
2318 FOR_ALL_BB_FN (block, cfun)
2320 BB_VISITED (block) = 0;
2322 FOR_EACH_EDGE (e, ei, block->preds)
2323 if (e->flags & EDGE_ABNORMAL)
2325 bitmap_set_bit (has_abnormal_preds, block->index);
2326 break;
2329 /* While we are here, give empty ANTIC_IN sets to each block. */
2330 ANTIC_IN (block) = bitmap_set_new ();
2331 if (do_partial_partial)
2332 PA_IN (block) = bitmap_set_new ();
2335 /* At the exit block we anticipate nothing. */
2336 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2338 /* For ANTIC computation we need a postorder that also guarantees that
2339 a block with a single successor is visited after its successor.
2340 RPO on the inverted CFG has this property. */
2341 auto_vec<int, 20> postorder;
2342 inverted_post_order_compute (&postorder);
2344 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
2345 bitmap_clear (worklist);
2346 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2347 bitmap_set_bit (worklist, e->src->index);
2348 while (changed)
2350 if (dump_file && (dump_flags & TDF_DETAILS))
2351 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2352 /* ??? We need to clear our PHI translation cache here as the
2353 ANTIC sets shrink and we restrict valid translations to
2354 those having operands with leaders in ANTIC. Same below
2355 for PA ANTIC computation. */
2356 num_iterations++;
2357 changed = false;
2358 for (i = postorder.length () - 1; i >= 0; i--)
2360 if (bitmap_bit_p (worklist, postorder[i]))
2362 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2363 bitmap_clear_bit (worklist, block->index);
2364 if (compute_antic_aux (block,
2365 bitmap_bit_p (has_abnormal_preds,
2366 block->index)))
2368 FOR_EACH_EDGE (e, ei, block->preds)
2369 bitmap_set_bit (worklist, e->src->index);
2370 changed = true;
2374 /* Theoretically possible, but *highly* unlikely. */
2375 gcc_checking_assert (num_iterations < 500);
2378 /* We have to clean after the dataflow problem converged as cleaning
2379 can cause non-convergence because it is based on expressions
2380 rather than values. */
2381 FOR_EACH_BB_FN (block, cfun)
2382 clean (ANTIC_IN (block));
2384 statistics_histogram_event (cfun, "compute_antic iterations",
2385 num_iterations);
2387 if (do_partial_partial)
2389 /* For partial antic we ignore backedges and thus we do not need
2390 to perform any iteration when we process blocks in postorder. */
2391 int postorder_num
2392 = pre_and_rev_post_order_compute (NULL, postorder.address (), false);
2393 for (i = postorder_num - 1 ; i >= 0; i--)
2395 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2396 compute_partial_antic_aux (block,
2397 bitmap_bit_p (has_abnormal_preds,
2398 block->index));
2402 sbitmap_free (has_abnormal_preds);
2406 /* Inserted expressions are placed onto this worklist, which is used
2407 for performing quick dead code elimination of insertions we made
2408 that didn't turn out to be necessary. */
2409 static bitmap inserted_exprs;
2411 /* The actual worker for create_component_ref_by_pieces. */
2413 static tree
2414 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2415 unsigned int *operand, gimple_seq *stmts)
2417 vn_reference_op_t currop = &ref->operands[*operand];
2418 tree genop;
2419 ++*operand;
2420 switch (currop->opcode)
2422 case CALL_EXPR:
2423 gcc_unreachable ();
2425 case MEM_REF:
2427 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2428 stmts);
2429 if (!baseop)
2430 return NULL_TREE;
2431 tree offset = currop->op0;
2432 if (TREE_CODE (baseop) == ADDR_EXPR
2433 && handled_component_p (TREE_OPERAND (baseop, 0)))
2435 poly_int64 off;
2436 tree base;
2437 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2438 &off);
2439 gcc_assert (base);
2440 offset = int_const_binop (PLUS_EXPR, offset,
2441 build_int_cst (TREE_TYPE (offset),
2442 off));
2443 baseop = build_fold_addr_expr (base);
2445 genop = build2 (MEM_REF, currop->type, baseop, offset);
2446 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2447 MR_DEPENDENCE_BASE (genop) = currop->base;
2448 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2449 return genop;
2452 case TARGET_MEM_REF:
2454 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2455 vn_reference_op_t nextop = &ref->operands[++*operand];
2456 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2457 stmts);
2458 if (!baseop)
2459 return NULL_TREE;
2460 if (currop->op0)
2462 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2463 if (!genop0)
2464 return NULL_TREE;
2466 if (nextop->op0)
2468 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2469 if (!genop1)
2470 return NULL_TREE;
2472 genop = build5 (TARGET_MEM_REF, currop->type,
2473 baseop, currop->op2, genop0, currop->op1, genop1);
2475 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2476 MR_DEPENDENCE_BASE (genop) = currop->base;
2477 return genop;
2480 case ADDR_EXPR:
2481 if (currop->op0)
2483 gcc_assert (is_gimple_min_invariant (currop->op0));
2484 return currop->op0;
2486 /* Fallthrough. */
2487 case REALPART_EXPR:
2488 case IMAGPART_EXPR:
2489 case VIEW_CONVERT_EXPR:
2491 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2492 stmts);
2493 if (!genop0)
2494 return NULL_TREE;
2495 return fold_build1 (currop->opcode, currop->type, genop0);
2498 case WITH_SIZE_EXPR:
2500 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2501 stmts);
2502 if (!genop0)
2503 return NULL_TREE;
2504 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2505 if (!genop1)
2506 return NULL_TREE;
2507 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2510 case BIT_FIELD_REF:
2512 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2513 stmts);
2514 if (!genop0)
2515 return NULL_TREE;
2516 tree op1 = currop->op0;
2517 tree op2 = currop->op1;
2518 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2519 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2520 return fold (t);
2523 /* For array ref vn_reference_op's, operand 1 of the array ref
2524 is op0 of the reference op and operand 3 of the array ref is
2525 op1. */
2526 case ARRAY_RANGE_REF:
2527 case ARRAY_REF:
2529 tree genop0;
2530 tree genop1 = currop->op0;
2531 tree genop2 = currop->op1;
2532 tree genop3 = currop->op2;
2533 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2534 stmts);
2535 if (!genop0)
2536 return NULL_TREE;
2537 genop1 = find_or_generate_expression (block, genop1, stmts);
2538 if (!genop1)
2539 return NULL_TREE;
2540 if (genop2)
2542 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2543 /* Drop zero minimum index if redundant. */
2544 if (integer_zerop (genop2)
2545 && (!domain_type
2546 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2547 genop2 = NULL_TREE;
2548 else
2550 genop2 = find_or_generate_expression (block, genop2, stmts);
2551 if (!genop2)
2552 return NULL_TREE;
2555 if (genop3)
2557 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2558 /* We can't always put a size in units of the element alignment
2559 here as the element alignment may be not visible. See
2560 PR43783. Simply drop the element size for constant
2561 sizes. */
2562 if (TREE_CODE (genop3) == INTEGER_CST
2563 && TREE_CODE (TYPE_SIZE_UNIT (elmt_type)) == INTEGER_CST
2564 && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)),
2565 (wi::to_offset (genop3)
2566 * vn_ref_op_align_unit (currop))))
2567 genop3 = NULL_TREE;
2568 else
2570 genop3 = find_or_generate_expression (block, genop3, stmts);
2571 if (!genop3)
2572 return NULL_TREE;
2575 return build4 (currop->opcode, currop->type, genop0, genop1,
2576 genop2, genop3);
2578 case COMPONENT_REF:
2580 tree op0;
2581 tree op1;
2582 tree genop2 = currop->op1;
2583 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2584 if (!op0)
2585 return NULL_TREE;
2586 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2587 op1 = currop->op0;
2588 if (genop2)
2590 genop2 = find_or_generate_expression (block, genop2, stmts);
2591 if (!genop2)
2592 return NULL_TREE;
2594 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2597 case SSA_NAME:
2599 genop = find_or_generate_expression (block, currop->op0, stmts);
2600 return genop;
2602 case STRING_CST:
2603 case INTEGER_CST:
2604 case COMPLEX_CST:
2605 case VECTOR_CST:
2606 case REAL_CST:
2607 case CONSTRUCTOR:
2608 case VAR_DECL:
2609 case PARM_DECL:
2610 case CONST_DECL:
2611 case RESULT_DECL:
2612 case FUNCTION_DECL:
2613 return currop->op0;
2615 default:
2616 gcc_unreachable ();
2620 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2621 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2622 trying to rename aggregates into ssa form directly, which is a no no.
2624 Thus, this routine doesn't create temporaries, it just builds a
2625 single access expression for the array, calling
2626 find_or_generate_expression to build the innermost pieces.
2628 This function is a subroutine of create_expression_by_pieces, and
2629 should not be called on it's own unless you really know what you
2630 are doing. */
2632 static tree
2633 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2634 gimple_seq *stmts)
2636 unsigned int op = 0;
2637 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2640 /* Find a simple leader for an expression, or generate one using
2641 create_expression_by_pieces from a NARY expression for the value.
2642 BLOCK is the basic_block we are looking for leaders in.
2643 OP is the tree expression to find a leader for or generate.
2644 Returns the leader or NULL_TREE on failure. */
2646 static tree
2647 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2649 pre_expr expr = get_or_alloc_expr_for (op);
2650 unsigned int lookfor = get_expr_value_id (expr);
2651 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2652 if (leader)
2654 if (leader->kind == NAME)
2655 return PRE_EXPR_NAME (leader);
2656 else if (leader->kind == CONSTANT)
2657 return PRE_EXPR_CONSTANT (leader);
2659 /* Defer. */
2660 return NULL_TREE;
2663 /* It must be a complex expression, so generate it recursively. Note
2664 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2665 where the insert algorithm fails to insert a required expression. */
2666 bitmap exprset = value_expressions[lookfor];
2667 bitmap_iterator bi;
2668 unsigned int i;
2669 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2671 pre_expr temp = expression_for_id (i);
2672 /* We cannot insert random REFERENCE expressions at arbitrary
2673 places. We can insert NARYs which eventually re-materializes
2674 its operand values. */
2675 if (temp->kind == NARY)
2676 return create_expression_by_pieces (block, temp, stmts,
2677 get_expr_type (expr));
2680 /* Defer. */
2681 return NULL_TREE;
2684 /* Create an expression in pieces, so that we can handle very complex
2685 expressions that may be ANTIC, but not necessary GIMPLE.
2686 BLOCK is the basic block the expression will be inserted into,
2687 EXPR is the expression to insert (in value form)
2688 STMTS is a statement list to append the necessary insertions into.
2690 This function will die if we hit some value that shouldn't be
2691 ANTIC but is (IE there is no leader for it, or its components).
2692 The function returns NULL_TREE in case a different antic expression
2693 has to be inserted first.
2694 This function may also generate expressions that are themselves
2695 partially or fully redundant. Those that are will be either made
2696 fully redundant during the next iteration of insert (for partially
2697 redundant ones), or eliminated by eliminate (for fully redundant
2698 ones). */
2700 static tree
2701 create_expression_by_pieces (basic_block block, pre_expr expr,
2702 gimple_seq *stmts, tree type)
2704 tree name;
2705 tree folded;
2706 gimple_seq forced_stmts = NULL;
2707 unsigned int value_id;
2708 gimple_stmt_iterator gsi;
2709 tree exprtype = type ? type : get_expr_type (expr);
2710 pre_expr nameexpr;
2711 gassign *newstmt;
2713 switch (expr->kind)
2715 /* We may hit the NAME/CONSTANT case if we have to convert types
2716 that value numbering saw through. */
2717 case NAME:
2718 folded = PRE_EXPR_NAME (expr);
2719 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (folded))
2720 return NULL_TREE;
2721 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2722 return folded;
2723 break;
2724 case CONSTANT:
2726 folded = PRE_EXPR_CONSTANT (expr);
2727 tree tem = fold_convert (exprtype, folded);
2728 if (is_gimple_min_invariant (tem))
2729 return tem;
2730 break;
2732 case REFERENCE:
2733 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2735 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2736 unsigned int operand = 1;
2737 vn_reference_op_t currop = &ref->operands[0];
2738 tree sc = NULL_TREE;
2739 tree fn;
2740 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2741 fn = currop->op0;
2742 else
2743 fn = find_or_generate_expression (block, currop->op0, stmts);
2744 if (!fn)
2745 return NULL_TREE;
2746 if (currop->op1)
2748 sc = find_or_generate_expression (block, currop->op1, stmts);
2749 if (!sc)
2750 return NULL_TREE;
2752 auto_vec<tree> args (ref->operands.length () - 1);
2753 while (operand < ref->operands.length ())
2755 tree arg = create_component_ref_by_pieces_1 (block, ref,
2756 &operand, stmts);
2757 if (!arg)
2758 return NULL_TREE;
2759 args.quick_push (arg);
2761 gcall *call
2762 = gimple_build_call_vec ((TREE_CODE (fn) == FUNCTION_DECL
2763 ? build_fold_addr_expr (fn) : fn), args);
2764 gimple_call_set_with_bounds (call, currop->with_bounds);
2765 if (sc)
2766 gimple_call_set_chain (call, sc);
2767 tree forcedname = make_ssa_name (currop->type);
2768 gimple_call_set_lhs (call, forcedname);
2769 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2770 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2771 folded = forcedname;
2773 else
2775 folded = create_component_ref_by_pieces (block,
2776 PRE_EXPR_REFERENCE (expr),
2777 stmts);
2778 if (!folded)
2779 return NULL_TREE;
2780 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2781 newstmt = gimple_build_assign (name, folded);
2782 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2783 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2784 folded = name;
2786 break;
2787 case NARY:
2789 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2790 tree *genop = XALLOCAVEC (tree, nary->length);
2791 unsigned i;
2792 for (i = 0; i < nary->length; ++i)
2794 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2795 if (!genop[i])
2796 return NULL_TREE;
2797 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2798 may have conversions stripped. */
2799 if (nary->opcode == POINTER_PLUS_EXPR)
2801 if (i == 0)
2802 genop[i] = gimple_convert (&forced_stmts,
2803 nary->type, genop[i]);
2804 else if (i == 1)
2805 genop[i] = gimple_convert (&forced_stmts,
2806 sizetype, genop[i]);
2808 else
2809 genop[i] = gimple_convert (&forced_stmts,
2810 TREE_TYPE (nary->op[i]), genop[i]);
2812 if (nary->opcode == CONSTRUCTOR)
2814 vec<constructor_elt, va_gc> *elts = NULL;
2815 for (i = 0; i < nary->length; ++i)
2816 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2817 folded = build_constructor (nary->type, elts);
2818 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2819 newstmt = gimple_build_assign (name, folded);
2820 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2821 folded = name;
2823 else
2825 switch (nary->length)
2827 case 1:
2828 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2829 genop[0]);
2830 break;
2831 case 2:
2832 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2833 genop[0], genop[1]);
2834 break;
2835 case 3:
2836 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2837 genop[0], genop[1], genop[2]);
2838 break;
2839 default:
2840 gcc_unreachable ();
2844 break;
2845 default:
2846 gcc_unreachable ();
2849 folded = gimple_convert (&forced_stmts, exprtype, folded);
2851 /* If there is nothing to insert, return the simplified result. */
2852 if (gimple_seq_empty_p (forced_stmts))
2853 return folded;
2854 /* If we simplified to a constant return it and discard eventually
2855 built stmts. */
2856 if (is_gimple_min_invariant (folded))
2858 gimple_seq_discard (forced_stmts);
2859 return folded;
2861 /* Likewise if we simplified to sth not queued for insertion. */
2862 bool found = false;
2863 gsi = gsi_last (forced_stmts);
2864 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2866 gimple *stmt = gsi_stmt (gsi);
2867 tree forcedname = gimple_get_lhs (stmt);
2868 if (forcedname == folded)
2870 found = true;
2871 break;
2874 if (! found)
2876 gimple_seq_discard (forced_stmts);
2877 return folded;
2879 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2881 /* If we have any intermediate expressions to the value sets, add them
2882 to the value sets and chain them in the instruction stream. */
2883 if (forced_stmts)
2885 gsi = gsi_start (forced_stmts);
2886 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2888 gimple *stmt = gsi_stmt (gsi);
2889 tree forcedname = gimple_get_lhs (stmt);
2890 pre_expr nameexpr;
2892 if (forcedname != folded)
2894 VN_INFO_GET (forcedname)->valnum = forcedname;
2895 VN_INFO (forcedname)->value_id = get_next_value_id ();
2896 nameexpr = get_or_alloc_expr_for_name (forcedname);
2897 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2898 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2899 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2902 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2904 gimple_seq_add_seq (stmts, forced_stmts);
2907 name = folded;
2909 /* Fold the last statement. */
2910 gsi = gsi_last (*stmts);
2911 if (fold_stmt_inplace (&gsi))
2912 update_stmt (gsi_stmt (gsi));
2914 /* Add a value number to the temporary.
2915 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2916 we are creating the expression by pieces, and this particular piece of
2917 the expression may have been represented. There is no harm in replacing
2918 here. */
2919 value_id = get_expr_value_id (expr);
2920 VN_INFO_GET (name)->value_id = value_id;
2921 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2922 if (VN_INFO (name)->valnum == NULL_TREE)
2923 VN_INFO (name)->valnum = name;
2924 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2925 nameexpr = get_or_alloc_expr_for_name (name);
2926 add_to_value (value_id, nameexpr);
2927 if (NEW_SETS (block))
2928 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2929 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2931 pre_stats.insertions++;
2932 if (dump_file && (dump_flags & TDF_DETAILS))
2934 fprintf (dump_file, "Inserted ");
2935 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0);
2936 fprintf (dump_file, " in predecessor %d (%04d)\n",
2937 block->index, value_id);
2940 return name;
2944 /* Insert the to-be-made-available values of expression EXPRNUM for each
2945 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2946 merge the result with a phi node, given the same value number as
2947 NODE. Return true if we have inserted new stuff. */
2949 static bool
2950 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2951 vec<pre_expr> avail)
2953 pre_expr expr = expression_for_id (exprnum);
2954 pre_expr newphi;
2955 unsigned int val = get_expr_value_id (expr);
2956 edge pred;
2957 bool insertions = false;
2958 bool nophi = false;
2959 basic_block bprime;
2960 pre_expr eprime;
2961 edge_iterator ei;
2962 tree type = get_expr_type (expr);
2963 tree temp;
2964 gphi *phi;
2966 /* Make sure we aren't creating an induction variable. */
2967 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
2969 bool firstinsideloop = false;
2970 bool secondinsideloop = false;
2971 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2972 EDGE_PRED (block, 0)->src);
2973 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2974 EDGE_PRED (block, 1)->src);
2975 /* Induction variables only have one edge inside the loop. */
2976 if ((firstinsideloop ^ secondinsideloop)
2977 && expr->kind != REFERENCE)
2979 if (dump_file && (dump_flags & TDF_DETAILS))
2980 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2981 nophi = true;
2985 /* Make the necessary insertions. */
2986 FOR_EACH_EDGE (pred, ei, block->preds)
2988 gimple_seq stmts = NULL;
2989 tree builtexpr;
2990 bprime = pred->src;
2991 eprime = avail[pred->dest_idx];
2992 builtexpr = create_expression_by_pieces (bprime, eprime,
2993 &stmts, type);
2994 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
2995 if (!gimple_seq_empty_p (stmts))
2997 basic_block new_bb = gsi_insert_seq_on_edge_immediate (pred, stmts);
2998 gcc_assert (! new_bb);
2999 insertions = true;
3001 if (!builtexpr)
3003 /* We cannot insert a PHI node if we failed to insert
3004 on one edge. */
3005 nophi = true;
3006 continue;
3008 if (is_gimple_min_invariant (builtexpr))
3009 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3010 else
3011 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3013 /* If we didn't want a phi node, and we made insertions, we still have
3014 inserted new stuff, and thus return true. If we didn't want a phi node,
3015 and didn't make insertions, we haven't added anything new, so return
3016 false. */
3017 if (nophi && insertions)
3018 return true;
3019 else if (nophi && !insertions)
3020 return false;
3022 /* Now build a phi for the new variable. */
3023 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3024 phi = create_phi_node (temp, block);
3026 VN_INFO_GET (temp)->value_id = val;
3027 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3028 if (VN_INFO (temp)->valnum == NULL_TREE)
3029 VN_INFO (temp)->valnum = temp;
3030 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3031 FOR_EACH_EDGE (pred, ei, block->preds)
3033 pre_expr ae = avail[pred->dest_idx];
3034 gcc_assert (get_expr_type (ae) == type
3035 || useless_type_conversion_p (type, get_expr_type (ae)));
3036 if (ae->kind == CONSTANT)
3037 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3038 pred, UNKNOWN_LOCATION);
3039 else
3040 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3043 newphi = get_or_alloc_expr_for_name (temp);
3044 add_to_value (val, newphi);
3046 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3047 this insertion, since we test for the existence of this value in PHI_GEN
3048 before proceeding with the partial redundancy checks in insert_aux.
3050 The value may exist in AVAIL_OUT, in particular, it could be represented
3051 by the expression we are trying to eliminate, in which case we want the
3052 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3053 inserted there.
3055 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3056 this block, because if it did, it would have existed in our dominator's
3057 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3060 bitmap_insert_into_set (PHI_GEN (block), newphi);
3061 bitmap_value_replace_in_set (AVAIL_OUT (block),
3062 newphi);
3063 bitmap_insert_into_set (NEW_SETS (block),
3064 newphi);
3066 /* If we insert a PHI node for a conversion of another PHI node
3067 in the same basic-block try to preserve range information.
3068 This is important so that followup loop passes receive optimal
3069 number of iteration analysis results. See PR61743. */
3070 if (expr->kind == NARY
3071 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3072 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3073 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3074 && INTEGRAL_TYPE_P (type)
3075 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3076 && (TYPE_PRECISION (type)
3077 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3078 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3080 wide_int min, max;
3081 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3082 && !wi::neg_p (min, SIGNED)
3083 && !wi::neg_p (max, SIGNED))
3084 /* Just handle extension and sign-changes of all-positive ranges. */
3085 set_range_info (temp,
3086 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3087 wide_int_storage::from (min, TYPE_PRECISION (type),
3088 TYPE_SIGN (type)),
3089 wide_int_storage::from (max, TYPE_PRECISION (type),
3090 TYPE_SIGN (type)));
3093 if (dump_file && (dump_flags & TDF_DETAILS))
3095 fprintf (dump_file, "Created phi ");
3096 print_gimple_stmt (dump_file, phi, 0);
3097 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3099 pre_stats.phis++;
3100 return true;
3105 /* Perform insertion of partially redundant or hoistable values.
3106 For BLOCK, do the following:
3107 1. Propagate the NEW_SETS of the dominator into the current block.
3108 If the block has multiple predecessors,
3109 2a. Iterate over the ANTIC expressions for the block to see if
3110 any of them are partially redundant.
3111 2b. If so, insert them into the necessary predecessors to make
3112 the expression fully redundant.
3113 2c. Insert a new PHI merging the values of the predecessors.
3114 2d. Insert the new PHI, and the new expressions, into the
3115 NEW_SETS set.
3116 If the block has multiple successors,
3117 3a. Iterate over the ANTIC values for the block to see if
3118 any of them are good candidates for hoisting.
3119 3b. If so, insert expressions computing the values in BLOCK,
3120 and add the new expressions into the NEW_SETS set.
3121 4. Recursively call ourselves on the dominator children of BLOCK.
3123 Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3124 do_pre_regular_insertion and do_partial_insertion. 3a and 3b are
3125 done in do_hoist_insertion.
3128 static bool
3129 do_pre_regular_insertion (basic_block block, basic_block dom)
3131 bool new_stuff = false;
3132 vec<pre_expr> exprs;
3133 pre_expr expr;
3134 auto_vec<pre_expr> avail;
3135 int i;
3137 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3138 avail.safe_grow (EDGE_COUNT (block->preds));
3140 FOR_EACH_VEC_ELT (exprs, i, expr)
3142 if (expr->kind == NARY
3143 || expr->kind == REFERENCE)
3145 unsigned int val;
3146 bool by_some = false;
3147 bool cant_insert = false;
3148 bool all_same = true;
3149 pre_expr first_s = NULL;
3150 edge pred;
3151 basic_block bprime;
3152 pre_expr eprime = NULL;
3153 edge_iterator ei;
3154 pre_expr edoubleprime = NULL;
3155 bool do_insertion = false;
3157 val = get_expr_value_id (expr);
3158 if (bitmap_set_contains_value (PHI_GEN (block), val))
3159 continue;
3160 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3162 if (dump_file && (dump_flags & TDF_DETAILS))
3164 fprintf (dump_file, "Found fully redundant value: ");
3165 print_pre_expr (dump_file, expr);
3166 fprintf (dump_file, "\n");
3168 continue;
3171 FOR_EACH_EDGE (pred, ei, block->preds)
3173 unsigned int vprime;
3175 /* We should never run insertion for the exit block
3176 and so not come across fake pred edges. */
3177 gcc_assert (!(pred->flags & EDGE_FAKE));
3178 bprime = pred->src;
3179 /* We are looking at ANTIC_OUT of bprime. */
3180 eprime = phi_translate (expr, ANTIC_IN (block), NULL, pred);
3182 /* eprime will generally only be NULL if the
3183 value of the expression, translated
3184 through the PHI for this predecessor, is
3185 undefined. If that is the case, we can't
3186 make the expression fully redundant,
3187 because its value is undefined along a
3188 predecessor path. We can thus break out
3189 early because it doesn't matter what the
3190 rest of the results are. */
3191 if (eprime == NULL)
3193 avail[pred->dest_idx] = NULL;
3194 cant_insert = true;
3195 break;
3198 vprime = get_expr_value_id (eprime);
3199 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3200 vprime);
3201 if (edoubleprime == NULL)
3203 avail[pred->dest_idx] = eprime;
3204 all_same = false;
3206 else
3208 avail[pred->dest_idx] = edoubleprime;
3209 by_some = true;
3210 /* We want to perform insertions to remove a redundancy on
3211 a path in the CFG we want to optimize for speed. */
3212 if (optimize_edge_for_speed_p (pred))
3213 do_insertion = true;
3214 if (first_s == NULL)
3215 first_s = edoubleprime;
3216 else if (!pre_expr_d::equal (first_s, edoubleprime))
3217 all_same = false;
3220 /* If we can insert it, it's not the same value
3221 already existing along every predecessor, and
3222 it's defined by some predecessor, it is
3223 partially redundant. */
3224 if (!cant_insert && !all_same && by_some)
3226 if (!do_insertion)
3228 if (dump_file && (dump_flags & TDF_DETAILS))
3230 fprintf (dump_file, "Skipping partial redundancy for "
3231 "expression ");
3232 print_pre_expr (dump_file, expr);
3233 fprintf (dump_file, " (%04d), no redundancy on to be "
3234 "optimized for speed edge\n", val);
3237 else if (dbg_cnt (treepre_insert))
3239 if (dump_file && (dump_flags & TDF_DETAILS))
3241 fprintf (dump_file, "Found partial redundancy for "
3242 "expression ");
3243 print_pre_expr (dump_file, expr);
3244 fprintf (dump_file, " (%04d)\n",
3245 get_expr_value_id (expr));
3247 if (insert_into_preds_of_block (block,
3248 get_expression_id (expr),
3249 avail))
3250 new_stuff = true;
3253 /* If all edges produce the same value and that value is
3254 an invariant, then the PHI has the same value on all
3255 edges. Note this. */
3256 else if (!cant_insert && all_same)
3258 gcc_assert (edoubleprime->kind == CONSTANT
3259 || edoubleprime->kind == NAME);
3261 tree temp = make_temp_ssa_name (get_expr_type (expr),
3262 NULL, "pretmp");
3263 gassign *assign
3264 = gimple_build_assign (temp,
3265 edoubleprime->kind == CONSTANT ?
3266 PRE_EXPR_CONSTANT (edoubleprime) :
3267 PRE_EXPR_NAME (edoubleprime));
3268 gimple_stmt_iterator gsi = gsi_after_labels (block);
3269 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3271 VN_INFO_GET (temp)->value_id = val;
3272 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3273 if (VN_INFO (temp)->valnum == NULL_TREE)
3274 VN_INFO (temp)->valnum = temp;
3275 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3276 pre_expr newe = get_or_alloc_expr_for_name (temp);
3277 add_to_value (val, newe);
3278 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3279 bitmap_insert_into_set (NEW_SETS (block), newe);
3284 exprs.release ();
3285 return new_stuff;
3289 /* Perform insertion for partially anticipatable expressions. There
3290 is only one case we will perform insertion for these. This case is
3291 if the expression is partially anticipatable, and fully available.
3292 In this case, we know that putting it earlier will enable us to
3293 remove the later computation. */
3295 static bool
3296 do_pre_partial_partial_insertion (basic_block block, basic_block dom)
3298 bool new_stuff = false;
3299 vec<pre_expr> exprs;
3300 pre_expr expr;
3301 auto_vec<pre_expr> avail;
3302 int i;
3304 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3305 avail.safe_grow (EDGE_COUNT (block->preds));
3307 FOR_EACH_VEC_ELT (exprs, i, expr)
3309 if (expr->kind == NARY
3310 || expr->kind == REFERENCE)
3312 unsigned int val;
3313 bool by_all = true;
3314 bool cant_insert = false;
3315 edge pred;
3316 basic_block bprime;
3317 pre_expr eprime = NULL;
3318 edge_iterator ei;
3320 val = get_expr_value_id (expr);
3321 if (bitmap_set_contains_value (PHI_GEN (block), val))
3322 continue;
3323 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3324 continue;
3326 FOR_EACH_EDGE (pred, ei, block->preds)
3328 unsigned int vprime;
3329 pre_expr edoubleprime;
3331 /* We should never run insertion for the exit block
3332 and so not come across fake pred edges. */
3333 gcc_assert (!(pred->flags & EDGE_FAKE));
3334 bprime = pred->src;
3335 eprime = phi_translate (expr, ANTIC_IN (block),
3336 PA_IN (block), pred);
3338 /* eprime will generally only be NULL if the
3339 value of the expression, translated
3340 through the PHI for this predecessor, is
3341 undefined. If that is the case, we can't
3342 make the expression fully redundant,
3343 because its value is undefined along a
3344 predecessor path. We can thus break out
3345 early because it doesn't matter what the
3346 rest of the results are. */
3347 if (eprime == NULL)
3349 avail[pred->dest_idx] = NULL;
3350 cant_insert = true;
3351 break;
3354 vprime = get_expr_value_id (eprime);
3355 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3356 avail[pred->dest_idx] = edoubleprime;
3357 if (edoubleprime == NULL)
3359 by_all = false;
3360 break;
3364 /* If we can insert it, it's not the same value
3365 already existing along every predecessor, and
3366 it's defined by some predecessor, it is
3367 partially redundant. */
3368 if (!cant_insert && by_all)
3370 edge succ;
3371 bool do_insertion = false;
3373 /* Insert only if we can remove a later expression on a path
3374 that we want to optimize for speed.
3375 The phi node that we will be inserting in BLOCK is not free,
3376 and inserting it for the sake of !optimize_for_speed successor
3377 may cause regressions on the speed path. */
3378 FOR_EACH_EDGE (succ, ei, block->succs)
3380 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3381 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3383 if (optimize_edge_for_speed_p (succ))
3384 do_insertion = true;
3388 if (!do_insertion)
3390 if (dump_file && (dump_flags & TDF_DETAILS))
3392 fprintf (dump_file, "Skipping partial partial redundancy "
3393 "for expression ");
3394 print_pre_expr (dump_file, expr);
3395 fprintf (dump_file, " (%04d), not (partially) anticipated "
3396 "on any to be optimized for speed edges\n", val);
3399 else if (dbg_cnt (treepre_insert))
3401 pre_stats.pa_insert++;
3402 if (dump_file && (dump_flags & TDF_DETAILS))
3404 fprintf (dump_file, "Found partial partial redundancy "
3405 "for expression ");
3406 print_pre_expr (dump_file, expr);
3407 fprintf (dump_file, " (%04d)\n",
3408 get_expr_value_id (expr));
3410 if (insert_into_preds_of_block (block,
3411 get_expression_id (expr),
3412 avail))
3413 new_stuff = true;
3419 exprs.release ();
3420 return new_stuff;
3423 /* Insert expressions in BLOCK to compute hoistable values up.
3424 Return TRUE if something was inserted, otherwise return FALSE.
3425 The caller has to make sure that BLOCK has at least two successors. */
3427 static bool
3428 do_hoist_insertion (basic_block block)
3430 edge e;
3431 edge_iterator ei;
3432 bool new_stuff = false;
3433 unsigned i;
3434 gimple_stmt_iterator last;
3436 /* At least two successors, or else... */
3437 gcc_assert (EDGE_COUNT (block->succs) >= 2);
3439 /* Check that all successors of BLOCK are dominated by block.
3440 We could use dominated_by_p() for this, but actually there is a much
3441 quicker check: any successor that is dominated by BLOCK can't have
3442 more than one predecessor edge. */
3443 FOR_EACH_EDGE (e, ei, block->succs)
3444 if (! single_pred_p (e->dest))
3445 return false;
3447 /* Determine the insertion point. If we cannot safely insert before
3448 the last stmt if we'd have to, bail out. */
3449 last = gsi_last_bb (block);
3450 if (!gsi_end_p (last)
3451 && !is_ctrl_stmt (gsi_stmt (last))
3452 && stmt_ends_bb_p (gsi_stmt (last)))
3453 return false;
3455 /* Compute the set of hoistable expressions from ANTIC_IN. First compute
3456 hoistable values. */
3457 bitmap_set hoistable_set;
3459 /* A hoistable value must be in ANTIC_IN(block)
3460 but not in AVAIL_OUT(BLOCK). */
3461 bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack);
3462 bitmap_and_compl (&hoistable_set.values,
3463 &ANTIC_IN (block)->values, &AVAIL_OUT (block)->values);
3465 /* Short-cut for a common case: hoistable_set is empty. */
3466 if (bitmap_empty_p (&hoistable_set.values))
3467 return false;
3469 /* Compute which of the hoistable values is in AVAIL_OUT of
3470 at least one of the successors of BLOCK. */
3471 bitmap_head availout_in_some;
3472 bitmap_initialize (&availout_in_some, &grand_bitmap_obstack);
3473 FOR_EACH_EDGE (e, ei, block->succs)
3474 /* Do not consider expressions solely because their availability
3475 on loop exits. They'd be ANTIC-IN throughout the whole loop
3476 and thus effectively hoisted across loops by combination of
3477 PRE and hoisting. */
3478 if (! loop_exit_edge_p (block->loop_father, e))
3479 bitmap_ior_and_into (&availout_in_some, &hoistable_set.values,
3480 &AVAIL_OUT (e->dest)->values);
3481 bitmap_clear (&hoistable_set.values);
3483 /* Short-cut for a common case: availout_in_some is empty. */
3484 if (bitmap_empty_p (&availout_in_some))
3485 return false;
3487 /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */
3488 hoistable_set.values = availout_in_some;
3489 hoistable_set.expressions = ANTIC_IN (block)->expressions;
3491 /* Now finally construct the topological-ordered expression set. */
3492 vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set);
3494 bitmap_clear (&hoistable_set.values);
3496 /* If there are candidate values for hoisting, insert expressions
3497 strategically to make the hoistable expressions fully redundant. */
3498 pre_expr expr;
3499 FOR_EACH_VEC_ELT (exprs, i, expr)
3501 /* While we try to sort expressions topologically above the
3502 sorting doesn't work out perfectly. Catch expressions we
3503 already inserted. */
3504 unsigned int value_id = get_expr_value_id (expr);
3505 if (bitmap_set_contains_value (AVAIL_OUT (block), value_id))
3507 if (dump_file && (dump_flags & TDF_DETAILS))
3509 fprintf (dump_file,
3510 "Already inserted expression for ");
3511 print_pre_expr (dump_file, expr);
3512 fprintf (dump_file, " (%04d)\n", value_id);
3514 continue;
3517 /* OK, we should hoist this value. Perform the transformation. */
3518 pre_stats.hoist_insert++;
3519 if (dump_file && (dump_flags & TDF_DETAILS))
3521 fprintf (dump_file,
3522 "Inserting expression in block %d for code hoisting: ",
3523 block->index);
3524 print_pre_expr (dump_file, expr);
3525 fprintf (dump_file, " (%04d)\n", value_id);
3528 gimple_seq stmts = NULL;
3529 tree res = create_expression_by_pieces (block, expr, &stmts,
3530 get_expr_type (expr));
3532 /* Do not return true if expression creation ultimately
3533 did not insert any statements. */
3534 if (gimple_seq_empty_p (stmts))
3535 res = NULL_TREE;
3536 else
3538 if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last)))
3539 gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT);
3540 else
3541 gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT);
3544 /* Make sure to not return true if expression creation ultimately
3545 failed but also make sure to insert any stmts produced as they
3546 are tracked in inserted_exprs. */
3547 if (! res)
3548 continue;
3550 new_stuff = true;
3553 exprs.release ();
3555 return new_stuff;
3558 /* Do a dominator walk on the control flow graph, and insert computations
3559 of values as necessary for PRE and hoisting. */
3561 static bool
3562 insert_aux (basic_block block, bool do_pre, bool do_hoist)
3564 basic_block son;
3565 bool new_stuff = false;
3567 if (block)
3569 basic_block dom;
3570 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3571 if (dom)
3573 unsigned i;
3574 bitmap_iterator bi;
3575 bitmap_set_t newset;
3577 /* First, update the AVAIL_OUT set with anything we may have
3578 inserted higher up in the dominator tree. */
3579 newset = NEW_SETS (dom);
3580 if (newset)
3582 /* Note that we need to value_replace both NEW_SETS, and
3583 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3584 represented by some non-simple expression here that we want
3585 to replace it with. */
3586 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3588 pre_expr expr = expression_for_id (i);
3589 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3590 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3594 /* Insert expressions for partial redundancies. */
3595 if (do_pre && !single_pred_p (block))
3597 new_stuff |= do_pre_regular_insertion (block, dom);
3598 if (do_partial_partial)
3599 new_stuff |= do_pre_partial_partial_insertion (block, dom);
3602 /* Insert expressions for hoisting. */
3603 if (do_hoist && EDGE_COUNT (block->succs) >= 2)
3604 new_stuff |= do_hoist_insertion (block);
3607 for (son = first_dom_son (CDI_DOMINATORS, block);
3608 son;
3609 son = next_dom_son (CDI_DOMINATORS, son))
3611 new_stuff |= insert_aux (son, do_pre, do_hoist);
3614 return new_stuff;
3617 /* Perform insertion of partially redundant and hoistable values. */
3619 static void
3620 insert (void)
3622 bool new_stuff = true;
3623 basic_block bb;
3624 int num_iterations = 0;
3626 FOR_ALL_BB_FN (bb, cfun)
3627 NEW_SETS (bb) = bitmap_set_new ();
3629 while (new_stuff)
3631 num_iterations++;
3632 if (dump_file && dump_flags & TDF_DETAILS)
3633 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3634 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun), flag_tree_pre,
3635 flag_code_hoisting);
3637 /* Clear the NEW sets before the next iteration. We have already
3638 fully propagated its contents. */
3639 if (new_stuff)
3640 FOR_ALL_BB_FN (bb, cfun)
3641 bitmap_set_free (NEW_SETS (bb));
3643 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3647 /* Compute the AVAIL set for all basic blocks.
3649 This function performs value numbering of the statements in each basic
3650 block. The AVAIL sets are built from information we glean while doing
3651 this value numbering, since the AVAIL sets contain only one entry per
3652 value.
3654 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3655 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3657 static void
3658 compute_avail (void)
3661 basic_block block, son;
3662 basic_block *worklist;
3663 size_t sp = 0;
3664 unsigned i;
3665 tree name;
3667 /* We pretend that default definitions are defined in the entry block.
3668 This includes function arguments and the static chain decl. */
3669 FOR_EACH_SSA_NAME (i, name, cfun)
3671 pre_expr e;
3672 if (!SSA_NAME_IS_DEFAULT_DEF (name)
3673 || has_zero_uses (name)
3674 || virtual_operand_p (name))
3675 continue;
3677 e = get_or_alloc_expr_for_name (name);
3678 add_to_value (get_expr_value_id (e), e);
3679 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3680 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3684 if (dump_file && (dump_flags & TDF_DETAILS))
3686 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3687 "tmp_gen", ENTRY_BLOCK);
3688 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3689 "avail_out", ENTRY_BLOCK);
3692 /* Allocate the worklist. */
3693 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3695 /* Seed the algorithm by putting the dominator children of the entry
3696 block on the worklist. */
3697 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3698 son;
3699 son = next_dom_son (CDI_DOMINATORS, son))
3700 worklist[sp++] = son;
3702 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3703 = ssa_default_def (cfun, gimple_vop (cfun));
3705 /* Loop until the worklist is empty. */
3706 while (sp)
3708 gimple *stmt;
3709 basic_block dom;
3711 /* Pick a block from the worklist. */
3712 block = worklist[--sp];
3714 /* Initially, the set of available values in BLOCK is that of
3715 its immediate dominator. */
3716 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3717 if (dom)
3719 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3720 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3723 /* Generate values for PHI nodes. */
3724 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3725 gsi_next (&gsi))
3727 tree result = gimple_phi_result (gsi.phi ());
3729 /* We have no need for virtual phis, as they don't represent
3730 actual computations. */
3731 if (virtual_operand_p (result))
3733 BB_LIVE_VOP_ON_EXIT (block) = result;
3734 continue;
3737 pre_expr e = get_or_alloc_expr_for_name (result);
3738 add_to_value (get_expr_value_id (e), e);
3739 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3740 bitmap_insert_into_set (PHI_GEN (block), e);
3743 BB_MAY_NOTRETURN (block) = 0;
3745 /* Now compute value numbers and populate value sets with all
3746 the expressions computed in BLOCK. */
3747 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3748 gsi_next (&gsi))
3750 ssa_op_iter iter;
3751 tree op;
3753 stmt = gsi_stmt (gsi);
3755 /* Cache whether the basic-block has any non-visible side-effect
3756 or control flow.
3757 If this isn't a call or it is the last stmt in the
3758 basic-block then the CFG represents things correctly. */
3759 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3761 /* Non-looping const functions always return normally.
3762 Otherwise the call might not return or have side-effects
3763 that forbids hoisting possibly trapping expressions
3764 before it. */
3765 int flags = gimple_call_flags (stmt);
3766 if (!(flags & ECF_CONST)
3767 || (flags & ECF_LOOPING_CONST_OR_PURE))
3768 BB_MAY_NOTRETURN (block) = 1;
3771 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3773 pre_expr e = get_or_alloc_expr_for_name (op);
3775 add_to_value (get_expr_value_id (e), e);
3776 bitmap_insert_into_set (TMP_GEN (block), e);
3777 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3780 if (gimple_vdef (stmt))
3781 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3783 if (gimple_has_side_effects (stmt)
3784 || stmt_could_throw_p (stmt)
3785 || is_gimple_debug (stmt))
3786 continue;
3788 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3790 if (ssa_undefined_value_p (op))
3791 continue;
3792 pre_expr e = get_or_alloc_expr_for_name (op);
3793 bitmap_value_insert_into_set (EXP_GEN (block), e);
3796 switch (gimple_code (stmt))
3798 case GIMPLE_RETURN:
3799 continue;
3801 case GIMPLE_CALL:
3803 vn_reference_t ref;
3804 vn_reference_s ref1;
3805 pre_expr result = NULL;
3807 /* We can value number only calls to real functions. */
3808 if (gimple_call_internal_p (stmt))
3809 continue;
3811 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3812 if (!ref)
3813 continue;
3815 /* If the value of the call is not invalidated in
3816 this block until it is computed, add the expression
3817 to EXP_GEN. */
3818 if (!gimple_vuse (stmt)
3819 || gimple_code
3820 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3821 || gimple_bb (SSA_NAME_DEF_STMT
3822 (gimple_vuse (stmt))) != block)
3824 result = pre_expr_pool.allocate ();
3825 result->kind = REFERENCE;
3826 result->id = 0;
3827 PRE_EXPR_REFERENCE (result) = ref;
3829 get_or_alloc_expression_id (result);
3830 add_to_value (get_expr_value_id (result), result);
3831 bitmap_value_insert_into_set (EXP_GEN (block), result);
3833 continue;
3836 case GIMPLE_ASSIGN:
3838 pre_expr result = NULL;
3839 switch (vn_get_stmt_kind (stmt))
3841 case VN_NARY:
3843 enum tree_code code = gimple_assign_rhs_code (stmt);
3844 vn_nary_op_t nary;
3846 /* COND_EXPR and VEC_COND_EXPR are awkward in
3847 that they contain an embedded complex expression.
3848 Don't even try to shove those through PRE. */
3849 if (code == COND_EXPR
3850 || code == VEC_COND_EXPR)
3851 continue;
3853 vn_nary_op_lookup_stmt (stmt, &nary);
3854 if (!nary)
3855 continue;
3857 /* If the NARY traps and there was a preceding
3858 point in the block that might not return avoid
3859 adding the nary to EXP_GEN. */
3860 if (BB_MAY_NOTRETURN (block)
3861 && vn_nary_may_trap (nary))
3862 continue;
3864 result = pre_expr_pool.allocate ();
3865 result->kind = NARY;
3866 result->id = 0;
3867 PRE_EXPR_NARY (result) = nary;
3868 break;
3871 case VN_REFERENCE:
3873 tree rhs1 = gimple_assign_rhs1 (stmt);
3874 alias_set_type set = get_alias_set (rhs1);
3875 vec<vn_reference_op_s> operands
3876 = vn_reference_operands_for_lookup (rhs1);
3877 vn_reference_t ref;
3878 vn_reference_lookup_pieces (gimple_vuse (stmt), set,
3879 TREE_TYPE (rhs1),
3880 operands, &ref, VN_WALK);
3881 if (!ref)
3883 operands.release ();
3884 continue;
3887 /* If the value of the reference is not invalidated in
3888 this block until it is computed, add the expression
3889 to EXP_GEN. */
3890 if (gimple_vuse (stmt))
3892 gimple *def_stmt;
3893 bool ok = true;
3894 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3895 while (!gimple_nop_p (def_stmt)
3896 && gimple_code (def_stmt) != GIMPLE_PHI
3897 && gimple_bb (def_stmt) == block)
3899 if (stmt_may_clobber_ref_p
3900 (def_stmt, gimple_assign_rhs1 (stmt)))
3902 ok = false;
3903 break;
3905 def_stmt
3906 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3908 if (!ok)
3910 operands.release ();
3911 continue;
3915 /* If the load was value-numbered to another
3916 load make sure we do not use its expression
3917 for insertion if it wouldn't be a valid
3918 replacement. */
3919 /* At the momemt we have a testcase
3920 for hoist insertion of aligned vs. misaligned
3921 variants in gcc.dg/torture/pr65270-1.c thus
3922 with just alignment to be considered we can
3923 simply replace the expression in the hashtable
3924 with the most conservative one. */
3925 vn_reference_op_t ref1 = &ref->operands.last ();
3926 while (ref1->opcode != TARGET_MEM_REF
3927 && ref1->opcode != MEM_REF
3928 && ref1 != &ref->operands[0])
3929 --ref1;
3930 vn_reference_op_t ref2 = &operands.last ();
3931 while (ref2->opcode != TARGET_MEM_REF
3932 && ref2->opcode != MEM_REF
3933 && ref2 != &operands[0])
3934 --ref2;
3935 if ((ref1->opcode == TARGET_MEM_REF
3936 || ref1->opcode == MEM_REF)
3937 && (TYPE_ALIGN (ref1->type)
3938 > TYPE_ALIGN (ref2->type)))
3939 ref1->type
3940 = build_aligned_type (ref1->type,
3941 TYPE_ALIGN (ref2->type));
3942 /* TBAA behavior is an obvious part so make sure
3943 that the hashtable one covers this as well
3944 by adjusting the ref alias set and its base. */
3945 if (ref->set == set
3946 || alias_set_subset_of (set, ref->set))
3948 else if (alias_set_subset_of (ref->set, set))
3950 ref->set = set;
3951 if (ref1->opcode == MEM_REF)
3952 ref1->op0
3953 = wide_int_to_tree (TREE_TYPE (ref2->op0),
3954 wi::to_wide (ref1->op0));
3955 else
3956 ref1->op2
3957 = wide_int_to_tree (TREE_TYPE (ref2->op2),
3958 wi::to_wide (ref1->op2));
3960 else
3962 ref->set = 0;
3963 if (ref1->opcode == MEM_REF)
3964 ref1->op0
3965 = wide_int_to_tree (ptr_type_node,
3966 wi::to_wide (ref1->op0));
3967 else
3968 ref1->op2
3969 = wide_int_to_tree (ptr_type_node,
3970 wi::to_wide (ref1->op2));
3972 operands.release ();
3974 result = pre_expr_pool.allocate ();
3975 result->kind = REFERENCE;
3976 result->id = 0;
3977 PRE_EXPR_REFERENCE (result) = ref;
3978 break;
3981 default:
3982 continue;
3985 get_or_alloc_expression_id (result);
3986 add_to_value (get_expr_value_id (result), result);
3987 bitmap_value_insert_into_set (EXP_GEN (block), result);
3988 continue;
3990 default:
3991 break;
3995 if (dump_file && (dump_flags & TDF_DETAILS))
3997 print_bitmap_set (dump_file, EXP_GEN (block),
3998 "exp_gen", block->index);
3999 print_bitmap_set (dump_file, PHI_GEN (block),
4000 "phi_gen", block->index);
4001 print_bitmap_set (dump_file, TMP_GEN (block),
4002 "tmp_gen", block->index);
4003 print_bitmap_set (dump_file, AVAIL_OUT (block),
4004 "avail_out", block->index);
4007 /* Put the dominator children of BLOCK on the worklist of blocks
4008 to compute available sets for. */
4009 for (son = first_dom_son (CDI_DOMINATORS, block);
4010 son;
4011 son = next_dom_son (CDI_DOMINATORS, son))
4012 worklist[sp++] = son;
4015 free (worklist);
4019 /* Initialize data structures used by PRE. */
4021 static void
4022 init_pre (void)
4024 basic_block bb;
4026 next_expression_id = 1;
4027 expressions.create (0);
4028 expressions.safe_push (NULL);
4029 value_expressions.create (get_max_value_id () + 1);
4030 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4031 name_to_id.create (0);
4033 inserted_exprs = BITMAP_ALLOC (NULL);
4035 connect_infinite_loops_to_exit ();
4036 memset (&pre_stats, 0, sizeof (pre_stats));
4038 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4040 calculate_dominance_info (CDI_DOMINATORS);
4042 bitmap_obstack_initialize (&grand_bitmap_obstack);
4043 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4044 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4045 FOR_ALL_BB_FN (bb, cfun)
4047 EXP_GEN (bb) = bitmap_set_new ();
4048 PHI_GEN (bb) = bitmap_set_new ();
4049 TMP_GEN (bb) = bitmap_set_new ();
4050 AVAIL_OUT (bb) = bitmap_set_new ();
4055 /* Deallocate data structures used by PRE. */
4057 static void
4058 fini_pre ()
4060 value_expressions.release ();
4061 expressions.release ();
4062 BITMAP_FREE (inserted_exprs);
4063 bitmap_obstack_release (&grand_bitmap_obstack);
4064 bitmap_set_pool.release ();
4065 pre_expr_pool.release ();
4066 delete phi_translate_table;
4067 phi_translate_table = NULL;
4068 delete expression_to_id;
4069 expression_to_id = NULL;
4070 name_to_id.release ();
4072 free_aux_for_blocks ();
4075 namespace {
4077 const pass_data pass_data_pre =
4079 GIMPLE_PASS, /* type */
4080 "pre", /* name */
4081 OPTGROUP_NONE, /* optinfo_flags */
4082 TV_TREE_PRE, /* tv_id */
4083 ( PROP_cfg | PROP_ssa ), /* properties_required */
4084 0, /* properties_provided */
4085 0, /* properties_destroyed */
4086 TODO_rebuild_alias, /* todo_flags_start */
4087 0, /* todo_flags_finish */
4090 class pass_pre : public gimple_opt_pass
4092 public:
4093 pass_pre (gcc::context *ctxt)
4094 : gimple_opt_pass (pass_data_pre, ctxt)
4097 /* opt_pass methods: */
4098 virtual bool gate (function *)
4099 { return flag_tree_pre != 0 || flag_code_hoisting != 0; }
4100 virtual unsigned int execute (function *);
4102 }; // class pass_pre
4104 unsigned int
4105 pass_pre::execute (function *fun)
4107 unsigned int todo = 0;
4109 do_partial_partial =
4110 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4112 /* This has to happen before SCCVN runs because
4113 loop_optimizer_init may create new phis, etc. */
4114 loop_optimizer_init (LOOPS_NORMAL);
4115 split_critical_edges ();
4116 scev_initialize ();
4118 run_scc_vn (VN_WALK);
4120 init_pre ();
4122 /* Insert can get quite slow on an incredibly large number of basic
4123 blocks due to some quadratic behavior. Until this behavior is
4124 fixed, don't run it when he have an incredibly large number of
4125 bb's. If we aren't going to run insert, there is no point in
4126 computing ANTIC, either, even though it's plenty fast nor do
4127 we require AVAIL. */
4128 if (n_basic_blocks_for_fn (fun) < 4000)
4130 compute_avail ();
4131 compute_antic ();
4132 insert ();
4135 /* Make sure to remove fake edges before committing our inserts.
4136 This makes sure we don't end up with extra critical edges that
4137 we would need to split. */
4138 remove_fake_exit_edges ();
4139 gsi_commit_edge_inserts ();
4141 /* Eliminate folds statements which might (should not...) end up
4142 not keeping virtual operands up-to-date. */
4143 gcc_assert (!need_ssa_update_p (fun));
4145 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4146 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4147 statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
4148 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4150 /* Remove all the redundant expressions. */
4151 todo |= vn_eliminate (inserted_exprs);
4153 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4154 to insert PHI nodes sometimes, and because value numbering of casts isn't
4155 perfect, we sometimes end up inserting dead code. This simple DCE-like
4156 pass removes any insertions we made that weren't actually used. */
4157 simple_dce_from_worklist (inserted_exprs);
4159 fini_pre ();
4161 scev_finalize ();
4162 loop_optimizer_finalize ();
4164 /* Restore SSA info before tail-merging as that resets it as well. */
4165 scc_vn_restore_ssa_info ();
4167 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4168 case we can merge the block with the remaining predecessor of the block.
4169 It should either:
4170 - call merge_blocks after each tail merge iteration
4171 - call merge_blocks after all tail merge iterations
4172 - mark TODO_cleanup_cfg when necessary
4173 - share the cfg cleanup with fini_pre. */
4174 todo |= tail_merge_optimize (todo);
4176 free_scc_vn ();
4178 /* Tail merging invalidates the virtual SSA web, together with
4179 cfg-cleanup opportunities exposed by PRE this will wreck the
4180 SSA updating machinery. So make sure to run update-ssa
4181 manually, before eventually scheduling cfg-cleanup as part of
4182 the todo. */
4183 update_ssa (TODO_update_ssa_only_virtuals);
4185 return todo;
4188 } // anon namespace
4190 gimple_opt_pass *
4191 make_pass_pre (gcc::context *ctxt)
4193 return new pass_pre (ctxt);