PR middle-end/82875
[official-gcc.git] / gcc / tree-ssa-pre.c
blob477a41ed851eb67e550347033fb69a69ea2df04a
1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-ssa.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
48 #include "params.h"
49 #include "dbgcnt.h"
50 #include "domwalk.h"
51 #include "tree-ssa-propagate.h"
52 #include "tree-cfgcleanup.h"
53 #include "alias.h"
55 /* Even though this file is called tree-ssa-pre.c, we actually
56 implement a bit more than just PRE here. All of them piggy-back
57 on GVN which is implemented in tree-ssa-sccvn.c.
59 1. Full Redundancy Elimination (FRE)
60 This is the elimination phase of GVN.
62 2. Partial Redundancy Elimination (PRE)
63 This is adds computation of AVAIL_OUT and ANTIC_IN and
64 doing expression insertion to form GVN-PRE.
66 3. Code hoisting
67 This optimization uses the ANTIC_IN sets computed for PRE
68 to move expressions further up than PRE would do, to make
69 multiple computations of the same value fully redundant.
70 This pass is explained below (after the explanation of the
71 basic algorithm for PRE).
74 /* TODO:
76 1. Avail sets can be shared by making an avail_find_leader that
77 walks up the dominator tree and looks in those avail sets.
78 This might affect code optimality, it's unclear right now.
79 Currently the AVAIL_OUT sets are the remaining quadraticness in
80 memory of GVN-PRE.
81 2. Strength reduction can be performed by anticipating expressions
82 we can repair later on.
83 3. We can do back-substitution or smarter value numbering to catch
84 commutative expressions split up over multiple statements.
87 /* For ease of terminology, "expression node" in the below refers to
88 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
89 represent the actual statement containing the expressions we care about,
90 and we cache the value number by putting it in the expression. */
92 /* Basic algorithm for Partial Redundancy Elimination:
94 First we walk the statements to generate the AVAIL sets, the
95 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
96 generation of values/expressions by a given block. We use them
97 when computing the ANTIC sets. The AVAIL sets consist of
98 SSA_NAME's that represent values, so we know what values are
99 available in what blocks. AVAIL is a forward dataflow problem. In
100 SSA, values are never killed, so we don't need a kill set, or a
101 fixpoint iteration, in order to calculate the AVAIL sets. In
102 traditional parlance, AVAIL sets tell us the downsafety of the
103 expressions/values.
105 Next, we generate the ANTIC sets. These sets represent the
106 anticipatable expressions. ANTIC is a backwards dataflow
107 problem. An expression is anticipatable in a given block if it could
108 be generated in that block. This means that if we had to perform
109 an insertion in that block, of the value of that expression, we
110 could. Calculating the ANTIC sets requires phi translation of
111 expressions, because the flow goes backwards through phis. We must
112 iterate to a fixpoint of the ANTIC sets, because we have a kill
113 set. Even in SSA form, values are not live over the entire
114 function, only from their definition point onwards. So we have to
115 remove values from the ANTIC set once we go past the definition
116 point of the leaders that make them up.
117 compute_antic/compute_antic_aux performs this computation.
119 Third, we perform insertions to make partially redundant
120 expressions fully redundant.
122 An expression is partially redundant (excluding partial
123 anticipation) if:
125 1. It is AVAIL in some, but not all, of the predecessors of a
126 given block.
127 2. It is ANTIC in all the predecessors.
129 In order to make it fully redundant, we insert the expression into
130 the predecessors where it is not available, but is ANTIC.
132 When optimizing for size, we only eliminate the partial redundancy
133 if we need to insert in only one predecessor. This avoids almost
134 completely the code size increase that PRE usually causes.
136 For the partial anticipation case, we only perform insertion if it
137 is partially anticipated in some block, and fully available in all
138 of the predecessors.
140 do_pre_regular_insertion/do_pre_partial_partial_insertion
141 performs these steps, driven by insert/insert_aux.
143 Fourth, we eliminate fully redundant expressions.
144 This is a simple statement walk that replaces redundant
145 calculations with the now available values. */
147 /* Basic algorithm for Code Hoisting:
149 Code hoisting is: Moving value computations up in the control flow
150 graph to make multiple copies redundant. Typically this is a size
151 optimization, but there are cases where it also is helpful for speed.
153 A simple code hoisting algorithm is implemented that piggy-backs on
154 the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT
155 which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be
156 computed for PRE, and we can use them to perform a limited version of
157 code hoisting, too.
159 For the purpose of this implementation, a value is hoistable to a basic
160 block B if the following properties are met:
162 1. The value is in ANTIC_IN(B) -- the value will be computed on all
163 paths from B to function exit and it can be computed in B);
165 2. The value is not in AVAIL_OUT(B) -- there would be no need to
166 compute the value again and make it available twice;
168 3. All successors of B are dominated by B -- makes sure that inserting
169 a computation of the value in B will make the remaining
170 computations fully redundant;
172 4. At least one successor has the value in AVAIL_OUT -- to avoid
173 hoisting values up too far;
175 5. There are at least two successors of B -- hoisting in straight
176 line code is pointless.
178 The third condition is not strictly necessary, but it would complicate
179 the hoisting pass a lot. In fact, I don't know of any code hoisting
180 algorithm that does not have this requirement. Fortunately, experiments
181 have show that most candidate hoistable values are in regions that meet
182 this condition (e.g. diamond-shape regions).
184 The forth condition is necessary to avoid hoisting things up too far
185 away from the uses of the value. Nothing else limits the algorithm
186 from hoisting everything up as far as ANTIC_IN allows. Experiments
187 with SPEC and CSiBE have shown that hoisting up too far results in more
188 spilling, less benefits for code size, and worse benchmark scores.
189 Fortunately, in practice most of the interesting hoisting opportunities
190 are caught despite this limitation.
192 For hoistable values that meet all conditions, expressions are inserted
193 to make the calculation of the hoistable value fully redundant. We
194 perform code hoisting insertions after each round of PRE insertions,
195 because code hoisting never exposes new PRE opportunities, but PRE can
196 create new code hoisting opportunities.
198 The code hoisting algorithm is implemented in do_hoist_insert, driven
199 by insert/insert_aux. */
201 /* Representations of value numbers:
203 Value numbers are represented by a representative SSA_NAME. We
204 will create fake SSA_NAME's in situations where we need a
205 representative but do not have one (because it is a complex
206 expression). In order to facilitate storing the value numbers in
207 bitmaps, and keep the number of wasted SSA_NAME's down, we also
208 associate a value_id with each value number, and create full blown
209 ssa_name's only where we actually need them (IE in operands of
210 existing expressions).
212 Theoretically you could replace all the value_id's with
213 SSA_NAME_VERSION, but this would allocate a large number of
214 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
215 It would also require an additional indirection at each point we
216 use the value id. */
218 /* Representation of expressions on value numbers:
220 Expressions consisting of value numbers are represented the same
221 way as our VN internally represents them, with an additional
222 "pre_expr" wrapping around them in order to facilitate storing all
223 of the expressions in the same sets. */
225 /* Representation of sets:
227 The dataflow sets do not need to be sorted in any particular order
228 for the majority of their lifetime, are simply represented as two
229 bitmaps, one that keeps track of values present in the set, and one
230 that keeps track of expressions present in the set.
232 When we need them in topological order, we produce it on demand by
233 transforming the bitmap into an array and sorting it into topo
234 order. */
236 /* Type of expression, used to know which member of the PRE_EXPR union
237 is valid. */
239 enum pre_expr_kind
241 NAME,
242 NARY,
243 REFERENCE,
244 CONSTANT
247 union pre_expr_union
249 tree name;
250 tree constant;
251 vn_nary_op_t nary;
252 vn_reference_t reference;
255 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
257 enum pre_expr_kind kind;
258 unsigned int id;
259 pre_expr_union u;
261 /* hash_table support. */
262 static inline hashval_t hash (const pre_expr_d *);
263 static inline int equal (const pre_expr_d *, const pre_expr_d *);
264 } *pre_expr;
266 #define PRE_EXPR_NAME(e) (e)->u.name
267 #define PRE_EXPR_NARY(e) (e)->u.nary
268 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
269 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
271 /* Compare E1 and E1 for equality. */
273 inline int
274 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
276 if (e1->kind != e2->kind)
277 return false;
279 switch (e1->kind)
281 case CONSTANT:
282 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
283 PRE_EXPR_CONSTANT (e2));
284 case NAME:
285 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
286 case NARY:
287 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
288 case REFERENCE:
289 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
290 PRE_EXPR_REFERENCE (e2));
291 default:
292 gcc_unreachable ();
296 /* Hash E. */
298 inline hashval_t
299 pre_expr_d::hash (const pre_expr_d *e)
301 switch (e->kind)
303 case CONSTANT:
304 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
305 case NAME:
306 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
307 case NARY:
308 return PRE_EXPR_NARY (e)->hashcode;
309 case REFERENCE:
310 return PRE_EXPR_REFERENCE (e)->hashcode;
311 default:
312 gcc_unreachable ();
316 /* Next global expression id number. */
317 static unsigned int next_expression_id;
319 /* Mapping from expression to id number we can use in bitmap sets. */
320 static vec<pre_expr> expressions;
321 static hash_table<pre_expr_d> *expression_to_id;
322 static vec<unsigned> name_to_id;
324 /* Allocate an expression id for EXPR. */
326 static inline unsigned int
327 alloc_expression_id (pre_expr expr)
329 struct pre_expr_d **slot;
330 /* Make sure we won't overflow. */
331 gcc_assert (next_expression_id + 1 > next_expression_id);
332 expr->id = next_expression_id++;
333 expressions.safe_push (expr);
334 if (expr->kind == NAME)
336 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
337 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
338 re-allocations by using vec::reserve upfront. */
339 unsigned old_len = name_to_id.length ();
340 name_to_id.reserve (num_ssa_names - old_len);
341 name_to_id.quick_grow_cleared (num_ssa_names);
342 gcc_assert (name_to_id[version] == 0);
343 name_to_id[version] = expr->id;
345 else
347 slot = expression_to_id->find_slot (expr, INSERT);
348 gcc_assert (!*slot);
349 *slot = expr;
351 return next_expression_id - 1;
354 /* Return the expression id for tree EXPR. */
356 static inline unsigned int
357 get_expression_id (const pre_expr expr)
359 return expr->id;
362 static inline unsigned int
363 lookup_expression_id (const pre_expr expr)
365 struct pre_expr_d **slot;
367 if (expr->kind == NAME)
369 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
370 if (name_to_id.length () <= version)
371 return 0;
372 return name_to_id[version];
374 else
376 slot = expression_to_id->find_slot (expr, NO_INSERT);
377 if (!slot)
378 return 0;
379 return ((pre_expr)*slot)->id;
383 /* Return the existing expression id for EXPR, or create one if one
384 does not exist yet. */
386 static inline unsigned int
387 get_or_alloc_expression_id (pre_expr expr)
389 unsigned int id = lookup_expression_id (expr);
390 if (id == 0)
391 return alloc_expression_id (expr);
392 return expr->id = id;
395 /* Return the expression that has expression id ID */
397 static inline pre_expr
398 expression_for_id (unsigned int id)
400 return expressions[id];
403 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
405 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
407 static pre_expr
408 get_or_alloc_expr_for_name (tree name)
410 struct pre_expr_d expr;
411 pre_expr result;
412 unsigned int result_id;
414 expr.kind = NAME;
415 expr.id = 0;
416 PRE_EXPR_NAME (&expr) = name;
417 result_id = lookup_expression_id (&expr);
418 if (result_id != 0)
419 return expression_for_id (result_id);
421 result = pre_expr_pool.allocate ();
422 result->kind = NAME;
423 PRE_EXPR_NAME (result) = name;
424 alloc_expression_id (result);
425 return result;
428 /* An unordered bitmap set. One bitmap tracks values, the other,
429 expressions. */
430 typedef struct bitmap_set
432 bitmap_head expressions;
433 bitmap_head values;
434 } *bitmap_set_t;
436 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
437 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
439 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
440 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
442 /* Mapping from value id to expressions with that value_id. */
443 static vec<bitmap> value_expressions;
445 /* Sets that we need to keep track of. */
446 typedef struct bb_bitmap_sets
448 /* The EXP_GEN set, which represents expressions/values generated in
449 a basic block. */
450 bitmap_set_t exp_gen;
452 /* The PHI_GEN set, which represents PHI results generated in a
453 basic block. */
454 bitmap_set_t phi_gen;
456 /* The TMP_GEN set, which represents results/temporaries generated
457 in a basic block. IE the LHS of an expression. */
458 bitmap_set_t tmp_gen;
460 /* The AVAIL_OUT set, which represents which values are available in
461 a given basic block. */
462 bitmap_set_t avail_out;
464 /* The ANTIC_IN set, which represents which values are anticipatable
465 in a given basic block. */
466 bitmap_set_t antic_in;
468 /* The PA_IN set, which represents which values are
469 partially anticipatable in a given basic block. */
470 bitmap_set_t pa_in;
472 /* The NEW_SETS set, which is used during insertion to augment the
473 AVAIL_OUT set of blocks with the new insertions performed during
474 the current iteration. */
475 bitmap_set_t new_sets;
477 /* A cache for value_dies_in_block_x. */
478 bitmap expr_dies;
480 /* The live virtual operand on successor edges. */
481 tree vop_on_exit;
483 /* True if we have visited this block during ANTIC calculation. */
484 unsigned int visited : 1;
486 /* True when the block contains a call that might not return. */
487 unsigned int contains_may_not_return_call : 1;
488 } *bb_value_sets_t;
490 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
491 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
492 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
493 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
494 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
495 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
496 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
497 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
498 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
499 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
500 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
503 /* This structure is used to keep track of statistics on what
504 optimization PRE was able to perform. */
505 static struct
507 /* The number of new expressions/temporaries generated by PRE. */
508 int insertions;
510 /* The number of inserts found due to partial anticipation */
511 int pa_insert;
513 /* The number of inserts made for code hoisting. */
514 int hoist_insert;
516 /* The number of new PHI nodes added by PRE. */
517 int phis;
518 } pre_stats;
520 static bool do_partial_partial;
521 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
522 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
523 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
524 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
525 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
526 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
527 static bitmap_set_t bitmap_set_new (void);
528 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
529 tree);
530 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
531 static unsigned int get_expr_value_id (pre_expr);
533 /* We can add and remove elements and entries to and from sets
534 and hash tables, so we use alloc pools for them. */
536 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
537 static bitmap_obstack grand_bitmap_obstack;
539 /* A three tuple {e, pred, v} used to cache phi translations in the
540 phi_translate_table. */
542 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
544 /* The expression. */
545 pre_expr e;
547 /* The predecessor block along which we translated the expression. */
548 basic_block pred;
550 /* The value that resulted from the translation. */
551 pre_expr v;
553 /* The hashcode for the expression, pred pair. This is cached for
554 speed reasons. */
555 hashval_t hashcode;
557 /* hash_table support. */
558 static inline hashval_t hash (const expr_pred_trans_d *);
559 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
560 } *expr_pred_trans_t;
561 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
563 inline hashval_t
564 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
566 return e->hashcode;
569 inline int
570 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
571 const expr_pred_trans_d *ve2)
573 basic_block b1 = ve1->pred;
574 basic_block b2 = ve2->pred;
576 /* If they are not translations for the same basic block, they can't
577 be equal. */
578 if (b1 != b2)
579 return false;
580 return pre_expr_d::equal (ve1->e, ve2->e);
583 /* The phi_translate_table caches phi translations for a given
584 expression and predecessor. */
585 static hash_table<expr_pred_trans_d> *phi_translate_table;
587 /* Add the tuple mapping from {expression E, basic block PRED} to
588 the phi translation table and return whether it pre-existed. */
590 static inline bool
591 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
593 expr_pred_trans_t *slot;
594 expr_pred_trans_d tem;
595 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
596 pred->index);
597 tem.e = e;
598 tem.pred = pred;
599 tem.hashcode = hash;
600 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
601 if (*slot)
603 *entry = *slot;
604 return true;
607 *entry = *slot = XNEW (struct expr_pred_trans_d);
608 (*entry)->e = e;
609 (*entry)->pred = pred;
610 (*entry)->hashcode = hash;
611 return false;
615 /* Add expression E to the expression set of value id V. */
617 static void
618 add_to_value (unsigned int v, pre_expr e)
620 bitmap set;
622 gcc_checking_assert (get_expr_value_id (e) == v);
624 if (v >= value_expressions.length ())
626 value_expressions.safe_grow_cleared (v + 1);
629 set = value_expressions[v];
630 if (!set)
632 set = BITMAP_ALLOC (&grand_bitmap_obstack);
633 value_expressions[v] = set;
636 bitmap_set_bit (set, get_or_alloc_expression_id (e));
639 /* Create a new bitmap set and return it. */
641 static bitmap_set_t
642 bitmap_set_new (void)
644 bitmap_set_t ret = bitmap_set_pool.allocate ();
645 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
646 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
647 return ret;
650 /* Return the value id for a PRE expression EXPR. */
652 static unsigned int
653 get_expr_value_id (pre_expr expr)
655 unsigned int id;
656 switch (expr->kind)
658 case CONSTANT:
659 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
660 break;
661 case NAME:
662 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
663 break;
664 case NARY:
665 id = PRE_EXPR_NARY (expr)->value_id;
666 break;
667 case REFERENCE:
668 id = PRE_EXPR_REFERENCE (expr)->value_id;
669 break;
670 default:
671 gcc_unreachable ();
673 /* ??? We cannot assert that expr has a value-id (it can be 0), because
674 we assign value-ids only to expressions that have a result
675 in set_hashtable_value_ids. */
676 return id;
679 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
681 static tree
682 sccvn_valnum_from_value_id (unsigned int val)
684 bitmap_iterator bi;
685 unsigned int i;
686 bitmap exprset = value_expressions[val];
687 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
689 pre_expr vexpr = expression_for_id (i);
690 if (vexpr->kind == NAME)
691 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
692 else if (vexpr->kind == CONSTANT)
693 return PRE_EXPR_CONSTANT (vexpr);
695 return NULL_TREE;
698 /* Remove an expression EXPR from a bitmapped set. */
700 static void
701 bitmap_remove_expr_from_set (bitmap_set_t set, pre_expr expr)
703 unsigned int val = get_expr_value_id (expr);
704 bitmap_clear_bit (&set->values, val);
705 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
708 /* Insert an expression EXPR into a bitmapped set. */
710 static void
711 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
713 unsigned int val = get_expr_value_id (expr);
714 if (! value_id_constant_p (val))
716 /* Note this is the only function causing multiple expressions
717 for the same value to appear in a set. This is needed for
718 TMP_GEN, PHI_GEN and NEW_SETs. */
719 bitmap_set_bit (&set->values, val);
720 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
724 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
726 static void
727 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
729 bitmap_copy (&dest->expressions, &orig->expressions);
730 bitmap_copy (&dest->values, &orig->values);
734 /* Free memory used up by SET. */
735 static void
736 bitmap_set_free (bitmap_set_t set)
738 bitmap_clear (&set->expressions);
739 bitmap_clear (&set->values);
743 /* Generate an topological-ordered array of bitmap set SET. */
745 static vec<pre_expr>
746 sorted_array_from_bitmap_set (bitmap_set_t set)
748 unsigned int i, j;
749 bitmap_iterator bi, bj;
750 vec<pre_expr> result;
752 /* Pre-allocate enough space for the array. */
753 result.create (bitmap_count_bits (&set->expressions));
755 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
757 /* The number of expressions having a given value is usually
758 relatively small. Thus, rather than making a vector of all
759 the expressions and sorting it by value-id, we walk the values
760 and check in the reverse mapping that tells us what expressions
761 have a given value, to filter those in our set. As a result,
762 the expressions are inserted in value-id order, which means
763 topological order.
765 If this is somehow a significant lose for some cases, we can
766 choose which set to walk based on the set size. */
767 bitmap exprset = value_expressions[i];
768 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
770 if (bitmap_bit_p (&set->expressions, j))
771 result.quick_push (expression_for_id (j));
775 return result;
778 /* Subtract all expressions contained in ORIG from DEST. */
780 static bitmap_set_t
781 bitmap_set_subtract_expressions (bitmap_set_t dest, bitmap_set_t orig)
783 bitmap_set_t result = bitmap_set_new ();
784 bitmap_iterator bi;
785 unsigned int i;
787 bitmap_and_compl (&result->expressions, &dest->expressions,
788 &orig->expressions);
790 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
792 pre_expr expr = expression_for_id (i);
793 unsigned int value_id = get_expr_value_id (expr);
794 bitmap_set_bit (&result->values, value_id);
797 return result;
800 /* Subtract all values in bitmap set B from bitmap set A. */
802 static void
803 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
805 unsigned int i;
806 bitmap_iterator bi;
807 pre_expr to_remove = NULL;
808 FOR_EACH_EXPR_ID_IN_SET (a, i, bi)
810 if (to_remove)
812 bitmap_remove_expr_from_set (a, to_remove);
813 to_remove = NULL;
815 pre_expr expr = expression_for_id (i);
816 if (bitmap_bit_p (&b->values, get_expr_value_id (expr)))
817 to_remove = expr;
819 if (to_remove)
820 bitmap_remove_expr_from_set (a, to_remove);
824 /* Return true if bitmapped set SET contains the value VALUE_ID. */
826 static bool
827 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
829 if (value_id_constant_p (value_id))
830 return true;
832 return bitmap_bit_p (&set->values, value_id);
835 static inline bool
836 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
838 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
841 /* Return true if two bitmap sets are equal. */
843 static bool
844 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
846 return bitmap_equal_p (&a->values, &b->values);
849 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
850 and add it otherwise. */
852 static void
853 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
855 unsigned int val = get_expr_value_id (expr);
856 if (value_id_constant_p (val))
857 return;
859 if (bitmap_set_contains_value (set, val))
861 /* The number of expressions having a given value is usually
862 significantly less than the total number of expressions in SET.
863 Thus, rather than check, for each expression in SET, whether it
864 has the value LOOKFOR, we walk the reverse mapping that tells us
865 what expressions have a given value, and see if any of those
866 expressions are in our set. For large testcases, this is about
867 5-10x faster than walking the bitmap. If this is somehow a
868 significant lose for some cases, we can choose which set to walk
869 based on the set size. */
870 unsigned int i;
871 bitmap_iterator bi;
872 bitmap exprset = value_expressions[val];
873 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
875 if (bitmap_clear_bit (&set->expressions, i))
877 bitmap_set_bit (&set->expressions, get_expression_id (expr));
878 return;
881 gcc_unreachable ();
883 else
884 bitmap_insert_into_set (set, expr);
887 /* Insert EXPR into SET if EXPR's value is not already present in
888 SET. */
890 static void
891 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
893 unsigned int val = get_expr_value_id (expr);
895 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
897 /* Constant values are always considered to be part of the set. */
898 if (value_id_constant_p (val))
899 return;
901 /* If the value membership changed, add the expression. */
902 if (bitmap_set_bit (&set->values, val))
903 bitmap_set_bit (&set->expressions, expr->id);
906 /* Print out EXPR to outfile. */
908 static void
909 print_pre_expr (FILE *outfile, const pre_expr expr)
911 if (! expr)
913 fprintf (outfile, "NULL");
914 return;
916 switch (expr->kind)
918 case CONSTANT:
919 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr));
920 break;
921 case NAME:
922 print_generic_expr (outfile, PRE_EXPR_NAME (expr));
923 break;
924 case NARY:
926 unsigned int i;
927 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
928 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
929 for (i = 0; i < nary->length; i++)
931 print_generic_expr (outfile, nary->op[i]);
932 if (i != (unsigned) nary->length - 1)
933 fprintf (outfile, ",");
935 fprintf (outfile, "}");
937 break;
939 case REFERENCE:
941 vn_reference_op_t vro;
942 unsigned int i;
943 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
944 fprintf (outfile, "{");
945 for (i = 0;
946 ref->operands.iterate (i, &vro);
947 i++)
949 bool closebrace = false;
950 if (vro->opcode != SSA_NAME
951 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
953 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
954 if (vro->op0)
956 fprintf (outfile, "<");
957 closebrace = true;
960 if (vro->op0)
962 print_generic_expr (outfile, vro->op0);
963 if (vro->op1)
965 fprintf (outfile, ",");
966 print_generic_expr (outfile, vro->op1);
968 if (vro->op2)
970 fprintf (outfile, ",");
971 print_generic_expr (outfile, vro->op2);
974 if (closebrace)
975 fprintf (outfile, ">");
976 if (i != ref->operands.length () - 1)
977 fprintf (outfile, ",");
979 fprintf (outfile, "}");
980 if (ref->vuse)
982 fprintf (outfile, "@");
983 print_generic_expr (outfile, ref->vuse);
986 break;
989 void debug_pre_expr (pre_expr);
991 /* Like print_pre_expr but always prints to stderr. */
992 DEBUG_FUNCTION void
993 debug_pre_expr (pre_expr e)
995 print_pre_expr (stderr, e);
996 fprintf (stderr, "\n");
999 /* Print out SET to OUTFILE. */
1001 static void
1002 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1003 const char *setname, int blockindex)
1005 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1006 if (set)
1008 bool first = true;
1009 unsigned i;
1010 bitmap_iterator bi;
1012 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1014 const pre_expr expr = expression_for_id (i);
1016 if (!first)
1017 fprintf (outfile, ", ");
1018 first = false;
1019 print_pre_expr (outfile, expr);
1021 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1024 fprintf (outfile, " }\n");
1027 void debug_bitmap_set (bitmap_set_t);
1029 DEBUG_FUNCTION void
1030 debug_bitmap_set (bitmap_set_t set)
1032 print_bitmap_set (stderr, set, "debug", 0);
1035 void debug_bitmap_sets_for (basic_block);
1037 DEBUG_FUNCTION void
1038 debug_bitmap_sets_for (basic_block bb)
1040 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1041 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1042 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1043 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1044 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1045 if (do_partial_partial)
1046 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1047 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1050 /* Print out the expressions that have VAL to OUTFILE. */
1052 static void
1053 print_value_expressions (FILE *outfile, unsigned int val)
1055 bitmap set = value_expressions[val];
1056 if (set)
1058 bitmap_set x;
1059 char s[10];
1060 sprintf (s, "%04d", val);
1061 x.expressions = *set;
1062 print_bitmap_set (outfile, &x, s, 0);
1067 DEBUG_FUNCTION void
1068 debug_value_expressions (unsigned int val)
1070 print_value_expressions (stderr, val);
1073 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1074 represent it. */
1076 static pre_expr
1077 get_or_alloc_expr_for_constant (tree constant)
1079 unsigned int result_id;
1080 unsigned int value_id;
1081 struct pre_expr_d expr;
1082 pre_expr newexpr;
1084 expr.kind = CONSTANT;
1085 PRE_EXPR_CONSTANT (&expr) = constant;
1086 result_id = lookup_expression_id (&expr);
1087 if (result_id != 0)
1088 return expression_for_id (result_id);
1090 newexpr = pre_expr_pool.allocate ();
1091 newexpr->kind = CONSTANT;
1092 PRE_EXPR_CONSTANT (newexpr) = constant;
1093 alloc_expression_id (newexpr);
1094 value_id = get_or_alloc_constant_value_id (constant);
1095 add_to_value (value_id, newexpr);
1096 return newexpr;
1099 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1100 Currently only supports constants and SSA_NAMES. */
1101 static pre_expr
1102 get_or_alloc_expr_for (tree t)
1104 if (TREE_CODE (t) == SSA_NAME)
1105 return get_or_alloc_expr_for_name (t);
1106 else if (is_gimple_min_invariant (t))
1107 return get_or_alloc_expr_for_constant (t);
1108 gcc_unreachable ();
1111 /* Return the folded version of T if T, when folded, is a gimple
1112 min_invariant or an SSA name. Otherwise, return T. */
1114 static pre_expr
1115 fully_constant_expression (pre_expr e)
1117 switch (e->kind)
1119 case CONSTANT:
1120 return e;
1121 case NARY:
1123 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1124 tree res = vn_nary_simplify (nary);
1125 if (!res)
1126 return e;
1127 if (is_gimple_min_invariant (res))
1128 return get_or_alloc_expr_for_constant (res);
1129 if (TREE_CODE (res) == SSA_NAME)
1130 return get_or_alloc_expr_for_name (res);
1131 return e;
1133 case REFERENCE:
1135 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1136 tree folded;
1137 if ((folded = fully_constant_vn_reference_p (ref)))
1138 return get_or_alloc_expr_for_constant (folded);
1139 return e;
1141 default:
1142 return e;
1144 return e;
1147 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1148 it has the value it would have in BLOCK. Set *SAME_VALID to true
1149 in case the new vuse doesn't change the value id of the OPERANDS. */
1151 static tree
1152 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1153 alias_set_type set, tree type, tree vuse,
1154 basic_block phiblock,
1155 basic_block block, bool *same_valid)
1157 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1158 ao_ref ref;
1159 edge e = NULL;
1160 bool use_oracle;
1162 *same_valid = true;
1164 if (gimple_bb (phi) != phiblock)
1165 return vuse;
1167 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1169 /* Use the alias-oracle to find either the PHI node in this block,
1170 the first VUSE used in this block that is equivalent to vuse or
1171 the first VUSE which definition in this block kills the value. */
1172 if (gimple_code (phi) == GIMPLE_PHI)
1173 e = find_edge (block, phiblock);
1174 else if (use_oracle)
1175 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1177 vuse = gimple_vuse (phi);
1178 phi = SSA_NAME_DEF_STMT (vuse);
1179 if (gimple_bb (phi) != phiblock)
1180 return vuse;
1181 if (gimple_code (phi) == GIMPLE_PHI)
1183 e = find_edge (block, phiblock);
1184 break;
1187 else
1188 return NULL_TREE;
1190 if (e)
1192 if (use_oracle)
1194 bitmap visited = NULL;
1195 unsigned int cnt;
1196 /* Try to find a vuse that dominates this phi node by skipping
1197 non-clobbering statements. */
1198 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1199 NULL, NULL);
1200 if (visited)
1201 BITMAP_FREE (visited);
1203 else
1204 vuse = NULL_TREE;
1205 if (!vuse)
1207 /* If we didn't find any, the value ID can't stay the same,
1208 but return the translated vuse. */
1209 *same_valid = false;
1210 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1212 /* ??? We would like to return vuse here as this is the canonical
1213 upmost vdef that this reference is associated with. But during
1214 insertion of the references into the hash tables we only ever
1215 directly insert with their direct gimple_vuse, hence returning
1216 something else would make us not find the other expression. */
1217 return PHI_ARG_DEF (phi, e->dest_idx);
1220 return NULL_TREE;
1223 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1224 SET2 *or* SET3. This is used to avoid making a set consisting of the union
1225 of PA_IN and ANTIC_IN during insert and phi-translation. */
1227 static inline pre_expr
1228 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2,
1229 bitmap_set_t set3 = NULL)
1231 pre_expr result;
1233 result = bitmap_find_leader (set1, val);
1234 if (!result && set2)
1235 result = bitmap_find_leader (set2, val);
1236 if (!result && set3)
1237 result = bitmap_find_leader (set3, val);
1238 return result;
1241 /* Get the tree type for our PRE expression e. */
1243 static tree
1244 get_expr_type (const pre_expr e)
1246 switch (e->kind)
1248 case NAME:
1249 return TREE_TYPE (PRE_EXPR_NAME (e));
1250 case CONSTANT:
1251 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1252 case REFERENCE:
1253 return PRE_EXPR_REFERENCE (e)->type;
1254 case NARY:
1255 return PRE_EXPR_NARY (e)->type;
1257 gcc_unreachable ();
1260 /* Get a representative SSA_NAME for a given expression.
1261 Since all of our sub-expressions are treated as values, we require
1262 them to be SSA_NAME's for simplicity.
1263 Prior versions of GVNPRE used to use "value handles" here, so that
1264 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1265 either case, the operands are really values (IE we do not expect
1266 them to be usable without finding leaders). */
1268 static tree
1269 get_representative_for (const pre_expr e)
1271 tree name;
1272 unsigned int value_id = get_expr_value_id (e);
1274 switch (e->kind)
1276 case NAME:
1277 return VN_INFO (PRE_EXPR_NAME (e))->valnum;
1278 case CONSTANT:
1279 return PRE_EXPR_CONSTANT (e);
1280 case NARY:
1281 case REFERENCE:
1283 /* Go through all of the expressions representing this value
1284 and pick out an SSA_NAME. */
1285 unsigned int i;
1286 bitmap_iterator bi;
1287 bitmap exprs = value_expressions[value_id];
1288 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1290 pre_expr rep = expression_for_id (i);
1291 if (rep->kind == NAME)
1292 return VN_INFO (PRE_EXPR_NAME (rep))->valnum;
1293 else if (rep->kind == CONSTANT)
1294 return PRE_EXPR_CONSTANT (rep);
1297 break;
1300 /* If we reached here we couldn't find an SSA_NAME. This can
1301 happen when we've discovered a value that has never appeared in
1302 the program as set to an SSA_NAME, as the result of phi translation.
1303 Create one here.
1304 ??? We should be able to re-use this when we insert the statement
1305 to compute it. */
1306 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1307 VN_INFO_GET (name)->value_id = value_id;
1308 VN_INFO (name)->valnum = name;
1309 /* ??? For now mark this SSA name for release by SCCVN. */
1310 VN_INFO (name)->needs_insertion = true;
1311 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1312 if (dump_file && (dump_flags & TDF_DETAILS))
1314 fprintf (dump_file, "Created SSA_NAME representative ");
1315 print_generic_expr (dump_file, name);
1316 fprintf (dump_file, " for expression:");
1317 print_pre_expr (dump_file, e);
1318 fprintf (dump_file, " (%04d)\n", value_id);
1321 return name;
1325 static pre_expr
1326 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1327 basic_block pred, basic_block phiblock);
1329 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1330 the phis in PRED. Return NULL if we can't find a leader for each part
1331 of the translated expression. */
1333 static pre_expr
1334 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1335 basic_block pred, basic_block phiblock)
1337 switch (expr->kind)
1339 case NARY:
1341 unsigned int i;
1342 bool changed = false;
1343 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1344 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1345 sizeof_vn_nary_op (nary->length));
1346 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1348 for (i = 0; i < newnary->length; i++)
1350 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1351 continue;
1352 else
1354 pre_expr leader, result;
1355 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1356 leader = find_leader_in_sets (op_val_id, set1, set2);
1357 result = phi_translate (leader, set1, set2, pred, phiblock);
1358 if (result && result != leader)
1359 newnary->op[i] = get_representative_for (result);
1360 else if (!result)
1361 return NULL;
1363 changed |= newnary->op[i] != nary->op[i];
1366 if (changed)
1368 pre_expr constant;
1369 unsigned int new_val_id;
1371 PRE_EXPR_NARY (expr) = newnary;
1372 constant = fully_constant_expression (expr);
1373 PRE_EXPR_NARY (expr) = nary;
1374 if (constant != expr)
1376 /* For non-CONSTANTs we have to make sure we can eventually
1377 insert the expression. Which means we need to have a
1378 leader for it. */
1379 if (constant->kind != CONSTANT)
1381 /* Do not allow simplifications to non-constants over
1382 backedges as this will likely result in a loop PHI node
1383 to be inserted and increased register pressure.
1384 See PR77498 - this avoids doing predcoms work in
1385 a less efficient way. */
1386 if (find_edge (pred, phiblock)->flags & EDGE_DFS_BACK)
1388 else
1390 unsigned value_id = get_expr_value_id (constant);
1391 constant = find_leader_in_sets (value_id, set1, set2,
1392 AVAIL_OUT (pred));
1393 if (constant)
1394 return constant;
1397 else
1398 return constant;
1401 tree result = vn_nary_op_lookup_pieces (newnary->length,
1402 newnary->opcode,
1403 newnary->type,
1404 &newnary->op[0],
1405 &nary);
1406 if (result && is_gimple_min_invariant (result))
1407 return get_or_alloc_expr_for_constant (result);
1409 expr = pre_expr_pool.allocate ();
1410 expr->kind = NARY;
1411 expr->id = 0;
1412 if (nary)
1414 PRE_EXPR_NARY (expr) = nary;
1415 new_val_id = nary->value_id;
1416 get_or_alloc_expression_id (expr);
1417 /* When we end up re-using a value number make sure that
1418 doesn't have unrelated (which we can't check here)
1419 range or points-to info on it. */
1420 if (result
1421 && INTEGRAL_TYPE_P (TREE_TYPE (result))
1422 && SSA_NAME_RANGE_INFO (result)
1423 && ! SSA_NAME_IS_DEFAULT_DEF (result))
1425 if (! VN_INFO (result)->info.range_info)
1427 VN_INFO (result)->info.range_info
1428 = SSA_NAME_RANGE_INFO (result);
1429 VN_INFO (result)->range_info_anti_range_p
1430 = SSA_NAME_ANTI_RANGE_P (result);
1432 if (dump_file && (dump_flags & TDF_DETAILS))
1434 fprintf (dump_file, "clearing range info of ");
1435 print_generic_expr (dump_file, result);
1436 fprintf (dump_file, "\n");
1438 SSA_NAME_RANGE_INFO (result) = NULL;
1440 else if (result
1441 && POINTER_TYPE_P (TREE_TYPE (result))
1442 && SSA_NAME_PTR_INFO (result)
1443 && ! SSA_NAME_IS_DEFAULT_DEF (result))
1445 if (! VN_INFO (result)->info.ptr_info)
1446 VN_INFO (result)->info.ptr_info
1447 = SSA_NAME_PTR_INFO (result);
1448 if (dump_file && (dump_flags & TDF_DETAILS))
1450 fprintf (dump_file, "clearing points-to info of ");
1451 print_generic_expr (dump_file, result);
1452 fprintf (dump_file, "\n");
1454 SSA_NAME_PTR_INFO (result) = NULL;
1457 else
1459 new_val_id = get_next_value_id ();
1460 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1461 nary = vn_nary_op_insert_pieces (newnary->length,
1462 newnary->opcode,
1463 newnary->type,
1464 &newnary->op[0],
1465 result, new_val_id);
1466 PRE_EXPR_NARY (expr) = nary;
1467 get_or_alloc_expression_id (expr);
1469 add_to_value (new_val_id, expr);
1471 return expr;
1473 break;
1475 case REFERENCE:
1477 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1478 vec<vn_reference_op_s> operands = ref->operands;
1479 tree vuse = ref->vuse;
1480 tree newvuse = vuse;
1481 vec<vn_reference_op_s> newoperands = vNULL;
1482 bool changed = false, same_valid = true;
1483 unsigned int i, n;
1484 vn_reference_op_t operand;
1485 vn_reference_t newref;
1487 for (i = 0; operands.iterate (i, &operand); i++)
1489 pre_expr opresult;
1490 pre_expr leader;
1491 tree op[3];
1492 tree type = operand->type;
1493 vn_reference_op_s newop = *operand;
1494 op[0] = operand->op0;
1495 op[1] = operand->op1;
1496 op[2] = operand->op2;
1497 for (n = 0; n < 3; ++n)
1499 unsigned int op_val_id;
1500 if (!op[n])
1501 continue;
1502 if (TREE_CODE (op[n]) != SSA_NAME)
1504 /* We can't possibly insert these. */
1505 if (n != 0
1506 && !is_gimple_min_invariant (op[n]))
1507 break;
1508 continue;
1510 op_val_id = VN_INFO (op[n])->value_id;
1511 leader = find_leader_in_sets (op_val_id, set1, set2);
1512 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1513 if (opresult && opresult != leader)
1515 tree name = get_representative_for (opresult);
1516 changed |= name != op[n];
1517 op[n] = name;
1519 else if (!opresult)
1520 break;
1522 if (n != 3)
1524 newoperands.release ();
1525 return NULL;
1527 if (!changed)
1528 continue;
1529 if (!newoperands.exists ())
1530 newoperands = operands.copy ();
1531 /* We may have changed from an SSA_NAME to a constant */
1532 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1533 newop.opcode = TREE_CODE (op[0]);
1534 newop.type = type;
1535 newop.op0 = op[0];
1536 newop.op1 = op[1];
1537 newop.op2 = op[2];
1538 newoperands[i] = newop;
1540 gcc_checking_assert (i == operands.length ());
1542 if (vuse)
1544 newvuse = translate_vuse_through_block (newoperands.exists ()
1545 ? newoperands : operands,
1546 ref->set, ref->type,
1547 vuse, phiblock, pred,
1548 &same_valid);
1549 if (newvuse == NULL_TREE)
1551 newoperands.release ();
1552 return NULL;
1556 if (changed || newvuse != vuse)
1558 unsigned int new_val_id;
1560 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1561 ref->type,
1562 newoperands.exists ()
1563 ? newoperands : operands,
1564 &newref, VN_WALK);
1565 if (result)
1566 newoperands.release ();
1568 /* We can always insert constants, so if we have a partial
1569 redundant constant load of another type try to translate it
1570 to a constant of appropriate type. */
1571 if (result && is_gimple_min_invariant (result))
1573 tree tem = result;
1574 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1576 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1577 if (tem && !is_gimple_min_invariant (tem))
1578 tem = NULL_TREE;
1580 if (tem)
1581 return get_or_alloc_expr_for_constant (tem);
1584 /* If we'd have to convert things we would need to validate
1585 if we can insert the translated expression. So fail
1586 here for now - we cannot insert an alias with a different
1587 type in the VN tables either, as that would assert. */
1588 if (result
1589 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1590 return NULL;
1591 else if (!result && newref
1592 && !useless_type_conversion_p (ref->type, newref->type))
1594 newoperands.release ();
1595 return NULL;
1598 expr = pre_expr_pool.allocate ();
1599 expr->kind = REFERENCE;
1600 expr->id = 0;
1602 if (newref)
1603 new_val_id = newref->value_id;
1604 else
1606 if (changed || !same_valid)
1608 new_val_id = get_next_value_id ();
1609 value_expressions.safe_grow_cleared
1610 (get_max_value_id () + 1);
1612 else
1613 new_val_id = ref->value_id;
1614 if (!newoperands.exists ())
1615 newoperands = operands.copy ();
1616 newref = vn_reference_insert_pieces (newvuse, ref->set,
1617 ref->type,
1618 newoperands,
1619 result, new_val_id);
1620 newoperands = vNULL;
1622 PRE_EXPR_REFERENCE (expr) = newref;
1623 get_or_alloc_expression_id (expr);
1624 add_to_value (new_val_id, expr);
1626 newoperands.release ();
1627 return expr;
1629 break;
1631 case NAME:
1633 tree name = PRE_EXPR_NAME (expr);
1634 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1635 /* If the SSA name is defined by a PHI node in this block,
1636 translate it. */
1637 if (gimple_code (def_stmt) == GIMPLE_PHI
1638 && gimple_bb (def_stmt) == phiblock)
1640 edge e = find_edge (pred, gimple_bb (def_stmt));
1641 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1643 /* Handle constant. */
1644 if (is_gimple_min_invariant (def))
1645 return get_or_alloc_expr_for_constant (def);
1647 return get_or_alloc_expr_for_name (def);
1649 /* Otherwise return it unchanged - it will get removed if its
1650 value is not available in PREDs AVAIL_OUT set of expressions
1651 by the subtraction of TMP_GEN. */
1652 return expr;
1655 default:
1656 gcc_unreachable ();
1660 /* Wrapper around phi_translate_1 providing caching functionality. */
1662 static pre_expr
1663 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1664 basic_block pred, basic_block phiblock)
1666 expr_pred_trans_t slot = NULL;
1667 pre_expr phitrans;
1669 if (!expr)
1670 return NULL;
1672 /* Constants contain no values that need translation. */
1673 if (expr->kind == CONSTANT)
1674 return expr;
1676 if (value_id_constant_p (get_expr_value_id (expr)))
1677 return expr;
1679 /* Don't add translations of NAMEs as those are cheap to translate. */
1680 if (expr->kind != NAME)
1682 if (phi_trans_add (&slot, expr, pred))
1683 return slot->v;
1684 /* Store NULL for the value we want to return in the case of
1685 recursing. */
1686 slot->v = NULL;
1689 /* Translate. */
1690 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1692 if (slot)
1694 if (phitrans)
1695 slot->v = phitrans;
1696 else
1697 /* Remove failed translations again, they cause insert
1698 iteration to not pick up new opportunities reliably. */
1699 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1702 return phitrans;
1706 /* For each expression in SET, translate the values through phi nodes
1707 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1708 expressions in DEST. */
1710 static void
1711 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1712 basic_block phiblock)
1714 vec<pre_expr> exprs;
1715 pre_expr expr;
1716 int i;
1718 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1720 bitmap_set_copy (dest, set);
1721 return;
1724 exprs = sorted_array_from_bitmap_set (set);
1725 FOR_EACH_VEC_ELT (exprs, i, expr)
1727 pre_expr translated;
1728 translated = phi_translate (expr, set, NULL, pred, phiblock);
1729 if (!translated)
1730 continue;
1732 /* We might end up with multiple expressions from SET being
1733 translated to the same value. In this case we do not want
1734 to retain the NARY or REFERENCE expression but prefer a NAME
1735 which would be the leader. */
1736 if (translated->kind == NAME)
1737 bitmap_value_replace_in_set (dest, translated);
1738 else
1739 bitmap_value_insert_into_set (dest, translated);
1741 exprs.release ();
1744 /* Find the leader for a value (i.e., the name representing that
1745 value) in a given set, and return it. Return NULL if no leader
1746 is found. */
1748 static pre_expr
1749 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1751 if (value_id_constant_p (val))
1753 unsigned int i;
1754 bitmap_iterator bi;
1755 bitmap exprset = value_expressions[val];
1757 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1759 pre_expr expr = expression_for_id (i);
1760 if (expr->kind == CONSTANT)
1761 return expr;
1764 if (bitmap_set_contains_value (set, val))
1766 /* Rather than walk the entire bitmap of expressions, and see
1767 whether any of them has the value we are looking for, we look
1768 at the reverse mapping, which tells us the set of expressions
1769 that have a given value (IE value->expressions with that
1770 value) and see if any of those expressions are in our set.
1771 The number of expressions per value is usually significantly
1772 less than the number of expressions in the set. In fact, for
1773 large testcases, doing it this way is roughly 5-10x faster
1774 than walking the bitmap.
1775 If this is somehow a significant lose for some cases, we can
1776 choose which set to walk based on which set is smaller. */
1777 unsigned int i;
1778 bitmap_iterator bi;
1779 bitmap exprset = value_expressions[val];
1781 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1782 return expression_for_id (i);
1784 return NULL;
1787 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1788 BLOCK by seeing if it is not killed in the block. Note that we are
1789 only determining whether there is a store that kills it. Because
1790 of the order in which clean iterates over values, we are guaranteed
1791 that altered operands will have caused us to be eliminated from the
1792 ANTIC_IN set already. */
1794 static bool
1795 value_dies_in_block_x (pre_expr expr, basic_block block)
1797 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1798 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1799 gimple *def;
1800 gimple_stmt_iterator gsi;
1801 unsigned id = get_expression_id (expr);
1802 bool res = false;
1803 ao_ref ref;
1805 if (!vuse)
1806 return false;
1808 /* Lookup a previously calculated result. */
1809 if (EXPR_DIES (block)
1810 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1811 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1813 /* A memory expression {e, VUSE} dies in the block if there is a
1814 statement that may clobber e. If, starting statement walk from the
1815 top of the basic block, a statement uses VUSE there can be no kill
1816 inbetween that use and the original statement that loaded {e, VUSE},
1817 so we can stop walking. */
1818 ref.base = NULL_TREE;
1819 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1821 tree def_vuse, def_vdef;
1822 def = gsi_stmt (gsi);
1823 def_vuse = gimple_vuse (def);
1824 def_vdef = gimple_vdef (def);
1826 /* Not a memory statement. */
1827 if (!def_vuse)
1828 continue;
1830 /* Not a may-def. */
1831 if (!def_vdef)
1833 /* A load with the same VUSE, we're done. */
1834 if (def_vuse == vuse)
1835 break;
1837 continue;
1840 /* Init ref only if we really need it. */
1841 if (ref.base == NULL_TREE
1842 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1843 refx->operands))
1845 res = true;
1846 break;
1848 /* If the statement may clobber expr, it dies. */
1849 if (stmt_may_clobber_ref_p_1 (def, &ref))
1851 res = true;
1852 break;
1856 /* Remember the result. */
1857 if (!EXPR_DIES (block))
1858 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1859 bitmap_set_bit (EXPR_DIES (block), id * 2);
1860 if (res)
1861 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1863 return res;
1867 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1868 contains its value-id. */
1870 static bool
1871 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1873 if (op && TREE_CODE (op) == SSA_NAME)
1875 unsigned int value_id = VN_INFO (op)->value_id;
1876 if (!(bitmap_set_contains_value (set1, value_id)
1877 || (set2 && bitmap_set_contains_value (set2, value_id))))
1878 return false;
1880 return true;
1883 /* Determine if the expression EXPR is valid in SET1 U SET2.
1884 ONLY SET2 CAN BE NULL.
1885 This means that we have a leader for each part of the expression
1886 (if it consists of values), or the expression is an SSA_NAME.
1887 For loads/calls, we also see if the vuse is killed in this block. */
1889 static bool
1890 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1892 switch (expr->kind)
1894 case NAME:
1895 /* By construction all NAMEs are available. Non-available
1896 NAMEs are removed by subtracting TMP_GEN from the sets. */
1897 return true;
1898 case NARY:
1900 unsigned int i;
1901 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1902 for (i = 0; i < nary->length; i++)
1903 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1904 return false;
1905 return true;
1907 break;
1908 case REFERENCE:
1910 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1911 vn_reference_op_t vro;
1912 unsigned int i;
1914 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1916 if (!op_valid_in_sets (set1, set2, vro->op0)
1917 || !op_valid_in_sets (set1, set2, vro->op1)
1918 || !op_valid_in_sets (set1, set2, vro->op2))
1919 return false;
1921 return true;
1923 default:
1924 gcc_unreachable ();
1928 /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2.
1929 This means expressions that are made up of values we have no leaders for
1930 in SET1 or SET2. */
1932 static void
1933 clean (bitmap_set_t set1, bitmap_set_t set2 = NULL)
1935 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
1936 pre_expr expr;
1937 int i;
1939 FOR_EACH_VEC_ELT (exprs, i, expr)
1941 if (!valid_in_sets (set1, set2, expr))
1942 bitmap_remove_expr_from_set (set1, expr);
1944 exprs.release ();
1947 /* Clean the set of expressions that are no longer valid in SET because
1948 they are clobbered in BLOCK or because they trap and may not be executed. */
1950 static void
1951 prune_clobbered_mems (bitmap_set_t set, basic_block block)
1953 bitmap_iterator bi;
1954 unsigned i;
1955 pre_expr to_remove = NULL;
1957 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1959 /* Remove queued expr. */
1960 if (to_remove)
1962 bitmap_remove_expr_from_set (set, to_remove);
1963 to_remove = NULL;
1966 pre_expr expr = expression_for_id (i);
1967 if (expr->kind == REFERENCE)
1969 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1970 if (ref->vuse)
1972 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
1973 if (!gimple_nop_p (def_stmt)
1974 && ((gimple_bb (def_stmt) != block
1975 && !dominated_by_p (CDI_DOMINATORS,
1976 block, gimple_bb (def_stmt)))
1977 || (gimple_bb (def_stmt) == block
1978 && value_dies_in_block_x (expr, block))))
1979 to_remove = expr;
1982 else if (expr->kind == NARY)
1984 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1985 /* If the NARY may trap make sure the block does not contain
1986 a possible exit point.
1987 ??? This is overly conservative if we translate AVAIL_OUT
1988 as the available expression might be after the exit point. */
1989 if (BB_MAY_NOTRETURN (block)
1990 && vn_nary_may_trap (nary))
1991 to_remove = expr;
1995 /* Remove queued expr. */
1996 if (to_remove)
1997 bitmap_remove_expr_from_set (set, to_remove);
2000 static sbitmap has_abnormal_preds;
2002 /* Compute the ANTIC set for BLOCK.
2004 If succs(BLOCK) > 1 then
2005 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2006 else if succs(BLOCK) == 1 then
2007 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2009 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2011 Note that clean() is deferred until after the iteration. */
2013 static bool
2014 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2016 bitmap_set_t S, old, ANTIC_OUT;
2017 bitmap_iterator bi;
2018 unsigned int bii;
2019 edge e;
2020 edge_iterator ei;
2022 bool changed = ! BB_VISITED (block);
2023 BB_VISITED (block) = 1;
2024 old = ANTIC_OUT = S = NULL;
2026 /* If any edges from predecessors are abnormal, antic_in is empty,
2027 so do nothing. */
2028 if (block_has_abnormal_pred_edge)
2029 goto maybe_dump_sets;
2031 old = ANTIC_IN (block);
2032 ANTIC_OUT = bitmap_set_new ();
2034 /* If the block has no successors, ANTIC_OUT is empty. */
2035 if (EDGE_COUNT (block->succs) == 0)
2037 /* If we have one successor, we could have some phi nodes to
2038 translate through. */
2039 else if (single_succ_p (block))
2041 basic_block succ_bb = single_succ (block);
2042 gcc_assert (BB_VISITED (succ_bb));
2043 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2045 /* If we have multiple successors, we take the intersection of all of
2046 them. Note that in the case of loop exit phi nodes, we may have
2047 phis to translate through. */
2048 else
2050 size_t i;
2051 basic_block bprime, first = NULL;
2053 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2054 FOR_EACH_EDGE (e, ei, block->succs)
2056 if (!first
2057 && BB_VISITED (e->dest))
2058 first = e->dest;
2059 else if (BB_VISITED (e->dest))
2060 worklist.quick_push (e->dest);
2061 else
2063 /* Unvisited successors get their ANTIC_IN replaced by the
2064 maximal set to arrive at a maximum ANTIC_IN solution.
2065 We can ignore them in the intersection operation and thus
2066 need not explicitely represent that maximum solution. */
2067 if (dump_file && (dump_flags & TDF_DETAILS))
2068 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2069 e->src->index, e->dest->index);
2073 /* Of multiple successors we have to have visited one already
2074 which is guaranteed by iteration order. */
2075 gcc_assert (first != NULL);
2077 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2079 /* If we have multiple successors we need to intersect the ANTIC_OUT
2080 sets. For values that's a simple intersection but for
2081 expressions it is a union. Given we want to have a single
2082 expression per value in our sets we have to canonicalize.
2083 Avoid randomness and running into cycles like for PR82129 and
2084 canonicalize the expression we choose to the one with the
2085 lowest id. This requires we actually compute the union first. */
2086 FOR_EACH_VEC_ELT (worklist, i, bprime)
2088 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2090 bitmap_set_t tmp = bitmap_set_new ();
2091 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2092 bitmap_and_into (&ANTIC_OUT->values, &tmp->values);
2093 bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions);
2094 bitmap_set_free (tmp);
2096 else
2098 bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (bprime)->values);
2099 bitmap_ior_into (&ANTIC_OUT->expressions,
2100 &ANTIC_IN (bprime)->expressions);
2103 if (! worklist.is_empty ())
2105 /* Prune expressions not in the value set, canonicalizing to
2106 expression with lowest ID. */
2107 bitmap_iterator bi;
2108 unsigned int i;
2109 unsigned int to_clear = -1U;
2110 bitmap seen_value = BITMAP_ALLOC (NULL);
2111 FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT, i, bi)
2113 if (to_clear != -1U)
2115 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2116 to_clear = -1U;
2118 pre_expr expr = expression_for_id (i);
2119 unsigned int value_id = get_expr_value_id (expr);
2120 if (!bitmap_bit_p (&ANTIC_OUT->values, value_id)
2121 || !bitmap_set_bit (seen_value, value_id))
2122 to_clear = i;
2124 if (to_clear != -1U)
2125 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2126 BITMAP_FREE (seen_value);
2130 /* Prune expressions that are clobbered in block and thus become
2131 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2132 prune_clobbered_mems (ANTIC_OUT, block);
2134 /* Generate ANTIC_OUT - TMP_GEN. */
2135 S = bitmap_set_subtract_expressions (ANTIC_OUT, TMP_GEN (block));
2137 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2138 ANTIC_IN (block) = bitmap_set_subtract_expressions (EXP_GEN (block),
2139 TMP_GEN (block));
2141 /* Then union in the ANTIC_OUT - TMP_GEN values,
2142 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2143 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2144 bitmap_value_insert_into_set (ANTIC_IN (block),
2145 expression_for_id (bii));
2147 /* clean (ANTIC_IN (block)) is defered to after the iteration converged
2148 because it can cause non-convergence, see for example PR81181. */
2150 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2151 changed = true;
2153 maybe_dump_sets:
2154 if (dump_file && (dump_flags & TDF_DETAILS))
2156 if (ANTIC_OUT)
2157 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2159 if (changed)
2160 fprintf (dump_file, "[changed] ");
2161 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2162 block->index);
2164 if (S)
2165 print_bitmap_set (dump_file, S, "S", block->index);
2167 if (old)
2168 bitmap_set_free (old);
2169 if (S)
2170 bitmap_set_free (S);
2171 if (ANTIC_OUT)
2172 bitmap_set_free (ANTIC_OUT);
2173 return changed;
2176 /* Compute PARTIAL_ANTIC for BLOCK.
2178 If succs(BLOCK) > 1 then
2179 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2180 in ANTIC_OUT for all succ(BLOCK)
2181 else if succs(BLOCK) == 1 then
2182 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2184 PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK])
2187 static void
2188 compute_partial_antic_aux (basic_block block,
2189 bool block_has_abnormal_pred_edge)
2191 bitmap_set_t old_PA_IN;
2192 bitmap_set_t PA_OUT;
2193 edge e;
2194 edge_iterator ei;
2195 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2197 old_PA_IN = PA_OUT = NULL;
2199 /* If any edges from predecessors are abnormal, antic_in is empty,
2200 so do nothing. */
2201 if (block_has_abnormal_pred_edge)
2202 goto maybe_dump_sets;
2204 /* If there are too many partially anticipatable values in the
2205 block, phi_translate_set can take an exponential time: stop
2206 before the translation starts. */
2207 if (max_pa
2208 && single_succ_p (block)
2209 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2210 goto maybe_dump_sets;
2212 old_PA_IN = PA_IN (block);
2213 PA_OUT = bitmap_set_new ();
2215 /* If the block has no successors, ANTIC_OUT is empty. */
2216 if (EDGE_COUNT (block->succs) == 0)
2218 /* If we have one successor, we could have some phi nodes to
2219 translate through. Note that we can't phi translate across DFS
2220 back edges in partial antic, because it uses a union operation on
2221 the successors. For recurrences like IV's, we will end up
2222 generating a new value in the set on each go around (i + 3 (VH.1)
2223 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2224 else if (single_succ_p (block))
2226 basic_block succ = single_succ (block);
2227 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2228 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2230 /* If we have multiple successors, we take the union of all of
2231 them. */
2232 else
2234 size_t i;
2235 basic_block bprime;
2237 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2238 FOR_EACH_EDGE (e, ei, block->succs)
2240 if (e->flags & EDGE_DFS_BACK)
2241 continue;
2242 worklist.quick_push (e->dest);
2244 if (worklist.length () > 0)
2246 FOR_EACH_VEC_ELT (worklist, i, bprime)
2248 unsigned int i;
2249 bitmap_iterator bi;
2251 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2252 bitmap_value_insert_into_set (PA_OUT,
2253 expression_for_id (i));
2254 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2256 bitmap_set_t pa_in = bitmap_set_new ();
2257 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2258 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2259 bitmap_value_insert_into_set (PA_OUT,
2260 expression_for_id (i));
2261 bitmap_set_free (pa_in);
2263 else
2264 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2265 bitmap_value_insert_into_set (PA_OUT,
2266 expression_for_id (i));
2271 /* Prune expressions that are clobbered in block and thus become
2272 invalid if translated from PA_OUT to PA_IN. */
2273 prune_clobbered_mems (PA_OUT, block);
2275 /* PA_IN starts with PA_OUT - TMP_GEN.
2276 Then we subtract things from ANTIC_IN. */
2277 PA_IN (block) = bitmap_set_subtract_expressions (PA_OUT, TMP_GEN (block));
2279 /* For partial antic, we want to put back in the phi results, since
2280 we will properly avoid making them partially antic over backedges. */
2281 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2282 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2284 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2285 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2287 clean (PA_IN (block), ANTIC_IN (block));
2289 maybe_dump_sets:
2290 if (dump_file && (dump_flags & TDF_DETAILS))
2292 if (PA_OUT)
2293 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2295 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2297 if (old_PA_IN)
2298 bitmap_set_free (old_PA_IN);
2299 if (PA_OUT)
2300 bitmap_set_free (PA_OUT);
2303 /* Compute ANTIC and partial ANTIC sets. */
2305 static void
2306 compute_antic (void)
2308 bool changed = true;
2309 int num_iterations = 0;
2310 basic_block block;
2311 int i;
2312 edge_iterator ei;
2313 edge e;
2315 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2316 We pre-build the map of blocks with incoming abnormal edges here. */
2317 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2318 bitmap_clear (has_abnormal_preds);
2320 FOR_ALL_BB_FN (block, cfun)
2322 BB_VISITED (block) = 0;
2324 FOR_EACH_EDGE (e, ei, block->preds)
2325 if (e->flags & EDGE_ABNORMAL)
2327 bitmap_set_bit (has_abnormal_preds, block->index);
2328 break;
2331 /* While we are here, give empty ANTIC_IN sets to each block. */
2332 ANTIC_IN (block) = bitmap_set_new ();
2333 if (do_partial_partial)
2334 PA_IN (block) = bitmap_set_new ();
2337 /* At the exit block we anticipate nothing. */
2338 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2340 /* For ANTIC computation we need a postorder that also guarantees that
2341 a block with a single successor is visited after its successor.
2342 RPO on the inverted CFG has this property. */
2343 auto_vec<int, 20> postorder;
2344 inverted_post_order_compute (&postorder);
2346 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
2347 bitmap_clear (worklist);
2348 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2349 bitmap_set_bit (worklist, e->src->index);
2350 while (changed)
2352 if (dump_file && (dump_flags & TDF_DETAILS))
2353 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2354 /* ??? We need to clear our PHI translation cache here as the
2355 ANTIC sets shrink and we restrict valid translations to
2356 those having operands with leaders in ANTIC. Same below
2357 for PA ANTIC computation. */
2358 num_iterations++;
2359 changed = false;
2360 for (i = postorder.length () - 1; i >= 0; i--)
2362 if (bitmap_bit_p (worklist, postorder[i]))
2364 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2365 bitmap_clear_bit (worklist, block->index);
2366 if (compute_antic_aux (block,
2367 bitmap_bit_p (has_abnormal_preds,
2368 block->index)))
2370 FOR_EACH_EDGE (e, ei, block->preds)
2371 bitmap_set_bit (worklist, e->src->index);
2372 changed = true;
2376 /* Theoretically possible, but *highly* unlikely. */
2377 gcc_checking_assert (num_iterations < 500);
2380 /* We have to clean after the dataflow problem converged as cleaning
2381 can cause non-convergence because it is based on expressions
2382 rather than values. */
2383 FOR_EACH_BB_FN (block, cfun)
2384 clean (ANTIC_IN (block));
2386 statistics_histogram_event (cfun, "compute_antic iterations",
2387 num_iterations);
2389 if (do_partial_partial)
2391 /* For partial antic we ignore backedges and thus we do not need
2392 to perform any iteration when we process blocks in postorder. */
2393 int postorder_num
2394 = pre_and_rev_post_order_compute (NULL, postorder.address (), false);
2395 for (i = postorder_num - 1 ; i >= 0; i--)
2397 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2398 compute_partial_antic_aux (block,
2399 bitmap_bit_p (has_abnormal_preds,
2400 block->index));
2404 sbitmap_free (has_abnormal_preds);
2408 /* Inserted expressions are placed onto this worklist, which is used
2409 for performing quick dead code elimination of insertions we made
2410 that didn't turn out to be necessary. */
2411 static bitmap inserted_exprs;
2413 /* The actual worker for create_component_ref_by_pieces. */
2415 static tree
2416 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2417 unsigned int *operand, gimple_seq *stmts)
2419 vn_reference_op_t currop = &ref->operands[*operand];
2420 tree genop;
2421 ++*operand;
2422 switch (currop->opcode)
2424 case CALL_EXPR:
2425 gcc_unreachable ();
2427 case MEM_REF:
2429 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2430 stmts);
2431 if (!baseop)
2432 return NULL_TREE;
2433 tree offset = currop->op0;
2434 if (TREE_CODE (baseop) == ADDR_EXPR
2435 && handled_component_p (TREE_OPERAND (baseop, 0)))
2437 HOST_WIDE_INT off;
2438 tree base;
2439 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2440 &off);
2441 gcc_assert (base);
2442 offset = int_const_binop (PLUS_EXPR, offset,
2443 build_int_cst (TREE_TYPE (offset),
2444 off));
2445 baseop = build_fold_addr_expr (base);
2447 genop = build2 (MEM_REF, currop->type, baseop, offset);
2448 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2449 MR_DEPENDENCE_BASE (genop) = currop->base;
2450 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2451 return genop;
2454 case TARGET_MEM_REF:
2456 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2457 vn_reference_op_t nextop = &ref->operands[++*operand];
2458 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2459 stmts);
2460 if (!baseop)
2461 return NULL_TREE;
2462 if (currop->op0)
2464 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2465 if (!genop0)
2466 return NULL_TREE;
2468 if (nextop->op0)
2470 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2471 if (!genop1)
2472 return NULL_TREE;
2474 genop = build5 (TARGET_MEM_REF, currop->type,
2475 baseop, currop->op2, genop0, currop->op1, genop1);
2477 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2478 MR_DEPENDENCE_BASE (genop) = currop->base;
2479 return genop;
2482 case ADDR_EXPR:
2483 if (currop->op0)
2485 gcc_assert (is_gimple_min_invariant (currop->op0));
2486 return currop->op0;
2488 /* Fallthrough. */
2489 case REALPART_EXPR:
2490 case IMAGPART_EXPR:
2491 case VIEW_CONVERT_EXPR:
2493 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2494 stmts);
2495 if (!genop0)
2496 return NULL_TREE;
2497 return fold_build1 (currop->opcode, currop->type, genop0);
2500 case WITH_SIZE_EXPR:
2502 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2503 stmts);
2504 if (!genop0)
2505 return NULL_TREE;
2506 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2507 if (!genop1)
2508 return NULL_TREE;
2509 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2512 case BIT_FIELD_REF:
2514 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2515 stmts);
2516 if (!genop0)
2517 return NULL_TREE;
2518 tree op1 = currop->op0;
2519 tree op2 = currop->op1;
2520 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2521 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2522 return fold (t);
2525 /* For array ref vn_reference_op's, operand 1 of the array ref
2526 is op0 of the reference op and operand 3 of the array ref is
2527 op1. */
2528 case ARRAY_RANGE_REF:
2529 case ARRAY_REF:
2531 tree genop0;
2532 tree genop1 = currop->op0;
2533 tree genop2 = currop->op1;
2534 tree genop3 = currop->op2;
2535 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2536 stmts);
2537 if (!genop0)
2538 return NULL_TREE;
2539 genop1 = find_or_generate_expression (block, genop1, stmts);
2540 if (!genop1)
2541 return NULL_TREE;
2542 if (genop2)
2544 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2545 /* Drop zero minimum index if redundant. */
2546 if (integer_zerop (genop2)
2547 && (!domain_type
2548 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2549 genop2 = NULL_TREE;
2550 else
2552 genop2 = find_or_generate_expression (block, genop2, stmts);
2553 if (!genop2)
2554 return NULL_TREE;
2557 if (genop3)
2559 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2560 /* We can't always put a size in units of the element alignment
2561 here as the element alignment may be not visible. See
2562 PR43783. Simply drop the element size for constant
2563 sizes. */
2564 if (TREE_CODE (genop3) == INTEGER_CST
2565 && TREE_CODE (TYPE_SIZE_UNIT (elmt_type)) == INTEGER_CST
2566 && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)),
2567 (wi::to_offset (genop3)
2568 * vn_ref_op_align_unit (currop))))
2569 genop3 = NULL_TREE;
2570 else
2572 genop3 = find_or_generate_expression (block, genop3, stmts);
2573 if (!genop3)
2574 return NULL_TREE;
2577 return build4 (currop->opcode, currop->type, genop0, genop1,
2578 genop2, genop3);
2580 case COMPONENT_REF:
2582 tree op0;
2583 tree op1;
2584 tree genop2 = currop->op1;
2585 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2586 if (!op0)
2587 return NULL_TREE;
2588 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2589 op1 = currop->op0;
2590 if (genop2)
2592 genop2 = find_or_generate_expression (block, genop2, stmts);
2593 if (!genop2)
2594 return NULL_TREE;
2596 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2599 case SSA_NAME:
2601 genop = find_or_generate_expression (block, currop->op0, stmts);
2602 return genop;
2604 case STRING_CST:
2605 case INTEGER_CST:
2606 case COMPLEX_CST:
2607 case VECTOR_CST:
2608 case REAL_CST:
2609 case CONSTRUCTOR:
2610 case VAR_DECL:
2611 case PARM_DECL:
2612 case CONST_DECL:
2613 case RESULT_DECL:
2614 case FUNCTION_DECL:
2615 return currop->op0;
2617 default:
2618 gcc_unreachable ();
2622 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2623 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2624 trying to rename aggregates into ssa form directly, which is a no no.
2626 Thus, this routine doesn't create temporaries, it just builds a
2627 single access expression for the array, calling
2628 find_or_generate_expression to build the innermost pieces.
2630 This function is a subroutine of create_expression_by_pieces, and
2631 should not be called on it's own unless you really know what you
2632 are doing. */
2634 static tree
2635 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2636 gimple_seq *stmts)
2638 unsigned int op = 0;
2639 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2642 /* Find a simple leader for an expression, or generate one using
2643 create_expression_by_pieces from a NARY expression for the value.
2644 BLOCK is the basic_block we are looking for leaders in.
2645 OP is the tree expression to find a leader for or generate.
2646 Returns the leader or NULL_TREE on failure. */
2648 static tree
2649 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2651 pre_expr expr = get_or_alloc_expr_for (op);
2652 unsigned int lookfor = get_expr_value_id (expr);
2653 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2654 if (leader)
2656 if (leader->kind == NAME)
2657 return PRE_EXPR_NAME (leader);
2658 else if (leader->kind == CONSTANT)
2659 return PRE_EXPR_CONSTANT (leader);
2661 /* Defer. */
2662 return NULL_TREE;
2665 /* It must be a complex expression, so generate it recursively. Note
2666 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2667 where the insert algorithm fails to insert a required expression. */
2668 bitmap exprset = value_expressions[lookfor];
2669 bitmap_iterator bi;
2670 unsigned int i;
2671 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2673 pre_expr temp = expression_for_id (i);
2674 /* We cannot insert random REFERENCE expressions at arbitrary
2675 places. We can insert NARYs which eventually re-materializes
2676 its operand values. */
2677 if (temp->kind == NARY)
2678 return create_expression_by_pieces (block, temp, stmts,
2679 get_expr_type (expr));
2682 /* Defer. */
2683 return NULL_TREE;
2686 /* Create an expression in pieces, so that we can handle very complex
2687 expressions that may be ANTIC, but not necessary GIMPLE.
2688 BLOCK is the basic block the expression will be inserted into,
2689 EXPR is the expression to insert (in value form)
2690 STMTS is a statement list to append the necessary insertions into.
2692 This function will die if we hit some value that shouldn't be
2693 ANTIC but is (IE there is no leader for it, or its components).
2694 The function returns NULL_TREE in case a different antic expression
2695 has to be inserted first.
2696 This function may also generate expressions that are themselves
2697 partially or fully redundant. Those that are will be either made
2698 fully redundant during the next iteration of insert (for partially
2699 redundant ones), or eliminated by eliminate (for fully redundant
2700 ones). */
2702 static tree
2703 create_expression_by_pieces (basic_block block, pre_expr expr,
2704 gimple_seq *stmts, tree type)
2706 tree name;
2707 tree folded;
2708 gimple_seq forced_stmts = NULL;
2709 unsigned int value_id;
2710 gimple_stmt_iterator gsi;
2711 tree exprtype = type ? type : get_expr_type (expr);
2712 pre_expr nameexpr;
2713 gassign *newstmt;
2715 switch (expr->kind)
2717 /* We may hit the NAME/CONSTANT case if we have to convert types
2718 that value numbering saw through. */
2719 case NAME:
2720 folded = PRE_EXPR_NAME (expr);
2721 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2722 return folded;
2723 break;
2724 case CONSTANT:
2726 folded = PRE_EXPR_CONSTANT (expr);
2727 tree tem = fold_convert (exprtype, folded);
2728 if (is_gimple_min_invariant (tem))
2729 return tem;
2730 break;
2732 case REFERENCE:
2733 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2735 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2736 unsigned int operand = 1;
2737 vn_reference_op_t currop = &ref->operands[0];
2738 tree sc = NULL_TREE;
2739 tree fn;
2740 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2741 fn = currop->op0;
2742 else
2743 fn = find_or_generate_expression (block, currop->op0, stmts);
2744 if (!fn)
2745 return NULL_TREE;
2746 if (currop->op1)
2748 sc = find_or_generate_expression (block, currop->op1, stmts);
2749 if (!sc)
2750 return NULL_TREE;
2752 auto_vec<tree> args (ref->operands.length () - 1);
2753 while (operand < ref->operands.length ())
2755 tree arg = create_component_ref_by_pieces_1 (block, ref,
2756 &operand, stmts);
2757 if (!arg)
2758 return NULL_TREE;
2759 args.quick_push (arg);
2761 gcall *call
2762 = gimple_build_call_vec ((TREE_CODE (fn) == FUNCTION_DECL
2763 ? build_fold_addr_expr (fn) : fn), args);
2764 gimple_call_set_with_bounds (call, currop->with_bounds);
2765 if (sc)
2766 gimple_call_set_chain (call, sc);
2767 tree forcedname = make_ssa_name (currop->type);
2768 gimple_call_set_lhs (call, forcedname);
2769 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2770 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2771 folded = forcedname;
2773 else
2775 folded = create_component_ref_by_pieces (block,
2776 PRE_EXPR_REFERENCE (expr),
2777 stmts);
2778 if (!folded)
2779 return NULL_TREE;
2780 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2781 newstmt = gimple_build_assign (name, folded);
2782 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2783 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2784 folded = name;
2786 break;
2787 case NARY:
2789 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2790 tree *genop = XALLOCAVEC (tree, nary->length);
2791 unsigned i;
2792 for (i = 0; i < nary->length; ++i)
2794 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2795 if (!genop[i])
2796 return NULL_TREE;
2797 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2798 may have conversions stripped. */
2799 if (nary->opcode == POINTER_PLUS_EXPR)
2801 if (i == 0)
2802 genop[i] = gimple_convert (&forced_stmts,
2803 nary->type, genop[i]);
2804 else if (i == 1)
2805 genop[i] = gimple_convert (&forced_stmts,
2806 sizetype, genop[i]);
2808 else
2809 genop[i] = gimple_convert (&forced_stmts,
2810 TREE_TYPE (nary->op[i]), genop[i]);
2812 if (nary->opcode == CONSTRUCTOR)
2814 vec<constructor_elt, va_gc> *elts = NULL;
2815 for (i = 0; i < nary->length; ++i)
2816 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2817 folded = build_constructor (nary->type, elts);
2818 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2819 newstmt = gimple_build_assign (name, folded);
2820 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2821 folded = name;
2823 else
2825 switch (nary->length)
2827 case 1:
2828 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2829 genop[0]);
2830 break;
2831 case 2:
2832 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2833 genop[0], genop[1]);
2834 break;
2835 case 3:
2836 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2837 genop[0], genop[1], genop[2]);
2838 break;
2839 default:
2840 gcc_unreachable ();
2844 break;
2845 default:
2846 gcc_unreachable ();
2849 folded = gimple_convert (&forced_stmts, exprtype, folded);
2851 /* If there is nothing to insert, return the simplified result. */
2852 if (gimple_seq_empty_p (forced_stmts))
2853 return folded;
2854 /* If we simplified to a constant return it and discard eventually
2855 built stmts. */
2856 if (is_gimple_min_invariant (folded))
2858 gimple_seq_discard (forced_stmts);
2859 return folded;
2861 /* Likewise if we simplified to sth not queued for insertion. */
2862 bool found = false;
2863 gsi = gsi_last (forced_stmts);
2864 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2866 gimple *stmt = gsi_stmt (gsi);
2867 tree forcedname = gimple_get_lhs (stmt);
2868 if (forcedname == folded)
2870 found = true;
2871 break;
2874 if (! found)
2876 gimple_seq_discard (forced_stmts);
2877 return folded;
2879 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2881 /* If we have any intermediate expressions to the value sets, add them
2882 to the value sets and chain them in the instruction stream. */
2883 if (forced_stmts)
2885 gsi = gsi_start (forced_stmts);
2886 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2888 gimple *stmt = gsi_stmt (gsi);
2889 tree forcedname = gimple_get_lhs (stmt);
2890 pre_expr nameexpr;
2892 if (forcedname != folded)
2894 VN_INFO_GET (forcedname)->valnum = forcedname;
2895 VN_INFO (forcedname)->value_id = get_next_value_id ();
2896 nameexpr = get_or_alloc_expr_for_name (forcedname);
2897 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2898 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2899 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2902 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2904 gimple_seq_add_seq (stmts, forced_stmts);
2907 name = folded;
2909 /* Fold the last statement. */
2910 gsi = gsi_last (*stmts);
2911 if (fold_stmt_inplace (&gsi))
2912 update_stmt (gsi_stmt (gsi));
2914 /* Add a value number to the temporary.
2915 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2916 we are creating the expression by pieces, and this particular piece of
2917 the expression may have been represented. There is no harm in replacing
2918 here. */
2919 value_id = get_expr_value_id (expr);
2920 VN_INFO_GET (name)->value_id = value_id;
2921 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2922 if (VN_INFO (name)->valnum == NULL_TREE)
2923 VN_INFO (name)->valnum = name;
2924 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2925 nameexpr = get_or_alloc_expr_for_name (name);
2926 add_to_value (value_id, nameexpr);
2927 if (NEW_SETS (block))
2928 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2929 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2931 pre_stats.insertions++;
2932 if (dump_file && (dump_flags & TDF_DETAILS))
2934 fprintf (dump_file, "Inserted ");
2935 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0);
2936 fprintf (dump_file, " in predecessor %d (%04d)\n",
2937 block->index, value_id);
2940 return name;
2944 /* Insert the to-be-made-available values of expression EXPRNUM for each
2945 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2946 merge the result with a phi node, given the same value number as
2947 NODE. Return true if we have inserted new stuff. */
2949 static bool
2950 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2951 vec<pre_expr> avail)
2953 pre_expr expr = expression_for_id (exprnum);
2954 pre_expr newphi;
2955 unsigned int val = get_expr_value_id (expr);
2956 edge pred;
2957 bool insertions = false;
2958 bool nophi = false;
2959 basic_block bprime;
2960 pre_expr eprime;
2961 edge_iterator ei;
2962 tree type = get_expr_type (expr);
2963 tree temp;
2964 gphi *phi;
2966 /* Make sure we aren't creating an induction variable. */
2967 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
2969 bool firstinsideloop = false;
2970 bool secondinsideloop = false;
2971 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2972 EDGE_PRED (block, 0)->src);
2973 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2974 EDGE_PRED (block, 1)->src);
2975 /* Induction variables only have one edge inside the loop. */
2976 if ((firstinsideloop ^ secondinsideloop)
2977 && expr->kind != REFERENCE)
2979 if (dump_file && (dump_flags & TDF_DETAILS))
2980 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2981 nophi = true;
2985 /* Make the necessary insertions. */
2986 FOR_EACH_EDGE (pred, ei, block->preds)
2988 gimple_seq stmts = NULL;
2989 tree builtexpr;
2990 bprime = pred->src;
2991 eprime = avail[pred->dest_idx];
2992 builtexpr = create_expression_by_pieces (bprime, eprime,
2993 &stmts, type);
2994 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
2995 if (!gimple_seq_empty_p (stmts))
2997 basic_block new_bb = gsi_insert_seq_on_edge_immediate (pred, stmts);
2998 gcc_assert (! new_bb);
2999 insertions = true;
3001 if (!builtexpr)
3003 /* We cannot insert a PHI node if we failed to insert
3004 on one edge. */
3005 nophi = true;
3006 continue;
3008 if (is_gimple_min_invariant (builtexpr))
3009 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3010 else
3011 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3013 /* If we didn't want a phi node, and we made insertions, we still have
3014 inserted new stuff, and thus return true. If we didn't want a phi node,
3015 and didn't make insertions, we haven't added anything new, so return
3016 false. */
3017 if (nophi && insertions)
3018 return true;
3019 else if (nophi && !insertions)
3020 return false;
3022 /* Now build a phi for the new variable. */
3023 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3024 phi = create_phi_node (temp, block);
3026 VN_INFO_GET (temp)->value_id = val;
3027 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3028 if (VN_INFO (temp)->valnum == NULL_TREE)
3029 VN_INFO (temp)->valnum = temp;
3030 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3031 FOR_EACH_EDGE (pred, ei, block->preds)
3033 pre_expr ae = avail[pred->dest_idx];
3034 gcc_assert (get_expr_type (ae) == type
3035 || useless_type_conversion_p (type, get_expr_type (ae)));
3036 if (ae->kind == CONSTANT)
3037 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3038 pred, UNKNOWN_LOCATION);
3039 else
3040 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3043 newphi = get_or_alloc_expr_for_name (temp);
3044 add_to_value (val, newphi);
3046 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3047 this insertion, since we test for the existence of this value in PHI_GEN
3048 before proceeding with the partial redundancy checks in insert_aux.
3050 The value may exist in AVAIL_OUT, in particular, it could be represented
3051 by the expression we are trying to eliminate, in which case we want the
3052 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3053 inserted there.
3055 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3056 this block, because if it did, it would have existed in our dominator's
3057 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3060 bitmap_insert_into_set (PHI_GEN (block), newphi);
3061 bitmap_value_replace_in_set (AVAIL_OUT (block),
3062 newphi);
3063 bitmap_insert_into_set (NEW_SETS (block),
3064 newphi);
3066 /* If we insert a PHI node for a conversion of another PHI node
3067 in the same basic-block try to preserve range information.
3068 This is important so that followup loop passes receive optimal
3069 number of iteration analysis results. See PR61743. */
3070 if (expr->kind == NARY
3071 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3072 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3073 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3074 && INTEGRAL_TYPE_P (type)
3075 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3076 && (TYPE_PRECISION (type)
3077 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3078 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3080 wide_int min, max;
3081 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3082 && !wi::neg_p (min, SIGNED)
3083 && !wi::neg_p (max, SIGNED))
3084 /* Just handle extension and sign-changes of all-positive ranges. */
3085 set_range_info (temp,
3086 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3087 wide_int_storage::from (min, TYPE_PRECISION (type),
3088 TYPE_SIGN (type)),
3089 wide_int_storage::from (max, TYPE_PRECISION (type),
3090 TYPE_SIGN (type)));
3093 if (dump_file && (dump_flags & TDF_DETAILS))
3095 fprintf (dump_file, "Created phi ");
3096 print_gimple_stmt (dump_file, phi, 0);
3097 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3099 pre_stats.phis++;
3100 return true;
3105 /* Perform insertion of partially redundant or hoistable values.
3106 For BLOCK, do the following:
3107 1. Propagate the NEW_SETS of the dominator into the current block.
3108 If the block has multiple predecessors,
3109 2a. Iterate over the ANTIC expressions for the block to see if
3110 any of them are partially redundant.
3111 2b. If so, insert them into the necessary predecessors to make
3112 the expression fully redundant.
3113 2c. Insert a new PHI merging the values of the predecessors.
3114 2d. Insert the new PHI, and the new expressions, into the
3115 NEW_SETS set.
3116 If the block has multiple successors,
3117 3a. Iterate over the ANTIC values for the block to see if
3118 any of them are good candidates for hoisting.
3119 3b. If so, insert expressions computing the values in BLOCK,
3120 and add the new expressions into the NEW_SETS set.
3121 4. Recursively call ourselves on the dominator children of BLOCK.
3123 Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3124 do_pre_regular_insertion and do_partial_insertion. 3a and 3b are
3125 done in do_hoist_insertion.
3128 static bool
3129 do_pre_regular_insertion (basic_block block, basic_block dom)
3131 bool new_stuff = false;
3132 vec<pre_expr> exprs;
3133 pre_expr expr;
3134 auto_vec<pre_expr> avail;
3135 int i;
3137 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3138 avail.safe_grow (EDGE_COUNT (block->preds));
3140 FOR_EACH_VEC_ELT (exprs, i, expr)
3142 if (expr->kind == NARY
3143 || expr->kind == REFERENCE)
3145 unsigned int val;
3146 bool by_some = false;
3147 bool cant_insert = false;
3148 bool all_same = true;
3149 pre_expr first_s = NULL;
3150 edge pred;
3151 basic_block bprime;
3152 pre_expr eprime = NULL;
3153 edge_iterator ei;
3154 pre_expr edoubleprime = NULL;
3155 bool do_insertion = false;
3157 val = get_expr_value_id (expr);
3158 if (bitmap_set_contains_value (PHI_GEN (block), val))
3159 continue;
3160 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3162 if (dump_file && (dump_flags & TDF_DETAILS))
3164 fprintf (dump_file, "Found fully redundant value: ");
3165 print_pre_expr (dump_file, expr);
3166 fprintf (dump_file, "\n");
3168 continue;
3171 FOR_EACH_EDGE (pred, ei, block->preds)
3173 unsigned int vprime;
3175 /* We should never run insertion for the exit block
3176 and so not come across fake pred edges. */
3177 gcc_assert (!(pred->flags & EDGE_FAKE));
3178 bprime = pred->src;
3179 /* We are looking at ANTIC_OUT of bprime. */
3180 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3181 bprime, block);
3183 /* eprime will generally only be NULL if the
3184 value of the expression, translated
3185 through the PHI for this predecessor, is
3186 undefined. If that is the case, we can't
3187 make the expression fully redundant,
3188 because its value is undefined along a
3189 predecessor path. We can thus break out
3190 early because it doesn't matter what the
3191 rest of the results are. */
3192 if (eprime == NULL)
3194 avail[pred->dest_idx] = NULL;
3195 cant_insert = true;
3196 break;
3199 vprime = get_expr_value_id (eprime);
3200 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3201 vprime);
3202 if (edoubleprime == NULL)
3204 avail[pred->dest_idx] = eprime;
3205 all_same = false;
3207 else
3209 avail[pred->dest_idx] = edoubleprime;
3210 by_some = true;
3211 /* We want to perform insertions to remove a redundancy on
3212 a path in the CFG we want to optimize for speed. */
3213 if (optimize_edge_for_speed_p (pred))
3214 do_insertion = true;
3215 if (first_s == NULL)
3216 first_s = edoubleprime;
3217 else if (!pre_expr_d::equal (first_s, edoubleprime))
3218 all_same = false;
3221 /* If we can insert it, it's not the same value
3222 already existing along every predecessor, and
3223 it's defined by some predecessor, it is
3224 partially redundant. */
3225 if (!cant_insert && !all_same && by_some)
3227 if (!do_insertion)
3229 if (dump_file && (dump_flags & TDF_DETAILS))
3231 fprintf (dump_file, "Skipping partial redundancy for "
3232 "expression ");
3233 print_pre_expr (dump_file, expr);
3234 fprintf (dump_file, " (%04d), no redundancy on to be "
3235 "optimized for speed edge\n", val);
3238 else if (dbg_cnt (treepre_insert))
3240 if (dump_file && (dump_flags & TDF_DETAILS))
3242 fprintf (dump_file, "Found partial redundancy for "
3243 "expression ");
3244 print_pre_expr (dump_file, expr);
3245 fprintf (dump_file, " (%04d)\n",
3246 get_expr_value_id (expr));
3248 if (insert_into_preds_of_block (block,
3249 get_expression_id (expr),
3250 avail))
3251 new_stuff = true;
3254 /* If all edges produce the same value and that value is
3255 an invariant, then the PHI has the same value on all
3256 edges. Note this. */
3257 else if (!cant_insert && all_same)
3259 gcc_assert (edoubleprime->kind == CONSTANT
3260 || edoubleprime->kind == NAME);
3262 tree temp = make_temp_ssa_name (get_expr_type (expr),
3263 NULL, "pretmp");
3264 gassign *assign
3265 = gimple_build_assign (temp,
3266 edoubleprime->kind == CONSTANT ?
3267 PRE_EXPR_CONSTANT (edoubleprime) :
3268 PRE_EXPR_NAME (edoubleprime));
3269 gimple_stmt_iterator gsi = gsi_after_labels (block);
3270 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3272 VN_INFO_GET (temp)->value_id = val;
3273 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3274 if (VN_INFO (temp)->valnum == NULL_TREE)
3275 VN_INFO (temp)->valnum = temp;
3276 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3277 pre_expr newe = get_or_alloc_expr_for_name (temp);
3278 add_to_value (val, newe);
3279 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3280 bitmap_insert_into_set (NEW_SETS (block), newe);
3285 exprs.release ();
3286 return new_stuff;
3290 /* Perform insertion for partially anticipatable expressions. There
3291 is only one case we will perform insertion for these. This case is
3292 if the expression is partially anticipatable, and fully available.
3293 In this case, we know that putting it earlier will enable us to
3294 remove the later computation. */
3296 static bool
3297 do_pre_partial_partial_insertion (basic_block block, basic_block dom)
3299 bool new_stuff = false;
3300 vec<pre_expr> exprs;
3301 pre_expr expr;
3302 auto_vec<pre_expr> avail;
3303 int i;
3305 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3306 avail.safe_grow (EDGE_COUNT (block->preds));
3308 FOR_EACH_VEC_ELT (exprs, i, expr)
3310 if (expr->kind == NARY
3311 || expr->kind == REFERENCE)
3313 unsigned int val;
3314 bool by_all = true;
3315 bool cant_insert = false;
3316 edge pred;
3317 basic_block bprime;
3318 pre_expr eprime = NULL;
3319 edge_iterator ei;
3321 val = get_expr_value_id (expr);
3322 if (bitmap_set_contains_value (PHI_GEN (block), val))
3323 continue;
3324 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3325 continue;
3327 FOR_EACH_EDGE (pred, ei, block->preds)
3329 unsigned int vprime;
3330 pre_expr edoubleprime;
3332 /* We should never run insertion for the exit block
3333 and so not come across fake pred edges. */
3334 gcc_assert (!(pred->flags & EDGE_FAKE));
3335 bprime = pred->src;
3336 eprime = phi_translate (expr, ANTIC_IN (block),
3337 PA_IN (block),
3338 bprime, block);
3340 /* eprime will generally only be NULL if the
3341 value of the expression, translated
3342 through the PHI for this predecessor, is
3343 undefined. If that is the case, we can't
3344 make the expression fully redundant,
3345 because its value is undefined along a
3346 predecessor path. We can thus break out
3347 early because it doesn't matter what the
3348 rest of the results are. */
3349 if (eprime == NULL)
3351 avail[pred->dest_idx] = NULL;
3352 cant_insert = true;
3353 break;
3356 vprime = get_expr_value_id (eprime);
3357 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3358 avail[pred->dest_idx] = edoubleprime;
3359 if (edoubleprime == NULL)
3361 by_all = false;
3362 break;
3366 /* If we can insert it, it's not the same value
3367 already existing along every predecessor, and
3368 it's defined by some predecessor, it is
3369 partially redundant. */
3370 if (!cant_insert && by_all)
3372 edge succ;
3373 bool do_insertion = false;
3375 /* Insert only if we can remove a later expression on a path
3376 that we want to optimize for speed.
3377 The phi node that we will be inserting in BLOCK is not free,
3378 and inserting it for the sake of !optimize_for_speed successor
3379 may cause regressions on the speed path. */
3380 FOR_EACH_EDGE (succ, ei, block->succs)
3382 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3383 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3385 if (optimize_edge_for_speed_p (succ))
3386 do_insertion = true;
3390 if (!do_insertion)
3392 if (dump_file && (dump_flags & TDF_DETAILS))
3394 fprintf (dump_file, "Skipping partial partial redundancy "
3395 "for expression ");
3396 print_pre_expr (dump_file, expr);
3397 fprintf (dump_file, " (%04d), not (partially) anticipated "
3398 "on any to be optimized for speed edges\n", val);
3401 else if (dbg_cnt (treepre_insert))
3403 pre_stats.pa_insert++;
3404 if (dump_file && (dump_flags & TDF_DETAILS))
3406 fprintf (dump_file, "Found partial partial redundancy "
3407 "for expression ");
3408 print_pre_expr (dump_file, expr);
3409 fprintf (dump_file, " (%04d)\n",
3410 get_expr_value_id (expr));
3412 if (insert_into_preds_of_block (block,
3413 get_expression_id (expr),
3414 avail))
3415 new_stuff = true;
3421 exprs.release ();
3422 return new_stuff;
3425 /* Insert expressions in BLOCK to compute hoistable values up.
3426 Return TRUE if something was inserted, otherwise return FALSE.
3427 The caller has to make sure that BLOCK has at least two successors. */
3429 static bool
3430 do_hoist_insertion (basic_block block)
3432 edge e;
3433 edge_iterator ei;
3434 bool new_stuff = false;
3435 unsigned i;
3436 gimple_stmt_iterator last;
3438 /* At least two successors, or else... */
3439 gcc_assert (EDGE_COUNT (block->succs) >= 2);
3441 /* Check that all successors of BLOCK are dominated by block.
3442 We could use dominated_by_p() for this, but actually there is a much
3443 quicker check: any successor that is dominated by BLOCK can't have
3444 more than one predecessor edge. */
3445 FOR_EACH_EDGE (e, ei, block->succs)
3446 if (! single_pred_p (e->dest))
3447 return false;
3449 /* Determine the insertion point. If we cannot safely insert before
3450 the last stmt if we'd have to, bail out. */
3451 last = gsi_last_bb (block);
3452 if (!gsi_end_p (last)
3453 && !is_ctrl_stmt (gsi_stmt (last))
3454 && stmt_ends_bb_p (gsi_stmt (last)))
3455 return false;
3457 /* Compute the set of hoistable expressions from ANTIC_IN. First compute
3458 hoistable values. */
3459 bitmap_set hoistable_set;
3461 /* A hoistable value must be in ANTIC_IN(block)
3462 but not in AVAIL_OUT(BLOCK). */
3463 bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack);
3464 bitmap_and_compl (&hoistable_set.values,
3465 &ANTIC_IN (block)->values, &AVAIL_OUT (block)->values);
3467 /* Short-cut for a common case: hoistable_set is empty. */
3468 if (bitmap_empty_p (&hoistable_set.values))
3469 return false;
3471 /* Compute which of the hoistable values is in AVAIL_OUT of
3472 at least one of the successors of BLOCK. */
3473 bitmap_head availout_in_some;
3474 bitmap_initialize (&availout_in_some, &grand_bitmap_obstack);
3475 FOR_EACH_EDGE (e, ei, block->succs)
3476 /* Do not consider expressions solely because their availability
3477 on loop exits. They'd be ANTIC-IN throughout the whole loop
3478 and thus effectively hoisted across loops by combination of
3479 PRE and hoisting. */
3480 if (! loop_exit_edge_p (block->loop_father, e))
3481 bitmap_ior_and_into (&availout_in_some, &hoistable_set.values,
3482 &AVAIL_OUT (e->dest)->values);
3483 bitmap_clear (&hoistable_set.values);
3485 /* Short-cut for a common case: availout_in_some is empty. */
3486 if (bitmap_empty_p (&availout_in_some))
3487 return false;
3489 /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */
3490 hoistable_set.values = availout_in_some;
3491 hoistable_set.expressions = ANTIC_IN (block)->expressions;
3493 /* Now finally construct the topological-ordered expression set. */
3494 vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set);
3496 bitmap_clear (&hoistable_set.values);
3498 /* If there are candidate values for hoisting, insert expressions
3499 strategically to make the hoistable expressions fully redundant. */
3500 pre_expr expr;
3501 FOR_EACH_VEC_ELT (exprs, i, expr)
3503 /* While we try to sort expressions topologically above the
3504 sorting doesn't work out perfectly. Catch expressions we
3505 already inserted. */
3506 unsigned int value_id = get_expr_value_id (expr);
3507 if (bitmap_set_contains_value (AVAIL_OUT (block), value_id))
3509 if (dump_file && (dump_flags & TDF_DETAILS))
3511 fprintf (dump_file,
3512 "Already inserted expression for ");
3513 print_pre_expr (dump_file, expr);
3514 fprintf (dump_file, " (%04d)\n", value_id);
3516 continue;
3519 /* OK, we should hoist this value. Perform the transformation. */
3520 pre_stats.hoist_insert++;
3521 if (dump_file && (dump_flags & TDF_DETAILS))
3523 fprintf (dump_file,
3524 "Inserting expression in block %d for code hoisting: ",
3525 block->index);
3526 print_pre_expr (dump_file, expr);
3527 fprintf (dump_file, " (%04d)\n", value_id);
3530 gimple_seq stmts = NULL;
3531 tree res = create_expression_by_pieces (block, expr, &stmts,
3532 get_expr_type (expr));
3534 /* Do not return true if expression creation ultimately
3535 did not insert any statements. */
3536 if (gimple_seq_empty_p (stmts))
3537 res = NULL_TREE;
3538 else
3540 if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last)))
3541 gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT);
3542 else
3543 gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT);
3546 /* Make sure to not return true if expression creation ultimately
3547 failed but also make sure to insert any stmts produced as they
3548 are tracked in inserted_exprs. */
3549 if (! res)
3550 continue;
3552 new_stuff = true;
3555 exprs.release ();
3557 return new_stuff;
3560 /* Do a dominator walk on the control flow graph, and insert computations
3561 of values as necessary for PRE and hoisting. */
3563 static bool
3564 insert_aux (basic_block block, bool do_pre, bool do_hoist)
3566 basic_block son;
3567 bool new_stuff = false;
3569 if (block)
3571 basic_block dom;
3572 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3573 if (dom)
3575 unsigned i;
3576 bitmap_iterator bi;
3577 bitmap_set_t newset;
3579 /* First, update the AVAIL_OUT set with anything we may have
3580 inserted higher up in the dominator tree. */
3581 newset = NEW_SETS (dom);
3582 if (newset)
3584 /* Note that we need to value_replace both NEW_SETS, and
3585 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3586 represented by some non-simple expression here that we want
3587 to replace it with. */
3588 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3590 pre_expr expr = expression_for_id (i);
3591 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3592 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3596 /* Insert expressions for partial redundancies. */
3597 if (do_pre && !single_pred_p (block))
3599 new_stuff |= do_pre_regular_insertion (block, dom);
3600 if (do_partial_partial)
3601 new_stuff |= do_pre_partial_partial_insertion (block, dom);
3604 /* Insert expressions for hoisting. */
3605 if (do_hoist && EDGE_COUNT (block->succs) >= 2)
3606 new_stuff |= do_hoist_insertion (block);
3609 for (son = first_dom_son (CDI_DOMINATORS, block);
3610 son;
3611 son = next_dom_son (CDI_DOMINATORS, son))
3613 new_stuff |= insert_aux (son, do_pre, do_hoist);
3616 return new_stuff;
3619 /* Perform insertion of partially redundant and hoistable values. */
3621 static void
3622 insert (void)
3624 bool new_stuff = true;
3625 basic_block bb;
3626 int num_iterations = 0;
3628 FOR_ALL_BB_FN (bb, cfun)
3629 NEW_SETS (bb) = bitmap_set_new ();
3631 while (new_stuff)
3633 num_iterations++;
3634 if (dump_file && dump_flags & TDF_DETAILS)
3635 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3636 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun), flag_tree_pre,
3637 flag_code_hoisting);
3639 /* Clear the NEW sets before the next iteration. We have already
3640 fully propagated its contents. */
3641 if (new_stuff)
3642 FOR_ALL_BB_FN (bb, cfun)
3643 bitmap_set_free (NEW_SETS (bb));
3645 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3649 /* Compute the AVAIL set for all basic blocks.
3651 This function performs value numbering of the statements in each basic
3652 block. The AVAIL sets are built from information we glean while doing
3653 this value numbering, since the AVAIL sets contain only one entry per
3654 value.
3656 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3657 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3659 static void
3660 compute_avail (void)
3663 basic_block block, son;
3664 basic_block *worklist;
3665 size_t sp = 0;
3666 unsigned i;
3667 tree name;
3669 /* We pretend that default definitions are defined in the entry block.
3670 This includes function arguments and the static chain decl. */
3671 FOR_EACH_SSA_NAME (i, name, cfun)
3673 pre_expr e;
3674 if (!SSA_NAME_IS_DEFAULT_DEF (name)
3675 || has_zero_uses (name)
3676 || virtual_operand_p (name))
3677 continue;
3679 e = get_or_alloc_expr_for_name (name);
3680 add_to_value (get_expr_value_id (e), e);
3681 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3682 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3686 if (dump_file && (dump_flags & TDF_DETAILS))
3688 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3689 "tmp_gen", ENTRY_BLOCK);
3690 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3691 "avail_out", ENTRY_BLOCK);
3694 /* Allocate the worklist. */
3695 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3697 /* Seed the algorithm by putting the dominator children of the entry
3698 block on the worklist. */
3699 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3700 son;
3701 son = next_dom_son (CDI_DOMINATORS, son))
3702 worklist[sp++] = son;
3704 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3705 = ssa_default_def (cfun, gimple_vop (cfun));
3707 /* Loop until the worklist is empty. */
3708 while (sp)
3710 gimple *stmt;
3711 basic_block dom;
3713 /* Pick a block from the worklist. */
3714 block = worklist[--sp];
3716 /* Initially, the set of available values in BLOCK is that of
3717 its immediate dominator. */
3718 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3719 if (dom)
3721 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3722 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3725 /* Generate values for PHI nodes. */
3726 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3727 gsi_next (&gsi))
3729 tree result = gimple_phi_result (gsi.phi ());
3731 /* We have no need for virtual phis, as they don't represent
3732 actual computations. */
3733 if (virtual_operand_p (result))
3735 BB_LIVE_VOP_ON_EXIT (block) = result;
3736 continue;
3739 pre_expr e = get_or_alloc_expr_for_name (result);
3740 add_to_value (get_expr_value_id (e), e);
3741 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3742 bitmap_insert_into_set (PHI_GEN (block), e);
3745 BB_MAY_NOTRETURN (block) = 0;
3747 /* Now compute value numbers and populate value sets with all
3748 the expressions computed in BLOCK. */
3749 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3750 gsi_next (&gsi))
3752 ssa_op_iter iter;
3753 tree op;
3755 stmt = gsi_stmt (gsi);
3757 /* Cache whether the basic-block has any non-visible side-effect
3758 or control flow.
3759 If this isn't a call or it is the last stmt in the
3760 basic-block then the CFG represents things correctly. */
3761 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3763 /* Non-looping const functions always return normally.
3764 Otherwise the call might not return or have side-effects
3765 that forbids hoisting possibly trapping expressions
3766 before it. */
3767 int flags = gimple_call_flags (stmt);
3768 if (!(flags & ECF_CONST)
3769 || (flags & ECF_LOOPING_CONST_OR_PURE))
3770 BB_MAY_NOTRETURN (block) = 1;
3773 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3775 pre_expr e = get_or_alloc_expr_for_name (op);
3777 add_to_value (get_expr_value_id (e), e);
3778 bitmap_insert_into_set (TMP_GEN (block), e);
3779 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3782 if (gimple_vdef (stmt))
3783 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3785 if (gimple_has_side_effects (stmt)
3786 || stmt_could_throw_p (stmt)
3787 || is_gimple_debug (stmt))
3788 continue;
3790 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3792 if (ssa_undefined_value_p (op))
3793 continue;
3794 pre_expr e = get_or_alloc_expr_for_name (op);
3795 bitmap_value_insert_into_set (EXP_GEN (block), e);
3798 switch (gimple_code (stmt))
3800 case GIMPLE_RETURN:
3801 continue;
3803 case GIMPLE_CALL:
3805 vn_reference_t ref;
3806 vn_reference_s ref1;
3807 pre_expr result = NULL;
3809 /* We can value number only calls to real functions. */
3810 if (gimple_call_internal_p (stmt))
3811 continue;
3813 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3814 if (!ref)
3815 continue;
3817 /* If the value of the call is not invalidated in
3818 this block until it is computed, add the expression
3819 to EXP_GEN. */
3820 if (!gimple_vuse (stmt)
3821 || gimple_code
3822 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3823 || gimple_bb (SSA_NAME_DEF_STMT
3824 (gimple_vuse (stmt))) != block)
3826 result = pre_expr_pool.allocate ();
3827 result->kind = REFERENCE;
3828 result->id = 0;
3829 PRE_EXPR_REFERENCE (result) = ref;
3831 get_or_alloc_expression_id (result);
3832 add_to_value (get_expr_value_id (result), result);
3833 bitmap_value_insert_into_set (EXP_GEN (block), result);
3835 continue;
3838 case GIMPLE_ASSIGN:
3840 pre_expr result = NULL;
3841 switch (vn_get_stmt_kind (stmt))
3843 case VN_NARY:
3845 enum tree_code code = gimple_assign_rhs_code (stmt);
3846 vn_nary_op_t nary;
3848 /* COND_EXPR and VEC_COND_EXPR are awkward in
3849 that they contain an embedded complex expression.
3850 Don't even try to shove those through PRE. */
3851 if (code == COND_EXPR
3852 || code == VEC_COND_EXPR)
3853 continue;
3855 vn_nary_op_lookup_stmt (stmt, &nary);
3856 if (!nary)
3857 continue;
3859 /* If the NARY traps and there was a preceding
3860 point in the block that might not return avoid
3861 adding the nary to EXP_GEN. */
3862 if (BB_MAY_NOTRETURN (block)
3863 && vn_nary_may_trap (nary))
3864 continue;
3866 result = pre_expr_pool.allocate ();
3867 result->kind = NARY;
3868 result->id = 0;
3869 PRE_EXPR_NARY (result) = nary;
3870 break;
3873 case VN_REFERENCE:
3875 tree rhs1 = gimple_assign_rhs1 (stmt);
3876 alias_set_type set = get_alias_set (rhs1);
3877 vec<vn_reference_op_s> operands
3878 = vn_reference_operands_for_lookup (rhs1);
3879 vn_reference_t ref;
3880 vn_reference_lookup_pieces (gimple_vuse (stmt), set,
3881 TREE_TYPE (rhs1),
3882 operands, &ref, VN_WALK);
3883 if (!ref)
3885 operands.release ();
3886 continue;
3889 /* If the value of the reference is not invalidated in
3890 this block until it is computed, add the expression
3891 to EXP_GEN. */
3892 if (gimple_vuse (stmt))
3894 gimple *def_stmt;
3895 bool ok = true;
3896 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3897 while (!gimple_nop_p (def_stmt)
3898 && gimple_code (def_stmt) != GIMPLE_PHI
3899 && gimple_bb (def_stmt) == block)
3901 if (stmt_may_clobber_ref_p
3902 (def_stmt, gimple_assign_rhs1 (stmt)))
3904 ok = false;
3905 break;
3907 def_stmt
3908 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3910 if (!ok)
3912 operands.release ();
3913 continue;
3917 /* If the load was value-numbered to another
3918 load make sure we do not use its expression
3919 for insertion if it wouldn't be a valid
3920 replacement. */
3921 /* At the momemt we have a testcase
3922 for hoist insertion of aligned vs. misaligned
3923 variants in gcc.dg/torture/pr65270-1.c thus
3924 with just alignment to be considered we can
3925 simply replace the expression in the hashtable
3926 with the most conservative one. */
3927 vn_reference_op_t ref1 = &ref->operands.last ();
3928 while (ref1->opcode != TARGET_MEM_REF
3929 && ref1->opcode != MEM_REF
3930 && ref1 != &ref->operands[0])
3931 --ref1;
3932 vn_reference_op_t ref2 = &operands.last ();
3933 while (ref2->opcode != TARGET_MEM_REF
3934 && ref2->opcode != MEM_REF
3935 && ref2 != &operands[0])
3936 --ref2;
3937 if ((ref1->opcode == TARGET_MEM_REF
3938 || ref1->opcode == MEM_REF)
3939 && (TYPE_ALIGN (ref1->type)
3940 > TYPE_ALIGN (ref2->type)))
3941 ref1->type
3942 = build_aligned_type (ref1->type,
3943 TYPE_ALIGN (ref2->type));
3944 /* TBAA behavior is an obvious part so make sure
3945 that the hashtable one covers this as well
3946 by adjusting the ref alias set and its base. */
3947 if (ref->set == set
3948 || alias_set_subset_of (set, ref->set))
3950 else if (alias_set_subset_of (ref->set, set))
3952 ref->set = set;
3953 if (ref1->opcode == MEM_REF)
3954 ref1->op0
3955 = wide_int_to_tree (TREE_TYPE (ref2->op0),
3956 wi::to_wide (ref1->op0));
3957 else
3958 ref1->op2
3959 = wide_int_to_tree (TREE_TYPE (ref2->op2),
3960 wi::to_wide (ref1->op2));
3962 else
3964 ref->set = 0;
3965 if (ref1->opcode == MEM_REF)
3966 ref1->op0
3967 = wide_int_to_tree (ptr_type_node,
3968 wi::to_wide (ref1->op0));
3969 else
3970 ref1->op2
3971 = wide_int_to_tree (ptr_type_node,
3972 wi::to_wide (ref1->op2));
3974 operands.release ();
3976 result = pre_expr_pool.allocate ();
3977 result->kind = REFERENCE;
3978 result->id = 0;
3979 PRE_EXPR_REFERENCE (result) = ref;
3980 break;
3983 default:
3984 continue;
3987 get_or_alloc_expression_id (result);
3988 add_to_value (get_expr_value_id (result), result);
3989 bitmap_value_insert_into_set (EXP_GEN (block), result);
3990 continue;
3992 default:
3993 break;
3997 if (dump_file && (dump_flags & TDF_DETAILS))
3999 print_bitmap_set (dump_file, EXP_GEN (block),
4000 "exp_gen", block->index);
4001 print_bitmap_set (dump_file, PHI_GEN (block),
4002 "phi_gen", block->index);
4003 print_bitmap_set (dump_file, TMP_GEN (block),
4004 "tmp_gen", block->index);
4005 print_bitmap_set (dump_file, AVAIL_OUT (block),
4006 "avail_out", block->index);
4009 /* Put the dominator children of BLOCK on the worklist of blocks
4010 to compute available sets for. */
4011 for (son = first_dom_son (CDI_DOMINATORS, block);
4012 son;
4013 son = next_dom_son (CDI_DOMINATORS, son))
4014 worklist[sp++] = son;
4017 free (worklist);
4020 /* Cheap DCE of a known set of possibly dead stmts.
4022 Because we don't follow exactly the standard PRE algorithm, and decide not
4023 to insert PHI nodes sometimes, and because value numbering of casts isn't
4024 perfect, we sometimes end up inserting dead code. This simple DCE-like
4025 pass removes any insertions we made that weren't actually used. */
4027 static void
4028 remove_dead_inserted_code (void)
4030 /* ??? Re-use inserted_exprs as worklist not only as initial set.
4031 This may end up removing non-inserted code as well. If we
4032 keep inserted_exprs unchanged we could restrict new worklist
4033 elements to members of inserted_exprs. */
4034 bitmap worklist = inserted_exprs;
4035 while (! bitmap_empty_p (worklist))
4037 /* Pop item. */
4038 unsigned i = bitmap_first_set_bit (worklist);
4039 bitmap_clear_bit (worklist, i);
4041 tree def = ssa_name (i);
4042 /* Removed by somebody else or still in use. */
4043 if (! def || ! has_zero_uses (def))
4044 continue;
4046 gimple *t = SSA_NAME_DEF_STMT (def);
4047 if (gimple_has_side_effects (t))
4048 continue;
4050 /* Add uses to the worklist. */
4051 ssa_op_iter iter;
4052 use_operand_p use_p;
4053 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
4055 tree use = USE_FROM_PTR (use_p);
4056 if (TREE_CODE (use) == SSA_NAME
4057 && ! SSA_NAME_IS_DEFAULT_DEF (use))
4058 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4061 /* Remove stmt. */
4062 if (dump_file && (dump_flags & TDF_DETAILS))
4064 fprintf (dump_file, "Removing unnecessary insertion:");
4065 print_gimple_stmt (dump_file, t, 0);
4067 gimple_stmt_iterator gsi = gsi_for_stmt (t);
4068 if (gimple_code (t) == GIMPLE_PHI)
4069 remove_phi_node (&gsi, true);
4070 else
4072 gsi_remove (&gsi, true);
4073 release_defs (t);
4079 /* Initialize data structures used by PRE. */
4081 static void
4082 init_pre (void)
4084 basic_block bb;
4086 next_expression_id = 1;
4087 expressions.create (0);
4088 expressions.safe_push (NULL);
4089 value_expressions.create (get_max_value_id () + 1);
4090 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4091 name_to_id.create (0);
4093 inserted_exprs = BITMAP_ALLOC (NULL);
4095 connect_infinite_loops_to_exit ();
4096 memset (&pre_stats, 0, sizeof (pre_stats));
4098 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4100 calculate_dominance_info (CDI_DOMINATORS);
4102 bitmap_obstack_initialize (&grand_bitmap_obstack);
4103 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4104 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4105 FOR_ALL_BB_FN (bb, cfun)
4107 EXP_GEN (bb) = bitmap_set_new ();
4108 PHI_GEN (bb) = bitmap_set_new ();
4109 TMP_GEN (bb) = bitmap_set_new ();
4110 AVAIL_OUT (bb) = bitmap_set_new ();
4115 /* Deallocate data structures used by PRE. */
4117 static void
4118 fini_pre ()
4120 value_expressions.release ();
4121 expressions.release ();
4122 BITMAP_FREE (inserted_exprs);
4123 bitmap_obstack_release (&grand_bitmap_obstack);
4124 bitmap_set_pool.release ();
4125 pre_expr_pool.release ();
4126 delete phi_translate_table;
4127 phi_translate_table = NULL;
4128 delete expression_to_id;
4129 expression_to_id = NULL;
4130 name_to_id.release ();
4132 free_aux_for_blocks ();
4135 namespace {
4137 const pass_data pass_data_pre =
4139 GIMPLE_PASS, /* type */
4140 "pre", /* name */
4141 OPTGROUP_NONE, /* optinfo_flags */
4142 TV_TREE_PRE, /* tv_id */
4143 ( PROP_cfg | PROP_ssa ), /* properties_required */
4144 0, /* properties_provided */
4145 0, /* properties_destroyed */
4146 TODO_rebuild_alias, /* todo_flags_start */
4147 0, /* todo_flags_finish */
4150 class pass_pre : public gimple_opt_pass
4152 public:
4153 pass_pre (gcc::context *ctxt)
4154 : gimple_opt_pass (pass_data_pre, ctxt)
4157 /* opt_pass methods: */
4158 virtual bool gate (function *)
4159 { return flag_tree_pre != 0 || flag_code_hoisting != 0; }
4160 virtual unsigned int execute (function *);
4162 }; // class pass_pre
4164 unsigned int
4165 pass_pre::execute (function *fun)
4167 unsigned int todo = 0;
4169 do_partial_partial =
4170 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4172 /* This has to happen before SCCVN runs because
4173 loop_optimizer_init may create new phis, etc. */
4174 loop_optimizer_init (LOOPS_NORMAL);
4175 split_critical_edges ();
4176 scev_initialize ();
4178 run_scc_vn (VN_WALK);
4180 init_pre ();
4182 /* Insert can get quite slow on an incredibly large number of basic
4183 blocks due to some quadratic behavior. Until this behavior is
4184 fixed, don't run it when he have an incredibly large number of
4185 bb's. If we aren't going to run insert, there is no point in
4186 computing ANTIC, either, even though it's plenty fast nor do
4187 we require AVAIL. */
4188 if (n_basic_blocks_for_fn (fun) < 4000)
4190 compute_avail ();
4191 compute_antic ();
4192 insert ();
4195 /* Make sure to remove fake edges before committing our inserts.
4196 This makes sure we don't end up with extra critical edges that
4197 we would need to split. */
4198 remove_fake_exit_edges ();
4199 gsi_commit_edge_inserts ();
4201 /* Eliminate folds statements which might (should not...) end up
4202 not keeping virtual operands up-to-date. */
4203 gcc_assert (!need_ssa_update_p (fun));
4205 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4206 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4207 statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
4208 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4210 /* Remove all the redundant expressions. */
4211 todo |= vn_eliminate (inserted_exprs);
4213 remove_dead_inserted_code ();
4215 fini_pre ();
4217 scev_finalize ();
4218 loop_optimizer_finalize ();
4220 /* Restore SSA info before tail-merging as that resets it as well. */
4221 scc_vn_restore_ssa_info ();
4223 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4224 case we can merge the block with the remaining predecessor of the block.
4225 It should either:
4226 - call merge_blocks after each tail merge iteration
4227 - call merge_blocks after all tail merge iterations
4228 - mark TODO_cleanup_cfg when necessary
4229 - share the cfg cleanup with fini_pre. */
4230 todo |= tail_merge_optimize (todo);
4232 free_scc_vn ();
4234 /* Tail merging invalidates the virtual SSA web, together with
4235 cfg-cleanup opportunities exposed by PRE this will wreck the
4236 SSA updating machinery. So make sure to run update-ssa
4237 manually, before eventually scheduling cfg-cleanup as part of
4238 the todo. */
4239 update_ssa (TODO_update_ssa_only_virtuals);
4241 return todo;
4244 } // anon namespace
4246 gimple_opt_pass *
4247 make_pass_pre (gcc::context *ctxt)
4249 return new pass_pre (ctxt);