Document gcov-io (PR gcov-profile/84735).
[official-gcc.git] / gcc / tree-ssa-pre.c
blob8df15bfcdb3ea1c3c980a0f3fe48c037cd0d7b20
1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-ssa.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
48 #include "params.h"
49 #include "dbgcnt.h"
50 #include "domwalk.h"
51 #include "tree-ssa-propagate.h"
52 #include "tree-ssa-dce.h"
53 #include "tree-cfgcleanup.h"
54 #include "alias.h"
56 /* Even though this file is called tree-ssa-pre.c, we actually
57 implement a bit more than just PRE here. All of them piggy-back
58 on GVN which is implemented in tree-ssa-sccvn.c.
60 1. Full Redundancy Elimination (FRE)
61 This is the elimination phase of GVN.
63 2. Partial Redundancy Elimination (PRE)
64 This is adds computation of AVAIL_OUT and ANTIC_IN and
65 doing expression insertion to form GVN-PRE.
67 3. Code hoisting
68 This optimization uses the ANTIC_IN sets computed for PRE
69 to move expressions further up than PRE would do, to make
70 multiple computations of the same value fully redundant.
71 This pass is explained below (after the explanation of the
72 basic algorithm for PRE).
75 /* TODO:
77 1. Avail sets can be shared by making an avail_find_leader that
78 walks up the dominator tree and looks in those avail sets.
79 This might affect code optimality, it's unclear right now.
80 Currently the AVAIL_OUT sets are the remaining quadraticness in
81 memory of GVN-PRE.
82 2. Strength reduction can be performed by anticipating expressions
83 we can repair later on.
84 3. We can do back-substitution or smarter value numbering to catch
85 commutative expressions split up over multiple statements.
88 /* For ease of terminology, "expression node" in the below refers to
89 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
90 represent the actual statement containing the expressions we care about,
91 and we cache the value number by putting it in the expression. */
93 /* Basic algorithm for Partial Redundancy Elimination:
95 First we walk the statements to generate the AVAIL sets, the
96 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
97 generation of values/expressions by a given block. We use them
98 when computing the ANTIC sets. The AVAIL sets consist of
99 SSA_NAME's that represent values, so we know what values are
100 available in what blocks. AVAIL is a forward dataflow problem. In
101 SSA, values are never killed, so we don't need a kill set, or a
102 fixpoint iteration, in order to calculate the AVAIL sets. In
103 traditional parlance, AVAIL sets tell us the downsafety of the
104 expressions/values.
106 Next, we generate the ANTIC sets. These sets represent the
107 anticipatable expressions. ANTIC is a backwards dataflow
108 problem. An expression is anticipatable in a given block if it could
109 be generated in that block. This means that if we had to perform
110 an insertion in that block, of the value of that expression, we
111 could. Calculating the ANTIC sets requires phi translation of
112 expressions, because the flow goes backwards through phis. We must
113 iterate to a fixpoint of the ANTIC sets, because we have a kill
114 set. Even in SSA form, values are not live over the entire
115 function, only from their definition point onwards. So we have to
116 remove values from the ANTIC set once we go past the definition
117 point of the leaders that make them up.
118 compute_antic/compute_antic_aux performs this computation.
120 Third, we perform insertions to make partially redundant
121 expressions fully redundant.
123 An expression is partially redundant (excluding partial
124 anticipation) if:
126 1. It is AVAIL in some, but not all, of the predecessors of a
127 given block.
128 2. It is ANTIC in all the predecessors.
130 In order to make it fully redundant, we insert the expression into
131 the predecessors where it is not available, but is ANTIC.
133 When optimizing for size, we only eliminate the partial redundancy
134 if we need to insert in only one predecessor. This avoids almost
135 completely the code size increase that PRE usually causes.
137 For the partial anticipation case, we only perform insertion if it
138 is partially anticipated in some block, and fully available in all
139 of the predecessors.
141 do_pre_regular_insertion/do_pre_partial_partial_insertion
142 performs these steps, driven by insert/insert_aux.
144 Fourth, we eliminate fully redundant expressions.
145 This is a simple statement walk that replaces redundant
146 calculations with the now available values. */
148 /* Basic algorithm for Code Hoisting:
150 Code hoisting is: Moving value computations up in the control flow
151 graph to make multiple copies redundant. Typically this is a size
152 optimization, but there are cases where it also is helpful for speed.
154 A simple code hoisting algorithm is implemented that piggy-backs on
155 the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT
156 which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be
157 computed for PRE, and we can use them to perform a limited version of
158 code hoisting, too.
160 For the purpose of this implementation, a value is hoistable to a basic
161 block B if the following properties are met:
163 1. The value is in ANTIC_IN(B) -- the value will be computed on all
164 paths from B to function exit and it can be computed in B);
166 2. The value is not in AVAIL_OUT(B) -- there would be no need to
167 compute the value again and make it available twice;
169 3. All successors of B are dominated by B -- makes sure that inserting
170 a computation of the value in B will make the remaining
171 computations fully redundant;
173 4. At least one successor has the value in AVAIL_OUT -- to avoid
174 hoisting values up too far;
176 5. There are at least two successors of B -- hoisting in straight
177 line code is pointless.
179 The third condition is not strictly necessary, but it would complicate
180 the hoisting pass a lot. In fact, I don't know of any code hoisting
181 algorithm that does not have this requirement. Fortunately, experiments
182 have show that most candidate hoistable values are in regions that meet
183 this condition (e.g. diamond-shape regions).
185 The forth condition is necessary to avoid hoisting things up too far
186 away from the uses of the value. Nothing else limits the algorithm
187 from hoisting everything up as far as ANTIC_IN allows. Experiments
188 with SPEC and CSiBE have shown that hoisting up too far results in more
189 spilling, less benefits for code size, and worse benchmark scores.
190 Fortunately, in practice most of the interesting hoisting opportunities
191 are caught despite this limitation.
193 For hoistable values that meet all conditions, expressions are inserted
194 to make the calculation of the hoistable value fully redundant. We
195 perform code hoisting insertions after each round of PRE insertions,
196 because code hoisting never exposes new PRE opportunities, but PRE can
197 create new code hoisting opportunities.
199 The code hoisting algorithm is implemented in do_hoist_insert, driven
200 by insert/insert_aux. */
202 /* Representations of value numbers:
204 Value numbers are represented by a representative SSA_NAME. We
205 will create fake SSA_NAME's in situations where we need a
206 representative but do not have one (because it is a complex
207 expression). In order to facilitate storing the value numbers in
208 bitmaps, and keep the number of wasted SSA_NAME's down, we also
209 associate a value_id with each value number, and create full blown
210 ssa_name's only where we actually need them (IE in operands of
211 existing expressions).
213 Theoretically you could replace all the value_id's with
214 SSA_NAME_VERSION, but this would allocate a large number of
215 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
216 It would also require an additional indirection at each point we
217 use the value id. */
219 /* Representation of expressions on value numbers:
221 Expressions consisting of value numbers are represented the same
222 way as our VN internally represents them, with an additional
223 "pre_expr" wrapping around them in order to facilitate storing all
224 of the expressions in the same sets. */
226 /* Representation of sets:
228 The dataflow sets do not need to be sorted in any particular order
229 for the majority of their lifetime, are simply represented as two
230 bitmaps, one that keeps track of values present in the set, and one
231 that keeps track of expressions present in the set.
233 When we need them in topological order, we produce it on demand by
234 transforming the bitmap into an array and sorting it into topo
235 order. */
237 /* Type of expression, used to know which member of the PRE_EXPR union
238 is valid. */
240 enum pre_expr_kind
242 NAME,
243 NARY,
244 REFERENCE,
245 CONSTANT
248 union pre_expr_union
250 tree name;
251 tree constant;
252 vn_nary_op_t nary;
253 vn_reference_t reference;
256 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
258 enum pre_expr_kind kind;
259 unsigned int id;
260 pre_expr_union u;
262 /* hash_table support. */
263 static inline hashval_t hash (const pre_expr_d *);
264 static inline int equal (const pre_expr_d *, const pre_expr_d *);
265 } *pre_expr;
267 #define PRE_EXPR_NAME(e) (e)->u.name
268 #define PRE_EXPR_NARY(e) (e)->u.nary
269 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
270 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
272 /* Compare E1 and E1 for equality. */
274 inline int
275 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
277 if (e1->kind != e2->kind)
278 return false;
280 switch (e1->kind)
282 case CONSTANT:
283 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
284 PRE_EXPR_CONSTANT (e2));
285 case NAME:
286 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
287 case NARY:
288 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
289 case REFERENCE:
290 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
291 PRE_EXPR_REFERENCE (e2));
292 default:
293 gcc_unreachable ();
297 /* Hash E. */
299 inline hashval_t
300 pre_expr_d::hash (const pre_expr_d *e)
302 switch (e->kind)
304 case CONSTANT:
305 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
306 case NAME:
307 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
308 case NARY:
309 return PRE_EXPR_NARY (e)->hashcode;
310 case REFERENCE:
311 return PRE_EXPR_REFERENCE (e)->hashcode;
312 default:
313 gcc_unreachable ();
317 /* Next global expression id number. */
318 static unsigned int next_expression_id;
320 /* Mapping from expression to id number we can use in bitmap sets. */
321 static vec<pre_expr> expressions;
322 static hash_table<pre_expr_d> *expression_to_id;
323 static vec<unsigned> name_to_id;
325 /* Allocate an expression id for EXPR. */
327 static inline unsigned int
328 alloc_expression_id (pre_expr expr)
330 struct pre_expr_d **slot;
331 /* Make sure we won't overflow. */
332 gcc_assert (next_expression_id + 1 > next_expression_id);
333 expr->id = next_expression_id++;
334 expressions.safe_push (expr);
335 if (expr->kind == NAME)
337 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
338 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
339 re-allocations by using vec::reserve upfront. */
340 unsigned old_len = name_to_id.length ();
341 name_to_id.reserve (num_ssa_names - old_len);
342 name_to_id.quick_grow_cleared (num_ssa_names);
343 gcc_assert (name_to_id[version] == 0);
344 name_to_id[version] = expr->id;
346 else
348 slot = expression_to_id->find_slot (expr, INSERT);
349 gcc_assert (!*slot);
350 *slot = expr;
352 return next_expression_id - 1;
355 /* Return the expression id for tree EXPR. */
357 static inline unsigned int
358 get_expression_id (const pre_expr expr)
360 return expr->id;
363 static inline unsigned int
364 lookup_expression_id (const pre_expr expr)
366 struct pre_expr_d **slot;
368 if (expr->kind == NAME)
370 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
371 if (name_to_id.length () <= version)
372 return 0;
373 return name_to_id[version];
375 else
377 slot = expression_to_id->find_slot (expr, NO_INSERT);
378 if (!slot)
379 return 0;
380 return ((pre_expr)*slot)->id;
384 /* Return the existing expression id for EXPR, or create one if one
385 does not exist yet. */
387 static inline unsigned int
388 get_or_alloc_expression_id (pre_expr expr)
390 unsigned int id = lookup_expression_id (expr);
391 if (id == 0)
392 return alloc_expression_id (expr);
393 return expr->id = id;
396 /* Return the expression that has expression id ID */
398 static inline pre_expr
399 expression_for_id (unsigned int id)
401 return expressions[id];
404 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
406 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
408 static pre_expr
409 get_or_alloc_expr_for_name (tree name)
411 struct pre_expr_d expr;
412 pre_expr result;
413 unsigned int result_id;
415 expr.kind = NAME;
416 expr.id = 0;
417 PRE_EXPR_NAME (&expr) = name;
418 result_id = lookup_expression_id (&expr);
419 if (result_id != 0)
420 return expression_for_id (result_id);
422 result = pre_expr_pool.allocate ();
423 result->kind = NAME;
424 PRE_EXPR_NAME (result) = name;
425 alloc_expression_id (result);
426 return result;
429 /* An unordered bitmap set. One bitmap tracks values, the other,
430 expressions. */
431 typedef struct bitmap_set
433 bitmap_head expressions;
434 bitmap_head values;
435 } *bitmap_set_t;
437 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
438 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
440 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
441 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
443 /* Mapping from value id to expressions with that value_id. */
444 static vec<bitmap> value_expressions;
446 /* Sets that we need to keep track of. */
447 typedef struct bb_bitmap_sets
449 /* The EXP_GEN set, which represents expressions/values generated in
450 a basic block. */
451 bitmap_set_t exp_gen;
453 /* The PHI_GEN set, which represents PHI results generated in a
454 basic block. */
455 bitmap_set_t phi_gen;
457 /* The TMP_GEN set, which represents results/temporaries generated
458 in a basic block. IE the LHS of an expression. */
459 bitmap_set_t tmp_gen;
461 /* The AVAIL_OUT set, which represents which values are available in
462 a given basic block. */
463 bitmap_set_t avail_out;
465 /* The ANTIC_IN set, which represents which values are anticipatable
466 in a given basic block. */
467 bitmap_set_t antic_in;
469 /* The PA_IN set, which represents which values are
470 partially anticipatable in a given basic block. */
471 bitmap_set_t pa_in;
473 /* The NEW_SETS set, which is used during insertion to augment the
474 AVAIL_OUT set of blocks with the new insertions performed during
475 the current iteration. */
476 bitmap_set_t new_sets;
478 /* A cache for value_dies_in_block_x. */
479 bitmap expr_dies;
481 /* The live virtual operand on successor edges. */
482 tree vop_on_exit;
484 /* True if we have visited this block during ANTIC calculation. */
485 unsigned int visited : 1;
487 /* True if we have visited this block after all successors have been
488 visited this way. */
489 unsigned int visited_with_visited_succs : 1;
491 /* True when the block contains a call that might not return. */
492 unsigned int contains_may_not_return_call : 1;
493 } *bb_value_sets_t;
495 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
496 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
497 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
498 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
499 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
500 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
501 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
502 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
503 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
504 #define BB_VISITED_WITH_VISITED_SUCCS(BB) \
505 ((bb_value_sets_t) ((BB)->aux))->visited_with_visited_succs
506 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
507 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
510 /* This structure is used to keep track of statistics on what
511 optimization PRE was able to perform. */
512 static struct
514 /* The number of new expressions/temporaries generated by PRE. */
515 int insertions;
517 /* The number of inserts found due to partial anticipation */
518 int pa_insert;
520 /* The number of inserts made for code hoisting. */
521 int hoist_insert;
523 /* The number of new PHI nodes added by PRE. */
524 int phis;
525 } pre_stats;
527 static bool do_partial_partial;
528 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
529 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
530 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
531 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
532 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
533 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
534 static bitmap_set_t bitmap_set_new (void);
535 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
536 tree);
537 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
538 static unsigned int get_expr_value_id (pre_expr);
540 /* We can add and remove elements and entries to and from sets
541 and hash tables, so we use alloc pools for them. */
543 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
544 static bitmap_obstack grand_bitmap_obstack;
546 /* A three tuple {e, pred, v} used to cache phi translations in the
547 phi_translate_table. */
549 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
551 /* The expression. */
552 pre_expr e;
554 /* The predecessor block along which we translated the expression. */
555 basic_block pred;
557 /* The value that resulted from the translation. */
558 pre_expr v;
560 /* The hashcode for the expression, pred pair. This is cached for
561 speed reasons. */
562 hashval_t hashcode;
564 /* hash_table support. */
565 static inline hashval_t hash (const expr_pred_trans_d *);
566 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
567 } *expr_pred_trans_t;
568 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
570 inline hashval_t
571 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
573 return e->hashcode;
576 inline int
577 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
578 const expr_pred_trans_d *ve2)
580 basic_block b1 = ve1->pred;
581 basic_block b2 = ve2->pred;
583 /* If they are not translations for the same basic block, they can't
584 be equal. */
585 if (b1 != b2)
586 return false;
587 return pre_expr_d::equal (ve1->e, ve2->e);
590 /* The phi_translate_table caches phi translations for a given
591 expression and predecessor. */
592 static hash_table<expr_pred_trans_d> *phi_translate_table;
594 /* Add the tuple mapping from {expression E, basic block PRED} to
595 the phi translation table and return whether it pre-existed. */
597 static inline bool
598 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
600 expr_pred_trans_t *slot;
601 expr_pred_trans_d tem;
602 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
603 pred->index);
604 tem.e = e;
605 tem.pred = pred;
606 tem.hashcode = hash;
607 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
608 if (*slot)
610 *entry = *slot;
611 return true;
614 *entry = *slot = XNEW (struct expr_pred_trans_d);
615 (*entry)->e = e;
616 (*entry)->pred = pred;
617 (*entry)->hashcode = hash;
618 return false;
622 /* Add expression E to the expression set of value id V. */
624 static void
625 add_to_value (unsigned int v, pre_expr e)
627 bitmap set;
629 gcc_checking_assert (get_expr_value_id (e) == v);
631 if (v >= value_expressions.length ())
633 value_expressions.safe_grow_cleared (v + 1);
636 set = value_expressions[v];
637 if (!set)
639 set = BITMAP_ALLOC (&grand_bitmap_obstack);
640 value_expressions[v] = set;
643 bitmap_set_bit (set, get_or_alloc_expression_id (e));
646 /* Create a new bitmap set and return it. */
648 static bitmap_set_t
649 bitmap_set_new (void)
651 bitmap_set_t ret = bitmap_set_pool.allocate ();
652 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
653 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
654 return ret;
657 /* Return the value id for a PRE expression EXPR. */
659 static unsigned int
660 get_expr_value_id (pre_expr expr)
662 unsigned int id;
663 switch (expr->kind)
665 case CONSTANT:
666 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
667 break;
668 case NAME:
669 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
670 break;
671 case NARY:
672 id = PRE_EXPR_NARY (expr)->value_id;
673 break;
674 case REFERENCE:
675 id = PRE_EXPR_REFERENCE (expr)->value_id;
676 break;
677 default:
678 gcc_unreachable ();
680 /* ??? We cannot assert that expr has a value-id (it can be 0), because
681 we assign value-ids only to expressions that have a result
682 in set_hashtable_value_ids. */
683 return id;
686 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
688 static tree
689 sccvn_valnum_from_value_id (unsigned int val)
691 bitmap_iterator bi;
692 unsigned int i;
693 bitmap exprset = value_expressions[val];
694 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
696 pre_expr vexpr = expression_for_id (i);
697 if (vexpr->kind == NAME)
698 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
699 else if (vexpr->kind == CONSTANT)
700 return PRE_EXPR_CONSTANT (vexpr);
702 return NULL_TREE;
705 /* Insert an expression EXPR into a bitmapped set. */
707 static void
708 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
710 unsigned int val = get_expr_value_id (expr);
711 if (! value_id_constant_p (val))
713 /* Note this is the only function causing multiple expressions
714 for the same value to appear in a set. This is needed for
715 TMP_GEN, PHI_GEN and NEW_SETs. */
716 bitmap_set_bit (&set->values, val);
717 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
721 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
723 static void
724 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
726 bitmap_copy (&dest->expressions, &orig->expressions);
727 bitmap_copy (&dest->values, &orig->values);
731 /* Free memory used up by SET. */
732 static void
733 bitmap_set_free (bitmap_set_t set)
735 bitmap_clear (&set->expressions);
736 bitmap_clear (&set->values);
740 /* Generate an topological-ordered array of bitmap set SET. */
742 static vec<pre_expr>
743 sorted_array_from_bitmap_set (bitmap_set_t set)
745 unsigned int i, j;
746 bitmap_iterator bi, bj;
747 vec<pre_expr> result;
749 /* Pre-allocate enough space for the array. */
750 result.create (bitmap_count_bits (&set->expressions));
752 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
754 /* The number of expressions having a given value is usually
755 relatively small. Thus, rather than making a vector of all
756 the expressions and sorting it by value-id, we walk the values
757 and check in the reverse mapping that tells us what expressions
758 have a given value, to filter those in our set. As a result,
759 the expressions are inserted in value-id order, which means
760 topological order.
762 If this is somehow a significant lose for some cases, we can
763 choose which set to walk based on the set size. */
764 bitmap exprset = value_expressions[i];
765 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
767 if (bitmap_bit_p (&set->expressions, j))
768 result.quick_push (expression_for_id (j));
772 return result;
775 /* Subtract all expressions contained in ORIG from DEST. */
777 static bitmap_set_t
778 bitmap_set_subtract_expressions (bitmap_set_t dest, bitmap_set_t orig)
780 bitmap_set_t result = bitmap_set_new ();
781 bitmap_iterator bi;
782 unsigned int i;
784 bitmap_and_compl (&result->expressions, &dest->expressions,
785 &orig->expressions);
787 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
789 pre_expr expr = expression_for_id (i);
790 unsigned int value_id = get_expr_value_id (expr);
791 bitmap_set_bit (&result->values, value_id);
794 return result;
797 /* Subtract all values in bitmap set B from bitmap set A. */
799 static void
800 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
802 unsigned int i;
803 bitmap_iterator bi;
804 unsigned to_remove = -1U;
805 bitmap_and_compl_into (&a->values, &b->values);
806 FOR_EACH_EXPR_ID_IN_SET (a, i, bi)
808 if (to_remove != -1U)
810 bitmap_clear_bit (&a->expressions, to_remove);
811 to_remove = -1U;
813 pre_expr expr = expression_for_id (i);
814 if (! bitmap_bit_p (&a->values, get_expr_value_id (expr)))
815 to_remove = i;
817 if (to_remove != -1U)
818 bitmap_clear_bit (&a->expressions, to_remove);
822 /* Return true if bitmapped set SET contains the value VALUE_ID. */
824 static bool
825 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
827 if (value_id_constant_p (value_id))
828 return true;
830 return bitmap_bit_p (&set->values, value_id);
833 static inline bool
834 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
836 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
839 /* Return true if two bitmap sets are equal. */
841 static bool
842 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
844 return bitmap_equal_p (&a->values, &b->values);
847 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
848 and add it otherwise. */
850 static void
851 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
853 unsigned int val = get_expr_value_id (expr);
854 if (value_id_constant_p (val))
855 return;
857 if (bitmap_set_contains_value (set, val))
859 /* The number of expressions having a given value is usually
860 significantly less than the total number of expressions in SET.
861 Thus, rather than check, for each expression in SET, whether it
862 has the value LOOKFOR, we walk the reverse mapping that tells us
863 what expressions have a given value, and see if any of those
864 expressions are in our set. For large testcases, this is about
865 5-10x faster than walking the bitmap. If this is somehow a
866 significant lose for some cases, we can choose which set to walk
867 based on the set size. */
868 unsigned int i;
869 bitmap_iterator bi;
870 bitmap exprset = value_expressions[val];
871 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
873 if (bitmap_clear_bit (&set->expressions, i))
875 bitmap_set_bit (&set->expressions, get_expression_id (expr));
876 return;
879 gcc_unreachable ();
881 else
882 bitmap_insert_into_set (set, expr);
885 /* Insert EXPR into SET if EXPR's value is not already present in
886 SET. */
888 static void
889 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
891 unsigned int val = get_expr_value_id (expr);
893 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
895 /* Constant values are always considered to be part of the set. */
896 if (value_id_constant_p (val))
897 return;
899 /* If the value membership changed, add the expression. */
900 if (bitmap_set_bit (&set->values, val))
901 bitmap_set_bit (&set->expressions, expr->id);
904 /* Print out EXPR to outfile. */
906 static void
907 print_pre_expr (FILE *outfile, const pre_expr expr)
909 if (! expr)
911 fprintf (outfile, "NULL");
912 return;
914 switch (expr->kind)
916 case CONSTANT:
917 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr));
918 break;
919 case NAME:
920 print_generic_expr (outfile, PRE_EXPR_NAME (expr));
921 break;
922 case NARY:
924 unsigned int i;
925 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
926 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
927 for (i = 0; i < nary->length; i++)
929 print_generic_expr (outfile, nary->op[i]);
930 if (i != (unsigned) nary->length - 1)
931 fprintf (outfile, ",");
933 fprintf (outfile, "}");
935 break;
937 case REFERENCE:
939 vn_reference_op_t vro;
940 unsigned int i;
941 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
942 fprintf (outfile, "{");
943 for (i = 0;
944 ref->operands.iterate (i, &vro);
945 i++)
947 bool closebrace = false;
948 if (vro->opcode != SSA_NAME
949 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
951 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
952 if (vro->op0)
954 fprintf (outfile, "<");
955 closebrace = true;
958 if (vro->op0)
960 print_generic_expr (outfile, vro->op0);
961 if (vro->op1)
963 fprintf (outfile, ",");
964 print_generic_expr (outfile, vro->op1);
966 if (vro->op2)
968 fprintf (outfile, ",");
969 print_generic_expr (outfile, vro->op2);
972 if (closebrace)
973 fprintf (outfile, ">");
974 if (i != ref->operands.length () - 1)
975 fprintf (outfile, ",");
977 fprintf (outfile, "}");
978 if (ref->vuse)
980 fprintf (outfile, "@");
981 print_generic_expr (outfile, ref->vuse);
984 break;
987 void debug_pre_expr (pre_expr);
989 /* Like print_pre_expr but always prints to stderr. */
990 DEBUG_FUNCTION void
991 debug_pre_expr (pre_expr e)
993 print_pre_expr (stderr, e);
994 fprintf (stderr, "\n");
997 /* Print out SET to OUTFILE. */
999 static void
1000 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1001 const char *setname, int blockindex)
1003 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1004 if (set)
1006 bool first = true;
1007 unsigned i;
1008 bitmap_iterator bi;
1010 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1012 const pre_expr expr = expression_for_id (i);
1014 if (!first)
1015 fprintf (outfile, ", ");
1016 first = false;
1017 print_pre_expr (outfile, expr);
1019 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1022 fprintf (outfile, " }\n");
1025 void debug_bitmap_set (bitmap_set_t);
1027 DEBUG_FUNCTION void
1028 debug_bitmap_set (bitmap_set_t set)
1030 print_bitmap_set (stderr, set, "debug", 0);
1033 void debug_bitmap_sets_for (basic_block);
1035 DEBUG_FUNCTION void
1036 debug_bitmap_sets_for (basic_block bb)
1038 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1039 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1040 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1041 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1042 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1043 if (do_partial_partial)
1044 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1045 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1048 /* Print out the expressions that have VAL to OUTFILE. */
1050 static void
1051 print_value_expressions (FILE *outfile, unsigned int val)
1053 bitmap set = value_expressions[val];
1054 if (set)
1056 bitmap_set x;
1057 char s[10];
1058 sprintf (s, "%04d", val);
1059 x.expressions = *set;
1060 print_bitmap_set (outfile, &x, s, 0);
1065 DEBUG_FUNCTION void
1066 debug_value_expressions (unsigned int val)
1068 print_value_expressions (stderr, val);
1071 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1072 represent it. */
1074 static pre_expr
1075 get_or_alloc_expr_for_constant (tree constant)
1077 unsigned int result_id;
1078 unsigned int value_id;
1079 struct pre_expr_d expr;
1080 pre_expr newexpr;
1082 expr.kind = CONSTANT;
1083 PRE_EXPR_CONSTANT (&expr) = constant;
1084 result_id = lookup_expression_id (&expr);
1085 if (result_id != 0)
1086 return expression_for_id (result_id);
1088 newexpr = pre_expr_pool.allocate ();
1089 newexpr->kind = CONSTANT;
1090 PRE_EXPR_CONSTANT (newexpr) = constant;
1091 alloc_expression_id (newexpr);
1092 value_id = get_or_alloc_constant_value_id (constant);
1093 add_to_value (value_id, newexpr);
1094 return newexpr;
1097 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1098 Currently only supports constants and SSA_NAMES. */
1099 static pre_expr
1100 get_or_alloc_expr_for (tree t)
1102 if (TREE_CODE (t) == SSA_NAME)
1103 return get_or_alloc_expr_for_name (t);
1104 else if (is_gimple_min_invariant (t))
1105 return get_or_alloc_expr_for_constant (t);
1106 gcc_unreachable ();
1109 /* Return the folded version of T if T, when folded, is a gimple
1110 min_invariant or an SSA name. Otherwise, return T. */
1112 static pre_expr
1113 fully_constant_expression (pre_expr e)
1115 switch (e->kind)
1117 case CONSTANT:
1118 return e;
1119 case NARY:
1121 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1122 tree res = vn_nary_simplify (nary);
1123 if (!res)
1124 return e;
1125 if (is_gimple_min_invariant (res))
1126 return get_or_alloc_expr_for_constant (res);
1127 if (TREE_CODE (res) == SSA_NAME)
1128 return get_or_alloc_expr_for_name (res);
1129 return e;
1131 case REFERENCE:
1133 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1134 tree folded;
1135 if ((folded = fully_constant_vn_reference_p (ref)))
1136 return get_or_alloc_expr_for_constant (folded);
1137 return e;
1139 default:
1140 return e;
1142 return e;
1145 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1146 it has the value it would have in BLOCK. Set *SAME_VALID to true
1147 in case the new vuse doesn't change the value id of the OPERANDS. */
1149 static tree
1150 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1151 alias_set_type set, tree type, tree vuse,
1152 basic_block phiblock,
1153 basic_block block, bool *same_valid)
1155 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1156 ao_ref ref;
1157 edge e = NULL;
1158 bool use_oracle;
1160 *same_valid = true;
1162 if (gimple_bb (phi) != phiblock)
1163 return vuse;
1165 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1167 /* Use the alias-oracle to find either the PHI node in this block,
1168 the first VUSE used in this block that is equivalent to vuse or
1169 the first VUSE which definition in this block kills the value. */
1170 if (gimple_code (phi) == GIMPLE_PHI)
1171 e = find_edge (block, phiblock);
1172 else if (use_oracle)
1173 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1175 vuse = gimple_vuse (phi);
1176 phi = SSA_NAME_DEF_STMT (vuse);
1177 if (gimple_bb (phi) != phiblock)
1178 return vuse;
1179 if (gimple_code (phi) == GIMPLE_PHI)
1181 e = find_edge (block, phiblock);
1182 break;
1185 else
1186 return NULL_TREE;
1188 if (e)
1190 if (use_oracle)
1192 bitmap visited = NULL;
1193 unsigned int cnt;
1194 /* Try to find a vuse that dominates this phi node by skipping
1195 non-clobbering statements. */
1196 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1197 NULL, NULL);
1198 if (visited)
1199 BITMAP_FREE (visited);
1201 else
1202 vuse = NULL_TREE;
1203 if (!vuse)
1205 /* If we didn't find any, the value ID can't stay the same,
1206 but return the translated vuse. */
1207 *same_valid = false;
1208 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1210 /* ??? We would like to return vuse here as this is the canonical
1211 upmost vdef that this reference is associated with. But during
1212 insertion of the references into the hash tables we only ever
1213 directly insert with their direct gimple_vuse, hence returning
1214 something else would make us not find the other expression. */
1215 return PHI_ARG_DEF (phi, e->dest_idx);
1218 return NULL_TREE;
1221 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1222 SET2 *or* SET3. This is used to avoid making a set consisting of the union
1223 of PA_IN and ANTIC_IN during insert and phi-translation. */
1225 static inline pre_expr
1226 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2,
1227 bitmap_set_t set3 = NULL)
1229 pre_expr result;
1231 result = bitmap_find_leader (set1, val);
1232 if (!result && set2)
1233 result = bitmap_find_leader (set2, val);
1234 if (!result && set3)
1235 result = bitmap_find_leader (set3, val);
1236 return result;
1239 /* Get the tree type for our PRE expression e. */
1241 static tree
1242 get_expr_type (const pre_expr e)
1244 switch (e->kind)
1246 case NAME:
1247 return TREE_TYPE (PRE_EXPR_NAME (e));
1248 case CONSTANT:
1249 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1250 case REFERENCE:
1251 return PRE_EXPR_REFERENCE (e)->type;
1252 case NARY:
1253 return PRE_EXPR_NARY (e)->type;
1255 gcc_unreachable ();
1258 /* Get a representative SSA_NAME for a given expression that is available in B.
1259 Since all of our sub-expressions are treated as values, we require
1260 them to be SSA_NAME's for simplicity.
1261 Prior versions of GVNPRE used to use "value handles" here, so that
1262 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1263 either case, the operands are really values (IE we do not expect
1264 them to be usable without finding leaders). */
1266 static tree
1267 get_representative_for (const pre_expr e, basic_block b = NULL)
1269 tree name, valnum = NULL_TREE;
1270 unsigned int value_id = get_expr_value_id (e);
1272 switch (e->kind)
1274 case NAME:
1275 return VN_INFO (PRE_EXPR_NAME (e))->valnum;
1276 case CONSTANT:
1277 return PRE_EXPR_CONSTANT (e);
1278 case NARY:
1279 case REFERENCE:
1281 /* Go through all of the expressions representing this value
1282 and pick out an SSA_NAME. */
1283 unsigned int i;
1284 bitmap_iterator bi;
1285 bitmap exprs = value_expressions[value_id];
1286 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1288 pre_expr rep = expression_for_id (i);
1289 if (rep->kind == NAME)
1291 tree name = PRE_EXPR_NAME (rep);
1292 valnum = VN_INFO (name)->valnum;
1293 gimple *def = SSA_NAME_DEF_STMT (name);
1294 /* We have to return either a new representative or one
1295 that can be used for expression simplification and thus
1296 is available in B. */
1297 if (! b
1298 || gimple_nop_p (def)
1299 || dominated_by_p (CDI_DOMINATORS, b, gimple_bb (def)))
1300 return name;
1302 else if (rep->kind == CONSTANT)
1303 return PRE_EXPR_CONSTANT (rep);
1306 break;
1309 /* If we reached here we couldn't find an SSA_NAME. This can
1310 happen when we've discovered a value that has never appeared in
1311 the program as set to an SSA_NAME, as the result of phi translation.
1312 Create one here.
1313 ??? We should be able to re-use this when we insert the statement
1314 to compute it. */
1315 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1316 VN_INFO_GET (name)->value_id = value_id;
1317 VN_INFO (name)->valnum = valnum ? valnum : name;
1318 /* ??? For now mark this SSA name for release by SCCVN. */
1319 VN_INFO (name)->needs_insertion = true;
1320 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1321 if (dump_file && (dump_flags & TDF_DETAILS))
1323 fprintf (dump_file, "Created SSA_NAME representative ");
1324 print_generic_expr (dump_file, name);
1325 fprintf (dump_file, " for expression:");
1326 print_pre_expr (dump_file, e);
1327 fprintf (dump_file, " (%04d)\n", value_id);
1330 return name;
1334 static pre_expr
1335 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, edge e);
1337 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1338 the phis in PRED. Return NULL if we can't find a leader for each part
1339 of the translated expression. */
1341 static pre_expr
1342 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, edge e)
1344 basic_block pred = e->src;
1345 basic_block phiblock = e->dest;
1346 switch (expr->kind)
1348 case NARY:
1350 unsigned int i;
1351 bool changed = false;
1352 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1353 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1354 sizeof_vn_nary_op (nary->length));
1355 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1357 for (i = 0; i < newnary->length; i++)
1359 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1360 continue;
1361 else
1363 pre_expr leader, result;
1364 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1365 leader = find_leader_in_sets (op_val_id, set1, set2);
1366 result = phi_translate (leader, set1, set2, e);
1367 if (result && result != leader)
1368 /* Force a leader as well as we are simplifying this
1369 expression. */
1370 newnary->op[i] = get_representative_for (result, pred);
1371 else if (!result)
1372 return NULL;
1374 changed |= newnary->op[i] != nary->op[i];
1377 if (changed)
1379 pre_expr constant;
1380 unsigned int new_val_id;
1382 PRE_EXPR_NARY (expr) = newnary;
1383 constant = fully_constant_expression (expr);
1384 PRE_EXPR_NARY (expr) = nary;
1385 if (constant != expr)
1387 /* For non-CONSTANTs we have to make sure we can eventually
1388 insert the expression. Which means we need to have a
1389 leader for it. */
1390 if (constant->kind != CONSTANT)
1392 /* Do not allow simplifications to non-constants over
1393 backedges as this will likely result in a loop PHI node
1394 to be inserted and increased register pressure.
1395 See PR77498 - this avoids doing predcoms work in
1396 a less efficient way. */
1397 if (e->flags & EDGE_DFS_BACK)
1399 else
1401 unsigned value_id = get_expr_value_id (constant);
1402 constant = find_leader_in_sets (value_id, set1, set2,
1403 AVAIL_OUT (pred));
1404 if (constant)
1405 return constant;
1408 else
1409 return constant;
1412 /* vn_nary_* do not valueize operands. */
1413 for (i = 0; i < newnary->length; ++i)
1414 if (TREE_CODE (newnary->op[i]) == SSA_NAME)
1415 newnary->op[i] = VN_INFO (newnary->op[i])->valnum;
1416 tree result = vn_nary_op_lookup_pieces (newnary->length,
1417 newnary->opcode,
1418 newnary->type,
1419 &newnary->op[0],
1420 &nary);
1421 if (result && is_gimple_min_invariant (result))
1422 return get_or_alloc_expr_for_constant (result);
1424 expr = pre_expr_pool.allocate ();
1425 expr->kind = NARY;
1426 expr->id = 0;
1427 if (nary)
1429 PRE_EXPR_NARY (expr) = nary;
1430 new_val_id = nary->value_id;
1431 get_or_alloc_expression_id (expr);
1433 else
1435 new_val_id = get_next_value_id ();
1436 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1437 nary = vn_nary_op_insert_pieces (newnary->length,
1438 newnary->opcode,
1439 newnary->type,
1440 &newnary->op[0],
1441 result, new_val_id);
1442 PRE_EXPR_NARY (expr) = nary;
1443 get_or_alloc_expression_id (expr);
1445 add_to_value (new_val_id, expr);
1447 return expr;
1449 break;
1451 case REFERENCE:
1453 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1454 vec<vn_reference_op_s> operands = ref->operands;
1455 tree vuse = ref->vuse;
1456 tree newvuse = vuse;
1457 vec<vn_reference_op_s> newoperands = vNULL;
1458 bool changed = false, same_valid = true;
1459 unsigned int i, n;
1460 vn_reference_op_t operand;
1461 vn_reference_t newref;
1463 for (i = 0; operands.iterate (i, &operand); i++)
1465 pre_expr opresult;
1466 pre_expr leader;
1467 tree op[3];
1468 tree type = operand->type;
1469 vn_reference_op_s newop = *operand;
1470 op[0] = operand->op0;
1471 op[1] = operand->op1;
1472 op[2] = operand->op2;
1473 for (n = 0; n < 3; ++n)
1475 unsigned int op_val_id;
1476 if (!op[n])
1477 continue;
1478 if (TREE_CODE (op[n]) != SSA_NAME)
1480 /* We can't possibly insert these. */
1481 if (n != 0
1482 && !is_gimple_min_invariant (op[n]))
1483 break;
1484 continue;
1486 op_val_id = VN_INFO (op[n])->value_id;
1487 leader = find_leader_in_sets (op_val_id, set1, set2);
1488 opresult = phi_translate (leader, set1, set2, e);
1489 if (opresult && opresult != leader)
1491 tree name = get_representative_for (opresult);
1492 changed |= name != op[n];
1493 op[n] = name;
1495 else if (!opresult)
1496 break;
1498 if (n != 3)
1500 newoperands.release ();
1501 return NULL;
1503 if (!changed)
1504 continue;
1505 if (!newoperands.exists ())
1506 newoperands = operands.copy ();
1507 /* We may have changed from an SSA_NAME to a constant */
1508 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1509 newop.opcode = TREE_CODE (op[0]);
1510 newop.type = type;
1511 newop.op0 = op[0];
1512 newop.op1 = op[1];
1513 newop.op2 = op[2];
1514 newoperands[i] = newop;
1516 gcc_checking_assert (i == operands.length ());
1518 if (vuse)
1520 newvuse = translate_vuse_through_block (newoperands.exists ()
1521 ? newoperands : operands,
1522 ref->set, ref->type,
1523 vuse, phiblock, pred,
1524 &same_valid);
1525 if (newvuse == NULL_TREE)
1527 newoperands.release ();
1528 return NULL;
1532 if (changed || newvuse != vuse)
1534 unsigned int new_val_id;
1536 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1537 ref->type,
1538 newoperands.exists ()
1539 ? newoperands : operands,
1540 &newref, VN_WALK);
1541 if (result)
1542 newoperands.release ();
1544 /* We can always insert constants, so if we have a partial
1545 redundant constant load of another type try to translate it
1546 to a constant of appropriate type. */
1547 if (result && is_gimple_min_invariant (result))
1549 tree tem = result;
1550 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1552 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1553 if (tem && !is_gimple_min_invariant (tem))
1554 tem = NULL_TREE;
1556 if (tem)
1557 return get_or_alloc_expr_for_constant (tem);
1560 /* If we'd have to convert things we would need to validate
1561 if we can insert the translated expression. So fail
1562 here for now - we cannot insert an alias with a different
1563 type in the VN tables either, as that would assert. */
1564 if (result
1565 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1566 return NULL;
1567 else if (!result && newref
1568 && !useless_type_conversion_p (ref->type, newref->type))
1570 newoperands.release ();
1571 return NULL;
1574 expr = pre_expr_pool.allocate ();
1575 expr->kind = REFERENCE;
1576 expr->id = 0;
1578 if (newref)
1579 new_val_id = newref->value_id;
1580 else
1582 if (changed || !same_valid)
1584 new_val_id = get_next_value_id ();
1585 value_expressions.safe_grow_cleared
1586 (get_max_value_id () + 1);
1588 else
1589 new_val_id = ref->value_id;
1590 if (!newoperands.exists ())
1591 newoperands = operands.copy ();
1592 newref = vn_reference_insert_pieces (newvuse, ref->set,
1593 ref->type,
1594 newoperands,
1595 result, new_val_id);
1596 newoperands = vNULL;
1598 PRE_EXPR_REFERENCE (expr) = newref;
1599 get_or_alloc_expression_id (expr);
1600 add_to_value (new_val_id, expr);
1602 newoperands.release ();
1603 return expr;
1605 break;
1607 case NAME:
1609 tree name = PRE_EXPR_NAME (expr);
1610 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1611 /* If the SSA name is defined by a PHI node in this block,
1612 translate it. */
1613 if (gimple_code (def_stmt) == GIMPLE_PHI
1614 && gimple_bb (def_stmt) == phiblock)
1616 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1618 /* Handle constant. */
1619 if (is_gimple_min_invariant (def))
1620 return get_or_alloc_expr_for_constant (def);
1622 return get_or_alloc_expr_for_name (def);
1624 /* Otherwise return it unchanged - it will get removed if its
1625 value is not available in PREDs AVAIL_OUT set of expressions
1626 by the subtraction of TMP_GEN. */
1627 return expr;
1630 default:
1631 gcc_unreachable ();
1635 /* Wrapper around phi_translate_1 providing caching functionality. */
1637 static pre_expr
1638 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, edge e)
1640 expr_pred_trans_t slot = NULL;
1641 pre_expr phitrans;
1643 if (!expr)
1644 return NULL;
1646 /* Constants contain no values that need translation. */
1647 if (expr->kind == CONSTANT)
1648 return expr;
1650 if (value_id_constant_p (get_expr_value_id (expr)))
1651 return expr;
1653 /* Don't add translations of NAMEs as those are cheap to translate. */
1654 if (expr->kind != NAME)
1656 if (phi_trans_add (&slot, expr, e->src))
1657 return slot->v;
1658 /* Store NULL for the value we want to return in the case of
1659 recursing. */
1660 slot->v = NULL;
1663 /* Translate. */
1664 phitrans = phi_translate_1 (expr, set1, set2, e);
1666 if (slot)
1668 if (phitrans)
1669 slot->v = phitrans;
1670 else
1671 /* Remove failed translations again, they cause insert
1672 iteration to not pick up new opportunities reliably. */
1673 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1676 return phitrans;
1680 /* For each expression in SET, translate the values through phi nodes
1681 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1682 expressions in DEST. */
1684 static void
1685 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, edge e)
1687 vec<pre_expr> exprs;
1688 pre_expr expr;
1689 int i;
1691 if (gimple_seq_empty_p (phi_nodes (e->dest)))
1693 bitmap_set_copy (dest, set);
1694 return;
1697 exprs = sorted_array_from_bitmap_set (set);
1698 FOR_EACH_VEC_ELT (exprs, i, expr)
1700 pre_expr translated;
1701 translated = phi_translate (expr, set, NULL, e);
1702 if (!translated)
1703 continue;
1705 bitmap_insert_into_set (dest, translated);
1707 exprs.release ();
1710 /* Find the leader for a value (i.e., the name representing that
1711 value) in a given set, and return it. Return NULL if no leader
1712 is found. */
1714 static pre_expr
1715 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1717 if (value_id_constant_p (val))
1719 unsigned int i;
1720 bitmap_iterator bi;
1721 bitmap exprset = value_expressions[val];
1723 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1725 pre_expr expr = expression_for_id (i);
1726 if (expr->kind == CONSTANT)
1727 return expr;
1730 if (bitmap_set_contains_value (set, val))
1732 /* Rather than walk the entire bitmap of expressions, and see
1733 whether any of them has the value we are looking for, we look
1734 at the reverse mapping, which tells us the set of expressions
1735 that have a given value (IE value->expressions with that
1736 value) and see if any of those expressions are in our set.
1737 The number of expressions per value is usually significantly
1738 less than the number of expressions in the set. In fact, for
1739 large testcases, doing it this way is roughly 5-10x faster
1740 than walking the bitmap.
1741 If this is somehow a significant lose for some cases, we can
1742 choose which set to walk based on which set is smaller. */
1743 unsigned int i;
1744 bitmap_iterator bi;
1745 bitmap exprset = value_expressions[val];
1747 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1748 return expression_for_id (i);
1750 return NULL;
1753 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1754 BLOCK by seeing if it is not killed in the block. Note that we are
1755 only determining whether there is a store that kills it. Because
1756 of the order in which clean iterates over values, we are guaranteed
1757 that altered operands will have caused us to be eliminated from the
1758 ANTIC_IN set already. */
1760 static bool
1761 value_dies_in_block_x (pre_expr expr, basic_block block)
1763 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1764 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1765 gimple *def;
1766 gimple_stmt_iterator gsi;
1767 unsigned id = get_expression_id (expr);
1768 bool res = false;
1769 ao_ref ref;
1771 if (!vuse)
1772 return false;
1774 /* Lookup a previously calculated result. */
1775 if (EXPR_DIES (block)
1776 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1777 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1779 /* A memory expression {e, VUSE} dies in the block if there is a
1780 statement that may clobber e. If, starting statement walk from the
1781 top of the basic block, a statement uses VUSE there can be no kill
1782 inbetween that use and the original statement that loaded {e, VUSE},
1783 so we can stop walking. */
1784 ref.base = NULL_TREE;
1785 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1787 tree def_vuse, def_vdef;
1788 def = gsi_stmt (gsi);
1789 def_vuse = gimple_vuse (def);
1790 def_vdef = gimple_vdef (def);
1792 /* Not a memory statement. */
1793 if (!def_vuse)
1794 continue;
1796 /* Not a may-def. */
1797 if (!def_vdef)
1799 /* A load with the same VUSE, we're done. */
1800 if (def_vuse == vuse)
1801 break;
1803 continue;
1806 /* Init ref only if we really need it. */
1807 if (ref.base == NULL_TREE
1808 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1809 refx->operands))
1811 res = true;
1812 break;
1814 /* If the statement may clobber expr, it dies. */
1815 if (stmt_may_clobber_ref_p_1 (def, &ref))
1817 res = true;
1818 break;
1822 /* Remember the result. */
1823 if (!EXPR_DIES (block))
1824 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1825 bitmap_set_bit (EXPR_DIES (block), id * 2);
1826 if (res)
1827 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1829 return res;
1833 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1834 contains its value-id. */
1836 static bool
1837 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1839 if (op && TREE_CODE (op) == SSA_NAME)
1841 unsigned int value_id = VN_INFO (op)->value_id;
1842 if (!(bitmap_set_contains_value (set1, value_id)
1843 || (set2 && bitmap_set_contains_value (set2, value_id))))
1844 return false;
1846 return true;
1849 /* Determine if the expression EXPR is valid in SET1 U SET2.
1850 ONLY SET2 CAN BE NULL.
1851 This means that we have a leader for each part of the expression
1852 (if it consists of values), or the expression is an SSA_NAME.
1853 For loads/calls, we also see if the vuse is killed in this block. */
1855 static bool
1856 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1858 switch (expr->kind)
1860 case NAME:
1861 /* By construction all NAMEs are available. Non-available
1862 NAMEs are removed by subtracting TMP_GEN from the sets. */
1863 return true;
1864 case NARY:
1866 unsigned int i;
1867 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1868 for (i = 0; i < nary->length; i++)
1869 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1870 return false;
1871 return true;
1873 break;
1874 case REFERENCE:
1876 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1877 vn_reference_op_t vro;
1878 unsigned int i;
1880 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1882 if (!op_valid_in_sets (set1, set2, vro->op0)
1883 || !op_valid_in_sets (set1, set2, vro->op1)
1884 || !op_valid_in_sets (set1, set2, vro->op2))
1885 return false;
1887 return true;
1889 default:
1890 gcc_unreachable ();
1894 /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2.
1895 This means expressions that are made up of values we have no leaders for
1896 in SET1 or SET2. */
1898 static void
1899 clean (bitmap_set_t set1, bitmap_set_t set2 = NULL)
1901 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
1902 pre_expr expr;
1903 int i;
1905 FOR_EACH_VEC_ELT (exprs, i, expr)
1907 if (!valid_in_sets (set1, set2, expr))
1909 unsigned int val = get_expr_value_id (expr);
1910 bitmap_clear_bit (&set1->expressions, get_expression_id (expr));
1911 /* We are entered with possibly multiple expressions for a value
1912 so before removing a value from the set see if there's an
1913 expression for it left. */
1914 if (! bitmap_find_leader (set1, val))
1915 bitmap_clear_bit (&set1->values, val);
1918 exprs.release ();
1921 /* Clean the set of expressions that are no longer valid in SET because
1922 they are clobbered in BLOCK or because they trap and may not be executed. */
1924 static void
1925 prune_clobbered_mems (bitmap_set_t set, basic_block block)
1927 bitmap_iterator bi;
1928 unsigned i;
1929 unsigned to_remove = -1U;
1930 bool any_removed = false;
1932 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1934 /* Remove queued expr. */
1935 if (to_remove != -1U)
1937 bitmap_clear_bit (&set->expressions, to_remove);
1938 any_removed = true;
1939 to_remove = -1U;
1942 pre_expr expr = expression_for_id (i);
1943 if (expr->kind == REFERENCE)
1945 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1946 if (ref->vuse)
1948 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
1949 if (!gimple_nop_p (def_stmt)
1950 && ((gimple_bb (def_stmt) != block
1951 && !dominated_by_p (CDI_DOMINATORS,
1952 block, gimple_bb (def_stmt)))
1953 || (gimple_bb (def_stmt) == block
1954 && value_dies_in_block_x (expr, block))))
1955 to_remove = i;
1958 else if (expr->kind == NARY)
1960 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1961 /* If the NARY may trap make sure the block does not contain
1962 a possible exit point.
1963 ??? This is overly conservative if we translate AVAIL_OUT
1964 as the available expression might be after the exit point. */
1965 if (BB_MAY_NOTRETURN (block)
1966 && vn_nary_may_trap (nary))
1967 to_remove = i;
1971 /* Remove queued expr. */
1972 if (to_remove != -1U)
1974 bitmap_clear_bit (&set->expressions, to_remove);
1975 any_removed = true;
1978 /* Above we only removed expressions, now clean the set of values
1979 which no longer have any corresponding expression. We cannot
1980 clear the value at the time we remove an expression since there
1981 may be multiple expressions per value.
1982 If we'd queue possibly to be removed values we could use
1983 the bitmap_find_leader way to see if there's still an expression
1984 for it. For some ratio of to be removed values and number of
1985 values/expressions in the set this might be faster than rebuilding
1986 the value-set. */
1987 if (any_removed)
1989 bitmap_clear (&set->values);
1990 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1992 pre_expr expr = expression_for_id (i);
1993 unsigned int value_id = get_expr_value_id (expr);
1994 bitmap_set_bit (&set->values, value_id);
1999 static sbitmap has_abnormal_preds;
2001 /* Compute the ANTIC set for BLOCK.
2003 If succs(BLOCK) > 1 then
2004 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2005 else if succs(BLOCK) == 1 then
2006 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2008 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2010 Note that clean() is deferred until after the iteration. */
2012 static bool
2013 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2015 bitmap_set_t S, old, ANTIC_OUT;
2016 edge e;
2017 edge_iterator ei;
2019 bool was_visited = BB_VISITED (block);
2020 bool changed = ! BB_VISITED (block);
2021 BB_VISITED (block) = 1;
2022 old = ANTIC_OUT = S = NULL;
2024 /* If any edges from predecessors are abnormal, antic_in is empty,
2025 so do nothing. */
2026 if (block_has_abnormal_pred_edge)
2027 goto maybe_dump_sets;
2029 old = ANTIC_IN (block);
2030 ANTIC_OUT = bitmap_set_new ();
2032 /* If the block has no successors, ANTIC_OUT is empty. */
2033 if (EDGE_COUNT (block->succs) == 0)
2035 /* If we have one successor, we could have some phi nodes to
2036 translate through. */
2037 else if (single_succ_p (block))
2039 e = single_succ_edge (block);
2040 gcc_assert (BB_VISITED (e->dest));
2041 BB_VISITED_WITH_VISITED_SUCCS (block)
2042 = BB_VISITED_WITH_VISITED_SUCCS (e->dest);
2043 phi_translate_set (ANTIC_OUT, ANTIC_IN (e->dest), e);
2045 /* If we have multiple successors, we take the intersection of all of
2046 them. Note that in the case of loop exit phi nodes, we may have
2047 phis to translate through. */
2048 else
2050 size_t i;
2051 edge first = NULL;
2053 BB_VISITED_WITH_VISITED_SUCCS (block) = true;
2054 auto_vec<edge> worklist (EDGE_COUNT (block->succs));
2055 FOR_EACH_EDGE (e, ei, block->succs)
2057 if (!first
2058 && BB_VISITED (e->dest))
2059 first = e;
2060 else if (BB_VISITED (e->dest))
2061 worklist.quick_push (e);
2062 else
2064 /* Unvisited successors get their ANTIC_IN replaced by the
2065 maximal set to arrive at a maximum ANTIC_IN solution.
2066 We can ignore them in the intersection operation and thus
2067 need not explicitely represent that maximum solution. */
2068 if (dump_file && (dump_flags & TDF_DETAILS))
2069 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2070 e->src->index, e->dest->index);
2072 BB_VISITED_WITH_VISITED_SUCCS (block)
2073 &= BB_VISITED_WITH_VISITED_SUCCS (e->dest);
2076 /* Of multiple successors we have to have visited one already
2077 which is guaranteed by iteration order. */
2078 gcc_assert (first != NULL);
2080 phi_translate_set (ANTIC_OUT, ANTIC_IN (first->dest), first);
2082 /* If we have multiple successors we need to intersect the ANTIC_OUT
2083 sets. For values that's a simple intersection but for
2084 expressions it is a union. Given we want to have a single
2085 expression per value in our sets we have to canonicalize.
2086 Avoid randomness and running into cycles like for PR82129 and
2087 canonicalize the expression we choose to the one with the
2088 lowest id. This requires we actually compute the union first. */
2089 FOR_EACH_VEC_ELT (worklist, i, e)
2091 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
2093 bitmap_set_t tmp = bitmap_set_new ();
2094 phi_translate_set (tmp, ANTIC_IN (e->dest), e);
2095 bitmap_and_into (&ANTIC_OUT->values, &tmp->values);
2096 bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions);
2097 bitmap_set_free (tmp);
2099 else
2101 bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (e->dest)->values);
2102 bitmap_ior_into (&ANTIC_OUT->expressions,
2103 &ANTIC_IN (e->dest)->expressions);
2106 if (! worklist.is_empty ())
2108 /* Prune expressions not in the value set. */
2109 bitmap_iterator bi;
2110 unsigned int i;
2111 unsigned int to_clear = -1U;
2112 FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT, i, bi)
2114 if (to_clear != -1U)
2116 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2117 to_clear = -1U;
2119 pre_expr expr = expression_for_id (i);
2120 unsigned int value_id = get_expr_value_id (expr);
2121 if (!bitmap_bit_p (&ANTIC_OUT->values, value_id))
2122 to_clear = i;
2124 if (to_clear != -1U)
2125 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2129 /* Prune expressions that are clobbered in block and thus become
2130 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2131 prune_clobbered_mems (ANTIC_OUT, block);
2133 /* Generate ANTIC_OUT - TMP_GEN. */
2134 S = bitmap_set_subtract_expressions (ANTIC_OUT, TMP_GEN (block));
2136 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2137 ANTIC_IN (block) = bitmap_set_subtract_expressions (EXP_GEN (block),
2138 TMP_GEN (block));
2140 /* Then union in the ANTIC_OUT - TMP_GEN values,
2141 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2142 bitmap_ior_into (&ANTIC_IN (block)->values, &S->values);
2143 bitmap_ior_into (&ANTIC_IN (block)->expressions, &S->expressions);
2145 /* clean (ANTIC_IN (block)) is defered to after the iteration converged
2146 because it can cause non-convergence, see for example PR81181. */
2148 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2150 changed = true;
2151 /* After the initial value set computation the value set may
2152 only shrink during the iteration. */
2153 if (was_visited && BB_VISITED_WITH_VISITED_SUCCS (block) && flag_checking)
2155 bitmap_iterator bi;
2156 unsigned int i;
2157 EXECUTE_IF_AND_COMPL_IN_BITMAP (&ANTIC_IN (block)->values,
2158 &old->values, 0, i, bi)
2159 gcc_unreachable ();
2163 maybe_dump_sets:
2164 if (dump_file && (dump_flags & TDF_DETAILS))
2166 if (ANTIC_OUT)
2167 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2169 if (changed)
2170 fprintf (dump_file, "[changed] ");
2171 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2172 block->index);
2174 if (S)
2175 print_bitmap_set (dump_file, S, "S", block->index);
2177 if (old)
2178 bitmap_set_free (old);
2179 if (S)
2180 bitmap_set_free (S);
2181 if (ANTIC_OUT)
2182 bitmap_set_free (ANTIC_OUT);
2183 return changed;
2186 /* Compute PARTIAL_ANTIC for BLOCK.
2188 If succs(BLOCK) > 1 then
2189 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2190 in ANTIC_OUT for all succ(BLOCK)
2191 else if succs(BLOCK) == 1 then
2192 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2194 PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK])
2197 static void
2198 compute_partial_antic_aux (basic_block block,
2199 bool block_has_abnormal_pred_edge)
2201 bitmap_set_t old_PA_IN;
2202 bitmap_set_t PA_OUT;
2203 edge e;
2204 edge_iterator ei;
2205 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2207 old_PA_IN = PA_OUT = NULL;
2209 /* If any edges from predecessors are abnormal, antic_in is empty,
2210 so do nothing. */
2211 if (block_has_abnormal_pred_edge)
2212 goto maybe_dump_sets;
2214 /* If there are too many partially anticipatable values in the
2215 block, phi_translate_set can take an exponential time: stop
2216 before the translation starts. */
2217 if (max_pa
2218 && single_succ_p (block)
2219 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2220 goto maybe_dump_sets;
2222 old_PA_IN = PA_IN (block);
2223 PA_OUT = bitmap_set_new ();
2225 /* If the block has no successors, ANTIC_OUT is empty. */
2226 if (EDGE_COUNT (block->succs) == 0)
2228 /* If we have one successor, we could have some phi nodes to
2229 translate through. Note that we can't phi translate across DFS
2230 back edges in partial antic, because it uses a union operation on
2231 the successors. For recurrences like IV's, we will end up
2232 generating a new value in the set on each go around (i + 3 (VH.1)
2233 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2234 else if (single_succ_p (block))
2236 e = single_succ_edge (block);
2237 if (!(e->flags & EDGE_DFS_BACK))
2238 phi_translate_set (PA_OUT, PA_IN (e->dest), e);
2240 /* If we have multiple successors, we take the union of all of
2241 them. */
2242 else
2244 size_t i;
2246 auto_vec<edge> worklist (EDGE_COUNT (block->succs));
2247 FOR_EACH_EDGE (e, ei, block->succs)
2249 if (e->flags & EDGE_DFS_BACK)
2250 continue;
2251 worklist.quick_push (e);
2253 if (worklist.length () > 0)
2255 FOR_EACH_VEC_ELT (worklist, i, e)
2257 unsigned int i;
2258 bitmap_iterator bi;
2260 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (e->dest), i, bi)
2261 bitmap_value_insert_into_set (PA_OUT,
2262 expression_for_id (i));
2263 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
2265 bitmap_set_t pa_in = bitmap_set_new ();
2266 phi_translate_set (pa_in, PA_IN (e->dest), e);
2267 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2268 bitmap_value_insert_into_set (PA_OUT,
2269 expression_for_id (i));
2270 bitmap_set_free (pa_in);
2272 else
2273 FOR_EACH_EXPR_ID_IN_SET (PA_IN (e->dest), i, bi)
2274 bitmap_value_insert_into_set (PA_OUT,
2275 expression_for_id (i));
2280 /* Prune expressions that are clobbered in block and thus become
2281 invalid if translated from PA_OUT to PA_IN. */
2282 prune_clobbered_mems (PA_OUT, block);
2284 /* PA_IN starts with PA_OUT - TMP_GEN.
2285 Then we subtract things from ANTIC_IN. */
2286 PA_IN (block) = bitmap_set_subtract_expressions (PA_OUT, TMP_GEN (block));
2288 /* For partial antic, we want to put back in the phi results, since
2289 we will properly avoid making them partially antic over backedges. */
2290 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2291 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2293 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2294 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2296 clean (PA_IN (block), ANTIC_IN (block));
2298 maybe_dump_sets:
2299 if (dump_file && (dump_flags & TDF_DETAILS))
2301 if (PA_OUT)
2302 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2304 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2306 if (old_PA_IN)
2307 bitmap_set_free (old_PA_IN);
2308 if (PA_OUT)
2309 bitmap_set_free (PA_OUT);
2312 /* Compute ANTIC and partial ANTIC sets. */
2314 static void
2315 compute_antic (void)
2317 bool changed = true;
2318 int num_iterations = 0;
2319 basic_block block;
2320 int i;
2321 edge_iterator ei;
2322 edge e;
2324 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2325 We pre-build the map of blocks with incoming abnormal edges here. */
2326 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2327 bitmap_clear (has_abnormal_preds);
2329 FOR_ALL_BB_FN (block, cfun)
2331 BB_VISITED (block) = 0;
2332 BB_VISITED_WITH_VISITED_SUCCS (block) = 0;
2334 FOR_EACH_EDGE (e, ei, block->preds)
2335 if (e->flags & EDGE_ABNORMAL)
2337 bitmap_set_bit (has_abnormal_preds, block->index);
2338 break;
2341 /* While we are here, give empty ANTIC_IN sets to each block. */
2342 ANTIC_IN (block) = bitmap_set_new ();
2343 if (do_partial_partial)
2344 PA_IN (block) = bitmap_set_new ();
2347 /* At the exit block we anticipate nothing. */
2348 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2349 BB_VISITED_WITH_VISITED_SUCCS (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2351 /* For ANTIC computation we need a postorder that also guarantees that
2352 a block with a single successor is visited after its successor.
2353 RPO on the inverted CFG has this property. */
2354 auto_vec<int, 20> postorder;
2355 inverted_post_order_compute (&postorder);
2357 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
2358 bitmap_clear (worklist);
2359 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2360 bitmap_set_bit (worklist, e->src->index);
2361 while (changed)
2363 if (dump_file && (dump_flags & TDF_DETAILS))
2364 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2365 /* ??? We need to clear our PHI translation cache here as the
2366 ANTIC sets shrink and we restrict valid translations to
2367 those having operands with leaders in ANTIC. Same below
2368 for PA ANTIC computation. */
2369 num_iterations++;
2370 changed = false;
2371 for (i = postorder.length () - 1; i >= 0; i--)
2373 if (bitmap_bit_p (worklist, postorder[i]))
2375 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2376 bitmap_clear_bit (worklist, block->index);
2377 if (compute_antic_aux (block,
2378 bitmap_bit_p (has_abnormal_preds,
2379 block->index)))
2381 FOR_EACH_EDGE (e, ei, block->preds)
2382 bitmap_set_bit (worklist, e->src->index);
2383 changed = true;
2387 /* Theoretically possible, but *highly* unlikely. */
2388 gcc_checking_assert (num_iterations < 500);
2391 /* We have to clean after the dataflow problem converged as cleaning
2392 can cause non-convergence because it is based on expressions
2393 rather than values. */
2394 FOR_EACH_BB_FN (block, cfun)
2395 clean (ANTIC_IN (block));
2397 statistics_histogram_event (cfun, "compute_antic iterations",
2398 num_iterations);
2400 if (do_partial_partial)
2402 /* For partial antic we ignore backedges and thus we do not need
2403 to perform any iteration when we process blocks in postorder. */
2404 int postorder_num
2405 = pre_and_rev_post_order_compute (NULL, postorder.address (), false);
2406 for (i = postorder_num - 1 ; i >= 0; i--)
2408 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2409 compute_partial_antic_aux (block,
2410 bitmap_bit_p (has_abnormal_preds,
2411 block->index));
2415 sbitmap_free (has_abnormal_preds);
2419 /* Inserted expressions are placed onto this worklist, which is used
2420 for performing quick dead code elimination of insertions we made
2421 that didn't turn out to be necessary. */
2422 static bitmap inserted_exprs;
2424 /* The actual worker for create_component_ref_by_pieces. */
2426 static tree
2427 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2428 unsigned int *operand, gimple_seq *stmts)
2430 vn_reference_op_t currop = &ref->operands[*operand];
2431 tree genop;
2432 ++*operand;
2433 switch (currop->opcode)
2435 case CALL_EXPR:
2436 gcc_unreachable ();
2438 case MEM_REF:
2440 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2441 stmts);
2442 if (!baseop)
2443 return NULL_TREE;
2444 tree offset = currop->op0;
2445 if (TREE_CODE (baseop) == ADDR_EXPR
2446 && handled_component_p (TREE_OPERAND (baseop, 0)))
2448 poly_int64 off;
2449 tree base;
2450 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2451 &off);
2452 gcc_assert (base);
2453 offset = int_const_binop (PLUS_EXPR, offset,
2454 build_int_cst (TREE_TYPE (offset),
2455 off));
2456 baseop = build_fold_addr_expr (base);
2458 genop = build2 (MEM_REF, currop->type, baseop, offset);
2459 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2460 MR_DEPENDENCE_BASE (genop) = currop->base;
2461 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2462 return genop;
2465 case TARGET_MEM_REF:
2467 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2468 vn_reference_op_t nextop = &ref->operands[++*operand];
2469 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2470 stmts);
2471 if (!baseop)
2472 return NULL_TREE;
2473 if (currop->op0)
2475 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2476 if (!genop0)
2477 return NULL_TREE;
2479 if (nextop->op0)
2481 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2482 if (!genop1)
2483 return NULL_TREE;
2485 genop = build5 (TARGET_MEM_REF, currop->type,
2486 baseop, currop->op2, genop0, currop->op1, genop1);
2488 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2489 MR_DEPENDENCE_BASE (genop) = currop->base;
2490 return genop;
2493 case ADDR_EXPR:
2494 if (currop->op0)
2496 gcc_assert (is_gimple_min_invariant (currop->op0));
2497 return currop->op0;
2499 /* Fallthrough. */
2500 case REALPART_EXPR:
2501 case IMAGPART_EXPR:
2502 case VIEW_CONVERT_EXPR:
2504 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2505 stmts);
2506 if (!genop0)
2507 return NULL_TREE;
2508 return fold_build1 (currop->opcode, currop->type, genop0);
2511 case WITH_SIZE_EXPR:
2513 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2514 stmts);
2515 if (!genop0)
2516 return NULL_TREE;
2517 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2518 if (!genop1)
2519 return NULL_TREE;
2520 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2523 case BIT_FIELD_REF:
2525 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2526 stmts);
2527 if (!genop0)
2528 return NULL_TREE;
2529 tree op1 = currop->op0;
2530 tree op2 = currop->op1;
2531 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2532 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2533 return fold (t);
2536 /* For array ref vn_reference_op's, operand 1 of the array ref
2537 is op0 of the reference op and operand 3 of the array ref is
2538 op1. */
2539 case ARRAY_RANGE_REF:
2540 case ARRAY_REF:
2542 tree genop0;
2543 tree genop1 = currop->op0;
2544 tree genop2 = currop->op1;
2545 tree genop3 = currop->op2;
2546 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2547 stmts);
2548 if (!genop0)
2549 return NULL_TREE;
2550 genop1 = find_or_generate_expression (block, genop1, stmts);
2551 if (!genop1)
2552 return NULL_TREE;
2553 if (genop2)
2555 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2556 /* Drop zero minimum index if redundant. */
2557 if (integer_zerop (genop2)
2558 && (!domain_type
2559 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2560 genop2 = NULL_TREE;
2561 else
2563 genop2 = find_or_generate_expression (block, genop2, stmts);
2564 if (!genop2)
2565 return NULL_TREE;
2568 if (genop3)
2570 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2571 /* We can't always put a size in units of the element alignment
2572 here as the element alignment may be not visible. See
2573 PR43783. Simply drop the element size for constant
2574 sizes. */
2575 if (TREE_CODE (genop3) == INTEGER_CST
2576 && TREE_CODE (TYPE_SIZE_UNIT (elmt_type)) == INTEGER_CST
2577 && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)),
2578 (wi::to_offset (genop3)
2579 * vn_ref_op_align_unit (currop))))
2580 genop3 = NULL_TREE;
2581 else
2583 genop3 = find_or_generate_expression (block, genop3, stmts);
2584 if (!genop3)
2585 return NULL_TREE;
2588 return build4 (currop->opcode, currop->type, genop0, genop1,
2589 genop2, genop3);
2591 case COMPONENT_REF:
2593 tree op0;
2594 tree op1;
2595 tree genop2 = currop->op1;
2596 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2597 if (!op0)
2598 return NULL_TREE;
2599 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2600 op1 = currop->op0;
2601 if (genop2)
2603 genop2 = find_or_generate_expression (block, genop2, stmts);
2604 if (!genop2)
2605 return NULL_TREE;
2607 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2610 case SSA_NAME:
2612 genop = find_or_generate_expression (block, currop->op0, stmts);
2613 return genop;
2615 case STRING_CST:
2616 case INTEGER_CST:
2617 case COMPLEX_CST:
2618 case VECTOR_CST:
2619 case REAL_CST:
2620 case CONSTRUCTOR:
2621 case VAR_DECL:
2622 case PARM_DECL:
2623 case CONST_DECL:
2624 case RESULT_DECL:
2625 case FUNCTION_DECL:
2626 return currop->op0;
2628 default:
2629 gcc_unreachable ();
2633 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2634 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2635 trying to rename aggregates into ssa form directly, which is a no no.
2637 Thus, this routine doesn't create temporaries, it just builds a
2638 single access expression for the array, calling
2639 find_or_generate_expression to build the innermost pieces.
2641 This function is a subroutine of create_expression_by_pieces, and
2642 should not be called on it's own unless you really know what you
2643 are doing. */
2645 static tree
2646 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2647 gimple_seq *stmts)
2649 unsigned int op = 0;
2650 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2653 /* Find a simple leader for an expression, or generate one using
2654 create_expression_by_pieces from a NARY expression for the value.
2655 BLOCK is the basic_block we are looking for leaders in.
2656 OP is the tree expression to find a leader for or generate.
2657 Returns the leader or NULL_TREE on failure. */
2659 static tree
2660 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2662 pre_expr expr = get_or_alloc_expr_for (op);
2663 unsigned int lookfor = get_expr_value_id (expr);
2664 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2665 if (leader)
2667 if (leader->kind == NAME)
2668 return PRE_EXPR_NAME (leader);
2669 else if (leader->kind == CONSTANT)
2670 return PRE_EXPR_CONSTANT (leader);
2672 /* Defer. */
2673 return NULL_TREE;
2676 /* It must be a complex expression, so generate it recursively. Note
2677 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2678 where the insert algorithm fails to insert a required expression. */
2679 bitmap exprset = value_expressions[lookfor];
2680 bitmap_iterator bi;
2681 unsigned int i;
2682 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2684 pre_expr temp = expression_for_id (i);
2685 /* We cannot insert random REFERENCE expressions at arbitrary
2686 places. We can insert NARYs which eventually re-materializes
2687 its operand values. */
2688 if (temp->kind == NARY)
2689 return create_expression_by_pieces (block, temp, stmts,
2690 get_expr_type (expr));
2693 /* Defer. */
2694 return NULL_TREE;
2697 /* Create an expression in pieces, so that we can handle very complex
2698 expressions that may be ANTIC, but not necessary GIMPLE.
2699 BLOCK is the basic block the expression will be inserted into,
2700 EXPR is the expression to insert (in value form)
2701 STMTS is a statement list to append the necessary insertions into.
2703 This function will die if we hit some value that shouldn't be
2704 ANTIC but is (IE there is no leader for it, or its components).
2705 The function returns NULL_TREE in case a different antic expression
2706 has to be inserted first.
2707 This function may also generate expressions that are themselves
2708 partially or fully redundant. Those that are will be either made
2709 fully redundant during the next iteration of insert (for partially
2710 redundant ones), or eliminated by eliminate (for fully redundant
2711 ones). */
2713 static tree
2714 create_expression_by_pieces (basic_block block, pre_expr expr,
2715 gimple_seq *stmts, tree type)
2717 tree name;
2718 tree folded;
2719 gimple_seq forced_stmts = NULL;
2720 unsigned int value_id;
2721 gimple_stmt_iterator gsi;
2722 tree exprtype = type ? type : get_expr_type (expr);
2723 pre_expr nameexpr;
2724 gassign *newstmt;
2726 switch (expr->kind)
2728 /* We may hit the NAME/CONSTANT case if we have to convert types
2729 that value numbering saw through. */
2730 case NAME:
2731 folded = PRE_EXPR_NAME (expr);
2732 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (folded))
2733 return NULL_TREE;
2734 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2735 return folded;
2736 break;
2737 case CONSTANT:
2739 folded = PRE_EXPR_CONSTANT (expr);
2740 tree tem = fold_convert (exprtype, folded);
2741 if (is_gimple_min_invariant (tem))
2742 return tem;
2743 break;
2745 case REFERENCE:
2746 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2748 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2749 unsigned int operand = 1;
2750 vn_reference_op_t currop = &ref->operands[0];
2751 tree sc = NULL_TREE;
2752 tree fn = find_or_generate_expression (block, currop->op0, stmts);
2753 if (!fn)
2754 return NULL_TREE;
2755 if (currop->op1)
2757 sc = find_or_generate_expression (block, currop->op1, stmts);
2758 if (!sc)
2759 return NULL_TREE;
2761 auto_vec<tree> args (ref->operands.length () - 1);
2762 while (operand < ref->operands.length ())
2764 tree arg = create_component_ref_by_pieces_1 (block, ref,
2765 &operand, stmts);
2766 if (!arg)
2767 return NULL_TREE;
2768 args.quick_push (arg);
2770 gcall *call = gimple_build_call_vec (fn, args);
2771 gimple_call_set_with_bounds (call, currop->with_bounds);
2772 if (sc)
2773 gimple_call_set_chain (call, sc);
2774 tree forcedname = make_ssa_name (currop->type);
2775 gimple_call_set_lhs (call, forcedname);
2776 /* There's no CCP pass after PRE which would re-compute alignment
2777 information so make sure we re-materialize this here. */
2778 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED)
2779 && args.length () - 2 <= 1
2780 && tree_fits_uhwi_p (args[1])
2781 && (args.length () != 3 || tree_fits_uhwi_p (args[2])))
2783 unsigned HOST_WIDE_INT halign = tree_to_uhwi (args[1]);
2784 unsigned HOST_WIDE_INT hmisalign
2785 = args.length () == 3 ? tree_to_uhwi (args[2]) : 0;
2786 if ((halign & (halign - 1)) == 0
2787 && (hmisalign & ~(halign - 1)) == 0)
2788 set_ptr_info_alignment (get_ptr_info (forcedname),
2789 halign, hmisalign);
2791 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2792 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2793 folded = forcedname;
2795 else
2797 folded = create_component_ref_by_pieces (block,
2798 PRE_EXPR_REFERENCE (expr),
2799 stmts);
2800 if (!folded)
2801 return NULL_TREE;
2802 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2803 newstmt = gimple_build_assign (name, folded);
2804 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2805 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2806 folded = name;
2808 break;
2809 case NARY:
2811 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2812 tree *genop = XALLOCAVEC (tree, nary->length);
2813 unsigned i;
2814 for (i = 0; i < nary->length; ++i)
2816 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2817 if (!genop[i])
2818 return NULL_TREE;
2819 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2820 may have conversions stripped. */
2821 if (nary->opcode == POINTER_PLUS_EXPR)
2823 if (i == 0)
2824 genop[i] = gimple_convert (&forced_stmts,
2825 nary->type, genop[i]);
2826 else if (i == 1)
2827 genop[i] = gimple_convert (&forced_stmts,
2828 sizetype, genop[i]);
2830 else
2831 genop[i] = gimple_convert (&forced_stmts,
2832 TREE_TYPE (nary->op[i]), genop[i]);
2834 if (nary->opcode == CONSTRUCTOR)
2836 vec<constructor_elt, va_gc> *elts = NULL;
2837 for (i = 0; i < nary->length; ++i)
2838 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2839 folded = build_constructor (nary->type, elts);
2840 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2841 newstmt = gimple_build_assign (name, folded);
2842 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2843 folded = name;
2845 else
2847 switch (nary->length)
2849 case 1:
2850 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2851 genop[0]);
2852 break;
2853 case 2:
2854 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2855 genop[0], genop[1]);
2856 break;
2857 case 3:
2858 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2859 genop[0], genop[1], genop[2]);
2860 break;
2861 default:
2862 gcc_unreachable ();
2866 break;
2867 default:
2868 gcc_unreachable ();
2871 folded = gimple_convert (&forced_stmts, exprtype, folded);
2873 /* If there is nothing to insert, return the simplified result. */
2874 if (gimple_seq_empty_p (forced_stmts))
2875 return folded;
2876 /* If we simplified to a constant return it and discard eventually
2877 built stmts. */
2878 if (is_gimple_min_invariant (folded))
2880 gimple_seq_discard (forced_stmts);
2881 return folded;
2883 /* Likewise if we simplified to sth not queued for insertion. */
2884 bool found = false;
2885 gsi = gsi_last (forced_stmts);
2886 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2888 gimple *stmt = gsi_stmt (gsi);
2889 tree forcedname = gimple_get_lhs (stmt);
2890 if (forcedname == folded)
2892 found = true;
2893 break;
2896 if (! found)
2898 gimple_seq_discard (forced_stmts);
2899 return folded;
2901 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2903 /* If we have any intermediate expressions to the value sets, add them
2904 to the value sets and chain them in the instruction stream. */
2905 if (forced_stmts)
2907 gsi = gsi_start (forced_stmts);
2908 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2910 gimple *stmt = gsi_stmt (gsi);
2911 tree forcedname = gimple_get_lhs (stmt);
2912 pre_expr nameexpr;
2914 if (forcedname != folded)
2916 VN_INFO_GET (forcedname)->valnum = forcedname;
2917 VN_INFO (forcedname)->value_id = get_next_value_id ();
2918 nameexpr = get_or_alloc_expr_for_name (forcedname);
2919 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2920 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2921 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2924 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2926 gimple_seq_add_seq (stmts, forced_stmts);
2929 name = folded;
2931 /* Fold the last statement. */
2932 gsi = gsi_last (*stmts);
2933 if (fold_stmt_inplace (&gsi))
2934 update_stmt (gsi_stmt (gsi));
2936 /* Add a value number to the temporary.
2937 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2938 we are creating the expression by pieces, and this particular piece of
2939 the expression may have been represented. There is no harm in replacing
2940 here. */
2941 value_id = get_expr_value_id (expr);
2942 VN_INFO_GET (name)->value_id = value_id;
2943 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2944 if (VN_INFO (name)->valnum == NULL_TREE)
2945 VN_INFO (name)->valnum = name;
2946 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2947 nameexpr = get_or_alloc_expr_for_name (name);
2948 add_to_value (value_id, nameexpr);
2949 if (NEW_SETS (block))
2950 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2951 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2953 pre_stats.insertions++;
2954 if (dump_file && (dump_flags & TDF_DETAILS))
2956 fprintf (dump_file, "Inserted ");
2957 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0);
2958 fprintf (dump_file, " in predecessor %d (%04d)\n",
2959 block->index, value_id);
2962 return name;
2966 /* Insert the to-be-made-available values of expression EXPRNUM for each
2967 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2968 merge the result with a phi node, given the same value number as
2969 NODE. Return true if we have inserted new stuff. */
2971 static bool
2972 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2973 vec<pre_expr> avail)
2975 pre_expr expr = expression_for_id (exprnum);
2976 pre_expr newphi;
2977 unsigned int val = get_expr_value_id (expr);
2978 edge pred;
2979 bool insertions = false;
2980 bool nophi = false;
2981 basic_block bprime;
2982 pre_expr eprime;
2983 edge_iterator ei;
2984 tree type = get_expr_type (expr);
2985 tree temp;
2986 gphi *phi;
2988 /* Make sure we aren't creating an induction variable. */
2989 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
2991 bool firstinsideloop = false;
2992 bool secondinsideloop = false;
2993 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2994 EDGE_PRED (block, 0)->src);
2995 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2996 EDGE_PRED (block, 1)->src);
2997 /* Induction variables only have one edge inside the loop. */
2998 if ((firstinsideloop ^ secondinsideloop)
2999 && expr->kind != REFERENCE)
3001 if (dump_file && (dump_flags & TDF_DETAILS))
3002 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3003 nophi = true;
3007 /* Make the necessary insertions. */
3008 FOR_EACH_EDGE (pred, ei, block->preds)
3010 gimple_seq stmts = NULL;
3011 tree builtexpr;
3012 bprime = pred->src;
3013 eprime = avail[pred->dest_idx];
3014 builtexpr = create_expression_by_pieces (bprime, eprime,
3015 &stmts, type);
3016 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3017 if (!gimple_seq_empty_p (stmts))
3019 basic_block new_bb = gsi_insert_seq_on_edge_immediate (pred, stmts);
3020 gcc_assert (! new_bb);
3021 insertions = true;
3023 if (!builtexpr)
3025 /* We cannot insert a PHI node if we failed to insert
3026 on one edge. */
3027 nophi = true;
3028 continue;
3030 if (is_gimple_min_invariant (builtexpr))
3031 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3032 else
3033 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3035 /* If we didn't want a phi node, and we made insertions, we still have
3036 inserted new stuff, and thus return true. If we didn't want a phi node,
3037 and didn't make insertions, we haven't added anything new, so return
3038 false. */
3039 if (nophi && insertions)
3040 return true;
3041 else if (nophi && !insertions)
3042 return false;
3044 /* Now build a phi for the new variable. */
3045 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3046 phi = create_phi_node (temp, block);
3048 VN_INFO_GET (temp)->value_id = val;
3049 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3050 if (VN_INFO (temp)->valnum == NULL_TREE)
3051 VN_INFO (temp)->valnum = temp;
3052 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3053 FOR_EACH_EDGE (pred, ei, block->preds)
3055 pre_expr ae = avail[pred->dest_idx];
3056 gcc_assert (get_expr_type (ae) == type
3057 || useless_type_conversion_p (type, get_expr_type (ae)));
3058 if (ae->kind == CONSTANT)
3059 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3060 pred, UNKNOWN_LOCATION);
3061 else
3062 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3065 newphi = get_or_alloc_expr_for_name (temp);
3066 add_to_value (val, newphi);
3068 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3069 this insertion, since we test for the existence of this value in PHI_GEN
3070 before proceeding with the partial redundancy checks in insert_aux.
3072 The value may exist in AVAIL_OUT, in particular, it could be represented
3073 by the expression we are trying to eliminate, in which case we want the
3074 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3075 inserted there.
3077 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3078 this block, because if it did, it would have existed in our dominator's
3079 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3082 bitmap_insert_into_set (PHI_GEN (block), newphi);
3083 bitmap_value_replace_in_set (AVAIL_OUT (block),
3084 newphi);
3085 bitmap_insert_into_set (NEW_SETS (block),
3086 newphi);
3088 /* If we insert a PHI node for a conversion of another PHI node
3089 in the same basic-block try to preserve range information.
3090 This is important so that followup loop passes receive optimal
3091 number of iteration analysis results. See PR61743. */
3092 if (expr->kind == NARY
3093 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3094 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3095 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3096 && INTEGRAL_TYPE_P (type)
3097 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3098 && (TYPE_PRECISION (type)
3099 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3100 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3102 wide_int min, max;
3103 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3104 && !wi::neg_p (min, SIGNED)
3105 && !wi::neg_p (max, SIGNED))
3106 /* Just handle extension and sign-changes of all-positive ranges. */
3107 set_range_info (temp,
3108 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3109 wide_int_storage::from (min, TYPE_PRECISION (type),
3110 TYPE_SIGN (type)),
3111 wide_int_storage::from (max, TYPE_PRECISION (type),
3112 TYPE_SIGN (type)));
3115 if (dump_file && (dump_flags & TDF_DETAILS))
3117 fprintf (dump_file, "Created phi ");
3118 print_gimple_stmt (dump_file, phi, 0);
3119 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3121 pre_stats.phis++;
3122 return true;
3127 /* Perform insertion of partially redundant or hoistable values.
3128 For BLOCK, do the following:
3129 1. Propagate the NEW_SETS of the dominator into the current block.
3130 If the block has multiple predecessors,
3131 2a. Iterate over the ANTIC expressions for the block to see if
3132 any of them are partially redundant.
3133 2b. If so, insert them into the necessary predecessors to make
3134 the expression fully redundant.
3135 2c. Insert a new PHI merging the values of the predecessors.
3136 2d. Insert the new PHI, and the new expressions, into the
3137 NEW_SETS set.
3138 If the block has multiple successors,
3139 3a. Iterate over the ANTIC values for the block to see if
3140 any of them are good candidates for hoisting.
3141 3b. If so, insert expressions computing the values in BLOCK,
3142 and add the new expressions into the NEW_SETS set.
3143 4. Recursively call ourselves on the dominator children of BLOCK.
3145 Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3146 do_pre_regular_insertion and do_partial_insertion. 3a and 3b are
3147 done in do_hoist_insertion.
3150 static bool
3151 do_pre_regular_insertion (basic_block block, basic_block dom)
3153 bool new_stuff = false;
3154 vec<pre_expr> exprs;
3155 pre_expr expr;
3156 auto_vec<pre_expr> avail;
3157 int i;
3159 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3160 avail.safe_grow (EDGE_COUNT (block->preds));
3162 FOR_EACH_VEC_ELT (exprs, i, expr)
3164 if (expr->kind == NARY
3165 || expr->kind == REFERENCE)
3167 unsigned int val;
3168 bool by_some = false;
3169 bool cant_insert = false;
3170 bool all_same = true;
3171 pre_expr first_s = NULL;
3172 edge pred;
3173 basic_block bprime;
3174 pre_expr eprime = NULL;
3175 edge_iterator ei;
3176 pre_expr edoubleprime = NULL;
3177 bool do_insertion = false;
3179 val = get_expr_value_id (expr);
3180 if (bitmap_set_contains_value (PHI_GEN (block), val))
3181 continue;
3182 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3184 if (dump_file && (dump_flags & TDF_DETAILS))
3186 fprintf (dump_file, "Found fully redundant value: ");
3187 print_pre_expr (dump_file, expr);
3188 fprintf (dump_file, "\n");
3190 continue;
3193 FOR_EACH_EDGE (pred, ei, block->preds)
3195 unsigned int vprime;
3197 /* We should never run insertion for the exit block
3198 and so not come across fake pred edges. */
3199 gcc_assert (!(pred->flags & EDGE_FAKE));
3200 bprime = pred->src;
3201 /* We are looking at ANTIC_OUT of bprime. */
3202 eprime = phi_translate (expr, ANTIC_IN (block), NULL, pred);
3204 /* eprime will generally only be NULL if the
3205 value of the expression, translated
3206 through the PHI for this predecessor, is
3207 undefined. If that is the case, we can't
3208 make the expression fully redundant,
3209 because its value is undefined along a
3210 predecessor path. We can thus break out
3211 early because it doesn't matter what the
3212 rest of the results are. */
3213 if (eprime == NULL)
3215 avail[pred->dest_idx] = NULL;
3216 cant_insert = true;
3217 break;
3220 vprime = get_expr_value_id (eprime);
3221 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3222 vprime);
3223 if (edoubleprime == NULL)
3225 avail[pred->dest_idx] = eprime;
3226 all_same = false;
3228 else
3230 avail[pred->dest_idx] = edoubleprime;
3231 by_some = true;
3232 /* We want to perform insertions to remove a redundancy on
3233 a path in the CFG we want to optimize for speed. */
3234 if (optimize_edge_for_speed_p (pred))
3235 do_insertion = true;
3236 if (first_s == NULL)
3237 first_s = edoubleprime;
3238 else if (!pre_expr_d::equal (first_s, edoubleprime))
3239 all_same = false;
3242 /* If we can insert it, it's not the same value
3243 already existing along every predecessor, and
3244 it's defined by some predecessor, it is
3245 partially redundant. */
3246 if (!cant_insert && !all_same && by_some)
3248 if (!do_insertion)
3250 if (dump_file && (dump_flags & TDF_DETAILS))
3252 fprintf (dump_file, "Skipping partial redundancy for "
3253 "expression ");
3254 print_pre_expr (dump_file, expr);
3255 fprintf (dump_file, " (%04d), no redundancy on to be "
3256 "optimized for speed edge\n", val);
3259 else if (dbg_cnt (treepre_insert))
3261 if (dump_file && (dump_flags & TDF_DETAILS))
3263 fprintf (dump_file, "Found partial redundancy for "
3264 "expression ");
3265 print_pre_expr (dump_file, expr);
3266 fprintf (dump_file, " (%04d)\n",
3267 get_expr_value_id (expr));
3269 if (insert_into_preds_of_block (block,
3270 get_expression_id (expr),
3271 avail))
3272 new_stuff = true;
3275 /* If all edges produce the same value and that value is
3276 an invariant, then the PHI has the same value on all
3277 edges. Note this. */
3278 else if (!cant_insert && all_same)
3280 gcc_assert (edoubleprime->kind == CONSTANT
3281 || edoubleprime->kind == NAME);
3283 tree temp = make_temp_ssa_name (get_expr_type (expr),
3284 NULL, "pretmp");
3285 gassign *assign
3286 = gimple_build_assign (temp,
3287 edoubleprime->kind == CONSTANT ?
3288 PRE_EXPR_CONSTANT (edoubleprime) :
3289 PRE_EXPR_NAME (edoubleprime));
3290 gimple_stmt_iterator gsi = gsi_after_labels (block);
3291 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3293 VN_INFO_GET (temp)->value_id = val;
3294 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3295 if (VN_INFO (temp)->valnum == NULL_TREE)
3296 VN_INFO (temp)->valnum = temp;
3297 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3298 pre_expr newe = get_or_alloc_expr_for_name (temp);
3299 add_to_value (val, newe);
3300 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3301 bitmap_insert_into_set (NEW_SETS (block), newe);
3306 exprs.release ();
3307 return new_stuff;
3311 /* Perform insertion for partially anticipatable expressions. There
3312 is only one case we will perform insertion for these. This case is
3313 if the expression is partially anticipatable, and fully available.
3314 In this case, we know that putting it earlier will enable us to
3315 remove the later computation. */
3317 static bool
3318 do_pre_partial_partial_insertion (basic_block block, basic_block dom)
3320 bool new_stuff = false;
3321 vec<pre_expr> exprs;
3322 pre_expr expr;
3323 auto_vec<pre_expr> avail;
3324 int i;
3326 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3327 avail.safe_grow (EDGE_COUNT (block->preds));
3329 FOR_EACH_VEC_ELT (exprs, i, expr)
3331 if (expr->kind == NARY
3332 || expr->kind == REFERENCE)
3334 unsigned int val;
3335 bool by_all = true;
3336 bool cant_insert = false;
3337 edge pred;
3338 basic_block bprime;
3339 pre_expr eprime = NULL;
3340 edge_iterator ei;
3342 val = get_expr_value_id (expr);
3343 if (bitmap_set_contains_value (PHI_GEN (block), val))
3344 continue;
3345 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3346 continue;
3348 FOR_EACH_EDGE (pred, ei, block->preds)
3350 unsigned int vprime;
3351 pre_expr edoubleprime;
3353 /* We should never run insertion for the exit block
3354 and so not come across fake pred edges. */
3355 gcc_assert (!(pred->flags & EDGE_FAKE));
3356 bprime = pred->src;
3357 eprime = phi_translate (expr, ANTIC_IN (block),
3358 PA_IN (block), pred);
3360 /* eprime will generally only be NULL if the
3361 value of the expression, translated
3362 through the PHI for this predecessor, is
3363 undefined. If that is the case, we can't
3364 make the expression fully redundant,
3365 because its value is undefined along a
3366 predecessor path. We can thus break out
3367 early because it doesn't matter what the
3368 rest of the results are. */
3369 if (eprime == NULL)
3371 avail[pred->dest_idx] = NULL;
3372 cant_insert = true;
3373 break;
3376 vprime = get_expr_value_id (eprime);
3377 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3378 avail[pred->dest_idx] = edoubleprime;
3379 if (edoubleprime == NULL)
3381 by_all = false;
3382 break;
3386 /* If we can insert it, it's not the same value
3387 already existing along every predecessor, and
3388 it's defined by some predecessor, it is
3389 partially redundant. */
3390 if (!cant_insert && by_all)
3392 edge succ;
3393 bool do_insertion = false;
3395 /* Insert only if we can remove a later expression on a path
3396 that we want to optimize for speed.
3397 The phi node that we will be inserting in BLOCK is not free,
3398 and inserting it for the sake of !optimize_for_speed successor
3399 may cause regressions on the speed path. */
3400 FOR_EACH_EDGE (succ, ei, block->succs)
3402 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3403 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3405 if (optimize_edge_for_speed_p (succ))
3406 do_insertion = true;
3410 if (!do_insertion)
3412 if (dump_file && (dump_flags & TDF_DETAILS))
3414 fprintf (dump_file, "Skipping partial partial redundancy "
3415 "for expression ");
3416 print_pre_expr (dump_file, expr);
3417 fprintf (dump_file, " (%04d), not (partially) anticipated "
3418 "on any to be optimized for speed edges\n", val);
3421 else if (dbg_cnt (treepre_insert))
3423 pre_stats.pa_insert++;
3424 if (dump_file && (dump_flags & TDF_DETAILS))
3426 fprintf (dump_file, "Found partial partial redundancy "
3427 "for expression ");
3428 print_pre_expr (dump_file, expr);
3429 fprintf (dump_file, " (%04d)\n",
3430 get_expr_value_id (expr));
3432 if (insert_into_preds_of_block (block,
3433 get_expression_id (expr),
3434 avail))
3435 new_stuff = true;
3441 exprs.release ();
3442 return new_stuff;
3445 /* Insert expressions in BLOCK to compute hoistable values up.
3446 Return TRUE if something was inserted, otherwise return FALSE.
3447 The caller has to make sure that BLOCK has at least two successors. */
3449 static bool
3450 do_hoist_insertion (basic_block block)
3452 edge e;
3453 edge_iterator ei;
3454 bool new_stuff = false;
3455 unsigned i;
3456 gimple_stmt_iterator last;
3458 /* At least two successors, or else... */
3459 gcc_assert (EDGE_COUNT (block->succs) >= 2);
3461 /* Check that all successors of BLOCK are dominated by block.
3462 We could use dominated_by_p() for this, but actually there is a much
3463 quicker check: any successor that is dominated by BLOCK can't have
3464 more than one predecessor edge. */
3465 FOR_EACH_EDGE (e, ei, block->succs)
3466 if (! single_pred_p (e->dest))
3467 return false;
3469 /* Determine the insertion point. If we cannot safely insert before
3470 the last stmt if we'd have to, bail out. */
3471 last = gsi_last_bb (block);
3472 if (!gsi_end_p (last)
3473 && !is_ctrl_stmt (gsi_stmt (last))
3474 && stmt_ends_bb_p (gsi_stmt (last)))
3475 return false;
3477 /* Compute the set of hoistable expressions from ANTIC_IN. First compute
3478 hoistable values. */
3479 bitmap_set hoistable_set;
3481 /* A hoistable value must be in ANTIC_IN(block)
3482 but not in AVAIL_OUT(BLOCK). */
3483 bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack);
3484 bitmap_and_compl (&hoistable_set.values,
3485 &ANTIC_IN (block)->values, &AVAIL_OUT (block)->values);
3487 /* Short-cut for a common case: hoistable_set is empty. */
3488 if (bitmap_empty_p (&hoistable_set.values))
3489 return false;
3491 /* Compute which of the hoistable values is in AVAIL_OUT of
3492 at least one of the successors of BLOCK. */
3493 bitmap_head availout_in_some;
3494 bitmap_initialize (&availout_in_some, &grand_bitmap_obstack);
3495 FOR_EACH_EDGE (e, ei, block->succs)
3496 /* Do not consider expressions solely because their availability
3497 on loop exits. They'd be ANTIC-IN throughout the whole loop
3498 and thus effectively hoisted across loops by combination of
3499 PRE and hoisting. */
3500 if (! loop_exit_edge_p (block->loop_father, e))
3501 bitmap_ior_and_into (&availout_in_some, &hoistable_set.values,
3502 &AVAIL_OUT (e->dest)->values);
3503 bitmap_clear (&hoistable_set.values);
3505 /* Short-cut for a common case: availout_in_some is empty. */
3506 if (bitmap_empty_p (&availout_in_some))
3507 return false;
3509 /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */
3510 hoistable_set.values = availout_in_some;
3511 hoistable_set.expressions = ANTIC_IN (block)->expressions;
3513 /* Now finally construct the topological-ordered expression set. */
3514 vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set);
3516 bitmap_clear (&hoistable_set.values);
3518 /* If there are candidate values for hoisting, insert expressions
3519 strategically to make the hoistable expressions fully redundant. */
3520 pre_expr expr;
3521 FOR_EACH_VEC_ELT (exprs, i, expr)
3523 /* While we try to sort expressions topologically above the
3524 sorting doesn't work out perfectly. Catch expressions we
3525 already inserted. */
3526 unsigned int value_id = get_expr_value_id (expr);
3527 if (bitmap_set_contains_value (AVAIL_OUT (block), value_id))
3529 if (dump_file && (dump_flags & TDF_DETAILS))
3531 fprintf (dump_file,
3532 "Already inserted expression for ");
3533 print_pre_expr (dump_file, expr);
3534 fprintf (dump_file, " (%04d)\n", value_id);
3536 continue;
3539 /* OK, we should hoist this value. Perform the transformation. */
3540 pre_stats.hoist_insert++;
3541 if (dump_file && (dump_flags & TDF_DETAILS))
3543 fprintf (dump_file,
3544 "Inserting expression in block %d for code hoisting: ",
3545 block->index);
3546 print_pre_expr (dump_file, expr);
3547 fprintf (dump_file, " (%04d)\n", value_id);
3550 gimple_seq stmts = NULL;
3551 tree res = create_expression_by_pieces (block, expr, &stmts,
3552 get_expr_type (expr));
3554 /* Do not return true if expression creation ultimately
3555 did not insert any statements. */
3556 if (gimple_seq_empty_p (stmts))
3557 res = NULL_TREE;
3558 else
3560 if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last)))
3561 gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT);
3562 else
3563 gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT);
3566 /* Make sure to not return true if expression creation ultimately
3567 failed but also make sure to insert any stmts produced as they
3568 are tracked in inserted_exprs. */
3569 if (! res)
3570 continue;
3572 new_stuff = true;
3575 exprs.release ();
3577 return new_stuff;
3580 /* Do a dominator walk on the control flow graph, and insert computations
3581 of values as necessary for PRE and hoisting. */
3583 static bool
3584 insert_aux (basic_block block, bool do_pre, bool do_hoist)
3586 basic_block son;
3587 bool new_stuff = false;
3589 if (block)
3591 basic_block dom;
3592 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3593 if (dom)
3595 unsigned i;
3596 bitmap_iterator bi;
3597 bitmap_set_t newset;
3599 /* First, update the AVAIL_OUT set with anything we may have
3600 inserted higher up in the dominator tree. */
3601 newset = NEW_SETS (dom);
3602 if (newset)
3604 /* Note that we need to value_replace both NEW_SETS, and
3605 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3606 represented by some non-simple expression here that we want
3607 to replace it with. */
3608 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3610 pre_expr expr = expression_for_id (i);
3611 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3612 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3616 /* Insert expressions for partial redundancies. */
3617 if (do_pre && !single_pred_p (block))
3619 new_stuff |= do_pre_regular_insertion (block, dom);
3620 if (do_partial_partial)
3621 new_stuff |= do_pre_partial_partial_insertion (block, dom);
3624 /* Insert expressions for hoisting. */
3625 if (do_hoist && EDGE_COUNT (block->succs) >= 2)
3626 new_stuff |= do_hoist_insertion (block);
3629 for (son = first_dom_son (CDI_DOMINATORS, block);
3630 son;
3631 son = next_dom_son (CDI_DOMINATORS, son))
3633 new_stuff |= insert_aux (son, do_pre, do_hoist);
3636 return new_stuff;
3639 /* Perform insertion of partially redundant and hoistable values. */
3641 static void
3642 insert (void)
3644 bool new_stuff = true;
3645 basic_block bb;
3646 int num_iterations = 0;
3648 FOR_ALL_BB_FN (bb, cfun)
3649 NEW_SETS (bb) = bitmap_set_new ();
3651 while (new_stuff)
3653 num_iterations++;
3654 if (dump_file && dump_flags & TDF_DETAILS)
3655 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3656 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun), flag_tree_pre,
3657 flag_code_hoisting);
3659 /* Clear the NEW sets before the next iteration. We have already
3660 fully propagated its contents. */
3661 if (new_stuff)
3662 FOR_ALL_BB_FN (bb, cfun)
3663 bitmap_set_free (NEW_SETS (bb));
3665 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3669 /* Compute the AVAIL set for all basic blocks.
3671 This function performs value numbering of the statements in each basic
3672 block. The AVAIL sets are built from information we glean while doing
3673 this value numbering, since the AVAIL sets contain only one entry per
3674 value.
3676 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3677 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3679 static void
3680 compute_avail (void)
3683 basic_block block, son;
3684 basic_block *worklist;
3685 size_t sp = 0;
3686 unsigned i;
3687 tree name;
3689 /* We pretend that default definitions are defined in the entry block.
3690 This includes function arguments and the static chain decl. */
3691 FOR_EACH_SSA_NAME (i, name, cfun)
3693 pre_expr e;
3694 if (!SSA_NAME_IS_DEFAULT_DEF (name)
3695 || has_zero_uses (name)
3696 || virtual_operand_p (name))
3697 continue;
3699 e = get_or_alloc_expr_for_name (name);
3700 add_to_value (get_expr_value_id (e), e);
3701 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3702 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3706 if (dump_file && (dump_flags & TDF_DETAILS))
3708 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3709 "tmp_gen", ENTRY_BLOCK);
3710 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3711 "avail_out", ENTRY_BLOCK);
3714 /* Allocate the worklist. */
3715 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3717 /* Seed the algorithm by putting the dominator children of the entry
3718 block on the worklist. */
3719 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3720 son;
3721 son = next_dom_son (CDI_DOMINATORS, son))
3722 worklist[sp++] = son;
3724 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3725 = ssa_default_def (cfun, gimple_vop (cfun));
3727 /* Loop until the worklist is empty. */
3728 while (sp)
3730 gimple *stmt;
3731 basic_block dom;
3733 /* Pick a block from the worklist. */
3734 block = worklist[--sp];
3736 /* Initially, the set of available values in BLOCK is that of
3737 its immediate dominator. */
3738 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3739 if (dom)
3741 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3742 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3745 /* Generate values for PHI nodes. */
3746 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3747 gsi_next (&gsi))
3749 tree result = gimple_phi_result (gsi.phi ());
3751 /* We have no need for virtual phis, as they don't represent
3752 actual computations. */
3753 if (virtual_operand_p (result))
3755 BB_LIVE_VOP_ON_EXIT (block) = result;
3756 continue;
3759 pre_expr e = get_or_alloc_expr_for_name (result);
3760 add_to_value (get_expr_value_id (e), e);
3761 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3762 bitmap_insert_into_set (PHI_GEN (block), e);
3765 BB_MAY_NOTRETURN (block) = 0;
3767 /* Now compute value numbers and populate value sets with all
3768 the expressions computed in BLOCK. */
3769 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3770 gsi_next (&gsi))
3772 ssa_op_iter iter;
3773 tree op;
3775 stmt = gsi_stmt (gsi);
3777 /* Cache whether the basic-block has any non-visible side-effect
3778 or control flow.
3779 If this isn't a call or it is the last stmt in the
3780 basic-block then the CFG represents things correctly. */
3781 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3783 /* Non-looping const functions always return normally.
3784 Otherwise the call might not return or have side-effects
3785 that forbids hoisting possibly trapping expressions
3786 before it. */
3787 int flags = gimple_call_flags (stmt);
3788 if (!(flags & ECF_CONST)
3789 || (flags & ECF_LOOPING_CONST_OR_PURE))
3790 BB_MAY_NOTRETURN (block) = 1;
3793 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3795 pre_expr e = get_or_alloc_expr_for_name (op);
3797 add_to_value (get_expr_value_id (e), e);
3798 bitmap_insert_into_set (TMP_GEN (block), e);
3799 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3802 if (gimple_vdef (stmt))
3803 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3805 if (gimple_has_side_effects (stmt)
3806 || stmt_could_throw_p (stmt)
3807 || is_gimple_debug (stmt))
3808 continue;
3810 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3812 if (ssa_undefined_value_p (op))
3813 continue;
3814 pre_expr e = get_or_alloc_expr_for_name (op);
3815 bitmap_value_insert_into_set (EXP_GEN (block), e);
3818 switch (gimple_code (stmt))
3820 case GIMPLE_RETURN:
3821 continue;
3823 case GIMPLE_CALL:
3825 vn_reference_t ref;
3826 vn_reference_s ref1;
3827 pre_expr result = NULL;
3829 /* We can value number only calls to real functions. */
3830 if (gimple_call_internal_p (stmt))
3831 continue;
3833 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3834 if (!ref)
3835 continue;
3837 /* If the value of the call is not invalidated in
3838 this block until it is computed, add the expression
3839 to EXP_GEN. */
3840 if (!gimple_vuse (stmt)
3841 || gimple_code
3842 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3843 || gimple_bb (SSA_NAME_DEF_STMT
3844 (gimple_vuse (stmt))) != block)
3846 result = pre_expr_pool.allocate ();
3847 result->kind = REFERENCE;
3848 result->id = 0;
3849 PRE_EXPR_REFERENCE (result) = ref;
3851 get_or_alloc_expression_id (result);
3852 add_to_value (get_expr_value_id (result), result);
3853 bitmap_value_insert_into_set (EXP_GEN (block), result);
3855 continue;
3858 case GIMPLE_ASSIGN:
3860 pre_expr result = NULL;
3861 switch (vn_get_stmt_kind (stmt))
3863 case VN_NARY:
3865 enum tree_code code = gimple_assign_rhs_code (stmt);
3866 vn_nary_op_t nary;
3868 /* COND_EXPR and VEC_COND_EXPR are awkward in
3869 that they contain an embedded complex expression.
3870 Don't even try to shove those through PRE. */
3871 if (code == COND_EXPR
3872 || code == VEC_COND_EXPR)
3873 continue;
3875 vn_nary_op_lookup_stmt (stmt, &nary);
3876 if (!nary)
3877 continue;
3879 /* If the NARY traps and there was a preceding
3880 point in the block that might not return avoid
3881 adding the nary to EXP_GEN. */
3882 if (BB_MAY_NOTRETURN (block)
3883 && vn_nary_may_trap (nary))
3884 continue;
3886 result = pre_expr_pool.allocate ();
3887 result->kind = NARY;
3888 result->id = 0;
3889 PRE_EXPR_NARY (result) = nary;
3890 break;
3893 case VN_REFERENCE:
3895 tree rhs1 = gimple_assign_rhs1 (stmt);
3896 alias_set_type set = get_alias_set (rhs1);
3897 vec<vn_reference_op_s> operands
3898 = vn_reference_operands_for_lookup (rhs1);
3899 vn_reference_t ref;
3900 vn_reference_lookup_pieces (gimple_vuse (stmt), set,
3901 TREE_TYPE (rhs1),
3902 operands, &ref, VN_WALK);
3903 if (!ref)
3905 operands.release ();
3906 continue;
3909 /* If the value of the reference is not invalidated in
3910 this block until it is computed, add the expression
3911 to EXP_GEN. */
3912 if (gimple_vuse (stmt))
3914 gimple *def_stmt;
3915 bool ok = true;
3916 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3917 while (!gimple_nop_p (def_stmt)
3918 && gimple_code (def_stmt) != GIMPLE_PHI
3919 && gimple_bb (def_stmt) == block)
3921 if (stmt_may_clobber_ref_p
3922 (def_stmt, gimple_assign_rhs1 (stmt)))
3924 ok = false;
3925 break;
3927 def_stmt
3928 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3930 if (!ok)
3932 operands.release ();
3933 continue;
3937 /* If the load was value-numbered to another
3938 load make sure we do not use its expression
3939 for insertion if it wouldn't be a valid
3940 replacement. */
3941 /* At the momemt we have a testcase
3942 for hoist insertion of aligned vs. misaligned
3943 variants in gcc.dg/torture/pr65270-1.c thus
3944 with just alignment to be considered we can
3945 simply replace the expression in the hashtable
3946 with the most conservative one. */
3947 vn_reference_op_t ref1 = &ref->operands.last ();
3948 while (ref1->opcode != TARGET_MEM_REF
3949 && ref1->opcode != MEM_REF
3950 && ref1 != &ref->operands[0])
3951 --ref1;
3952 vn_reference_op_t ref2 = &operands.last ();
3953 while (ref2->opcode != TARGET_MEM_REF
3954 && ref2->opcode != MEM_REF
3955 && ref2 != &operands[0])
3956 --ref2;
3957 if ((ref1->opcode == TARGET_MEM_REF
3958 || ref1->opcode == MEM_REF)
3959 && (TYPE_ALIGN (ref1->type)
3960 > TYPE_ALIGN (ref2->type)))
3961 ref1->type
3962 = build_aligned_type (ref1->type,
3963 TYPE_ALIGN (ref2->type));
3964 /* TBAA behavior is an obvious part so make sure
3965 that the hashtable one covers this as well
3966 by adjusting the ref alias set and its base. */
3967 if (ref->set == set
3968 || alias_set_subset_of (set, ref->set))
3970 else if (alias_set_subset_of (ref->set, set))
3972 ref->set = set;
3973 if (ref1->opcode == MEM_REF)
3974 ref1->op0
3975 = wide_int_to_tree (TREE_TYPE (ref2->op0),
3976 wi::to_wide (ref1->op0));
3977 else
3978 ref1->op2
3979 = wide_int_to_tree (TREE_TYPE (ref2->op2),
3980 wi::to_wide (ref1->op2));
3982 else
3984 ref->set = 0;
3985 if (ref1->opcode == MEM_REF)
3986 ref1->op0
3987 = wide_int_to_tree (ptr_type_node,
3988 wi::to_wide (ref1->op0));
3989 else
3990 ref1->op2
3991 = wide_int_to_tree (ptr_type_node,
3992 wi::to_wide (ref1->op2));
3994 operands.release ();
3996 result = pre_expr_pool.allocate ();
3997 result->kind = REFERENCE;
3998 result->id = 0;
3999 PRE_EXPR_REFERENCE (result) = ref;
4000 break;
4003 default:
4004 continue;
4007 get_or_alloc_expression_id (result);
4008 add_to_value (get_expr_value_id (result), result);
4009 bitmap_value_insert_into_set (EXP_GEN (block), result);
4010 continue;
4012 default:
4013 break;
4017 if (dump_file && (dump_flags & TDF_DETAILS))
4019 print_bitmap_set (dump_file, EXP_GEN (block),
4020 "exp_gen", block->index);
4021 print_bitmap_set (dump_file, PHI_GEN (block),
4022 "phi_gen", block->index);
4023 print_bitmap_set (dump_file, TMP_GEN (block),
4024 "tmp_gen", block->index);
4025 print_bitmap_set (dump_file, AVAIL_OUT (block),
4026 "avail_out", block->index);
4029 /* Put the dominator children of BLOCK on the worklist of blocks
4030 to compute available sets for. */
4031 for (son = first_dom_son (CDI_DOMINATORS, block);
4032 son;
4033 son = next_dom_son (CDI_DOMINATORS, son))
4034 worklist[sp++] = son;
4037 free (worklist);
4041 /* Initialize data structures used by PRE. */
4043 static void
4044 init_pre (void)
4046 basic_block bb;
4048 next_expression_id = 1;
4049 expressions.create (0);
4050 expressions.safe_push (NULL);
4051 value_expressions.create (get_max_value_id () + 1);
4052 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4053 name_to_id.create (0);
4055 inserted_exprs = BITMAP_ALLOC (NULL);
4057 connect_infinite_loops_to_exit ();
4058 memset (&pre_stats, 0, sizeof (pre_stats));
4060 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4062 calculate_dominance_info (CDI_DOMINATORS);
4064 bitmap_obstack_initialize (&grand_bitmap_obstack);
4065 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4066 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4067 FOR_ALL_BB_FN (bb, cfun)
4069 EXP_GEN (bb) = bitmap_set_new ();
4070 PHI_GEN (bb) = bitmap_set_new ();
4071 TMP_GEN (bb) = bitmap_set_new ();
4072 AVAIL_OUT (bb) = bitmap_set_new ();
4077 /* Deallocate data structures used by PRE. */
4079 static void
4080 fini_pre ()
4082 value_expressions.release ();
4083 expressions.release ();
4084 BITMAP_FREE (inserted_exprs);
4085 bitmap_obstack_release (&grand_bitmap_obstack);
4086 bitmap_set_pool.release ();
4087 pre_expr_pool.release ();
4088 delete phi_translate_table;
4089 phi_translate_table = NULL;
4090 delete expression_to_id;
4091 expression_to_id = NULL;
4092 name_to_id.release ();
4094 free_aux_for_blocks ();
4097 namespace {
4099 const pass_data pass_data_pre =
4101 GIMPLE_PASS, /* type */
4102 "pre", /* name */
4103 OPTGROUP_NONE, /* optinfo_flags */
4104 TV_TREE_PRE, /* tv_id */
4105 ( PROP_cfg | PROP_ssa ), /* properties_required */
4106 0, /* properties_provided */
4107 0, /* properties_destroyed */
4108 TODO_rebuild_alias, /* todo_flags_start */
4109 0, /* todo_flags_finish */
4112 class pass_pre : public gimple_opt_pass
4114 public:
4115 pass_pre (gcc::context *ctxt)
4116 : gimple_opt_pass (pass_data_pre, ctxt)
4119 /* opt_pass methods: */
4120 virtual bool gate (function *)
4121 { return flag_tree_pre != 0 || flag_code_hoisting != 0; }
4122 virtual unsigned int execute (function *);
4124 }; // class pass_pre
4126 unsigned int
4127 pass_pre::execute (function *fun)
4129 unsigned int todo = 0;
4131 do_partial_partial =
4132 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4134 /* This has to happen before SCCVN runs because
4135 loop_optimizer_init may create new phis, etc. */
4136 loop_optimizer_init (LOOPS_NORMAL);
4137 split_critical_edges ();
4138 scev_initialize ();
4140 run_scc_vn (VN_WALK);
4142 init_pre ();
4144 /* Insert can get quite slow on an incredibly large number of basic
4145 blocks due to some quadratic behavior. Until this behavior is
4146 fixed, don't run it when he have an incredibly large number of
4147 bb's. If we aren't going to run insert, there is no point in
4148 computing ANTIC, either, even though it's plenty fast nor do
4149 we require AVAIL. */
4150 if (n_basic_blocks_for_fn (fun) < 4000)
4152 compute_avail ();
4153 compute_antic ();
4154 insert ();
4157 /* Make sure to remove fake edges before committing our inserts.
4158 This makes sure we don't end up with extra critical edges that
4159 we would need to split. */
4160 remove_fake_exit_edges ();
4161 gsi_commit_edge_inserts ();
4163 /* Eliminate folds statements which might (should not...) end up
4164 not keeping virtual operands up-to-date. */
4165 gcc_assert (!need_ssa_update_p (fun));
4167 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4168 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4169 statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
4170 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4172 /* Remove all the redundant expressions. */
4173 todo |= vn_eliminate (inserted_exprs);
4175 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4176 to insert PHI nodes sometimes, and because value numbering of casts isn't
4177 perfect, we sometimes end up inserting dead code. This simple DCE-like
4178 pass removes any insertions we made that weren't actually used. */
4179 simple_dce_from_worklist (inserted_exprs);
4181 fini_pre ();
4183 scev_finalize ();
4184 loop_optimizer_finalize ();
4186 /* Restore SSA info before tail-merging as that resets it as well. */
4187 scc_vn_restore_ssa_info ();
4189 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4190 case we can merge the block with the remaining predecessor of the block.
4191 It should either:
4192 - call merge_blocks after each tail merge iteration
4193 - call merge_blocks after all tail merge iterations
4194 - mark TODO_cleanup_cfg when necessary
4195 - share the cfg cleanup with fini_pre. */
4196 todo |= tail_merge_optimize (todo);
4198 free_scc_vn ();
4200 /* Tail merging invalidates the virtual SSA web, together with
4201 cfg-cleanup opportunities exposed by PRE this will wreck the
4202 SSA updating machinery. So make sure to run update-ssa
4203 manually, before eventually scheduling cfg-cleanup as part of
4204 the todo. */
4205 update_ssa (TODO_update_ssa_only_virtuals);
4207 return todo;
4210 } // anon namespace
4212 gimple_opt_pass *
4213 make_pass_pre (gcc::context *ctxt)
4215 return new pass_pre (ctxt);