* doc/generic.texi (ANNOTATE_EXPR): Document 3rd operand.
[official-gcc.git] / gcc / tree-ssa-pre.c
bloba9dcd5edd50ecfb6d298e5af844aaab55759df35
1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-ssa.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
48 #include "params.h"
49 #include "dbgcnt.h"
50 #include "domwalk.h"
51 #include "tree-ssa-propagate.h"
52 #include "tree-cfgcleanup.h"
53 #include "alias.h"
55 /* Even though this file is called tree-ssa-pre.c, we actually
56 implement a bit more than just PRE here. All of them piggy-back
57 on GVN which is implemented in tree-ssa-sccvn.c.
59 1. Full Redundancy Elimination (FRE)
60 This is the elimination phase of GVN.
62 2. Partial Redundancy Elimination (PRE)
63 This is adds computation of AVAIL_OUT and ANTIC_IN and
64 doing expression insertion to form GVN-PRE.
66 3. Code hoisting
67 This optimization uses the ANTIC_IN sets computed for PRE
68 to move expressions further up than PRE would do, to make
69 multiple computations of the same value fully redundant.
70 This pass is explained below (after the explanation of the
71 basic algorithm for PRE).
74 /* TODO:
76 1. Avail sets can be shared by making an avail_find_leader that
77 walks up the dominator tree and looks in those avail sets.
78 This might affect code optimality, it's unclear right now.
79 Currently the AVAIL_OUT sets are the remaining quadraticness in
80 memory of GVN-PRE.
81 2. Strength reduction can be performed by anticipating expressions
82 we can repair later on.
83 3. We can do back-substitution or smarter value numbering to catch
84 commutative expressions split up over multiple statements.
87 /* For ease of terminology, "expression node" in the below refers to
88 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
89 represent the actual statement containing the expressions we care about,
90 and we cache the value number by putting it in the expression. */
92 /* Basic algorithm for Partial Redundancy Elimination:
94 First we walk the statements to generate the AVAIL sets, the
95 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
96 generation of values/expressions by a given block. We use them
97 when computing the ANTIC sets. The AVAIL sets consist of
98 SSA_NAME's that represent values, so we know what values are
99 available in what blocks. AVAIL is a forward dataflow problem. In
100 SSA, values are never killed, so we don't need a kill set, or a
101 fixpoint iteration, in order to calculate the AVAIL sets. In
102 traditional parlance, AVAIL sets tell us the downsafety of the
103 expressions/values.
105 Next, we generate the ANTIC sets. These sets represent the
106 anticipatable expressions. ANTIC is a backwards dataflow
107 problem. An expression is anticipatable in a given block if it could
108 be generated in that block. This means that if we had to perform
109 an insertion in that block, of the value of that expression, we
110 could. Calculating the ANTIC sets requires phi translation of
111 expressions, because the flow goes backwards through phis. We must
112 iterate to a fixpoint of the ANTIC sets, because we have a kill
113 set. Even in SSA form, values are not live over the entire
114 function, only from their definition point onwards. So we have to
115 remove values from the ANTIC set once we go past the definition
116 point of the leaders that make them up.
117 compute_antic/compute_antic_aux performs this computation.
119 Third, we perform insertions to make partially redundant
120 expressions fully redundant.
122 An expression is partially redundant (excluding partial
123 anticipation) if:
125 1. It is AVAIL in some, but not all, of the predecessors of a
126 given block.
127 2. It is ANTIC in all the predecessors.
129 In order to make it fully redundant, we insert the expression into
130 the predecessors where it is not available, but is ANTIC.
132 When optimizing for size, we only eliminate the partial redundancy
133 if we need to insert in only one predecessor. This avoids almost
134 completely the code size increase that PRE usually causes.
136 For the partial anticipation case, we only perform insertion if it
137 is partially anticipated in some block, and fully available in all
138 of the predecessors.
140 do_pre_regular_insertion/do_pre_partial_partial_insertion
141 performs these steps, driven by insert/insert_aux.
143 Fourth, we eliminate fully redundant expressions.
144 This is a simple statement walk that replaces redundant
145 calculations with the now available values. */
147 /* Basic algorithm for Code Hoisting:
149 Code hoisting is: Moving value computations up in the control flow
150 graph to make multiple copies redundant. Typically this is a size
151 optimization, but there are cases where it also is helpful for speed.
153 A simple code hoisting algorithm is implemented that piggy-backs on
154 the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT
155 which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be
156 computed for PRE, and we can use them to perform a limited version of
157 code hoisting, too.
159 For the purpose of this implementation, a value is hoistable to a basic
160 block B if the following properties are met:
162 1. The value is in ANTIC_IN(B) -- the value will be computed on all
163 paths from B to function exit and it can be computed in B);
165 2. The value is not in AVAIL_OUT(B) -- there would be no need to
166 compute the value again and make it available twice;
168 3. All successors of B are dominated by B -- makes sure that inserting
169 a computation of the value in B will make the remaining
170 computations fully redundant;
172 4. At least one successor has the value in AVAIL_OUT -- to avoid
173 hoisting values up too far;
175 5. There are at least two successors of B -- hoisting in straight
176 line code is pointless.
178 The third condition is not strictly necessary, but it would complicate
179 the hoisting pass a lot. In fact, I don't know of any code hoisting
180 algorithm that does not have this requirement. Fortunately, experiments
181 have show that most candidate hoistable values are in regions that meet
182 this condition (e.g. diamond-shape regions).
184 The forth condition is necessary to avoid hoisting things up too far
185 away from the uses of the value. Nothing else limits the algorithm
186 from hoisting everything up as far as ANTIC_IN allows. Experiments
187 with SPEC and CSiBE have shown that hoisting up too far results in more
188 spilling, less benefits for code size, and worse benchmark scores.
189 Fortunately, in practice most of the interesting hoisting opportunities
190 are caught despite this limitation.
192 For hoistable values that meet all conditions, expressions are inserted
193 to make the calculation of the hoistable value fully redundant. We
194 perform code hoisting insertions after each round of PRE insertions,
195 because code hoisting never exposes new PRE opportunities, but PRE can
196 create new code hoisting opportunities.
198 The code hoisting algorithm is implemented in do_hoist_insert, driven
199 by insert/insert_aux. */
201 /* Representations of value numbers:
203 Value numbers are represented by a representative SSA_NAME. We
204 will create fake SSA_NAME's in situations where we need a
205 representative but do not have one (because it is a complex
206 expression). In order to facilitate storing the value numbers in
207 bitmaps, and keep the number of wasted SSA_NAME's down, we also
208 associate a value_id with each value number, and create full blown
209 ssa_name's only where we actually need them (IE in operands of
210 existing expressions).
212 Theoretically you could replace all the value_id's with
213 SSA_NAME_VERSION, but this would allocate a large number of
214 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
215 It would also require an additional indirection at each point we
216 use the value id. */
218 /* Representation of expressions on value numbers:
220 Expressions consisting of value numbers are represented the same
221 way as our VN internally represents them, with an additional
222 "pre_expr" wrapping around them in order to facilitate storing all
223 of the expressions in the same sets. */
225 /* Representation of sets:
227 The dataflow sets do not need to be sorted in any particular order
228 for the majority of their lifetime, are simply represented as two
229 bitmaps, one that keeps track of values present in the set, and one
230 that keeps track of expressions present in the set.
232 When we need them in topological order, we produce it on demand by
233 transforming the bitmap into an array and sorting it into topo
234 order. */
236 /* Type of expression, used to know which member of the PRE_EXPR union
237 is valid. */
239 enum pre_expr_kind
241 NAME,
242 NARY,
243 REFERENCE,
244 CONSTANT
247 union pre_expr_union
249 tree name;
250 tree constant;
251 vn_nary_op_t nary;
252 vn_reference_t reference;
255 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
257 enum pre_expr_kind kind;
258 unsigned int id;
259 pre_expr_union u;
261 /* hash_table support. */
262 static inline hashval_t hash (const pre_expr_d *);
263 static inline int equal (const pre_expr_d *, const pre_expr_d *);
264 } *pre_expr;
266 #define PRE_EXPR_NAME(e) (e)->u.name
267 #define PRE_EXPR_NARY(e) (e)->u.nary
268 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
269 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
271 /* Compare E1 and E1 for equality. */
273 inline int
274 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
276 if (e1->kind != e2->kind)
277 return false;
279 switch (e1->kind)
281 case CONSTANT:
282 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
283 PRE_EXPR_CONSTANT (e2));
284 case NAME:
285 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
286 case NARY:
287 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
288 case REFERENCE:
289 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
290 PRE_EXPR_REFERENCE (e2));
291 default:
292 gcc_unreachable ();
296 /* Hash E. */
298 inline hashval_t
299 pre_expr_d::hash (const pre_expr_d *e)
301 switch (e->kind)
303 case CONSTANT:
304 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
305 case NAME:
306 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
307 case NARY:
308 return PRE_EXPR_NARY (e)->hashcode;
309 case REFERENCE:
310 return PRE_EXPR_REFERENCE (e)->hashcode;
311 default:
312 gcc_unreachable ();
316 /* Next global expression id number. */
317 static unsigned int next_expression_id;
319 /* Mapping from expression to id number we can use in bitmap sets. */
320 static vec<pre_expr> expressions;
321 static hash_table<pre_expr_d> *expression_to_id;
322 static vec<unsigned> name_to_id;
324 /* Allocate an expression id for EXPR. */
326 static inline unsigned int
327 alloc_expression_id (pre_expr expr)
329 struct pre_expr_d **slot;
330 /* Make sure we won't overflow. */
331 gcc_assert (next_expression_id + 1 > next_expression_id);
332 expr->id = next_expression_id++;
333 expressions.safe_push (expr);
334 if (expr->kind == NAME)
336 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
337 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
338 re-allocations by using vec::reserve upfront. */
339 unsigned old_len = name_to_id.length ();
340 name_to_id.reserve (num_ssa_names - old_len);
341 name_to_id.quick_grow_cleared (num_ssa_names);
342 gcc_assert (name_to_id[version] == 0);
343 name_to_id[version] = expr->id;
345 else
347 slot = expression_to_id->find_slot (expr, INSERT);
348 gcc_assert (!*slot);
349 *slot = expr;
351 return next_expression_id - 1;
354 /* Return the expression id for tree EXPR. */
356 static inline unsigned int
357 get_expression_id (const pre_expr expr)
359 return expr->id;
362 static inline unsigned int
363 lookup_expression_id (const pre_expr expr)
365 struct pre_expr_d **slot;
367 if (expr->kind == NAME)
369 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
370 if (name_to_id.length () <= version)
371 return 0;
372 return name_to_id[version];
374 else
376 slot = expression_to_id->find_slot (expr, NO_INSERT);
377 if (!slot)
378 return 0;
379 return ((pre_expr)*slot)->id;
383 /* Return the existing expression id for EXPR, or create one if one
384 does not exist yet. */
386 static inline unsigned int
387 get_or_alloc_expression_id (pre_expr expr)
389 unsigned int id = lookup_expression_id (expr);
390 if (id == 0)
391 return alloc_expression_id (expr);
392 return expr->id = id;
395 /* Return the expression that has expression id ID */
397 static inline pre_expr
398 expression_for_id (unsigned int id)
400 return expressions[id];
403 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
405 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
407 static pre_expr
408 get_or_alloc_expr_for_name (tree name)
410 struct pre_expr_d expr;
411 pre_expr result;
412 unsigned int result_id;
414 expr.kind = NAME;
415 expr.id = 0;
416 PRE_EXPR_NAME (&expr) = name;
417 result_id = lookup_expression_id (&expr);
418 if (result_id != 0)
419 return expression_for_id (result_id);
421 result = pre_expr_pool.allocate ();
422 result->kind = NAME;
423 PRE_EXPR_NAME (result) = name;
424 alloc_expression_id (result);
425 return result;
428 /* An unordered bitmap set. One bitmap tracks values, the other,
429 expressions. */
430 typedef struct bitmap_set
432 bitmap_head expressions;
433 bitmap_head values;
434 } *bitmap_set_t;
436 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
437 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
439 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
440 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
442 /* Mapping from value id to expressions with that value_id. */
443 static vec<bitmap> value_expressions;
445 /* Sets that we need to keep track of. */
446 typedef struct bb_bitmap_sets
448 /* The EXP_GEN set, which represents expressions/values generated in
449 a basic block. */
450 bitmap_set_t exp_gen;
452 /* The PHI_GEN set, which represents PHI results generated in a
453 basic block. */
454 bitmap_set_t phi_gen;
456 /* The TMP_GEN set, which represents results/temporaries generated
457 in a basic block. IE the LHS of an expression. */
458 bitmap_set_t tmp_gen;
460 /* The AVAIL_OUT set, which represents which values are available in
461 a given basic block. */
462 bitmap_set_t avail_out;
464 /* The ANTIC_IN set, which represents which values are anticipatable
465 in a given basic block. */
466 bitmap_set_t antic_in;
468 /* The PA_IN set, which represents which values are
469 partially anticipatable in a given basic block. */
470 bitmap_set_t pa_in;
472 /* The NEW_SETS set, which is used during insertion to augment the
473 AVAIL_OUT set of blocks with the new insertions performed during
474 the current iteration. */
475 bitmap_set_t new_sets;
477 /* A cache for value_dies_in_block_x. */
478 bitmap expr_dies;
480 /* The live virtual operand on successor edges. */
481 tree vop_on_exit;
483 /* True if we have visited this block during ANTIC calculation. */
484 unsigned int visited : 1;
486 /* True when the block contains a call that might not return. */
487 unsigned int contains_may_not_return_call : 1;
488 } *bb_value_sets_t;
490 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
491 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
492 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
493 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
494 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
495 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
496 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
497 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
498 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
499 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
500 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
503 /* This structure is used to keep track of statistics on what
504 optimization PRE was able to perform. */
505 static struct
507 /* The number of new expressions/temporaries generated by PRE. */
508 int insertions;
510 /* The number of inserts found due to partial anticipation */
511 int pa_insert;
513 /* The number of inserts made for code hoisting. */
514 int hoist_insert;
516 /* The number of new PHI nodes added by PRE. */
517 int phis;
518 } pre_stats;
520 static bool do_partial_partial;
521 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
522 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
523 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
524 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
525 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
526 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
527 static bitmap_set_t bitmap_set_new (void);
528 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
529 tree);
530 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
531 static unsigned int get_expr_value_id (pre_expr);
533 /* We can add and remove elements and entries to and from sets
534 and hash tables, so we use alloc pools for them. */
536 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
537 static bitmap_obstack grand_bitmap_obstack;
539 /* A three tuple {e, pred, v} used to cache phi translations in the
540 phi_translate_table. */
542 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
544 /* The expression. */
545 pre_expr e;
547 /* The predecessor block along which we translated the expression. */
548 basic_block pred;
550 /* The value that resulted from the translation. */
551 pre_expr v;
553 /* The hashcode for the expression, pred pair. This is cached for
554 speed reasons. */
555 hashval_t hashcode;
557 /* hash_table support. */
558 static inline hashval_t hash (const expr_pred_trans_d *);
559 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
560 } *expr_pred_trans_t;
561 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
563 inline hashval_t
564 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
566 return e->hashcode;
569 inline int
570 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
571 const expr_pred_trans_d *ve2)
573 basic_block b1 = ve1->pred;
574 basic_block b2 = ve2->pred;
576 /* If they are not translations for the same basic block, they can't
577 be equal. */
578 if (b1 != b2)
579 return false;
580 return pre_expr_d::equal (ve1->e, ve2->e);
583 /* The phi_translate_table caches phi translations for a given
584 expression and predecessor. */
585 static hash_table<expr_pred_trans_d> *phi_translate_table;
587 /* Add the tuple mapping from {expression E, basic block PRED} to
588 the phi translation table and return whether it pre-existed. */
590 static inline bool
591 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
593 expr_pred_trans_t *slot;
594 expr_pred_trans_d tem;
595 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
596 pred->index);
597 tem.e = e;
598 tem.pred = pred;
599 tem.hashcode = hash;
600 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
601 if (*slot)
603 *entry = *slot;
604 return true;
607 *entry = *slot = XNEW (struct expr_pred_trans_d);
608 (*entry)->e = e;
609 (*entry)->pred = pred;
610 (*entry)->hashcode = hash;
611 return false;
615 /* Add expression E to the expression set of value id V. */
617 static void
618 add_to_value (unsigned int v, pre_expr e)
620 bitmap set;
622 gcc_checking_assert (get_expr_value_id (e) == v);
624 if (v >= value_expressions.length ())
626 value_expressions.safe_grow_cleared (v + 1);
629 set = value_expressions[v];
630 if (!set)
632 set = BITMAP_ALLOC (&grand_bitmap_obstack);
633 value_expressions[v] = set;
636 bitmap_set_bit (set, get_or_alloc_expression_id (e));
639 /* Create a new bitmap set and return it. */
641 static bitmap_set_t
642 bitmap_set_new (void)
644 bitmap_set_t ret = bitmap_set_pool.allocate ();
645 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
646 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
647 return ret;
650 /* Return the value id for a PRE expression EXPR. */
652 static unsigned int
653 get_expr_value_id (pre_expr expr)
655 unsigned int id;
656 switch (expr->kind)
658 case CONSTANT:
659 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
660 break;
661 case NAME:
662 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
663 break;
664 case NARY:
665 id = PRE_EXPR_NARY (expr)->value_id;
666 break;
667 case REFERENCE:
668 id = PRE_EXPR_REFERENCE (expr)->value_id;
669 break;
670 default:
671 gcc_unreachable ();
673 /* ??? We cannot assert that expr has a value-id (it can be 0), because
674 we assign value-ids only to expressions that have a result
675 in set_hashtable_value_ids. */
676 return id;
679 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
681 static tree
682 sccvn_valnum_from_value_id (unsigned int val)
684 bitmap_iterator bi;
685 unsigned int i;
686 bitmap exprset = value_expressions[val];
687 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
689 pre_expr vexpr = expression_for_id (i);
690 if (vexpr->kind == NAME)
691 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
692 else if (vexpr->kind == CONSTANT)
693 return PRE_EXPR_CONSTANT (vexpr);
695 return NULL_TREE;
698 /* Remove an expression EXPR from a bitmapped set. */
700 static void
701 bitmap_remove_expr_from_set (bitmap_set_t set, pre_expr expr)
703 unsigned int val = get_expr_value_id (expr);
704 bitmap_clear_bit (&set->values, val);
705 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
708 /* Insert an expression EXPR into a bitmapped set. */
710 static void
711 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
713 unsigned int val = get_expr_value_id (expr);
714 if (! value_id_constant_p (val))
716 /* Note this is the only function causing multiple expressions
717 for the same value to appear in a set. This is needed for
718 TMP_GEN, PHI_GEN and NEW_SETs. */
719 bitmap_set_bit (&set->values, val);
720 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
724 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
726 static void
727 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
729 bitmap_copy (&dest->expressions, &orig->expressions);
730 bitmap_copy (&dest->values, &orig->values);
734 /* Free memory used up by SET. */
735 static void
736 bitmap_set_free (bitmap_set_t set)
738 bitmap_clear (&set->expressions);
739 bitmap_clear (&set->values);
743 /* Generate an topological-ordered array of bitmap set SET. */
745 static vec<pre_expr>
746 sorted_array_from_bitmap_set (bitmap_set_t set)
748 unsigned int i, j;
749 bitmap_iterator bi, bj;
750 vec<pre_expr> result;
752 /* Pre-allocate enough space for the array. */
753 result.create (bitmap_count_bits (&set->expressions));
755 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
757 /* The number of expressions having a given value is usually
758 relatively small. Thus, rather than making a vector of all
759 the expressions and sorting it by value-id, we walk the values
760 and check in the reverse mapping that tells us what expressions
761 have a given value, to filter those in our set. As a result,
762 the expressions are inserted in value-id order, which means
763 topological order.
765 If this is somehow a significant lose for some cases, we can
766 choose which set to walk based on the set size. */
767 bitmap exprset = value_expressions[i];
768 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
770 if (bitmap_bit_p (&set->expressions, j))
771 result.quick_push (expression_for_id (j));
775 return result;
778 /* Subtract all expressions contained in ORIG from DEST. */
780 static bitmap_set_t
781 bitmap_set_subtract_expressions (bitmap_set_t dest, bitmap_set_t orig)
783 bitmap_set_t result = bitmap_set_new ();
784 bitmap_iterator bi;
785 unsigned int i;
787 bitmap_and_compl (&result->expressions, &dest->expressions,
788 &orig->expressions);
790 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
792 pre_expr expr = expression_for_id (i);
793 unsigned int value_id = get_expr_value_id (expr);
794 bitmap_set_bit (&result->values, value_id);
797 return result;
800 /* Subtract all values in bitmap set B from bitmap set A. */
802 static void
803 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
805 unsigned int i;
806 bitmap_iterator bi;
807 pre_expr to_remove = NULL;
808 FOR_EACH_EXPR_ID_IN_SET (a, i, bi)
810 if (to_remove)
812 bitmap_remove_expr_from_set (a, to_remove);
813 to_remove = NULL;
815 pre_expr expr = expression_for_id (i);
816 if (bitmap_bit_p (&b->values, get_expr_value_id (expr)))
817 to_remove = expr;
819 if (to_remove)
820 bitmap_remove_expr_from_set (a, to_remove);
824 /* Return true if bitmapped set SET contains the value VALUE_ID. */
826 static bool
827 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
829 if (value_id_constant_p (value_id))
830 return true;
832 return bitmap_bit_p (&set->values, value_id);
835 static inline bool
836 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
838 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
841 /* Return true if two bitmap sets are equal. */
843 static bool
844 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
846 return bitmap_equal_p (&a->values, &b->values);
849 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
850 and add it otherwise. */
852 static void
853 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
855 unsigned int val = get_expr_value_id (expr);
856 if (value_id_constant_p (val))
857 return;
859 if (bitmap_set_contains_value (set, val))
861 /* The number of expressions having a given value is usually
862 significantly less than the total number of expressions in SET.
863 Thus, rather than check, for each expression in SET, whether it
864 has the value LOOKFOR, we walk the reverse mapping that tells us
865 what expressions have a given value, and see if any of those
866 expressions are in our set. For large testcases, this is about
867 5-10x faster than walking the bitmap. If this is somehow a
868 significant lose for some cases, we can choose which set to walk
869 based on the set size. */
870 unsigned int i;
871 bitmap_iterator bi;
872 bitmap exprset = value_expressions[val];
873 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
875 if (bitmap_clear_bit (&set->expressions, i))
877 bitmap_set_bit (&set->expressions, get_expression_id (expr));
878 return;
881 gcc_unreachable ();
883 else
884 bitmap_insert_into_set (set, expr);
887 /* Insert EXPR into SET if EXPR's value is not already present in
888 SET. */
890 static void
891 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
893 unsigned int val = get_expr_value_id (expr);
895 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
897 /* Constant values are always considered to be part of the set. */
898 if (value_id_constant_p (val))
899 return;
901 /* If the value membership changed, add the expression. */
902 if (bitmap_set_bit (&set->values, val))
903 bitmap_set_bit (&set->expressions, expr->id);
906 /* Print out EXPR to outfile. */
908 static void
909 print_pre_expr (FILE *outfile, const pre_expr expr)
911 if (! expr)
913 fprintf (outfile, "NULL");
914 return;
916 switch (expr->kind)
918 case CONSTANT:
919 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr));
920 break;
921 case NAME:
922 print_generic_expr (outfile, PRE_EXPR_NAME (expr));
923 break;
924 case NARY:
926 unsigned int i;
927 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
928 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
929 for (i = 0; i < nary->length; i++)
931 print_generic_expr (outfile, nary->op[i]);
932 if (i != (unsigned) nary->length - 1)
933 fprintf (outfile, ",");
935 fprintf (outfile, "}");
937 break;
939 case REFERENCE:
941 vn_reference_op_t vro;
942 unsigned int i;
943 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
944 fprintf (outfile, "{");
945 for (i = 0;
946 ref->operands.iterate (i, &vro);
947 i++)
949 bool closebrace = false;
950 if (vro->opcode != SSA_NAME
951 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
953 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
954 if (vro->op0)
956 fprintf (outfile, "<");
957 closebrace = true;
960 if (vro->op0)
962 print_generic_expr (outfile, vro->op0);
963 if (vro->op1)
965 fprintf (outfile, ",");
966 print_generic_expr (outfile, vro->op1);
968 if (vro->op2)
970 fprintf (outfile, ",");
971 print_generic_expr (outfile, vro->op2);
974 if (closebrace)
975 fprintf (outfile, ">");
976 if (i != ref->operands.length () - 1)
977 fprintf (outfile, ",");
979 fprintf (outfile, "}");
980 if (ref->vuse)
982 fprintf (outfile, "@");
983 print_generic_expr (outfile, ref->vuse);
986 break;
989 void debug_pre_expr (pre_expr);
991 /* Like print_pre_expr but always prints to stderr. */
992 DEBUG_FUNCTION void
993 debug_pre_expr (pre_expr e)
995 print_pre_expr (stderr, e);
996 fprintf (stderr, "\n");
999 /* Print out SET to OUTFILE. */
1001 static void
1002 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1003 const char *setname, int blockindex)
1005 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1006 if (set)
1008 bool first = true;
1009 unsigned i;
1010 bitmap_iterator bi;
1012 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1014 const pre_expr expr = expression_for_id (i);
1016 if (!first)
1017 fprintf (outfile, ", ");
1018 first = false;
1019 print_pre_expr (outfile, expr);
1021 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1024 fprintf (outfile, " }\n");
1027 void debug_bitmap_set (bitmap_set_t);
1029 DEBUG_FUNCTION void
1030 debug_bitmap_set (bitmap_set_t set)
1032 print_bitmap_set (stderr, set, "debug", 0);
1035 void debug_bitmap_sets_for (basic_block);
1037 DEBUG_FUNCTION void
1038 debug_bitmap_sets_for (basic_block bb)
1040 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1041 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1042 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1043 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1044 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1045 if (do_partial_partial)
1046 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1047 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1050 /* Print out the expressions that have VAL to OUTFILE. */
1052 static void
1053 print_value_expressions (FILE *outfile, unsigned int val)
1055 bitmap set = value_expressions[val];
1056 if (set)
1058 bitmap_set x;
1059 char s[10];
1060 sprintf (s, "%04d", val);
1061 x.expressions = *set;
1062 print_bitmap_set (outfile, &x, s, 0);
1067 DEBUG_FUNCTION void
1068 debug_value_expressions (unsigned int val)
1070 print_value_expressions (stderr, val);
1073 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1074 represent it. */
1076 static pre_expr
1077 get_or_alloc_expr_for_constant (tree constant)
1079 unsigned int result_id;
1080 unsigned int value_id;
1081 struct pre_expr_d expr;
1082 pre_expr newexpr;
1084 expr.kind = CONSTANT;
1085 PRE_EXPR_CONSTANT (&expr) = constant;
1086 result_id = lookup_expression_id (&expr);
1087 if (result_id != 0)
1088 return expression_for_id (result_id);
1090 newexpr = pre_expr_pool.allocate ();
1091 newexpr->kind = CONSTANT;
1092 PRE_EXPR_CONSTANT (newexpr) = constant;
1093 alloc_expression_id (newexpr);
1094 value_id = get_or_alloc_constant_value_id (constant);
1095 add_to_value (value_id, newexpr);
1096 return newexpr;
1099 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1100 Currently only supports constants and SSA_NAMES. */
1101 static pre_expr
1102 get_or_alloc_expr_for (tree t)
1104 if (TREE_CODE (t) == SSA_NAME)
1105 return get_or_alloc_expr_for_name (t);
1106 else if (is_gimple_min_invariant (t))
1107 return get_or_alloc_expr_for_constant (t);
1108 gcc_unreachable ();
1111 /* Return the folded version of T if T, when folded, is a gimple
1112 min_invariant or an SSA name. Otherwise, return T. */
1114 static pre_expr
1115 fully_constant_expression (pre_expr e)
1117 switch (e->kind)
1119 case CONSTANT:
1120 return e;
1121 case NARY:
1123 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1124 tree res = vn_nary_simplify (nary);
1125 if (!res)
1126 return e;
1127 if (is_gimple_min_invariant (res))
1128 return get_or_alloc_expr_for_constant (res);
1129 if (TREE_CODE (res) == SSA_NAME)
1130 return get_or_alloc_expr_for_name (res);
1131 return e;
1133 case REFERENCE:
1135 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1136 tree folded;
1137 if ((folded = fully_constant_vn_reference_p (ref)))
1138 return get_or_alloc_expr_for_constant (folded);
1139 return e;
1141 default:
1142 return e;
1144 return e;
1147 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1148 it has the value it would have in BLOCK. Set *SAME_VALID to true
1149 in case the new vuse doesn't change the value id of the OPERANDS. */
1151 static tree
1152 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1153 alias_set_type set, tree type, tree vuse,
1154 basic_block phiblock,
1155 basic_block block, bool *same_valid)
1157 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1158 ao_ref ref;
1159 edge e = NULL;
1160 bool use_oracle;
1162 *same_valid = true;
1164 if (gimple_bb (phi) != phiblock)
1165 return vuse;
1167 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1169 /* Use the alias-oracle to find either the PHI node in this block,
1170 the first VUSE used in this block that is equivalent to vuse or
1171 the first VUSE which definition in this block kills the value. */
1172 if (gimple_code (phi) == GIMPLE_PHI)
1173 e = find_edge (block, phiblock);
1174 else if (use_oracle)
1175 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1177 vuse = gimple_vuse (phi);
1178 phi = SSA_NAME_DEF_STMT (vuse);
1179 if (gimple_bb (phi) != phiblock)
1180 return vuse;
1181 if (gimple_code (phi) == GIMPLE_PHI)
1183 e = find_edge (block, phiblock);
1184 break;
1187 else
1188 return NULL_TREE;
1190 if (e)
1192 if (use_oracle)
1194 bitmap visited = NULL;
1195 unsigned int cnt;
1196 /* Try to find a vuse that dominates this phi node by skipping
1197 non-clobbering statements. */
1198 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1199 NULL, NULL);
1200 if (visited)
1201 BITMAP_FREE (visited);
1203 else
1204 vuse = NULL_TREE;
1205 if (!vuse)
1207 /* If we didn't find any, the value ID can't stay the same,
1208 but return the translated vuse. */
1209 *same_valid = false;
1210 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1212 /* ??? We would like to return vuse here as this is the canonical
1213 upmost vdef that this reference is associated with. But during
1214 insertion of the references into the hash tables we only ever
1215 directly insert with their direct gimple_vuse, hence returning
1216 something else would make us not find the other expression. */
1217 return PHI_ARG_DEF (phi, e->dest_idx);
1220 return NULL_TREE;
1223 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1224 SET2 *or* SET3. This is used to avoid making a set consisting of the union
1225 of PA_IN and ANTIC_IN during insert and phi-translation. */
1227 static inline pre_expr
1228 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2,
1229 bitmap_set_t set3 = NULL)
1231 pre_expr result;
1233 result = bitmap_find_leader (set1, val);
1234 if (!result && set2)
1235 result = bitmap_find_leader (set2, val);
1236 if (!result && set3)
1237 result = bitmap_find_leader (set3, val);
1238 return result;
1241 /* Get the tree type for our PRE expression e. */
1243 static tree
1244 get_expr_type (const pre_expr e)
1246 switch (e->kind)
1248 case NAME:
1249 return TREE_TYPE (PRE_EXPR_NAME (e));
1250 case CONSTANT:
1251 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1252 case REFERENCE:
1253 return PRE_EXPR_REFERENCE (e)->type;
1254 case NARY:
1255 return PRE_EXPR_NARY (e)->type;
1257 gcc_unreachable ();
1260 /* Get a representative SSA_NAME for a given expression that is available in B.
1261 Since all of our sub-expressions are treated as values, we require
1262 them to be SSA_NAME's for simplicity.
1263 Prior versions of GVNPRE used to use "value handles" here, so that
1264 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1265 either case, the operands are really values (IE we do not expect
1266 them to be usable without finding leaders). */
1268 static tree
1269 get_representative_for (const pre_expr e, basic_block b = NULL)
1271 tree name, valnum = NULL_TREE;
1272 unsigned int value_id = get_expr_value_id (e);
1274 switch (e->kind)
1276 case NAME:
1277 return VN_INFO (PRE_EXPR_NAME (e))->valnum;
1278 case CONSTANT:
1279 return PRE_EXPR_CONSTANT (e);
1280 case NARY:
1281 case REFERENCE:
1283 /* Go through all of the expressions representing this value
1284 and pick out an SSA_NAME. */
1285 unsigned int i;
1286 bitmap_iterator bi;
1287 bitmap exprs = value_expressions[value_id];
1288 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1290 pre_expr rep = expression_for_id (i);
1291 if (rep->kind == NAME)
1293 tree name = PRE_EXPR_NAME (rep);
1294 valnum = VN_INFO (name)->valnum;
1295 gimple *def = SSA_NAME_DEF_STMT (name);
1296 /* We have to return either a new representative or one
1297 that can be used for expression simplification and thus
1298 is available in B. */
1299 if (! b
1300 || gimple_nop_p (def)
1301 || dominated_by_p (CDI_DOMINATORS, b, gimple_bb (def)))
1302 return name;
1304 else if (rep->kind == CONSTANT)
1305 return PRE_EXPR_CONSTANT (rep);
1308 break;
1311 /* If we reached here we couldn't find an SSA_NAME. This can
1312 happen when we've discovered a value that has never appeared in
1313 the program as set to an SSA_NAME, as the result of phi translation.
1314 Create one here.
1315 ??? We should be able to re-use this when we insert the statement
1316 to compute it. */
1317 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1318 VN_INFO_GET (name)->value_id = value_id;
1319 VN_INFO (name)->valnum = valnum ? valnum : name;
1320 /* ??? For now mark this SSA name for release by SCCVN. */
1321 VN_INFO (name)->needs_insertion = true;
1322 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1323 if (dump_file && (dump_flags & TDF_DETAILS))
1325 fprintf (dump_file, "Created SSA_NAME representative ");
1326 print_generic_expr (dump_file, name);
1327 fprintf (dump_file, " for expression:");
1328 print_pre_expr (dump_file, e);
1329 fprintf (dump_file, " (%04d)\n", value_id);
1332 return name;
1336 static pre_expr
1337 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1338 basic_block pred, basic_block phiblock);
1340 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1341 the phis in PRED. Return NULL if we can't find a leader for each part
1342 of the translated expression. */
1344 static pre_expr
1345 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1346 basic_block pred, basic_block phiblock)
1348 switch (expr->kind)
1350 case NARY:
1352 unsigned int i;
1353 bool changed = false;
1354 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1355 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1356 sizeof_vn_nary_op (nary->length));
1357 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1359 for (i = 0; i < newnary->length; i++)
1361 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1362 continue;
1363 else
1365 pre_expr leader, result;
1366 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1367 leader = find_leader_in_sets (op_val_id, set1, set2);
1368 result = phi_translate (leader, set1, set2, pred, phiblock);
1369 if (result && result != leader)
1370 /* Force a leader as well as we are simplifying this
1371 expression. */
1372 newnary->op[i] = get_representative_for (result, pred);
1373 else if (!result)
1374 return NULL;
1376 changed |= newnary->op[i] != nary->op[i];
1379 if (changed)
1381 pre_expr constant;
1382 unsigned int new_val_id;
1384 PRE_EXPR_NARY (expr) = newnary;
1385 constant = fully_constant_expression (expr);
1386 PRE_EXPR_NARY (expr) = nary;
1387 if (constant != expr)
1389 /* For non-CONSTANTs we have to make sure we can eventually
1390 insert the expression. Which means we need to have a
1391 leader for it. */
1392 if (constant->kind != CONSTANT)
1394 /* Do not allow simplifications to non-constants over
1395 backedges as this will likely result in a loop PHI node
1396 to be inserted and increased register pressure.
1397 See PR77498 - this avoids doing predcoms work in
1398 a less efficient way. */
1399 if (find_edge (pred, phiblock)->flags & EDGE_DFS_BACK)
1401 else
1403 unsigned value_id = get_expr_value_id (constant);
1404 constant = find_leader_in_sets (value_id, set1, set2,
1405 AVAIL_OUT (pred));
1406 if (constant)
1407 return constant;
1410 else
1411 return constant;
1414 /* vn_nary_* do not valueize operands. */
1415 for (i = 0; i < newnary->length; ++i)
1416 if (TREE_CODE (newnary->op[i]) == SSA_NAME)
1417 newnary->op[i] = VN_INFO (newnary->op[i])->valnum;
1418 tree result = vn_nary_op_lookup_pieces (newnary->length,
1419 newnary->opcode,
1420 newnary->type,
1421 &newnary->op[0],
1422 &nary);
1423 if (result && is_gimple_min_invariant (result))
1424 return get_or_alloc_expr_for_constant (result);
1426 expr = pre_expr_pool.allocate ();
1427 expr->kind = NARY;
1428 expr->id = 0;
1429 if (nary)
1431 PRE_EXPR_NARY (expr) = nary;
1432 new_val_id = nary->value_id;
1433 get_or_alloc_expression_id (expr);
1435 else
1437 new_val_id = get_next_value_id ();
1438 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1439 nary = vn_nary_op_insert_pieces (newnary->length,
1440 newnary->opcode,
1441 newnary->type,
1442 &newnary->op[0],
1443 result, new_val_id);
1444 PRE_EXPR_NARY (expr) = nary;
1445 get_or_alloc_expression_id (expr);
1447 add_to_value (new_val_id, expr);
1449 return expr;
1451 break;
1453 case REFERENCE:
1455 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1456 vec<vn_reference_op_s> operands = ref->operands;
1457 tree vuse = ref->vuse;
1458 tree newvuse = vuse;
1459 vec<vn_reference_op_s> newoperands = vNULL;
1460 bool changed = false, same_valid = true;
1461 unsigned int i, n;
1462 vn_reference_op_t operand;
1463 vn_reference_t newref;
1465 for (i = 0; operands.iterate (i, &operand); i++)
1467 pre_expr opresult;
1468 pre_expr leader;
1469 tree op[3];
1470 tree type = operand->type;
1471 vn_reference_op_s newop = *operand;
1472 op[0] = operand->op0;
1473 op[1] = operand->op1;
1474 op[2] = operand->op2;
1475 for (n = 0; n < 3; ++n)
1477 unsigned int op_val_id;
1478 if (!op[n])
1479 continue;
1480 if (TREE_CODE (op[n]) != SSA_NAME)
1482 /* We can't possibly insert these. */
1483 if (n != 0
1484 && !is_gimple_min_invariant (op[n]))
1485 break;
1486 continue;
1488 op_val_id = VN_INFO (op[n])->value_id;
1489 leader = find_leader_in_sets (op_val_id, set1, set2);
1490 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1491 if (opresult && opresult != leader)
1493 tree name = get_representative_for (opresult);
1494 changed |= name != op[n];
1495 op[n] = name;
1497 else if (!opresult)
1498 break;
1500 if (n != 3)
1502 newoperands.release ();
1503 return NULL;
1505 if (!changed)
1506 continue;
1507 if (!newoperands.exists ())
1508 newoperands = operands.copy ();
1509 /* We may have changed from an SSA_NAME to a constant */
1510 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1511 newop.opcode = TREE_CODE (op[0]);
1512 newop.type = type;
1513 newop.op0 = op[0];
1514 newop.op1 = op[1];
1515 newop.op2 = op[2];
1516 newoperands[i] = newop;
1518 gcc_checking_assert (i == operands.length ());
1520 if (vuse)
1522 newvuse = translate_vuse_through_block (newoperands.exists ()
1523 ? newoperands : operands,
1524 ref->set, ref->type,
1525 vuse, phiblock, pred,
1526 &same_valid);
1527 if (newvuse == NULL_TREE)
1529 newoperands.release ();
1530 return NULL;
1534 if (changed || newvuse != vuse)
1536 unsigned int new_val_id;
1538 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1539 ref->type,
1540 newoperands.exists ()
1541 ? newoperands : operands,
1542 &newref, VN_WALK);
1543 if (result)
1544 newoperands.release ();
1546 /* We can always insert constants, so if we have a partial
1547 redundant constant load of another type try to translate it
1548 to a constant of appropriate type. */
1549 if (result && is_gimple_min_invariant (result))
1551 tree tem = result;
1552 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1554 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1555 if (tem && !is_gimple_min_invariant (tem))
1556 tem = NULL_TREE;
1558 if (tem)
1559 return get_or_alloc_expr_for_constant (tem);
1562 /* If we'd have to convert things we would need to validate
1563 if we can insert the translated expression. So fail
1564 here for now - we cannot insert an alias with a different
1565 type in the VN tables either, as that would assert. */
1566 if (result
1567 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1568 return NULL;
1569 else if (!result && newref
1570 && !useless_type_conversion_p (ref->type, newref->type))
1572 newoperands.release ();
1573 return NULL;
1576 expr = pre_expr_pool.allocate ();
1577 expr->kind = REFERENCE;
1578 expr->id = 0;
1580 if (newref)
1581 new_val_id = newref->value_id;
1582 else
1584 if (changed || !same_valid)
1586 new_val_id = get_next_value_id ();
1587 value_expressions.safe_grow_cleared
1588 (get_max_value_id () + 1);
1590 else
1591 new_val_id = ref->value_id;
1592 if (!newoperands.exists ())
1593 newoperands = operands.copy ();
1594 newref = vn_reference_insert_pieces (newvuse, ref->set,
1595 ref->type,
1596 newoperands,
1597 result, new_val_id);
1598 newoperands = vNULL;
1600 PRE_EXPR_REFERENCE (expr) = newref;
1601 get_or_alloc_expression_id (expr);
1602 add_to_value (new_val_id, expr);
1604 newoperands.release ();
1605 return expr;
1607 break;
1609 case NAME:
1611 tree name = PRE_EXPR_NAME (expr);
1612 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1613 /* If the SSA name is defined by a PHI node in this block,
1614 translate it. */
1615 if (gimple_code (def_stmt) == GIMPLE_PHI
1616 && gimple_bb (def_stmt) == phiblock)
1618 edge e = find_edge (pred, gimple_bb (def_stmt));
1619 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1621 /* Handle constant. */
1622 if (is_gimple_min_invariant (def))
1623 return get_or_alloc_expr_for_constant (def);
1625 return get_or_alloc_expr_for_name (def);
1627 /* Otherwise return it unchanged - it will get removed if its
1628 value is not available in PREDs AVAIL_OUT set of expressions
1629 by the subtraction of TMP_GEN. */
1630 return expr;
1633 default:
1634 gcc_unreachable ();
1638 /* Wrapper around phi_translate_1 providing caching functionality. */
1640 static pre_expr
1641 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1642 basic_block pred, basic_block phiblock)
1644 expr_pred_trans_t slot = NULL;
1645 pre_expr phitrans;
1647 if (!expr)
1648 return NULL;
1650 /* Constants contain no values that need translation. */
1651 if (expr->kind == CONSTANT)
1652 return expr;
1654 if (value_id_constant_p (get_expr_value_id (expr)))
1655 return expr;
1657 /* Don't add translations of NAMEs as those are cheap to translate. */
1658 if (expr->kind != NAME)
1660 if (phi_trans_add (&slot, expr, pred))
1661 return slot->v;
1662 /* Store NULL for the value we want to return in the case of
1663 recursing. */
1664 slot->v = NULL;
1667 /* Translate. */
1668 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1670 if (slot)
1672 if (phitrans)
1673 slot->v = phitrans;
1674 else
1675 /* Remove failed translations again, they cause insert
1676 iteration to not pick up new opportunities reliably. */
1677 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1680 return phitrans;
1684 /* For each expression in SET, translate the values through phi nodes
1685 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1686 expressions in DEST. */
1688 static void
1689 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1690 basic_block phiblock)
1692 vec<pre_expr> exprs;
1693 pre_expr expr;
1694 int i;
1696 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1698 bitmap_set_copy (dest, set);
1699 return;
1702 exprs = sorted_array_from_bitmap_set (set);
1703 FOR_EACH_VEC_ELT (exprs, i, expr)
1705 pre_expr translated;
1706 translated = phi_translate (expr, set, NULL, pred, phiblock);
1707 if (!translated)
1708 continue;
1710 /* We might end up with multiple expressions from SET being
1711 translated to the same value. In this case we do not want
1712 to retain the NARY or REFERENCE expression but prefer a NAME
1713 which would be the leader. */
1714 if (translated->kind == NAME)
1715 bitmap_value_replace_in_set (dest, translated);
1716 else
1717 bitmap_value_insert_into_set (dest, translated);
1719 exprs.release ();
1722 /* Find the leader for a value (i.e., the name representing that
1723 value) in a given set, and return it. Return NULL if no leader
1724 is found. */
1726 static pre_expr
1727 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1729 if (value_id_constant_p (val))
1731 unsigned int i;
1732 bitmap_iterator bi;
1733 bitmap exprset = value_expressions[val];
1735 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1737 pre_expr expr = expression_for_id (i);
1738 if (expr->kind == CONSTANT)
1739 return expr;
1742 if (bitmap_set_contains_value (set, val))
1744 /* Rather than walk the entire bitmap of expressions, and see
1745 whether any of them has the value we are looking for, we look
1746 at the reverse mapping, which tells us the set of expressions
1747 that have a given value (IE value->expressions with that
1748 value) and see if any of those expressions are in our set.
1749 The number of expressions per value is usually significantly
1750 less than the number of expressions in the set. In fact, for
1751 large testcases, doing it this way is roughly 5-10x faster
1752 than walking the bitmap.
1753 If this is somehow a significant lose for some cases, we can
1754 choose which set to walk based on which set is smaller. */
1755 unsigned int i;
1756 bitmap_iterator bi;
1757 bitmap exprset = value_expressions[val];
1759 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1760 return expression_for_id (i);
1762 return NULL;
1765 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1766 BLOCK by seeing if it is not killed in the block. Note that we are
1767 only determining whether there is a store that kills it. Because
1768 of the order in which clean iterates over values, we are guaranteed
1769 that altered operands will have caused us to be eliminated from the
1770 ANTIC_IN set already. */
1772 static bool
1773 value_dies_in_block_x (pre_expr expr, basic_block block)
1775 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1776 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1777 gimple *def;
1778 gimple_stmt_iterator gsi;
1779 unsigned id = get_expression_id (expr);
1780 bool res = false;
1781 ao_ref ref;
1783 if (!vuse)
1784 return false;
1786 /* Lookup a previously calculated result. */
1787 if (EXPR_DIES (block)
1788 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1789 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1791 /* A memory expression {e, VUSE} dies in the block if there is a
1792 statement that may clobber e. If, starting statement walk from the
1793 top of the basic block, a statement uses VUSE there can be no kill
1794 inbetween that use and the original statement that loaded {e, VUSE},
1795 so we can stop walking. */
1796 ref.base = NULL_TREE;
1797 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1799 tree def_vuse, def_vdef;
1800 def = gsi_stmt (gsi);
1801 def_vuse = gimple_vuse (def);
1802 def_vdef = gimple_vdef (def);
1804 /* Not a memory statement. */
1805 if (!def_vuse)
1806 continue;
1808 /* Not a may-def. */
1809 if (!def_vdef)
1811 /* A load with the same VUSE, we're done. */
1812 if (def_vuse == vuse)
1813 break;
1815 continue;
1818 /* Init ref only if we really need it. */
1819 if (ref.base == NULL_TREE
1820 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1821 refx->operands))
1823 res = true;
1824 break;
1826 /* If the statement may clobber expr, it dies. */
1827 if (stmt_may_clobber_ref_p_1 (def, &ref))
1829 res = true;
1830 break;
1834 /* Remember the result. */
1835 if (!EXPR_DIES (block))
1836 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1837 bitmap_set_bit (EXPR_DIES (block), id * 2);
1838 if (res)
1839 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1841 return res;
1845 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1846 contains its value-id. */
1848 static bool
1849 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1851 if (op && TREE_CODE (op) == SSA_NAME)
1853 unsigned int value_id = VN_INFO (op)->value_id;
1854 if (!(bitmap_set_contains_value (set1, value_id)
1855 || (set2 && bitmap_set_contains_value (set2, value_id))))
1856 return false;
1858 return true;
1861 /* Determine if the expression EXPR is valid in SET1 U SET2.
1862 ONLY SET2 CAN BE NULL.
1863 This means that we have a leader for each part of the expression
1864 (if it consists of values), or the expression is an SSA_NAME.
1865 For loads/calls, we also see if the vuse is killed in this block. */
1867 static bool
1868 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1870 switch (expr->kind)
1872 case NAME:
1873 /* By construction all NAMEs are available. Non-available
1874 NAMEs are removed by subtracting TMP_GEN from the sets. */
1875 return true;
1876 case NARY:
1878 unsigned int i;
1879 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1880 for (i = 0; i < nary->length; i++)
1881 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1882 return false;
1883 return true;
1885 break;
1886 case REFERENCE:
1888 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1889 vn_reference_op_t vro;
1890 unsigned int i;
1892 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1894 if (!op_valid_in_sets (set1, set2, vro->op0)
1895 || !op_valid_in_sets (set1, set2, vro->op1)
1896 || !op_valid_in_sets (set1, set2, vro->op2))
1897 return false;
1899 return true;
1901 default:
1902 gcc_unreachable ();
1906 /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2.
1907 This means expressions that are made up of values we have no leaders for
1908 in SET1 or SET2. */
1910 static void
1911 clean (bitmap_set_t set1, bitmap_set_t set2 = NULL)
1913 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
1914 pre_expr expr;
1915 int i;
1917 FOR_EACH_VEC_ELT (exprs, i, expr)
1919 if (!valid_in_sets (set1, set2, expr))
1920 bitmap_remove_expr_from_set (set1, expr);
1922 exprs.release ();
1925 /* Clean the set of expressions that are no longer valid in SET because
1926 they are clobbered in BLOCK or because they trap and may not be executed. */
1928 static void
1929 prune_clobbered_mems (bitmap_set_t set, basic_block block)
1931 bitmap_iterator bi;
1932 unsigned i;
1933 pre_expr to_remove = NULL;
1935 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1937 /* Remove queued expr. */
1938 if (to_remove)
1940 bitmap_remove_expr_from_set (set, to_remove);
1941 to_remove = NULL;
1944 pre_expr expr = expression_for_id (i);
1945 if (expr->kind == REFERENCE)
1947 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1948 if (ref->vuse)
1950 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
1951 if (!gimple_nop_p (def_stmt)
1952 && ((gimple_bb (def_stmt) != block
1953 && !dominated_by_p (CDI_DOMINATORS,
1954 block, gimple_bb (def_stmt)))
1955 || (gimple_bb (def_stmt) == block
1956 && value_dies_in_block_x (expr, block))))
1957 to_remove = expr;
1960 else if (expr->kind == NARY)
1962 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1963 /* If the NARY may trap make sure the block does not contain
1964 a possible exit point.
1965 ??? This is overly conservative if we translate AVAIL_OUT
1966 as the available expression might be after the exit point. */
1967 if (BB_MAY_NOTRETURN (block)
1968 && vn_nary_may_trap (nary))
1969 to_remove = expr;
1973 /* Remove queued expr. */
1974 if (to_remove)
1975 bitmap_remove_expr_from_set (set, to_remove);
1978 static sbitmap has_abnormal_preds;
1980 /* Compute the ANTIC set for BLOCK.
1982 If succs(BLOCK) > 1 then
1983 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1984 else if succs(BLOCK) == 1 then
1985 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
1987 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
1989 Note that clean() is deferred until after the iteration. */
1991 static bool
1992 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
1994 bitmap_set_t S, old, ANTIC_OUT;
1995 bitmap_iterator bi;
1996 unsigned int bii;
1997 edge e;
1998 edge_iterator ei;
2000 bool changed = ! BB_VISITED (block);
2001 BB_VISITED (block) = 1;
2002 old = ANTIC_OUT = S = NULL;
2004 /* If any edges from predecessors are abnormal, antic_in is empty,
2005 so do nothing. */
2006 if (block_has_abnormal_pred_edge)
2007 goto maybe_dump_sets;
2009 old = ANTIC_IN (block);
2010 ANTIC_OUT = bitmap_set_new ();
2012 /* If the block has no successors, ANTIC_OUT is empty. */
2013 if (EDGE_COUNT (block->succs) == 0)
2015 /* If we have one successor, we could have some phi nodes to
2016 translate through. */
2017 else if (single_succ_p (block))
2019 basic_block succ_bb = single_succ (block);
2020 gcc_assert (BB_VISITED (succ_bb));
2021 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2023 /* If we have multiple successors, we take the intersection of all of
2024 them. Note that in the case of loop exit phi nodes, we may have
2025 phis to translate through. */
2026 else
2028 size_t i;
2029 basic_block bprime, first = NULL;
2031 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2032 FOR_EACH_EDGE (e, ei, block->succs)
2034 if (!first
2035 && BB_VISITED (e->dest))
2036 first = e->dest;
2037 else if (BB_VISITED (e->dest))
2038 worklist.quick_push (e->dest);
2039 else
2041 /* Unvisited successors get their ANTIC_IN replaced by the
2042 maximal set to arrive at a maximum ANTIC_IN solution.
2043 We can ignore them in the intersection operation and thus
2044 need not explicitely represent that maximum solution. */
2045 if (dump_file && (dump_flags & TDF_DETAILS))
2046 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2047 e->src->index, e->dest->index);
2051 /* Of multiple successors we have to have visited one already
2052 which is guaranteed by iteration order. */
2053 gcc_assert (first != NULL);
2055 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2057 /* If we have multiple successors we need to intersect the ANTIC_OUT
2058 sets. For values that's a simple intersection but for
2059 expressions it is a union. Given we want to have a single
2060 expression per value in our sets we have to canonicalize.
2061 Avoid randomness and running into cycles like for PR82129 and
2062 canonicalize the expression we choose to the one with the
2063 lowest id. This requires we actually compute the union first. */
2064 FOR_EACH_VEC_ELT (worklist, i, bprime)
2066 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2068 bitmap_set_t tmp = bitmap_set_new ();
2069 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2070 bitmap_and_into (&ANTIC_OUT->values, &tmp->values);
2071 bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions);
2072 bitmap_set_free (tmp);
2074 else
2076 bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (bprime)->values);
2077 bitmap_ior_into (&ANTIC_OUT->expressions,
2078 &ANTIC_IN (bprime)->expressions);
2081 if (! worklist.is_empty ())
2083 /* Prune expressions not in the value set, canonicalizing to
2084 expression with lowest ID. */
2085 bitmap_iterator bi;
2086 unsigned int i;
2087 unsigned int to_clear = -1U;
2088 bitmap seen_value = BITMAP_ALLOC (NULL);
2089 FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT, i, bi)
2091 if (to_clear != -1U)
2093 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2094 to_clear = -1U;
2096 pre_expr expr = expression_for_id (i);
2097 unsigned int value_id = get_expr_value_id (expr);
2098 if (!bitmap_bit_p (&ANTIC_OUT->values, value_id)
2099 || !bitmap_set_bit (seen_value, value_id))
2100 to_clear = i;
2102 if (to_clear != -1U)
2103 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2104 BITMAP_FREE (seen_value);
2108 /* Prune expressions that are clobbered in block and thus become
2109 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2110 prune_clobbered_mems (ANTIC_OUT, block);
2112 /* Generate ANTIC_OUT - TMP_GEN. */
2113 S = bitmap_set_subtract_expressions (ANTIC_OUT, TMP_GEN (block));
2115 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2116 ANTIC_IN (block) = bitmap_set_subtract_expressions (EXP_GEN (block),
2117 TMP_GEN (block));
2119 /* Then union in the ANTIC_OUT - TMP_GEN values,
2120 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2121 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2122 bitmap_value_insert_into_set (ANTIC_IN (block),
2123 expression_for_id (bii));
2125 /* clean (ANTIC_IN (block)) is defered to after the iteration converged
2126 because it can cause non-convergence, see for example PR81181. */
2128 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2129 changed = true;
2131 maybe_dump_sets:
2132 if (dump_file && (dump_flags & TDF_DETAILS))
2134 if (ANTIC_OUT)
2135 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2137 if (changed)
2138 fprintf (dump_file, "[changed] ");
2139 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2140 block->index);
2142 if (S)
2143 print_bitmap_set (dump_file, S, "S", block->index);
2145 if (old)
2146 bitmap_set_free (old);
2147 if (S)
2148 bitmap_set_free (S);
2149 if (ANTIC_OUT)
2150 bitmap_set_free (ANTIC_OUT);
2151 return changed;
2154 /* Compute PARTIAL_ANTIC for BLOCK.
2156 If succs(BLOCK) > 1 then
2157 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2158 in ANTIC_OUT for all succ(BLOCK)
2159 else if succs(BLOCK) == 1 then
2160 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2162 PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK])
2165 static void
2166 compute_partial_antic_aux (basic_block block,
2167 bool block_has_abnormal_pred_edge)
2169 bitmap_set_t old_PA_IN;
2170 bitmap_set_t PA_OUT;
2171 edge e;
2172 edge_iterator ei;
2173 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2175 old_PA_IN = PA_OUT = NULL;
2177 /* If any edges from predecessors are abnormal, antic_in is empty,
2178 so do nothing. */
2179 if (block_has_abnormal_pred_edge)
2180 goto maybe_dump_sets;
2182 /* If there are too many partially anticipatable values in the
2183 block, phi_translate_set can take an exponential time: stop
2184 before the translation starts. */
2185 if (max_pa
2186 && single_succ_p (block)
2187 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2188 goto maybe_dump_sets;
2190 old_PA_IN = PA_IN (block);
2191 PA_OUT = bitmap_set_new ();
2193 /* If the block has no successors, ANTIC_OUT is empty. */
2194 if (EDGE_COUNT (block->succs) == 0)
2196 /* If we have one successor, we could have some phi nodes to
2197 translate through. Note that we can't phi translate across DFS
2198 back edges in partial antic, because it uses a union operation on
2199 the successors. For recurrences like IV's, we will end up
2200 generating a new value in the set on each go around (i + 3 (VH.1)
2201 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2202 else if (single_succ_p (block))
2204 basic_block succ = single_succ (block);
2205 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2206 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2208 /* If we have multiple successors, we take the union of all of
2209 them. */
2210 else
2212 size_t i;
2213 basic_block bprime;
2215 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2216 FOR_EACH_EDGE (e, ei, block->succs)
2218 if (e->flags & EDGE_DFS_BACK)
2219 continue;
2220 worklist.quick_push (e->dest);
2222 if (worklist.length () > 0)
2224 FOR_EACH_VEC_ELT (worklist, i, bprime)
2226 unsigned int i;
2227 bitmap_iterator bi;
2229 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2230 bitmap_value_insert_into_set (PA_OUT,
2231 expression_for_id (i));
2232 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2234 bitmap_set_t pa_in = bitmap_set_new ();
2235 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2236 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2237 bitmap_value_insert_into_set (PA_OUT,
2238 expression_for_id (i));
2239 bitmap_set_free (pa_in);
2241 else
2242 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2243 bitmap_value_insert_into_set (PA_OUT,
2244 expression_for_id (i));
2249 /* Prune expressions that are clobbered in block and thus become
2250 invalid if translated from PA_OUT to PA_IN. */
2251 prune_clobbered_mems (PA_OUT, block);
2253 /* PA_IN starts with PA_OUT - TMP_GEN.
2254 Then we subtract things from ANTIC_IN. */
2255 PA_IN (block) = bitmap_set_subtract_expressions (PA_OUT, TMP_GEN (block));
2257 /* For partial antic, we want to put back in the phi results, since
2258 we will properly avoid making them partially antic over backedges. */
2259 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2260 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2262 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2263 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2265 clean (PA_IN (block), ANTIC_IN (block));
2267 maybe_dump_sets:
2268 if (dump_file && (dump_flags & TDF_DETAILS))
2270 if (PA_OUT)
2271 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2273 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2275 if (old_PA_IN)
2276 bitmap_set_free (old_PA_IN);
2277 if (PA_OUT)
2278 bitmap_set_free (PA_OUT);
2281 /* Compute ANTIC and partial ANTIC sets. */
2283 static void
2284 compute_antic (void)
2286 bool changed = true;
2287 int num_iterations = 0;
2288 basic_block block;
2289 int i;
2290 edge_iterator ei;
2291 edge e;
2293 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2294 We pre-build the map of blocks with incoming abnormal edges here. */
2295 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2296 bitmap_clear (has_abnormal_preds);
2298 FOR_ALL_BB_FN (block, cfun)
2300 BB_VISITED (block) = 0;
2302 FOR_EACH_EDGE (e, ei, block->preds)
2303 if (e->flags & EDGE_ABNORMAL)
2305 bitmap_set_bit (has_abnormal_preds, block->index);
2306 break;
2309 /* While we are here, give empty ANTIC_IN sets to each block. */
2310 ANTIC_IN (block) = bitmap_set_new ();
2311 if (do_partial_partial)
2312 PA_IN (block) = bitmap_set_new ();
2315 /* At the exit block we anticipate nothing. */
2316 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2318 /* For ANTIC computation we need a postorder that also guarantees that
2319 a block with a single successor is visited after its successor.
2320 RPO on the inverted CFG has this property. */
2321 auto_vec<int, 20> postorder;
2322 inverted_post_order_compute (&postorder);
2324 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
2325 bitmap_clear (worklist);
2326 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2327 bitmap_set_bit (worklist, e->src->index);
2328 while (changed)
2330 if (dump_file && (dump_flags & TDF_DETAILS))
2331 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2332 /* ??? We need to clear our PHI translation cache here as the
2333 ANTIC sets shrink and we restrict valid translations to
2334 those having operands with leaders in ANTIC. Same below
2335 for PA ANTIC computation. */
2336 num_iterations++;
2337 changed = false;
2338 for (i = postorder.length () - 1; i >= 0; i--)
2340 if (bitmap_bit_p (worklist, postorder[i]))
2342 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2343 bitmap_clear_bit (worklist, block->index);
2344 if (compute_antic_aux (block,
2345 bitmap_bit_p (has_abnormal_preds,
2346 block->index)))
2348 FOR_EACH_EDGE (e, ei, block->preds)
2349 bitmap_set_bit (worklist, e->src->index);
2350 changed = true;
2354 /* Theoretically possible, but *highly* unlikely. */
2355 gcc_checking_assert (num_iterations < 500);
2358 /* We have to clean after the dataflow problem converged as cleaning
2359 can cause non-convergence because it is based on expressions
2360 rather than values. */
2361 FOR_EACH_BB_FN (block, cfun)
2362 clean (ANTIC_IN (block));
2364 statistics_histogram_event (cfun, "compute_antic iterations",
2365 num_iterations);
2367 if (do_partial_partial)
2369 /* For partial antic we ignore backedges and thus we do not need
2370 to perform any iteration when we process blocks in postorder. */
2371 int postorder_num
2372 = pre_and_rev_post_order_compute (NULL, postorder.address (), false);
2373 for (i = postorder_num - 1 ; i >= 0; i--)
2375 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2376 compute_partial_antic_aux (block,
2377 bitmap_bit_p (has_abnormal_preds,
2378 block->index));
2382 sbitmap_free (has_abnormal_preds);
2386 /* Inserted expressions are placed onto this worklist, which is used
2387 for performing quick dead code elimination of insertions we made
2388 that didn't turn out to be necessary. */
2389 static bitmap inserted_exprs;
2391 /* The actual worker for create_component_ref_by_pieces. */
2393 static tree
2394 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2395 unsigned int *operand, gimple_seq *stmts)
2397 vn_reference_op_t currop = &ref->operands[*operand];
2398 tree genop;
2399 ++*operand;
2400 switch (currop->opcode)
2402 case CALL_EXPR:
2403 gcc_unreachable ();
2405 case MEM_REF:
2407 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2408 stmts);
2409 if (!baseop)
2410 return NULL_TREE;
2411 tree offset = currop->op0;
2412 if (TREE_CODE (baseop) == ADDR_EXPR
2413 && handled_component_p (TREE_OPERAND (baseop, 0)))
2415 HOST_WIDE_INT off;
2416 tree base;
2417 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2418 &off);
2419 gcc_assert (base);
2420 offset = int_const_binop (PLUS_EXPR, offset,
2421 build_int_cst (TREE_TYPE (offset),
2422 off));
2423 baseop = build_fold_addr_expr (base);
2425 genop = build2 (MEM_REF, currop->type, baseop, offset);
2426 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2427 MR_DEPENDENCE_BASE (genop) = currop->base;
2428 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2429 return genop;
2432 case TARGET_MEM_REF:
2434 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2435 vn_reference_op_t nextop = &ref->operands[++*operand];
2436 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2437 stmts);
2438 if (!baseop)
2439 return NULL_TREE;
2440 if (currop->op0)
2442 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2443 if (!genop0)
2444 return NULL_TREE;
2446 if (nextop->op0)
2448 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2449 if (!genop1)
2450 return NULL_TREE;
2452 genop = build5 (TARGET_MEM_REF, currop->type,
2453 baseop, currop->op2, genop0, currop->op1, genop1);
2455 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2456 MR_DEPENDENCE_BASE (genop) = currop->base;
2457 return genop;
2460 case ADDR_EXPR:
2461 if (currop->op0)
2463 gcc_assert (is_gimple_min_invariant (currop->op0));
2464 return currop->op0;
2466 /* Fallthrough. */
2467 case REALPART_EXPR:
2468 case IMAGPART_EXPR:
2469 case VIEW_CONVERT_EXPR:
2471 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2472 stmts);
2473 if (!genop0)
2474 return NULL_TREE;
2475 return fold_build1 (currop->opcode, currop->type, genop0);
2478 case WITH_SIZE_EXPR:
2480 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2481 stmts);
2482 if (!genop0)
2483 return NULL_TREE;
2484 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2485 if (!genop1)
2486 return NULL_TREE;
2487 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2490 case BIT_FIELD_REF:
2492 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2493 stmts);
2494 if (!genop0)
2495 return NULL_TREE;
2496 tree op1 = currop->op0;
2497 tree op2 = currop->op1;
2498 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2499 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2500 return fold (t);
2503 /* For array ref vn_reference_op's, operand 1 of the array ref
2504 is op0 of the reference op and operand 3 of the array ref is
2505 op1. */
2506 case ARRAY_RANGE_REF:
2507 case ARRAY_REF:
2509 tree genop0;
2510 tree genop1 = currop->op0;
2511 tree genop2 = currop->op1;
2512 tree genop3 = currop->op2;
2513 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2514 stmts);
2515 if (!genop0)
2516 return NULL_TREE;
2517 genop1 = find_or_generate_expression (block, genop1, stmts);
2518 if (!genop1)
2519 return NULL_TREE;
2520 if (genop2)
2522 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2523 /* Drop zero minimum index if redundant. */
2524 if (integer_zerop (genop2)
2525 && (!domain_type
2526 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2527 genop2 = NULL_TREE;
2528 else
2530 genop2 = find_or_generate_expression (block, genop2, stmts);
2531 if (!genop2)
2532 return NULL_TREE;
2535 if (genop3)
2537 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2538 /* We can't always put a size in units of the element alignment
2539 here as the element alignment may be not visible. See
2540 PR43783. Simply drop the element size for constant
2541 sizes. */
2542 if (TREE_CODE (genop3) == INTEGER_CST
2543 && TREE_CODE (TYPE_SIZE_UNIT (elmt_type)) == INTEGER_CST
2544 && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)),
2545 (wi::to_offset (genop3)
2546 * vn_ref_op_align_unit (currop))))
2547 genop3 = NULL_TREE;
2548 else
2550 genop3 = find_or_generate_expression (block, genop3, stmts);
2551 if (!genop3)
2552 return NULL_TREE;
2555 return build4 (currop->opcode, currop->type, genop0, genop1,
2556 genop2, genop3);
2558 case COMPONENT_REF:
2560 tree op0;
2561 tree op1;
2562 tree genop2 = currop->op1;
2563 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2564 if (!op0)
2565 return NULL_TREE;
2566 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2567 op1 = currop->op0;
2568 if (genop2)
2570 genop2 = find_or_generate_expression (block, genop2, stmts);
2571 if (!genop2)
2572 return NULL_TREE;
2574 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2577 case SSA_NAME:
2579 genop = find_or_generate_expression (block, currop->op0, stmts);
2580 return genop;
2582 case STRING_CST:
2583 case INTEGER_CST:
2584 case COMPLEX_CST:
2585 case VECTOR_CST:
2586 case REAL_CST:
2587 case CONSTRUCTOR:
2588 case VAR_DECL:
2589 case PARM_DECL:
2590 case CONST_DECL:
2591 case RESULT_DECL:
2592 case FUNCTION_DECL:
2593 return currop->op0;
2595 default:
2596 gcc_unreachable ();
2600 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2601 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2602 trying to rename aggregates into ssa form directly, which is a no no.
2604 Thus, this routine doesn't create temporaries, it just builds a
2605 single access expression for the array, calling
2606 find_or_generate_expression to build the innermost pieces.
2608 This function is a subroutine of create_expression_by_pieces, and
2609 should not be called on it's own unless you really know what you
2610 are doing. */
2612 static tree
2613 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2614 gimple_seq *stmts)
2616 unsigned int op = 0;
2617 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2620 /* Find a simple leader for an expression, or generate one using
2621 create_expression_by_pieces from a NARY expression for the value.
2622 BLOCK is the basic_block we are looking for leaders in.
2623 OP is the tree expression to find a leader for or generate.
2624 Returns the leader or NULL_TREE on failure. */
2626 static tree
2627 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2629 pre_expr expr = get_or_alloc_expr_for (op);
2630 unsigned int lookfor = get_expr_value_id (expr);
2631 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2632 if (leader)
2634 if (leader->kind == NAME)
2635 return PRE_EXPR_NAME (leader);
2636 else if (leader->kind == CONSTANT)
2637 return PRE_EXPR_CONSTANT (leader);
2639 /* Defer. */
2640 return NULL_TREE;
2643 /* It must be a complex expression, so generate it recursively. Note
2644 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2645 where the insert algorithm fails to insert a required expression. */
2646 bitmap exprset = value_expressions[lookfor];
2647 bitmap_iterator bi;
2648 unsigned int i;
2649 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2651 pre_expr temp = expression_for_id (i);
2652 /* We cannot insert random REFERENCE expressions at arbitrary
2653 places. We can insert NARYs which eventually re-materializes
2654 its operand values. */
2655 if (temp->kind == NARY)
2656 return create_expression_by_pieces (block, temp, stmts,
2657 get_expr_type (expr));
2660 /* Defer. */
2661 return NULL_TREE;
2664 /* Create an expression in pieces, so that we can handle very complex
2665 expressions that may be ANTIC, but not necessary GIMPLE.
2666 BLOCK is the basic block the expression will be inserted into,
2667 EXPR is the expression to insert (in value form)
2668 STMTS is a statement list to append the necessary insertions into.
2670 This function will die if we hit some value that shouldn't be
2671 ANTIC but is (IE there is no leader for it, or its components).
2672 The function returns NULL_TREE in case a different antic expression
2673 has to be inserted first.
2674 This function may also generate expressions that are themselves
2675 partially or fully redundant. Those that are will be either made
2676 fully redundant during the next iteration of insert (for partially
2677 redundant ones), or eliminated by eliminate (for fully redundant
2678 ones). */
2680 static tree
2681 create_expression_by_pieces (basic_block block, pre_expr expr,
2682 gimple_seq *stmts, tree type)
2684 tree name;
2685 tree folded;
2686 gimple_seq forced_stmts = NULL;
2687 unsigned int value_id;
2688 gimple_stmt_iterator gsi;
2689 tree exprtype = type ? type : get_expr_type (expr);
2690 pre_expr nameexpr;
2691 gassign *newstmt;
2693 switch (expr->kind)
2695 /* We may hit the NAME/CONSTANT case if we have to convert types
2696 that value numbering saw through. */
2697 case NAME:
2698 folded = PRE_EXPR_NAME (expr);
2699 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2700 return folded;
2701 break;
2702 case CONSTANT:
2704 folded = PRE_EXPR_CONSTANT (expr);
2705 tree tem = fold_convert (exprtype, folded);
2706 if (is_gimple_min_invariant (tem))
2707 return tem;
2708 break;
2710 case REFERENCE:
2711 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2713 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2714 unsigned int operand = 1;
2715 vn_reference_op_t currop = &ref->operands[0];
2716 tree sc = NULL_TREE;
2717 tree fn;
2718 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2719 fn = currop->op0;
2720 else
2721 fn = find_or_generate_expression (block, currop->op0, stmts);
2722 if (!fn)
2723 return NULL_TREE;
2724 if (currop->op1)
2726 sc = find_or_generate_expression (block, currop->op1, stmts);
2727 if (!sc)
2728 return NULL_TREE;
2730 auto_vec<tree> args (ref->operands.length () - 1);
2731 while (operand < ref->operands.length ())
2733 tree arg = create_component_ref_by_pieces_1 (block, ref,
2734 &operand, stmts);
2735 if (!arg)
2736 return NULL_TREE;
2737 args.quick_push (arg);
2739 gcall *call
2740 = gimple_build_call_vec ((TREE_CODE (fn) == FUNCTION_DECL
2741 ? build_fold_addr_expr (fn) : fn), args);
2742 gimple_call_set_with_bounds (call, currop->with_bounds);
2743 if (sc)
2744 gimple_call_set_chain (call, sc);
2745 tree forcedname = make_ssa_name (currop->type);
2746 gimple_call_set_lhs (call, forcedname);
2747 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2748 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2749 folded = forcedname;
2751 else
2753 folded = create_component_ref_by_pieces (block,
2754 PRE_EXPR_REFERENCE (expr),
2755 stmts);
2756 if (!folded)
2757 return NULL_TREE;
2758 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2759 newstmt = gimple_build_assign (name, folded);
2760 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2761 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2762 folded = name;
2764 break;
2765 case NARY:
2767 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2768 tree *genop = XALLOCAVEC (tree, nary->length);
2769 unsigned i;
2770 for (i = 0; i < nary->length; ++i)
2772 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2773 if (!genop[i])
2774 return NULL_TREE;
2775 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2776 may have conversions stripped. */
2777 if (nary->opcode == POINTER_PLUS_EXPR)
2779 if (i == 0)
2780 genop[i] = gimple_convert (&forced_stmts,
2781 nary->type, genop[i]);
2782 else if (i == 1)
2783 genop[i] = gimple_convert (&forced_stmts,
2784 sizetype, genop[i]);
2786 else
2787 genop[i] = gimple_convert (&forced_stmts,
2788 TREE_TYPE (nary->op[i]), genop[i]);
2790 if (nary->opcode == CONSTRUCTOR)
2792 vec<constructor_elt, va_gc> *elts = NULL;
2793 for (i = 0; i < nary->length; ++i)
2794 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2795 folded = build_constructor (nary->type, elts);
2796 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2797 newstmt = gimple_build_assign (name, folded);
2798 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2799 folded = name;
2801 else
2803 switch (nary->length)
2805 case 1:
2806 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2807 genop[0]);
2808 break;
2809 case 2:
2810 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2811 genop[0], genop[1]);
2812 break;
2813 case 3:
2814 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2815 genop[0], genop[1], genop[2]);
2816 break;
2817 default:
2818 gcc_unreachable ();
2822 break;
2823 default:
2824 gcc_unreachable ();
2827 folded = gimple_convert (&forced_stmts, exprtype, folded);
2829 /* If there is nothing to insert, return the simplified result. */
2830 if (gimple_seq_empty_p (forced_stmts))
2831 return folded;
2832 /* If we simplified to a constant return it and discard eventually
2833 built stmts. */
2834 if (is_gimple_min_invariant (folded))
2836 gimple_seq_discard (forced_stmts);
2837 return folded;
2839 /* Likewise if we simplified to sth not queued for insertion. */
2840 bool found = false;
2841 gsi = gsi_last (forced_stmts);
2842 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2844 gimple *stmt = gsi_stmt (gsi);
2845 tree forcedname = gimple_get_lhs (stmt);
2846 if (forcedname == folded)
2848 found = true;
2849 break;
2852 if (! found)
2854 gimple_seq_discard (forced_stmts);
2855 return folded;
2857 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2859 /* If we have any intermediate expressions to the value sets, add them
2860 to the value sets and chain them in the instruction stream. */
2861 if (forced_stmts)
2863 gsi = gsi_start (forced_stmts);
2864 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2866 gimple *stmt = gsi_stmt (gsi);
2867 tree forcedname = gimple_get_lhs (stmt);
2868 pre_expr nameexpr;
2870 if (forcedname != folded)
2872 VN_INFO_GET (forcedname)->valnum = forcedname;
2873 VN_INFO (forcedname)->value_id = get_next_value_id ();
2874 nameexpr = get_or_alloc_expr_for_name (forcedname);
2875 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2876 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2877 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2880 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2882 gimple_seq_add_seq (stmts, forced_stmts);
2885 name = folded;
2887 /* Fold the last statement. */
2888 gsi = gsi_last (*stmts);
2889 if (fold_stmt_inplace (&gsi))
2890 update_stmt (gsi_stmt (gsi));
2892 /* Add a value number to the temporary.
2893 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2894 we are creating the expression by pieces, and this particular piece of
2895 the expression may have been represented. There is no harm in replacing
2896 here. */
2897 value_id = get_expr_value_id (expr);
2898 VN_INFO_GET (name)->value_id = value_id;
2899 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2900 if (VN_INFO (name)->valnum == NULL_TREE)
2901 VN_INFO (name)->valnum = name;
2902 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2903 nameexpr = get_or_alloc_expr_for_name (name);
2904 add_to_value (value_id, nameexpr);
2905 if (NEW_SETS (block))
2906 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2907 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2909 pre_stats.insertions++;
2910 if (dump_file && (dump_flags & TDF_DETAILS))
2912 fprintf (dump_file, "Inserted ");
2913 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0);
2914 fprintf (dump_file, " in predecessor %d (%04d)\n",
2915 block->index, value_id);
2918 return name;
2922 /* Insert the to-be-made-available values of expression EXPRNUM for each
2923 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2924 merge the result with a phi node, given the same value number as
2925 NODE. Return true if we have inserted new stuff. */
2927 static bool
2928 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2929 vec<pre_expr> avail)
2931 pre_expr expr = expression_for_id (exprnum);
2932 pre_expr newphi;
2933 unsigned int val = get_expr_value_id (expr);
2934 edge pred;
2935 bool insertions = false;
2936 bool nophi = false;
2937 basic_block bprime;
2938 pre_expr eprime;
2939 edge_iterator ei;
2940 tree type = get_expr_type (expr);
2941 tree temp;
2942 gphi *phi;
2944 /* Make sure we aren't creating an induction variable. */
2945 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
2947 bool firstinsideloop = false;
2948 bool secondinsideloop = false;
2949 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2950 EDGE_PRED (block, 0)->src);
2951 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2952 EDGE_PRED (block, 1)->src);
2953 /* Induction variables only have one edge inside the loop. */
2954 if ((firstinsideloop ^ secondinsideloop)
2955 && expr->kind != REFERENCE)
2957 if (dump_file && (dump_flags & TDF_DETAILS))
2958 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2959 nophi = true;
2963 /* Make the necessary insertions. */
2964 FOR_EACH_EDGE (pred, ei, block->preds)
2966 gimple_seq stmts = NULL;
2967 tree builtexpr;
2968 bprime = pred->src;
2969 eprime = avail[pred->dest_idx];
2970 builtexpr = create_expression_by_pieces (bprime, eprime,
2971 &stmts, type);
2972 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
2973 if (!gimple_seq_empty_p (stmts))
2975 basic_block new_bb = gsi_insert_seq_on_edge_immediate (pred, stmts);
2976 gcc_assert (! new_bb);
2977 insertions = true;
2979 if (!builtexpr)
2981 /* We cannot insert a PHI node if we failed to insert
2982 on one edge. */
2983 nophi = true;
2984 continue;
2986 if (is_gimple_min_invariant (builtexpr))
2987 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
2988 else
2989 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
2991 /* If we didn't want a phi node, and we made insertions, we still have
2992 inserted new stuff, and thus return true. If we didn't want a phi node,
2993 and didn't make insertions, we haven't added anything new, so return
2994 false. */
2995 if (nophi && insertions)
2996 return true;
2997 else if (nophi && !insertions)
2998 return false;
3000 /* Now build a phi for the new variable. */
3001 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3002 phi = create_phi_node (temp, block);
3004 VN_INFO_GET (temp)->value_id = val;
3005 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3006 if (VN_INFO (temp)->valnum == NULL_TREE)
3007 VN_INFO (temp)->valnum = temp;
3008 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3009 FOR_EACH_EDGE (pred, ei, block->preds)
3011 pre_expr ae = avail[pred->dest_idx];
3012 gcc_assert (get_expr_type (ae) == type
3013 || useless_type_conversion_p (type, get_expr_type (ae)));
3014 if (ae->kind == CONSTANT)
3015 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3016 pred, UNKNOWN_LOCATION);
3017 else
3018 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3021 newphi = get_or_alloc_expr_for_name (temp);
3022 add_to_value (val, newphi);
3024 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3025 this insertion, since we test for the existence of this value in PHI_GEN
3026 before proceeding with the partial redundancy checks in insert_aux.
3028 The value may exist in AVAIL_OUT, in particular, it could be represented
3029 by the expression we are trying to eliminate, in which case we want the
3030 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3031 inserted there.
3033 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3034 this block, because if it did, it would have existed in our dominator's
3035 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3038 bitmap_insert_into_set (PHI_GEN (block), newphi);
3039 bitmap_value_replace_in_set (AVAIL_OUT (block),
3040 newphi);
3041 bitmap_insert_into_set (NEW_SETS (block),
3042 newphi);
3044 /* If we insert a PHI node for a conversion of another PHI node
3045 in the same basic-block try to preserve range information.
3046 This is important so that followup loop passes receive optimal
3047 number of iteration analysis results. See PR61743. */
3048 if (expr->kind == NARY
3049 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3050 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3051 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3052 && INTEGRAL_TYPE_P (type)
3053 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3054 && (TYPE_PRECISION (type)
3055 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3056 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3058 wide_int min, max;
3059 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3060 && !wi::neg_p (min, SIGNED)
3061 && !wi::neg_p (max, SIGNED))
3062 /* Just handle extension and sign-changes of all-positive ranges. */
3063 set_range_info (temp,
3064 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3065 wide_int_storage::from (min, TYPE_PRECISION (type),
3066 TYPE_SIGN (type)),
3067 wide_int_storage::from (max, TYPE_PRECISION (type),
3068 TYPE_SIGN (type)));
3071 if (dump_file && (dump_flags & TDF_DETAILS))
3073 fprintf (dump_file, "Created phi ");
3074 print_gimple_stmt (dump_file, phi, 0);
3075 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3077 pre_stats.phis++;
3078 return true;
3083 /* Perform insertion of partially redundant or hoistable values.
3084 For BLOCK, do the following:
3085 1. Propagate the NEW_SETS of the dominator into the current block.
3086 If the block has multiple predecessors,
3087 2a. Iterate over the ANTIC expressions for the block to see if
3088 any of them are partially redundant.
3089 2b. If so, insert them into the necessary predecessors to make
3090 the expression fully redundant.
3091 2c. Insert a new PHI merging the values of the predecessors.
3092 2d. Insert the new PHI, and the new expressions, into the
3093 NEW_SETS set.
3094 If the block has multiple successors,
3095 3a. Iterate over the ANTIC values for the block to see if
3096 any of them are good candidates for hoisting.
3097 3b. If so, insert expressions computing the values in BLOCK,
3098 and add the new expressions into the NEW_SETS set.
3099 4. Recursively call ourselves on the dominator children of BLOCK.
3101 Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3102 do_pre_regular_insertion and do_partial_insertion. 3a and 3b are
3103 done in do_hoist_insertion.
3106 static bool
3107 do_pre_regular_insertion (basic_block block, basic_block dom)
3109 bool new_stuff = false;
3110 vec<pre_expr> exprs;
3111 pre_expr expr;
3112 auto_vec<pre_expr> avail;
3113 int i;
3115 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3116 avail.safe_grow (EDGE_COUNT (block->preds));
3118 FOR_EACH_VEC_ELT (exprs, i, expr)
3120 if (expr->kind == NARY
3121 || expr->kind == REFERENCE)
3123 unsigned int val;
3124 bool by_some = false;
3125 bool cant_insert = false;
3126 bool all_same = true;
3127 pre_expr first_s = NULL;
3128 edge pred;
3129 basic_block bprime;
3130 pre_expr eprime = NULL;
3131 edge_iterator ei;
3132 pre_expr edoubleprime = NULL;
3133 bool do_insertion = false;
3135 val = get_expr_value_id (expr);
3136 if (bitmap_set_contains_value (PHI_GEN (block), val))
3137 continue;
3138 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3140 if (dump_file && (dump_flags & TDF_DETAILS))
3142 fprintf (dump_file, "Found fully redundant value: ");
3143 print_pre_expr (dump_file, expr);
3144 fprintf (dump_file, "\n");
3146 continue;
3149 FOR_EACH_EDGE (pred, ei, block->preds)
3151 unsigned int vprime;
3153 /* We should never run insertion for the exit block
3154 and so not come across fake pred edges. */
3155 gcc_assert (!(pred->flags & EDGE_FAKE));
3156 bprime = pred->src;
3157 /* We are looking at ANTIC_OUT of bprime. */
3158 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3159 bprime, block);
3161 /* eprime will generally only be NULL if the
3162 value of the expression, translated
3163 through the PHI for this predecessor, is
3164 undefined. If that is the case, we can't
3165 make the expression fully redundant,
3166 because its value is undefined along a
3167 predecessor path. We can thus break out
3168 early because it doesn't matter what the
3169 rest of the results are. */
3170 if (eprime == NULL)
3172 avail[pred->dest_idx] = NULL;
3173 cant_insert = true;
3174 break;
3177 vprime = get_expr_value_id (eprime);
3178 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3179 vprime);
3180 if (edoubleprime == NULL)
3182 avail[pred->dest_idx] = eprime;
3183 all_same = false;
3185 else
3187 avail[pred->dest_idx] = edoubleprime;
3188 by_some = true;
3189 /* We want to perform insertions to remove a redundancy on
3190 a path in the CFG we want to optimize for speed. */
3191 if (optimize_edge_for_speed_p (pred))
3192 do_insertion = true;
3193 if (first_s == NULL)
3194 first_s = edoubleprime;
3195 else if (!pre_expr_d::equal (first_s, edoubleprime))
3196 all_same = false;
3199 /* If we can insert it, it's not the same value
3200 already existing along every predecessor, and
3201 it's defined by some predecessor, it is
3202 partially redundant. */
3203 if (!cant_insert && !all_same && by_some)
3205 if (!do_insertion)
3207 if (dump_file && (dump_flags & TDF_DETAILS))
3209 fprintf (dump_file, "Skipping partial redundancy for "
3210 "expression ");
3211 print_pre_expr (dump_file, expr);
3212 fprintf (dump_file, " (%04d), no redundancy on to be "
3213 "optimized for speed edge\n", val);
3216 else if (dbg_cnt (treepre_insert))
3218 if (dump_file && (dump_flags & TDF_DETAILS))
3220 fprintf (dump_file, "Found partial redundancy for "
3221 "expression ");
3222 print_pre_expr (dump_file, expr);
3223 fprintf (dump_file, " (%04d)\n",
3224 get_expr_value_id (expr));
3226 if (insert_into_preds_of_block (block,
3227 get_expression_id (expr),
3228 avail))
3229 new_stuff = true;
3232 /* If all edges produce the same value and that value is
3233 an invariant, then the PHI has the same value on all
3234 edges. Note this. */
3235 else if (!cant_insert && all_same)
3237 gcc_assert (edoubleprime->kind == CONSTANT
3238 || edoubleprime->kind == NAME);
3240 tree temp = make_temp_ssa_name (get_expr_type (expr),
3241 NULL, "pretmp");
3242 gassign *assign
3243 = gimple_build_assign (temp,
3244 edoubleprime->kind == CONSTANT ?
3245 PRE_EXPR_CONSTANT (edoubleprime) :
3246 PRE_EXPR_NAME (edoubleprime));
3247 gimple_stmt_iterator gsi = gsi_after_labels (block);
3248 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3250 VN_INFO_GET (temp)->value_id = val;
3251 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3252 if (VN_INFO (temp)->valnum == NULL_TREE)
3253 VN_INFO (temp)->valnum = temp;
3254 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3255 pre_expr newe = get_or_alloc_expr_for_name (temp);
3256 add_to_value (val, newe);
3257 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3258 bitmap_insert_into_set (NEW_SETS (block), newe);
3263 exprs.release ();
3264 return new_stuff;
3268 /* Perform insertion for partially anticipatable expressions. There
3269 is only one case we will perform insertion for these. This case is
3270 if the expression is partially anticipatable, and fully available.
3271 In this case, we know that putting it earlier will enable us to
3272 remove the later computation. */
3274 static bool
3275 do_pre_partial_partial_insertion (basic_block block, basic_block dom)
3277 bool new_stuff = false;
3278 vec<pre_expr> exprs;
3279 pre_expr expr;
3280 auto_vec<pre_expr> avail;
3281 int i;
3283 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3284 avail.safe_grow (EDGE_COUNT (block->preds));
3286 FOR_EACH_VEC_ELT (exprs, i, expr)
3288 if (expr->kind == NARY
3289 || expr->kind == REFERENCE)
3291 unsigned int val;
3292 bool by_all = true;
3293 bool cant_insert = false;
3294 edge pred;
3295 basic_block bprime;
3296 pre_expr eprime = NULL;
3297 edge_iterator ei;
3299 val = get_expr_value_id (expr);
3300 if (bitmap_set_contains_value (PHI_GEN (block), val))
3301 continue;
3302 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3303 continue;
3305 FOR_EACH_EDGE (pred, ei, block->preds)
3307 unsigned int vprime;
3308 pre_expr edoubleprime;
3310 /* We should never run insertion for the exit block
3311 and so not come across fake pred edges. */
3312 gcc_assert (!(pred->flags & EDGE_FAKE));
3313 bprime = pred->src;
3314 eprime = phi_translate (expr, ANTIC_IN (block),
3315 PA_IN (block),
3316 bprime, block);
3318 /* eprime will generally only be NULL if the
3319 value of the expression, translated
3320 through the PHI for this predecessor, is
3321 undefined. If that is the case, we can't
3322 make the expression fully redundant,
3323 because its value is undefined along a
3324 predecessor path. We can thus break out
3325 early because it doesn't matter what the
3326 rest of the results are. */
3327 if (eprime == NULL)
3329 avail[pred->dest_idx] = NULL;
3330 cant_insert = true;
3331 break;
3334 vprime = get_expr_value_id (eprime);
3335 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3336 avail[pred->dest_idx] = edoubleprime;
3337 if (edoubleprime == NULL)
3339 by_all = false;
3340 break;
3344 /* If we can insert it, it's not the same value
3345 already existing along every predecessor, and
3346 it's defined by some predecessor, it is
3347 partially redundant. */
3348 if (!cant_insert && by_all)
3350 edge succ;
3351 bool do_insertion = false;
3353 /* Insert only if we can remove a later expression on a path
3354 that we want to optimize for speed.
3355 The phi node that we will be inserting in BLOCK is not free,
3356 and inserting it for the sake of !optimize_for_speed successor
3357 may cause regressions on the speed path. */
3358 FOR_EACH_EDGE (succ, ei, block->succs)
3360 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3361 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3363 if (optimize_edge_for_speed_p (succ))
3364 do_insertion = true;
3368 if (!do_insertion)
3370 if (dump_file && (dump_flags & TDF_DETAILS))
3372 fprintf (dump_file, "Skipping partial partial redundancy "
3373 "for expression ");
3374 print_pre_expr (dump_file, expr);
3375 fprintf (dump_file, " (%04d), not (partially) anticipated "
3376 "on any to be optimized for speed edges\n", val);
3379 else if (dbg_cnt (treepre_insert))
3381 pre_stats.pa_insert++;
3382 if (dump_file && (dump_flags & TDF_DETAILS))
3384 fprintf (dump_file, "Found partial partial redundancy "
3385 "for expression ");
3386 print_pre_expr (dump_file, expr);
3387 fprintf (dump_file, " (%04d)\n",
3388 get_expr_value_id (expr));
3390 if (insert_into_preds_of_block (block,
3391 get_expression_id (expr),
3392 avail))
3393 new_stuff = true;
3399 exprs.release ();
3400 return new_stuff;
3403 /* Insert expressions in BLOCK to compute hoistable values up.
3404 Return TRUE if something was inserted, otherwise return FALSE.
3405 The caller has to make sure that BLOCK has at least two successors. */
3407 static bool
3408 do_hoist_insertion (basic_block block)
3410 edge e;
3411 edge_iterator ei;
3412 bool new_stuff = false;
3413 unsigned i;
3414 gimple_stmt_iterator last;
3416 /* At least two successors, or else... */
3417 gcc_assert (EDGE_COUNT (block->succs) >= 2);
3419 /* Check that all successors of BLOCK are dominated by block.
3420 We could use dominated_by_p() for this, but actually there is a much
3421 quicker check: any successor that is dominated by BLOCK can't have
3422 more than one predecessor edge. */
3423 FOR_EACH_EDGE (e, ei, block->succs)
3424 if (! single_pred_p (e->dest))
3425 return false;
3427 /* Determine the insertion point. If we cannot safely insert before
3428 the last stmt if we'd have to, bail out. */
3429 last = gsi_last_bb (block);
3430 if (!gsi_end_p (last)
3431 && !is_ctrl_stmt (gsi_stmt (last))
3432 && stmt_ends_bb_p (gsi_stmt (last)))
3433 return false;
3435 /* Compute the set of hoistable expressions from ANTIC_IN. First compute
3436 hoistable values. */
3437 bitmap_set hoistable_set;
3439 /* A hoistable value must be in ANTIC_IN(block)
3440 but not in AVAIL_OUT(BLOCK). */
3441 bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack);
3442 bitmap_and_compl (&hoistable_set.values,
3443 &ANTIC_IN (block)->values, &AVAIL_OUT (block)->values);
3445 /* Short-cut for a common case: hoistable_set is empty. */
3446 if (bitmap_empty_p (&hoistable_set.values))
3447 return false;
3449 /* Compute which of the hoistable values is in AVAIL_OUT of
3450 at least one of the successors of BLOCK. */
3451 bitmap_head availout_in_some;
3452 bitmap_initialize (&availout_in_some, &grand_bitmap_obstack);
3453 FOR_EACH_EDGE (e, ei, block->succs)
3454 /* Do not consider expressions solely because their availability
3455 on loop exits. They'd be ANTIC-IN throughout the whole loop
3456 and thus effectively hoisted across loops by combination of
3457 PRE and hoisting. */
3458 if (! loop_exit_edge_p (block->loop_father, e))
3459 bitmap_ior_and_into (&availout_in_some, &hoistable_set.values,
3460 &AVAIL_OUT (e->dest)->values);
3461 bitmap_clear (&hoistable_set.values);
3463 /* Short-cut for a common case: availout_in_some is empty. */
3464 if (bitmap_empty_p (&availout_in_some))
3465 return false;
3467 /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */
3468 hoistable_set.values = availout_in_some;
3469 hoistable_set.expressions = ANTIC_IN (block)->expressions;
3471 /* Now finally construct the topological-ordered expression set. */
3472 vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set);
3474 bitmap_clear (&hoistable_set.values);
3476 /* If there are candidate values for hoisting, insert expressions
3477 strategically to make the hoistable expressions fully redundant. */
3478 pre_expr expr;
3479 FOR_EACH_VEC_ELT (exprs, i, expr)
3481 /* While we try to sort expressions topologically above the
3482 sorting doesn't work out perfectly. Catch expressions we
3483 already inserted. */
3484 unsigned int value_id = get_expr_value_id (expr);
3485 if (bitmap_set_contains_value (AVAIL_OUT (block), value_id))
3487 if (dump_file && (dump_flags & TDF_DETAILS))
3489 fprintf (dump_file,
3490 "Already inserted expression for ");
3491 print_pre_expr (dump_file, expr);
3492 fprintf (dump_file, " (%04d)\n", value_id);
3494 continue;
3497 /* OK, we should hoist this value. Perform the transformation. */
3498 pre_stats.hoist_insert++;
3499 if (dump_file && (dump_flags & TDF_DETAILS))
3501 fprintf (dump_file,
3502 "Inserting expression in block %d for code hoisting: ",
3503 block->index);
3504 print_pre_expr (dump_file, expr);
3505 fprintf (dump_file, " (%04d)\n", value_id);
3508 gimple_seq stmts = NULL;
3509 tree res = create_expression_by_pieces (block, expr, &stmts,
3510 get_expr_type (expr));
3512 /* Do not return true if expression creation ultimately
3513 did not insert any statements. */
3514 if (gimple_seq_empty_p (stmts))
3515 res = NULL_TREE;
3516 else
3518 if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last)))
3519 gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT);
3520 else
3521 gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT);
3524 /* Make sure to not return true if expression creation ultimately
3525 failed but also make sure to insert any stmts produced as they
3526 are tracked in inserted_exprs. */
3527 if (! res)
3528 continue;
3530 new_stuff = true;
3533 exprs.release ();
3535 return new_stuff;
3538 /* Do a dominator walk on the control flow graph, and insert computations
3539 of values as necessary for PRE and hoisting. */
3541 static bool
3542 insert_aux (basic_block block, bool do_pre, bool do_hoist)
3544 basic_block son;
3545 bool new_stuff = false;
3547 if (block)
3549 basic_block dom;
3550 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3551 if (dom)
3553 unsigned i;
3554 bitmap_iterator bi;
3555 bitmap_set_t newset;
3557 /* First, update the AVAIL_OUT set with anything we may have
3558 inserted higher up in the dominator tree. */
3559 newset = NEW_SETS (dom);
3560 if (newset)
3562 /* Note that we need to value_replace both NEW_SETS, and
3563 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3564 represented by some non-simple expression here that we want
3565 to replace it with. */
3566 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3568 pre_expr expr = expression_for_id (i);
3569 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3570 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3574 /* Insert expressions for partial redundancies. */
3575 if (do_pre && !single_pred_p (block))
3577 new_stuff |= do_pre_regular_insertion (block, dom);
3578 if (do_partial_partial)
3579 new_stuff |= do_pre_partial_partial_insertion (block, dom);
3582 /* Insert expressions for hoisting. */
3583 if (do_hoist && EDGE_COUNT (block->succs) >= 2)
3584 new_stuff |= do_hoist_insertion (block);
3587 for (son = first_dom_son (CDI_DOMINATORS, block);
3588 son;
3589 son = next_dom_son (CDI_DOMINATORS, son))
3591 new_stuff |= insert_aux (son, do_pre, do_hoist);
3594 return new_stuff;
3597 /* Perform insertion of partially redundant and hoistable values. */
3599 static void
3600 insert (void)
3602 bool new_stuff = true;
3603 basic_block bb;
3604 int num_iterations = 0;
3606 FOR_ALL_BB_FN (bb, cfun)
3607 NEW_SETS (bb) = bitmap_set_new ();
3609 while (new_stuff)
3611 num_iterations++;
3612 if (dump_file && dump_flags & TDF_DETAILS)
3613 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3614 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun), flag_tree_pre,
3615 flag_code_hoisting);
3617 /* Clear the NEW sets before the next iteration. We have already
3618 fully propagated its contents. */
3619 if (new_stuff)
3620 FOR_ALL_BB_FN (bb, cfun)
3621 bitmap_set_free (NEW_SETS (bb));
3623 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3627 /* Compute the AVAIL set for all basic blocks.
3629 This function performs value numbering of the statements in each basic
3630 block. The AVAIL sets are built from information we glean while doing
3631 this value numbering, since the AVAIL sets contain only one entry per
3632 value.
3634 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3635 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3637 static void
3638 compute_avail (void)
3641 basic_block block, son;
3642 basic_block *worklist;
3643 size_t sp = 0;
3644 unsigned i;
3645 tree name;
3647 /* We pretend that default definitions are defined in the entry block.
3648 This includes function arguments and the static chain decl. */
3649 FOR_EACH_SSA_NAME (i, name, cfun)
3651 pre_expr e;
3652 if (!SSA_NAME_IS_DEFAULT_DEF (name)
3653 || has_zero_uses (name)
3654 || virtual_operand_p (name))
3655 continue;
3657 e = get_or_alloc_expr_for_name (name);
3658 add_to_value (get_expr_value_id (e), e);
3659 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3660 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3664 if (dump_file && (dump_flags & TDF_DETAILS))
3666 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3667 "tmp_gen", ENTRY_BLOCK);
3668 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3669 "avail_out", ENTRY_BLOCK);
3672 /* Allocate the worklist. */
3673 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3675 /* Seed the algorithm by putting the dominator children of the entry
3676 block on the worklist. */
3677 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3678 son;
3679 son = next_dom_son (CDI_DOMINATORS, son))
3680 worklist[sp++] = son;
3682 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3683 = ssa_default_def (cfun, gimple_vop (cfun));
3685 /* Loop until the worklist is empty. */
3686 while (sp)
3688 gimple *stmt;
3689 basic_block dom;
3691 /* Pick a block from the worklist. */
3692 block = worklist[--sp];
3694 /* Initially, the set of available values in BLOCK is that of
3695 its immediate dominator. */
3696 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3697 if (dom)
3699 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3700 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3703 /* Generate values for PHI nodes. */
3704 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3705 gsi_next (&gsi))
3707 tree result = gimple_phi_result (gsi.phi ());
3709 /* We have no need for virtual phis, as they don't represent
3710 actual computations. */
3711 if (virtual_operand_p (result))
3713 BB_LIVE_VOP_ON_EXIT (block) = result;
3714 continue;
3717 pre_expr e = get_or_alloc_expr_for_name (result);
3718 add_to_value (get_expr_value_id (e), e);
3719 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3720 bitmap_insert_into_set (PHI_GEN (block), e);
3723 BB_MAY_NOTRETURN (block) = 0;
3725 /* Now compute value numbers and populate value sets with all
3726 the expressions computed in BLOCK. */
3727 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3728 gsi_next (&gsi))
3730 ssa_op_iter iter;
3731 tree op;
3733 stmt = gsi_stmt (gsi);
3735 /* Cache whether the basic-block has any non-visible side-effect
3736 or control flow.
3737 If this isn't a call or it is the last stmt in the
3738 basic-block then the CFG represents things correctly. */
3739 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3741 /* Non-looping const functions always return normally.
3742 Otherwise the call might not return or have side-effects
3743 that forbids hoisting possibly trapping expressions
3744 before it. */
3745 int flags = gimple_call_flags (stmt);
3746 if (!(flags & ECF_CONST)
3747 || (flags & ECF_LOOPING_CONST_OR_PURE))
3748 BB_MAY_NOTRETURN (block) = 1;
3751 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3753 pre_expr e = get_or_alloc_expr_for_name (op);
3755 add_to_value (get_expr_value_id (e), e);
3756 bitmap_insert_into_set (TMP_GEN (block), e);
3757 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3760 if (gimple_vdef (stmt))
3761 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3763 if (gimple_has_side_effects (stmt)
3764 || stmt_could_throw_p (stmt)
3765 || is_gimple_debug (stmt))
3766 continue;
3768 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3770 if (ssa_undefined_value_p (op))
3771 continue;
3772 pre_expr e = get_or_alloc_expr_for_name (op);
3773 bitmap_value_insert_into_set (EXP_GEN (block), e);
3776 switch (gimple_code (stmt))
3778 case GIMPLE_RETURN:
3779 continue;
3781 case GIMPLE_CALL:
3783 vn_reference_t ref;
3784 vn_reference_s ref1;
3785 pre_expr result = NULL;
3787 /* We can value number only calls to real functions. */
3788 if (gimple_call_internal_p (stmt))
3789 continue;
3791 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3792 if (!ref)
3793 continue;
3795 /* If the value of the call is not invalidated in
3796 this block until it is computed, add the expression
3797 to EXP_GEN. */
3798 if (!gimple_vuse (stmt)
3799 || gimple_code
3800 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3801 || gimple_bb (SSA_NAME_DEF_STMT
3802 (gimple_vuse (stmt))) != block)
3804 result = pre_expr_pool.allocate ();
3805 result->kind = REFERENCE;
3806 result->id = 0;
3807 PRE_EXPR_REFERENCE (result) = ref;
3809 get_or_alloc_expression_id (result);
3810 add_to_value (get_expr_value_id (result), result);
3811 bitmap_value_insert_into_set (EXP_GEN (block), result);
3813 continue;
3816 case GIMPLE_ASSIGN:
3818 pre_expr result = NULL;
3819 switch (vn_get_stmt_kind (stmt))
3821 case VN_NARY:
3823 enum tree_code code = gimple_assign_rhs_code (stmt);
3824 vn_nary_op_t nary;
3826 /* COND_EXPR and VEC_COND_EXPR are awkward in
3827 that they contain an embedded complex expression.
3828 Don't even try to shove those through PRE. */
3829 if (code == COND_EXPR
3830 || code == VEC_COND_EXPR)
3831 continue;
3833 vn_nary_op_lookup_stmt (stmt, &nary);
3834 if (!nary)
3835 continue;
3837 /* If the NARY traps and there was a preceding
3838 point in the block that might not return avoid
3839 adding the nary to EXP_GEN. */
3840 if (BB_MAY_NOTRETURN (block)
3841 && vn_nary_may_trap (nary))
3842 continue;
3844 result = pre_expr_pool.allocate ();
3845 result->kind = NARY;
3846 result->id = 0;
3847 PRE_EXPR_NARY (result) = nary;
3848 break;
3851 case VN_REFERENCE:
3853 tree rhs1 = gimple_assign_rhs1 (stmt);
3854 alias_set_type set = get_alias_set (rhs1);
3855 vec<vn_reference_op_s> operands
3856 = vn_reference_operands_for_lookup (rhs1);
3857 vn_reference_t ref;
3858 vn_reference_lookup_pieces (gimple_vuse (stmt), set,
3859 TREE_TYPE (rhs1),
3860 operands, &ref, VN_WALK);
3861 if (!ref)
3863 operands.release ();
3864 continue;
3867 /* If the value of the reference is not invalidated in
3868 this block until it is computed, add the expression
3869 to EXP_GEN. */
3870 if (gimple_vuse (stmt))
3872 gimple *def_stmt;
3873 bool ok = true;
3874 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3875 while (!gimple_nop_p (def_stmt)
3876 && gimple_code (def_stmt) != GIMPLE_PHI
3877 && gimple_bb (def_stmt) == block)
3879 if (stmt_may_clobber_ref_p
3880 (def_stmt, gimple_assign_rhs1 (stmt)))
3882 ok = false;
3883 break;
3885 def_stmt
3886 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3888 if (!ok)
3890 operands.release ();
3891 continue;
3895 /* If the load was value-numbered to another
3896 load make sure we do not use its expression
3897 for insertion if it wouldn't be a valid
3898 replacement. */
3899 /* At the momemt we have a testcase
3900 for hoist insertion of aligned vs. misaligned
3901 variants in gcc.dg/torture/pr65270-1.c thus
3902 with just alignment to be considered we can
3903 simply replace the expression in the hashtable
3904 with the most conservative one. */
3905 vn_reference_op_t ref1 = &ref->operands.last ();
3906 while (ref1->opcode != TARGET_MEM_REF
3907 && ref1->opcode != MEM_REF
3908 && ref1 != &ref->operands[0])
3909 --ref1;
3910 vn_reference_op_t ref2 = &operands.last ();
3911 while (ref2->opcode != TARGET_MEM_REF
3912 && ref2->opcode != MEM_REF
3913 && ref2 != &operands[0])
3914 --ref2;
3915 if ((ref1->opcode == TARGET_MEM_REF
3916 || ref1->opcode == MEM_REF)
3917 && (TYPE_ALIGN (ref1->type)
3918 > TYPE_ALIGN (ref2->type)))
3919 ref1->type
3920 = build_aligned_type (ref1->type,
3921 TYPE_ALIGN (ref2->type));
3922 /* TBAA behavior is an obvious part so make sure
3923 that the hashtable one covers this as well
3924 by adjusting the ref alias set and its base. */
3925 if (ref->set == set
3926 || alias_set_subset_of (set, ref->set))
3928 else if (alias_set_subset_of (ref->set, set))
3930 ref->set = set;
3931 if (ref1->opcode == MEM_REF)
3932 ref1->op0
3933 = wide_int_to_tree (TREE_TYPE (ref2->op0),
3934 wi::to_wide (ref1->op0));
3935 else
3936 ref1->op2
3937 = wide_int_to_tree (TREE_TYPE (ref2->op2),
3938 wi::to_wide (ref1->op2));
3940 else
3942 ref->set = 0;
3943 if (ref1->opcode == MEM_REF)
3944 ref1->op0
3945 = wide_int_to_tree (ptr_type_node,
3946 wi::to_wide (ref1->op0));
3947 else
3948 ref1->op2
3949 = wide_int_to_tree (ptr_type_node,
3950 wi::to_wide (ref1->op2));
3952 operands.release ();
3954 result = pre_expr_pool.allocate ();
3955 result->kind = REFERENCE;
3956 result->id = 0;
3957 PRE_EXPR_REFERENCE (result) = ref;
3958 break;
3961 default:
3962 continue;
3965 get_or_alloc_expression_id (result);
3966 add_to_value (get_expr_value_id (result), result);
3967 bitmap_value_insert_into_set (EXP_GEN (block), result);
3968 continue;
3970 default:
3971 break;
3975 if (dump_file && (dump_flags & TDF_DETAILS))
3977 print_bitmap_set (dump_file, EXP_GEN (block),
3978 "exp_gen", block->index);
3979 print_bitmap_set (dump_file, PHI_GEN (block),
3980 "phi_gen", block->index);
3981 print_bitmap_set (dump_file, TMP_GEN (block),
3982 "tmp_gen", block->index);
3983 print_bitmap_set (dump_file, AVAIL_OUT (block),
3984 "avail_out", block->index);
3987 /* Put the dominator children of BLOCK on the worklist of blocks
3988 to compute available sets for. */
3989 for (son = first_dom_son (CDI_DOMINATORS, block);
3990 son;
3991 son = next_dom_son (CDI_DOMINATORS, son))
3992 worklist[sp++] = son;
3995 free (worklist);
3998 /* Cheap DCE of a known set of possibly dead stmts.
4000 Because we don't follow exactly the standard PRE algorithm, and decide not
4001 to insert PHI nodes sometimes, and because value numbering of casts isn't
4002 perfect, we sometimes end up inserting dead code. This simple DCE-like
4003 pass removes any insertions we made that weren't actually used. */
4005 static void
4006 remove_dead_inserted_code (void)
4008 /* ??? Re-use inserted_exprs as worklist not only as initial set.
4009 This may end up removing non-inserted code as well. If we
4010 keep inserted_exprs unchanged we could restrict new worklist
4011 elements to members of inserted_exprs. */
4012 bitmap worklist = inserted_exprs;
4013 while (! bitmap_empty_p (worklist))
4015 /* Pop item. */
4016 unsigned i = bitmap_first_set_bit (worklist);
4017 bitmap_clear_bit (worklist, i);
4019 tree def = ssa_name (i);
4020 /* Removed by somebody else or still in use. */
4021 if (! def || ! has_zero_uses (def))
4022 continue;
4024 gimple *t = SSA_NAME_DEF_STMT (def);
4025 if (gimple_has_side_effects (t))
4026 continue;
4028 /* Add uses to the worklist. */
4029 ssa_op_iter iter;
4030 use_operand_p use_p;
4031 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
4033 tree use = USE_FROM_PTR (use_p);
4034 if (TREE_CODE (use) == SSA_NAME
4035 && ! SSA_NAME_IS_DEFAULT_DEF (use))
4036 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4039 /* Remove stmt. */
4040 if (dump_file && (dump_flags & TDF_DETAILS))
4042 fprintf (dump_file, "Removing unnecessary insertion:");
4043 print_gimple_stmt (dump_file, t, 0);
4045 gimple_stmt_iterator gsi = gsi_for_stmt (t);
4046 if (gimple_code (t) == GIMPLE_PHI)
4047 remove_phi_node (&gsi, true);
4048 else
4050 gsi_remove (&gsi, true);
4051 release_defs (t);
4057 /* Initialize data structures used by PRE. */
4059 static void
4060 init_pre (void)
4062 basic_block bb;
4064 next_expression_id = 1;
4065 expressions.create (0);
4066 expressions.safe_push (NULL);
4067 value_expressions.create (get_max_value_id () + 1);
4068 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4069 name_to_id.create (0);
4071 inserted_exprs = BITMAP_ALLOC (NULL);
4073 connect_infinite_loops_to_exit ();
4074 memset (&pre_stats, 0, sizeof (pre_stats));
4076 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4078 calculate_dominance_info (CDI_DOMINATORS);
4080 bitmap_obstack_initialize (&grand_bitmap_obstack);
4081 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4082 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4083 FOR_ALL_BB_FN (bb, cfun)
4085 EXP_GEN (bb) = bitmap_set_new ();
4086 PHI_GEN (bb) = bitmap_set_new ();
4087 TMP_GEN (bb) = bitmap_set_new ();
4088 AVAIL_OUT (bb) = bitmap_set_new ();
4093 /* Deallocate data structures used by PRE. */
4095 static void
4096 fini_pre ()
4098 value_expressions.release ();
4099 expressions.release ();
4100 BITMAP_FREE (inserted_exprs);
4101 bitmap_obstack_release (&grand_bitmap_obstack);
4102 bitmap_set_pool.release ();
4103 pre_expr_pool.release ();
4104 delete phi_translate_table;
4105 phi_translate_table = NULL;
4106 delete expression_to_id;
4107 expression_to_id = NULL;
4108 name_to_id.release ();
4110 free_aux_for_blocks ();
4113 namespace {
4115 const pass_data pass_data_pre =
4117 GIMPLE_PASS, /* type */
4118 "pre", /* name */
4119 OPTGROUP_NONE, /* optinfo_flags */
4120 TV_TREE_PRE, /* tv_id */
4121 ( PROP_cfg | PROP_ssa ), /* properties_required */
4122 0, /* properties_provided */
4123 0, /* properties_destroyed */
4124 TODO_rebuild_alias, /* todo_flags_start */
4125 0, /* todo_flags_finish */
4128 class pass_pre : public gimple_opt_pass
4130 public:
4131 pass_pre (gcc::context *ctxt)
4132 : gimple_opt_pass (pass_data_pre, ctxt)
4135 /* opt_pass methods: */
4136 virtual bool gate (function *)
4137 { return flag_tree_pre != 0 || flag_code_hoisting != 0; }
4138 virtual unsigned int execute (function *);
4140 }; // class pass_pre
4142 unsigned int
4143 pass_pre::execute (function *fun)
4145 unsigned int todo = 0;
4147 do_partial_partial =
4148 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4150 /* This has to happen before SCCVN runs because
4151 loop_optimizer_init may create new phis, etc. */
4152 loop_optimizer_init (LOOPS_NORMAL);
4153 split_critical_edges ();
4154 scev_initialize ();
4156 run_scc_vn (VN_WALK);
4158 init_pre ();
4160 /* Insert can get quite slow on an incredibly large number of basic
4161 blocks due to some quadratic behavior. Until this behavior is
4162 fixed, don't run it when he have an incredibly large number of
4163 bb's. If we aren't going to run insert, there is no point in
4164 computing ANTIC, either, even though it's plenty fast nor do
4165 we require AVAIL. */
4166 if (n_basic_blocks_for_fn (fun) < 4000)
4168 compute_avail ();
4169 compute_antic ();
4170 insert ();
4173 /* Make sure to remove fake edges before committing our inserts.
4174 This makes sure we don't end up with extra critical edges that
4175 we would need to split. */
4176 remove_fake_exit_edges ();
4177 gsi_commit_edge_inserts ();
4179 /* Eliminate folds statements which might (should not...) end up
4180 not keeping virtual operands up-to-date. */
4181 gcc_assert (!need_ssa_update_p (fun));
4183 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4184 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4185 statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
4186 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4188 /* Remove all the redundant expressions. */
4189 todo |= vn_eliminate (inserted_exprs);
4191 remove_dead_inserted_code ();
4193 fini_pre ();
4195 scev_finalize ();
4196 loop_optimizer_finalize ();
4198 /* Restore SSA info before tail-merging as that resets it as well. */
4199 scc_vn_restore_ssa_info ();
4201 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4202 case we can merge the block with the remaining predecessor of the block.
4203 It should either:
4204 - call merge_blocks after each tail merge iteration
4205 - call merge_blocks after all tail merge iterations
4206 - mark TODO_cleanup_cfg when necessary
4207 - share the cfg cleanup with fini_pre. */
4208 todo |= tail_merge_optimize (todo);
4210 free_scc_vn ();
4212 /* Tail merging invalidates the virtual SSA web, together with
4213 cfg-cleanup opportunities exposed by PRE this will wreck the
4214 SSA updating machinery. So make sure to run update-ssa
4215 manually, before eventually scheduling cfg-cleanup as part of
4216 the todo. */
4217 update_ssa (TODO_update_ssa_only_virtuals);
4219 return todo;
4222 } // anon namespace
4224 gimple_opt_pass *
4225 make_pass_pre (gcc::context *ctxt)
4227 return new pass_pre (ctxt);