Daily bump.
[official-gcc.git] / gcc / tree-ssa-pre.c
blob559e8df7c959d327b79d757704127d5b733726c3
1 /* SSA-PRE for trees.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-ssa-loop.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-ssa.h"
46 #include "cfgloop.h"
47 #include "tree-ssa-sccvn.h"
48 #include "tree-scalar-evolution.h"
49 #include "params.h"
50 #include "dbgcnt.h"
51 #include "domwalk.h"
52 #include "tree-ssa-propagate.h"
53 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
55 #include "langhooks.h"
56 #include "alias.h"
58 /* TODO:
60 1. Avail sets can be shared by making an avail_find_leader that
61 walks up the dominator tree and looks in those avail sets.
62 This might affect code optimality, it's unclear right now.
63 2. Strength reduction can be performed by anticipating expressions
64 we can repair later on.
65 3. We can do back-substitution or smarter value numbering to catch
66 commutative expressions split up over multiple statements.
69 /* For ease of terminology, "expression node" in the below refers to
70 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
71 represent the actual statement containing the expressions we care about,
72 and we cache the value number by putting it in the expression. */
74 /* Basic algorithm
76 First we walk the statements to generate the AVAIL sets, the
77 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
78 generation of values/expressions by a given block. We use them
79 when computing the ANTIC sets. The AVAIL sets consist of
80 SSA_NAME's that represent values, so we know what values are
81 available in what blocks. AVAIL is a forward dataflow problem. In
82 SSA, values are never killed, so we don't need a kill set, or a
83 fixpoint iteration, in order to calculate the AVAIL sets. In
84 traditional parlance, AVAIL sets tell us the downsafety of the
85 expressions/values.
87 Next, we generate the ANTIC sets. These sets represent the
88 anticipatable expressions. ANTIC is a backwards dataflow
89 problem. An expression is anticipatable in a given block if it could
90 be generated in that block. This means that if we had to perform
91 an insertion in that block, of the value of that expression, we
92 could. Calculating the ANTIC sets requires phi translation of
93 expressions, because the flow goes backwards through phis. We must
94 iterate to a fixpoint of the ANTIC sets, because we have a kill
95 set. Even in SSA form, values are not live over the entire
96 function, only from their definition point onwards. So we have to
97 remove values from the ANTIC set once we go past the definition
98 point of the leaders that make them up.
99 compute_antic/compute_antic_aux performs this computation.
101 Third, we perform insertions to make partially redundant
102 expressions fully redundant.
104 An expression is partially redundant (excluding partial
105 anticipation) if:
107 1. It is AVAIL in some, but not all, of the predecessors of a
108 given block.
109 2. It is ANTIC in all the predecessors.
111 In order to make it fully redundant, we insert the expression into
112 the predecessors where it is not available, but is ANTIC.
114 For the partial anticipation case, we only perform insertion if it
115 is partially anticipated in some block, and fully available in all
116 of the predecessors.
118 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
119 performs these steps.
121 Fourth, we eliminate fully redundant expressions.
122 This is a simple statement walk that replaces redundant
123 calculations with the now available values. */
125 /* Representations of value numbers:
127 Value numbers are represented by a representative SSA_NAME. We
128 will create fake SSA_NAME's in situations where we need a
129 representative but do not have one (because it is a complex
130 expression). In order to facilitate storing the value numbers in
131 bitmaps, and keep the number of wasted SSA_NAME's down, we also
132 associate a value_id with each value number, and create full blown
133 ssa_name's only where we actually need them (IE in operands of
134 existing expressions).
136 Theoretically you could replace all the value_id's with
137 SSA_NAME_VERSION, but this would allocate a large number of
138 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
139 It would also require an additional indirection at each point we
140 use the value id. */
142 /* Representation of expressions on value numbers:
144 Expressions consisting of value numbers are represented the same
145 way as our VN internally represents them, with an additional
146 "pre_expr" wrapping around them in order to facilitate storing all
147 of the expressions in the same sets. */
149 /* Representation of sets:
151 The dataflow sets do not need to be sorted in any particular order
152 for the majority of their lifetime, are simply represented as two
153 bitmaps, one that keeps track of values present in the set, and one
154 that keeps track of expressions present in the set.
156 When we need them in topological order, we produce it on demand by
157 transforming the bitmap into an array and sorting it into topo
158 order. */
160 /* Type of expression, used to know which member of the PRE_EXPR union
161 is valid. */
163 enum pre_expr_kind
165 NAME,
166 NARY,
167 REFERENCE,
168 CONSTANT
171 union pre_expr_union
173 tree name;
174 tree constant;
175 vn_nary_op_t nary;
176 vn_reference_t reference;
179 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
181 enum pre_expr_kind kind;
182 unsigned int id;
183 pre_expr_union u;
185 /* hash_table support. */
186 static inline hashval_t hash (const pre_expr_d *);
187 static inline int equal (const pre_expr_d *, const pre_expr_d *);
188 } *pre_expr;
190 #define PRE_EXPR_NAME(e) (e)->u.name
191 #define PRE_EXPR_NARY(e) (e)->u.nary
192 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
193 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
195 /* Compare E1 and E1 for equality. */
197 inline int
198 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
200 if (e1->kind != e2->kind)
201 return false;
203 switch (e1->kind)
205 case CONSTANT:
206 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
207 PRE_EXPR_CONSTANT (e2));
208 case NAME:
209 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
210 case NARY:
211 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
212 case REFERENCE:
213 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
214 PRE_EXPR_REFERENCE (e2));
215 default:
216 gcc_unreachable ();
220 /* Hash E. */
222 inline hashval_t
223 pre_expr_d::hash (const pre_expr_d *e)
225 switch (e->kind)
227 case CONSTANT:
228 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
229 case NAME:
230 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
231 case NARY:
232 return PRE_EXPR_NARY (e)->hashcode;
233 case REFERENCE:
234 return PRE_EXPR_REFERENCE (e)->hashcode;
235 default:
236 gcc_unreachable ();
240 /* Next global expression id number. */
241 static unsigned int next_expression_id;
243 /* Mapping from expression to id number we can use in bitmap sets. */
244 static vec<pre_expr> expressions;
245 static hash_table<pre_expr_d> *expression_to_id;
246 static vec<unsigned> name_to_id;
248 /* Allocate an expression id for EXPR. */
250 static inline unsigned int
251 alloc_expression_id (pre_expr expr)
253 struct pre_expr_d **slot;
254 /* Make sure we won't overflow. */
255 gcc_assert (next_expression_id + 1 > next_expression_id);
256 expr->id = next_expression_id++;
257 expressions.safe_push (expr);
258 if (expr->kind == NAME)
260 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
261 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
262 re-allocations by using vec::reserve upfront. */
263 unsigned old_len = name_to_id.length ();
264 name_to_id.reserve (num_ssa_names - old_len);
265 name_to_id.quick_grow_cleared (num_ssa_names);
266 gcc_assert (name_to_id[version] == 0);
267 name_to_id[version] = expr->id;
269 else
271 slot = expression_to_id->find_slot (expr, INSERT);
272 gcc_assert (!*slot);
273 *slot = expr;
275 return next_expression_id - 1;
278 /* Return the expression id for tree EXPR. */
280 static inline unsigned int
281 get_expression_id (const pre_expr expr)
283 return expr->id;
286 static inline unsigned int
287 lookup_expression_id (const pre_expr expr)
289 struct pre_expr_d **slot;
291 if (expr->kind == NAME)
293 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
294 if (name_to_id.length () <= version)
295 return 0;
296 return name_to_id[version];
298 else
300 slot = expression_to_id->find_slot (expr, NO_INSERT);
301 if (!slot)
302 return 0;
303 return ((pre_expr)*slot)->id;
307 /* Return the existing expression id for EXPR, or create one if one
308 does not exist yet. */
310 static inline unsigned int
311 get_or_alloc_expression_id (pre_expr expr)
313 unsigned int id = lookup_expression_id (expr);
314 if (id == 0)
315 return alloc_expression_id (expr);
316 return expr->id = id;
319 /* Return the expression that has expression id ID */
321 static inline pre_expr
322 expression_for_id (unsigned int id)
324 return expressions[id];
327 /* Free the expression id field in all of our expressions,
328 and then destroy the expressions array. */
330 static void
331 clear_expression_ids (void)
333 expressions.release ();
336 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
338 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
340 static pre_expr
341 get_or_alloc_expr_for_name (tree name)
343 struct pre_expr_d expr;
344 pre_expr result;
345 unsigned int result_id;
347 expr.kind = NAME;
348 expr.id = 0;
349 PRE_EXPR_NAME (&expr) = name;
350 result_id = lookup_expression_id (&expr);
351 if (result_id != 0)
352 return expression_for_id (result_id);
354 result = pre_expr_pool.allocate ();
355 result->kind = NAME;
356 PRE_EXPR_NAME (result) = name;
357 alloc_expression_id (result);
358 return result;
361 /* An unordered bitmap set. One bitmap tracks values, the other,
362 expressions. */
363 typedef struct bitmap_set
365 bitmap_head expressions;
366 bitmap_head values;
367 } *bitmap_set_t;
369 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
370 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
372 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
373 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
375 /* Mapping from value id to expressions with that value_id. */
376 static vec<bitmap> value_expressions;
378 /* Sets that we need to keep track of. */
379 typedef struct bb_bitmap_sets
381 /* The EXP_GEN set, which represents expressions/values generated in
382 a basic block. */
383 bitmap_set_t exp_gen;
385 /* The PHI_GEN set, which represents PHI results generated in a
386 basic block. */
387 bitmap_set_t phi_gen;
389 /* The TMP_GEN set, which represents results/temporaries generated
390 in a basic block. IE the LHS of an expression. */
391 bitmap_set_t tmp_gen;
393 /* The AVAIL_OUT set, which represents which values are available in
394 a given basic block. */
395 bitmap_set_t avail_out;
397 /* The ANTIC_IN set, which represents which values are anticipatable
398 in a given basic block. */
399 bitmap_set_t antic_in;
401 /* The PA_IN set, which represents which values are
402 partially anticipatable in a given basic block. */
403 bitmap_set_t pa_in;
405 /* The NEW_SETS set, which is used during insertion to augment the
406 AVAIL_OUT set of blocks with the new insertions performed during
407 the current iteration. */
408 bitmap_set_t new_sets;
410 /* A cache for value_dies_in_block_x. */
411 bitmap expr_dies;
413 /* The live virtual operand on successor edges. */
414 tree vop_on_exit;
416 /* True if we have visited this block during ANTIC calculation. */
417 unsigned int visited : 1;
419 /* True when the block contains a call that might not return. */
420 unsigned int contains_may_not_return_call : 1;
421 } *bb_value_sets_t;
423 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
424 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
425 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
426 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
427 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
428 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
429 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
430 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
431 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
432 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
433 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
436 /* Basic block list in postorder. */
437 static int *postorder;
438 static int postorder_num;
440 /* This structure is used to keep track of statistics on what
441 optimization PRE was able to perform. */
442 static struct
444 /* The number of RHS computations eliminated by PRE. */
445 int eliminations;
447 /* The number of new expressions/temporaries generated by PRE. */
448 int insertions;
450 /* The number of inserts found due to partial anticipation */
451 int pa_insert;
453 /* The number of new PHI nodes added by PRE. */
454 int phis;
455 } pre_stats;
457 static bool do_partial_partial;
458 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
459 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
460 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
461 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
462 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
463 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
464 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
465 unsigned int, bool);
466 static bitmap_set_t bitmap_set_new (void);
467 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
468 tree);
469 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
470 static unsigned int get_expr_value_id (pre_expr);
472 /* We can add and remove elements and entries to and from sets
473 and hash tables, so we use alloc pools for them. */
475 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
476 static bitmap_obstack grand_bitmap_obstack;
478 /* Set of blocks with statements that have had their EH properties changed. */
479 static bitmap need_eh_cleanup;
481 /* Set of blocks with statements that have had their AB properties changed. */
482 static bitmap need_ab_cleanup;
484 /* A three tuple {e, pred, v} used to cache phi translations in the
485 phi_translate_table. */
487 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
489 /* The expression. */
490 pre_expr e;
492 /* The predecessor block along which we translated the expression. */
493 basic_block pred;
495 /* The value that resulted from the translation. */
496 pre_expr v;
498 /* The hashcode for the expression, pred pair. This is cached for
499 speed reasons. */
500 hashval_t hashcode;
502 /* hash_table support. */
503 static inline hashval_t hash (const expr_pred_trans_d *);
504 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
505 } *expr_pred_trans_t;
506 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
508 inline hashval_t
509 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
511 return e->hashcode;
514 inline int
515 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
516 const expr_pred_trans_d *ve2)
518 basic_block b1 = ve1->pred;
519 basic_block b2 = ve2->pred;
521 /* If they are not translations for the same basic block, they can't
522 be equal. */
523 if (b1 != b2)
524 return false;
525 return pre_expr_d::equal (ve1->e, ve2->e);
528 /* The phi_translate_table caches phi translations for a given
529 expression and predecessor. */
530 static hash_table<expr_pred_trans_d> *phi_translate_table;
532 /* Add the tuple mapping from {expression E, basic block PRED} to
533 the phi translation table and return whether it pre-existed. */
535 static inline bool
536 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
538 expr_pred_trans_t *slot;
539 expr_pred_trans_d tem;
540 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
541 pred->index);
542 tem.e = e;
543 tem.pred = pred;
544 tem.hashcode = hash;
545 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
546 if (*slot)
548 *entry = *slot;
549 return true;
552 *entry = *slot = XNEW (struct expr_pred_trans_d);
553 (*entry)->e = e;
554 (*entry)->pred = pred;
555 (*entry)->hashcode = hash;
556 return false;
560 /* Add expression E to the expression set of value id V. */
562 static void
563 add_to_value (unsigned int v, pre_expr e)
565 bitmap set;
567 gcc_checking_assert (get_expr_value_id (e) == v);
569 if (v >= value_expressions.length ())
571 value_expressions.safe_grow_cleared (v + 1);
574 set = value_expressions[v];
575 if (!set)
577 set = BITMAP_ALLOC (&grand_bitmap_obstack);
578 value_expressions[v] = set;
581 bitmap_set_bit (set, get_or_alloc_expression_id (e));
584 /* Create a new bitmap set and return it. */
586 static bitmap_set_t
587 bitmap_set_new (void)
589 bitmap_set_t ret = bitmap_set_pool.allocate ();
590 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
591 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
592 return ret;
595 /* Return the value id for a PRE expression EXPR. */
597 static unsigned int
598 get_expr_value_id (pre_expr expr)
600 unsigned int id;
601 switch (expr->kind)
603 case CONSTANT:
604 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
605 break;
606 case NAME:
607 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
608 break;
609 case NARY:
610 id = PRE_EXPR_NARY (expr)->value_id;
611 break;
612 case REFERENCE:
613 id = PRE_EXPR_REFERENCE (expr)->value_id;
614 break;
615 default:
616 gcc_unreachable ();
618 /* ??? We cannot assert that expr has a value-id (it can be 0), because
619 we assign value-ids only to expressions that have a result
620 in set_hashtable_value_ids. */
621 return id;
624 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
626 static tree
627 sccvn_valnum_from_value_id (unsigned int val)
629 bitmap_iterator bi;
630 unsigned int i;
631 bitmap exprset = value_expressions[val];
632 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
634 pre_expr vexpr = expression_for_id (i);
635 if (vexpr->kind == NAME)
636 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
637 else if (vexpr->kind == CONSTANT)
638 return PRE_EXPR_CONSTANT (vexpr);
640 return NULL_TREE;
643 /* Remove an expression EXPR from a bitmapped set. */
645 static void
646 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
648 unsigned int val = get_expr_value_id (expr);
649 if (!value_id_constant_p (val))
651 bitmap_clear_bit (&set->values, val);
652 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
656 static void
657 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
658 unsigned int val, bool allow_constants)
660 if (allow_constants || !value_id_constant_p (val))
662 /* We specifically expect this and only this function to be able to
663 insert constants into a set. */
664 bitmap_set_bit (&set->values, val);
665 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
669 /* Insert an expression EXPR into a bitmapped set. */
671 static void
672 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
674 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
677 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
679 static void
680 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
682 bitmap_copy (&dest->expressions, &orig->expressions);
683 bitmap_copy (&dest->values, &orig->values);
687 /* Free memory used up by SET. */
688 static void
689 bitmap_set_free (bitmap_set_t set)
691 bitmap_clear (&set->expressions);
692 bitmap_clear (&set->values);
696 /* Generate an topological-ordered array of bitmap set SET. */
698 static vec<pre_expr>
699 sorted_array_from_bitmap_set (bitmap_set_t set)
701 unsigned int i, j;
702 bitmap_iterator bi, bj;
703 vec<pre_expr> result;
705 /* Pre-allocate enough space for the array. */
706 result.create (bitmap_count_bits (&set->expressions));
708 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
710 /* The number of expressions having a given value is usually
711 relatively small. Thus, rather than making a vector of all
712 the expressions and sorting it by value-id, we walk the values
713 and check in the reverse mapping that tells us what expressions
714 have a given value, to filter those in our set. As a result,
715 the expressions are inserted in value-id order, which means
716 topological order.
718 If this is somehow a significant lose for some cases, we can
719 choose which set to walk based on the set size. */
720 bitmap exprset = value_expressions[i];
721 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
723 if (bitmap_bit_p (&set->expressions, j))
724 result.quick_push (expression_for_id (j));
728 return result;
731 /* Perform bitmapped set operation DEST &= ORIG. */
733 static void
734 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
736 bitmap_iterator bi;
737 unsigned int i;
739 if (dest != orig)
741 bitmap_head temp;
742 bitmap_initialize (&temp, &grand_bitmap_obstack);
744 bitmap_and_into (&dest->values, &orig->values);
745 bitmap_copy (&temp, &dest->expressions);
746 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
748 pre_expr expr = expression_for_id (i);
749 unsigned int value_id = get_expr_value_id (expr);
750 if (!bitmap_bit_p (&dest->values, value_id))
751 bitmap_clear_bit (&dest->expressions, i);
753 bitmap_clear (&temp);
757 /* Subtract all values and expressions contained in ORIG from DEST. */
759 static bitmap_set_t
760 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
762 bitmap_set_t result = bitmap_set_new ();
763 bitmap_iterator bi;
764 unsigned int i;
766 bitmap_and_compl (&result->expressions, &dest->expressions,
767 &orig->expressions);
769 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
771 pre_expr expr = expression_for_id (i);
772 unsigned int value_id = get_expr_value_id (expr);
773 bitmap_set_bit (&result->values, value_id);
776 return result;
779 /* Subtract all the values in bitmap set B from bitmap set A. */
781 static void
782 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
784 unsigned int i;
785 bitmap_iterator bi;
786 bitmap_head temp;
788 bitmap_initialize (&temp, &grand_bitmap_obstack);
790 bitmap_copy (&temp, &a->expressions);
791 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
793 pre_expr expr = expression_for_id (i);
794 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
795 bitmap_remove_from_set (a, expr);
797 bitmap_clear (&temp);
801 /* Return true if bitmapped set SET contains the value VALUE_ID. */
803 static bool
804 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
806 if (value_id_constant_p (value_id))
807 return true;
809 if (!set || bitmap_empty_p (&set->expressions))
810 return false;
812 return bitmap_bit_p (&set->values, value_id);
815 static inline bool
816 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
818 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
821 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
823 static void
824 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
825 const pre_expr expr)
827 bitmap exprset;
828 unsigned int i;
829 bitmap_iterator bi;
831 if (value_id_constant_p (lookfor))
832 return;
834 if (!bitmap_set_contains_value (set, lookfor))
835 return;
837 /* The number of expressions having a given value is usually
838 significantly less than the total number of expressions in SET.
839 Thus, rather than check, for each expression in SET, whether it
840 has the value LOOKFOR, we walk the reverse mapping that tells us
841 what expressions have a given value, and see if any of those
842 expressions are in our set. For large testcases, this is about
843 5-10x faster than walking the bitmap. If this is somehow a
844 significant lose for some cases, we can choose which set to walk
845 based on the set size. */
846 exprset = value_expressions[lookfor];
847 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
849 if (bitmap_clear_bit (&set->expressions, i))
851 bitmap_set_bit (&set->expressions, get_expression_id (expr));
852 return;
856 gcc_unreachable ();
859 /* Return true if two bitmap sets are equal. */
861 static bool
862 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
864 return bitmap_equal_p (&a->values, &b->values);
867 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
868 and add it otherwise. */
870 static void
871 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
873 unsigned int val = get_expr_value_id (expr);
875 if (bitmap_set_contains_value (set, val))
876 bitmap_set_replace_value (set, val, expr);
877 else
878 bitmap_insert_into_set (set, expr);
881 /* Insert EXPR into SET if EXPR's value is not already present in
882 SET. */
884 static void
885 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
887 unsigned int val = get_expr_value_id (expr);
889 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
891 /* Constant values are always considered to be part of the set. */
892 if (value_id_constant_p (val))
893 return;
895 /* If the value membership changed, add the expression. */
896 if (bitmap_set_bit (&set->values, val))
897 bitmap_set_bit (&set->expressions, expr->id);
900 /* Print out EXPR to outfile. */
902 static void
903 print_pre_expr (FILE *outfile, const pre_expr expr)
905 switch (expr->kind)
907 case CONSTANT:
908 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
909 break;
910 case NAME:
911 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
912 break;
913 case NARY:
915 unsigned int i;
916 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
917 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
918 for (i = 0; i < nary->length; i++)
920 print_generic_expr (outfile, nary->op[i], 0);
921 if (i != (unsigned) nary->length - 1)
922 fprintf (outfile, ",");
924 fprintf (outfile, "}");
926 break;
928 case REFERENCE:
930 vn_reference_op_t vro;
931 unsigned int i;
932 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
933 fprintf (outfile, "{");
934 for (i = 0;
935 ref->operands.iterate (i, &vro);
936 i++)
938 bool closebrace = false;
939 if (vro->opcode != SSA_NAME
940 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
942 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
943 if (vro->op0)
945 fprintf (outfile, "<");
946 closebrace = true;
949 if (vro->op0)
951 print_generic_expr (outfile, vro->op0, 0);
952 if (vro->op1)
954 fprintf (outfile, ",");
955 print_generic_expr (outfile, vro->op1, 0);
957 if (vro->op2)
959 fprintf (outfile, ",");
960 print_generic_expr (outfile, vro->op2, 0);
963 if (closebrace)
964 fprintf (outfile, ">");
965 if (i != ref->operands.length () - 1)
966 fprintf (outfile, ",");
968 fprintf (outfile, "}");
969 if (ref->vuse)
971 fprintf (outfile, "@");
972 print_generic_expr (outfile, ref->vuse, 0);
975 break;
978 void debug_pre_expr (pre_expr);
980 /* Like print_pre_expr but always prints to stderr. */
981 DEBUG_FUNCTION void
982 debug_pre_expr (pre_expr e)
984 print_pre_expr (stderr, e);
985 fprintf (stderr, "\n");
988 /* Print out SET to OUTFILE. */
990 static void
991 print_bitmap_set (FILE *outfile, bitmap_set_t set,
992 const char *setname, int blockindex)
994 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
995 if (set)
997 bool first = true;
998 unsigned i;
999 bitmap_iterator bi;
1001 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1003 const pre_expr expr = expression_for_id (i);
1005 if (!first)
1006 fprintf (outfile, ", ");
1007 first = false;
1008 print_pre_expr (outfile, expr);
1010 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1013 fprintf (outfile, " }\n");
1016 void debug_bitmap_set (bitmap_set_t);
1018 DEBUG_FUNCTION void
1019 debug_bitmap_set (bitmap_set_t set)
1021 print_bitmap_set (stderr, set, "debug", 0);
1024 void debug_bitmap_sets_for (basic_block);
1026 DEBUG_FUNCTION void
1027 debug_bitmap_sets_for (basic_block bb)
1029 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1030 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1031 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1032 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1033 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1034 if (do_partial_partial)
1035 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1036 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1039 /* Print out the expressions that have VAL to OUTFILE. */
1041 static void
1042 print_value_expressions (FILE *outfile, unsigned int val)
1044 bitmap set = value_expressions[val];
1045 if (set)
1047 bitmap_set x;
1048 char s[10];
1049 sprintf (s, "%04d", val);
1050 x.expressions = *set;
1051 print_bitmap_set (outfile, &x, s, 0);
1056 DEBUG_FUNCTION void
1057 debug_value_expressions (unsigned int val)
1059 print_value_expressions (stderr, val);
1062 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1063 represent it. */
1065 static pre_expr
1066 get_or_alloc_expr_for_constant (tree constant)
1068 unsigned int result_id;
1069 unsigned int value_id;
1070 struct pre_expr_d expr;
1071 pre_expr newexpr;
1073 expr.kind = CONSTANT;
1074 PRE_EXPR_CONSTANT (&expr) = constant;
1075 result_id = lookup_expression_id (&expr);
1076 if (result_id != 0)
1077 return expression_for_id (result_id);
1079 newexpr = pre_expr_pool.allocate ();
1080 newexpr->kind = CONSTANT;
1081 PRE_EXPR_CONSTANT (newexpr) = constant;
1082 alloc_expression_id (newexpr);
1083 value_id = get_or_alloc_constant_value_id (constant);
1084 add_to_value (value_id, newexpr);
1085 return newexpr;
1088 /* Given a value id V, find the actual tree representing the constant
1089 value if there is one, and return it. Return NULL if we can't find
1090 a constant. */
1092 static tree
1093 get_constant_for_value_id (unsigned int v)
1095 if (value_id_constant_p (v))
1097 unsigned int i;
1098 bitmap_iterator bi;
1099 bitmap exprset = value_expressions[v];
1101 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1103 pre_expr expr = expression_for_id (i);
1104 if (expr->kind == CONSTANT)
1105 return PRE_EXPR_CONSTANT (expr);
1108 return NULL;
1111 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1112 Currently only supports constants and SSA_NAMES. */
1113 static pre_expr
1114 get_or_alloc_expr_for (tree t)
1116 if (TREE_CODE (t) == SSA_NAME)
1117 return get_or_alloc_expr_for_name (t);
1118 else if (is_gimple_min_invariant (t))
1119 return get_or_alloc_expr_for_constant (t);
1120 else
1122 /* More complex expressions can result from SCCVN expression
1123 simplification that inserts values for them. As they all
1124 do not have VOPs the get handled by the nary ops struct. */
1125 vn_nary_op_t result;
1126 unsigned int result_id;
1127 vn_nary_op_lookup (t, &result);
1128 if (result != NULL)
1130 pre_expr e = pre_expr_pool.allocate ();
1131 e->kind = NARY;
1132 PRE_EXPR_NARY (e) = result;
1133 result_id = lookup_expression_id (e);
1134 if (result_id != 0)
1136 pre_expr_pool.remove (e);
1137 e = expression_for_id (result_id);
1138 return e;
1140 alloc_expression_id (e);
1141 return e;
1144 return NULL;
1147 /* Return the folded version of T if T, when folded, is a gimple
1148 min_invariant. Otherwise, return T. */
1150 static pre_expr
1151 fully_constant_expression (pre_expr e)
1153 switch (e->kind)
1155 case CONSTANT:
1156 return e;
1157 case NARY:
1159 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1160 switch (TREE_CODE_CLASS (nary->opcode))
1162 case tcc_binary:
1163 case tcc_comparison:
1165 /* We have to go from trees to pre exprs to value ids to
1166 constants. */
1167 tree naryop0 = nary->op[0];
1168 tree naryop1 = nary->op[1];
1169 tree result;
1170 if (!is_gimple_min_invariant (naryop0))
1172 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1173 unsigned int vrep0 = get_expr_value_id (rep0);
1174 tree const0 = get_constant_for_value_id (vrep0);
1175 if (const0)
1176 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1178 if (!is_gimple_min_invariant (naryop1))
1180 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1181 unsigned int vrep1 = get_expr_value_id (rep1);
1182 tree const1 = get_constant_for_value_id (vrep1);
1183 if (const1)
1184 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1186 result = fold_binary (nary->opcode, nary->type,
1187 naryop0, naryop1);
1188 if (result && is_gimple_min_invariant (result))
1189 return get_or_alloc_expr_for_constant (result);
1190 /* We might have simplified the expression to a
1191 SSA_NAME for example from x_1 * 1. But we cannot
1192 insert a PHI for x_1 unconditionally as x_1 might
1193 not be available readily. */
1194 return e;
1196 case tcc_reference:
1197 if (nary->opcode != REALPART_EXPR
1198 && nary->opcode != IMAGPART_EXPR
1199 && nary->opcode != VIEW_CONVERT_EXPR)
1200 return e;
1201 /* Fallthrough. */
1202 case tcc_unary:
1204 /* We have to go from trees to pre exprs to value ids to
1205 constants. */
1206 tree naryop0 = nary->op[0];
1207 tree const0, result;
1208 if (is_gimple_min_invariant (naryop0))
1209 const0 = naryop0;
1210 else
1212 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1213 unsigned int vrep0 = get_expr_value_id (rep0);
1214 const0 = get_constant_for_value_id (vrep0);
1216 result = NULL;
1217 if (const0)
1219 tree type1 = TREE_TYPE (nary->op[0]);
1220 const0 = fold_convert (type1, const0);
1221 result = fold_unary (nary->opcode, nary->type, const0);
1223 if (result && is_gimple_min_invariant (result))
1224 return get_or_alloc_expr_for_constant (result);
1225 return e;
1227 default:
1228 return e;
1231 case REFERENCE:
1233 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1234 tree folded;
1235 if ((folded = fully_constant_vn_reference_p (ref)))
1236 return get_or_alloc_expr_for_constant (folded);
1237 return e;
1239 default:
1240 return e;
1242 return e;
1245 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1246 it has the value it would have in BLOCK. Set *SAME_VALID to true
1247 in case the new vuse doesn't change the value id of the OPERANDS. */
1249 static tree
1250 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1251 alias_set_type set, tree type, tree vuse,
1252 basic_block phiblock,
1253 basic_block block, bool *same_valid)
1255 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1256 ao_ref ref;
1257 edge e = NULL;
1258 bool use_oracle;
1260 *same_valid = true;
1262 if (gimple_bb (phi) != phiblock)
1263 return vuse;
1265 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1267 /* Use the alias-oracle to find either the PHI node in this block,
1268 the first VUSE used in this block that is equivalent to vuse or
1269 the first VUSE which definition in this block kills the value. */
1270 if (gimple_code (phi) == GIMPLE_PHI)
1271 e = find_edge (block, phiblock);
1272 else if (use_oracle)
1273 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1275 vuse = gimple_vuse (phi);
1276 phi = SSA_NAME_DEF_STMT (vuse);
1277 if (gimple_bb (phi) != phiblock)
1278 return vuse;
1279 if (gimple_code (phi) == GIMPLE_PHI)
1281 e = find_edge (block, phiblock);
1282 break;
1285 else
1286 return NULL_TREE;
1288 if (e)
1290 if (use_oracle)
1292 bitmap visited = NULL;
1293 unsigned int cnt;
1294 /* Try to find a vuse that dominates this phi node by skipping
1295 non-clobbering statements. */
1296 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1297 NULL, NULL);
1298 if (visited)
1299 BITMAP_FREE (visited);
1301 else
1302 vuse = NULL_TREE;
1303 if (!vuse)
1305 /* If we didn't find any, the value ID can't stay the same,
1306 but return the translated vuse. */
1307 *same_valid = false;
1308 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1310 /* ??? We would like to return vuse here as this is the canonical
1311 upmost vdef that this reference is associated with. But during
1312 insertion of the references into the hash tables we only ever
1313 directly insert with their direct gimple_vuse, hence returning
1314 something else would make us not find the other expression. */
1315 return PHI_ARG_DEF (phi, e->dest_idx);
1318 return NULL_TREE;
1321 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1322 SET2. This is used to avoid making a set consisting of the union
1323 of PA_IN and ANTIC_IN during insert. */
1325 static inline pre_expr
1326 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1328 pre_expr result;
1330 result = bitmap_find_leader (set1, val);
1331 if (!result && set2)
1332 result = bitmap_find_leader (set2, val);
1333 return result;
1336 /* Get the tree type for our PRE expression e. */
1338 static tree
1339 get_expr_type (const pre_expr e)
1341 switch (e->kind)
1343 case NAME:
1344 return TREE_TYPE (PRE_EXPR_NAME (e));
1345 case CONSTANT:
1346 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1347 case REFERENCE:
1348 return PRE_EXPR_REFERENCE (e)->type;
1349 case NARY:
1350 return PRE_EXPR_NARY (e)->type;
1352 gcc_unreachable ();
1355 /* Get a representative SSA_NAME for a given expression.
1356 Since all of our sub-expressions are treated as values, we require
1357 them to be SSA_NAME's for simplicity.
1358 Prior versions of GVNPRE used to use "value handles" here, so that
1359 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1360 either case, the operands are really values (IE we do not expect
1361 them to be usable without finding leaders). */
1363 static tree
1364 get_representative_for (const pre_expr e)
1366 tree name;
1367 unsigned int value_id = get_expr_value_id (e);
1369 switch (e->kind)
1371 case NAME:
1372 return PRE_EXPR_NAME (e);
1373 case CONSTANT:
1374 return PRE_EXPR_CONSTANT (e);
1375 case NARY:
1376 case REFERENCE:
1378 /* Go through all of the expressions representing this value
1379 and pick out an SSA_NAME. */
1380 unsigned int i;
1381 bitmap_iterator bi;
1382 bitmap exprs = value_expressions[value_id];
1383 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1385 pre_expr rep = expression_for_id (i);
1386 if (rep->kind == NAME)
1387 return PRE_EXPR_NAME (rep);
1388 else if (rep->kind == CONSTANT)
1389 return PRE_EXPR_CONSTANT (rep);
1392 break;
1395 /* If we reached here we couldn't find an SSA_NAME. This can
1396 happen when we've discovered a value that has never appeared in
1397 the program as set to an SSA_NAME, as the result of phi translation.
1398 Create one here.
1399 ??? We should be able to re-use this when we insert the statement
1400 to compute it. */
1401 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1402 VN_INFO_GET (name)->value_id = value_id;
1403 VN_INFO (name)->valnum = name;
1404 /* ??? For now mark this SSA name for release by SCCVN. */
1405 VN_INFO (name)->needs_insertion = true;
1406 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1407 if (dump_file && (dump_flags & TDF_DETAILS))
1409 fprintf (dump_file, "Created SSA_NAME representative ");
1410 print_generic_expr (dump_file, name, 0);
1411 fprintf (dump_file, " for expression:");
1412 print_pre_expr (dump_file, e);
1413 fprintf (dump_file, " (%04d)\n", value_id);
1416 return name;
1421 static pre_expr
1422 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1423 basic_block pred, basic_block phiblock);
1425 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1426 the phis in PRED. Return NULL if we can't find a leader for each part
1427 of the translated expression. */
1429 static pre_expr
1430 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1431 basic_block pred, basic_block phiblock)
1433 switch (expr->kind)
1435 case NARY:
1437 unsigned int i;
1438 bool changed = false;
1439 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1440 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1441 sizeof_vn_nary_op (nary->length));
1442 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1444 for (i = 0; i < newnary->length; i++)
1446 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1447 continue;
1448 else
1450 pre_expr leader, result;
1451 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1452 leader = find_leader_in_sets (op_val_id, set1, set2);
1453 result = phi_translate (leader, set1, set2, pred, phiblock);
1454 if (result && result != leader)
1456 tree name = get_representative_for (result);
1457 if (!name)
1458 return NULL;
1459 newnary->op[i] = name;
1461 else if (!result)
1462 return NULL;
1464 changed |= newnary->op[i] != nary->op[i];
1467 if (changed)
1469 pre_expr constant;
1470 unsigned int new_val_id;
1472 tree result = vn_nary_op_lookup_pieces (newnary->length,
1473 newnary->opcode,
1474 newnary->type,
1475 &newnary->op[0],
1476 &nary);
1477 if (result && is_gimple_min_invariant (result))
1478 return get_or_alloc_expr_for_constant (result);
1480 expr = pre_expr_pool.allocate ();
1481 expr->kind = NARY;
1482 expr->id = 0;
1483 if (nary)
1485 PRE_EXPR_NARY (expr) = nary;
1486 constant = fully_constant_expression (expr);
1487 if (constant != expr)
1488 return constant;
1490 new_val_id = nary->value_id;
1491 get_or_alloc_expression_id (expr);
1493 else
1495 new_val_id = get_next_value_id ();
1496 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1497 nary = vn_nary_op_insert_pieces (newnary->length,
1498 newnary->opcode,
1499 newnary->type,
1500 &newnary->op[0],
1501 result, new_val_id);
1502 PRE_EXPR_NARY (expr) = nary;
1503 constant = fully_constant_expression (expr);
1504 if (constant != expr)
1505 return constant;
1506 get_or_alloc_expression_id (expr);
1508 add_to_value (new_val_id, expr);
1510 return expr;
1512 break;
1514 case REFERENCE:
1516 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1517 vec<vn_reference_op_s> operands = ref->operands;
1518 tree vuse = ref->vuse;
1519 tree newvuse = vuse;
1520 vec<vn_reference_op_s> newoperands = vNULL;
1521 bool changed = false, same_valid = true;
1522 unsigned int i, n;
1523 vn_reference_op_t operand;
1524 vn_reference_t newref;
1526 for (i = 0; operands.iterate (i, &operand); i++)
1528 pre_expr opresult;
1529 pre_expr leader;
1530 tree op[3];
1531 tree type = operand->type;
1532 vn_reference_op_s newop = *operand;
1533 op[0] = operand->op0;
1534 op[1] = operand->op1;
1535 op[2] = operand->op2;
1536 for (n = 0; n < 3; ++n)
1538 unsigned int op_val_id;
1539 if (!op[n])
1540 continue;
1541 if (TREE_CODE (op[n]) != SSA_NAME)
1543 /* We can't possibly insert these. */
1544 if (n != 0
1545 && !is_gimple_min_invariant (op[n]))
1546 break;
1547 continue;
1549 op_val_id = VN_INFO (op[n])->value_id;
1550 leader = find_leader_in_sets (op_val_id, set1, set2);
1551 if (!leader)
1552 break;
1553 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1554 if (!opresult)
1555 break;
1556 if (opresult != leader)
1558 tree name = get_representative_for (opresult);
1559 if (!name)
1560 break;
1561 changed |= name != op[n];
1562 op[n] = name;
1565 if (n != 3)
1567 newoperands.release ();
1568 return NULL;
1570 if (!changed)
1571 continue;
1572 if (!newoperands.exists ())
1573 newoperands = operands.copy ();
1574 /* We may have changed from an SSA_NAME to a constant */
1575 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1576 newop.opcode = TREE_CODE (op[0]);
1577 newop.type = type;
1578 newop.op0 = op[0];
1579 newop.op1 = op[1];
1580 newop.op2 = op[2];
1581 newoperands[i] = newop;
1583 gcc_checking_assert (i == operands.length ());
1585 if (vuse)
1587 newvuse = translate_vuse_through_block (newoperands.exists ()
1588 ? newoperands : operands,
1589 ref->set, ref->type,
1590 vuse, phiblock, pred,
1591 &same_valid);
1592 if (newvuse == NULL_TREE)
1594 newoperands.release ();
1595 return NULL;
1599 if (changed || newvuse != vuse)
1601 unsigned int new_val_id;
1602 pre_expr constant;
1604 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1605 ref->type,
1606 newoperands.exists ()
1607 ? newoperands : operands,
1608 &newref, VN_WALK);
1609 if (result)
1610 newoperands.release ();
1612 /* We can always insert constants, so if we have a partial
1613 redundant constant load of another type try to translate it
1614 to a constant of appropriate type. */
1615 if (result && is_gimple_min_invariant (result))
1617 tree tem = result;
1618 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1620 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1621 if (tem && !is_gimple_min_invariant (tem))
1622 tem = NULL_TREE;
1624 if (tem)
1625 return get_or_alloc_expr_for_constant (tem);
1628 /* If we'd have to convert things we would need to validate
1629 if we can insert the translated expression. So fail
1630 here for now - we cannot insert an alias with a different
1631 type in the VN tables either, as that would assert. */
1632 if (result
1633 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1634 return NULL;
1635 else if (!result && newref
1636 && !useless_type_conversion_p (ref->type, newref->type))
1638 newoperands.release ();
1639 return NULL;
1642 expr = pre_expr_pool.allocate ();
1643 expr->kind = REFERENCE;
1644 expr->id = 0;
1646 if (newref)
1648 PRE_EXPR_REFERENCE (expr) = newref;
1649 constant = fully_constant_expression (expr);
1650 if (constant != expr)
1651 return constant;
1653 new_val_id = newref->value_id;
1654 get_or_alloc_expression_id (expr);
1656 else
1658 if (changed || !same_valid)
1660 new_val_id = get_next_value_id ();
1661 value_expressions.safe_grow_cleared
1662 (get_max_value_id () + 1);
1664 else
1665 new_val_id = ref->value_id;
1666 if (!newoperands.exists ())
1667 newoperands = operands.copy ();
1668 newref = vn_reference_insert_pieces (newvuse, ref->set,
1669 ref->type,
1670 newoperands,
1671 result, new_val_id);
1672 newoperands = vNULL;
1673 PRE_EXPR_REFERENCE (expr) = newref;
1674 constant = fully_constant_expression (expr);
1675 if (constant != expr)
1676 return constant;
1677 get_or_alloc_expression_id (expr);
1679 add_to_value (new_val_id, expr);
1681 newoperands.release ();
1682 return expr;
1684 break;
1686 case NAME:
1688 tree name = PRE_EXPR_NAME (expr);
1689 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1690 /* If the SSA name is defined by a PHI node in this block,
1691 translate it. */
1692 if (gimple_code (def_stmt) == GIMPLE_PHI
1693 && gimple_bb (def_stmt) == phiblock)
1695 edge e = find_edge (pred, gimple_bb (def_stmt));
1696 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1698 /* Handle constant. */
1699 if (is_gimple_min_invariant (def))
1700 return get_or_alloc_expr_for_constant (def);
1702 return get_or_alloc_expr_for_name (def);
1704 /* Otherwise return it unchanged - it will get removed if its
1705 value is not available in PREDs AVAIL_OUT set of expressions
1706 by the subtraction of TMP_GEN. */
1707 return expr;
1710 default:
1711 gcc_unreachable ();
1715 /* Wrapper around phi_translate_1 providing caching functionality. */
1717 static pre_expr
1718 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1719 basic_block pred, basic_block phiblock)
1721 expr_pred_trans_t slot = NULL;
1722 pre_expr phitrans;
1724 if (!expr)
1725 return NULL;
1727 /* Constants contain no values that need translation. */
1728 if (expr->kind == CONSTANT)
1729 return expr;
1731 if (value_id_constant_p (get_expr_value_id (expr)))
1732 return expr;
1734 /* Don't add translations of NAMEs as those are cheap to translate. */
1735 if (expr->kind != NAME)
1737 if (phi_trans_add (&slot, expr, pred))
1738 return slot->v;
1739 /* Store NULL for the value we want to return in the case of
1740 recursing. */
1741 slot->v = NULL;
1744 /* Translate. */
1745 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1747 if (slot)
1749 if (phitrans)
1750 slot->v = phitrans;
1751 else
1752 /* Remove failed translations again, they cause insert
1753 iteration to not pick up new opportunities reliably. */
1754 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1757 return phitrans;
1761 /* For each expression in SET, translate the values through phi nodes
1762 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1763 expressions in DEST. */
1765 static void
1766 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1767 basic_block phiblock)
1769 vec<pre_expr> exprs;
1770 pre_expr expr;
1771 int i;
1773 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1775 bitmap_set_copy (dest, set);
1776 return;
1779 exprs = sorted_array_from_bitmap_set (set);
1780 FOR_EACH_VEC_ELT (exprs, i, expr)
1782 pre_expr translated;
1783 translated = phi_translate (expr, set, NULL, pred, phiblock);
1784 if (!translated)
1785 continue;
1787 /* We might end up with multiple expressions from SET being
1788 translated to the same value. In this case we do not want
1789 to retain the NARY or REFERENCE expression but prefer a NAME
1790 which would be the leader. */
1791 if (translated->kind == NAME)
1792 bitmap_value_replace_in_set (dest, translated);
1793 else
1794 bitmap_value_insert_into_set (dest, translated);
1796 exprs.release ();
1799 /* Find the leader for a value (i.e., the name representing that
1800 value) in a given set, and return it. Return NULL if no leader
1801 is found. */
1803 static pre_expr
1804 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1806 if (value_id_constant_p (val))
1808 unsigned int i;
1809 bitmap_iterator bi;
1810 bitmap exprset = value_expressions[val];
1812 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1814 pre_expr expr = expression_for_id (i);
1815 if (expr->kind == CONSTANT)
1816 return expr;
1819 if (bitmap_set_contains_value (set, val))
1821 /* Rather than walk the entire bitmap of expressions, and see
1822 whether any of them has the value we are looking for, we look
1823 at the reverse mapping, which tells us the set of expressions
1824 that have a given value (IE value->expressions with that
1825 value) and see if any of those expressions are in our set.
1826 The number of expressions per value is usually significantly
1827 less than the number of expressions in the set. In fact, for
1828 large testcases, doing it this way is roughly 5-10x faster
1829 than walking the bitmap.
1830 If this is somehow a significant lose for some cases, we can
1831 choose which set to walk based on which set is smaller. */
1832 unsigned int i;
1833 bitmap_iterator bi;
1834 bitmap exprset = value_expressions[val];
1836 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1837 return expression_for_id (i);
1839 return NULL;
1842 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1843 BLOCK by seeing if it is not killed in the block. Note that we are
1844 only determining whether there is a store that kills it. Because
1845 of the order in which clean iterates over values, we are guaranteed
1846 that altered operands will have caused us to be eliminated from the
1847 ANTIC_IN set already. */
1849 static bool
1850 value_dies_in_block_x (pre_expr expr, basic_block block)
1852 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1853 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1854 gimple *def;
1855 gimple_stmt_iterator gsi;
1856 unsigned id = get_expression_id (expr);
1857 bool res = false;
1858 ao_ref ref;
1860 if (!vuse)
1861 return false;
1863 /* Lookup a previously calculated result. */
1864 if (EXPR_DIES (block)
1865 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1866 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1868 /* A memory expression {e, VUSE} dies in the block if there is a
1869 statement that may clobber e. If, starting statement walk from the
1870 top of the basic block, a statement uses VUSE there can be no kill
1871 inbetween that use and the original statement that loaded {e, VUSE},
1872 so we can stop walking. */
1873 ref.base = NULL_TREE;
1874 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1876 tree def_vuse, def_vdef;
1877 def = gsi_stmt (gsi);
1878 def_vuse = gimple_vuse (def);
1879 def_vdef = gimple_vdef (def);
1881 /* Not a memory statement. */
1882 if (!def_vuse)
1883 continue;
1885 /* Not a may-def. */
1886 if (!def_vdef)
1888 /* A load with the same VUSE, we're done. */
1889 if (def_vuse == vuse)
1890 break;
1892 continue;
1895 /* Init ref only if we really need it. */
1896 if (ref.base == NULL_TREE
1897 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1898 refx->operands))
1900 res = true;
1901 break;
1903 /* If the statement may clobber expr, it dies. */
1904 if (stmt_may_clobber_ref_p_1 (def, &ref))
1906 res = true;
1907 break;
1911 /* Remember the result. */
1912 if (!EXPR_DIES (block))
1913 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1914 bitmap_set_bit (EXPR_DIES (block), id * 2);
1915 if (res)
1916 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1918 return res;
1922 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1923 contains its value-id. */
1925 static bool
1926 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1928 if (op && TREE_CODE (op) == SSA_NAME)
1930 unsigned int value_id = VN_INFO (op)->value_id;
1931 if (!(bitmap_set_contains_value (set1, value_id)
1932 || (set2 && bitmap_set_contains_value (set2, value_id))))
1933 return false;
1935 return true;
1938 /* Determine if the expression EXPR is valid in SET1 U SET2.
1939 ONLY SET2 CAN BE NULL.
1940 This means that we have a leader for each part of the expression
1941 (if it consists of values), or the expression is an SSA_NAME.
1942 For loads/calls, we also see if the vuse is killed in this block. */
1944 static bool
1945 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1947 switch (expr->kind)
1949 case NAME:
1950 /* By construction all NAMEs are available. Non-available
1951 NAMEs are removed by subtracting TMP_GEN from the sets. */
1952 return true;
1953 case NARY:
1955 unsigned int i;
1956 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1957 for (i = 0; i < nary->length; i++)
1958 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1959 return false;
1960 return true;
1962 break;
1963 case REFERENCE:
1965 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1966 vn_reference_op_t vro;
1967 unsigned int i;
1969 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1971 if (!op_valid_in_sets (set1, set2, vro->op0)
1972 || !op_valid_in_sets (set1, set2, vro->op1)
1973 || !op_valid_in_sets (set1, set2, vro->op2))
1974 return false;
1976 return true;
1978 default:
1979 gcc_unreachable ();
1983 /* Clean the set of expressions that are no longer valid in SET1 or
1984 SET2. This means expressions that are made up of values we have no
1985 leaders for in SET1 or SET2. This version is used for partial
1986 anticipation, which means it is not valid in either ANTIC_IN or
1987 PA_IN. */
1989 static void
1990 dependent_clean (bitmap_set_t set1, bitmap_set_t set2)
1992 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
1993 pre_expr expr;
1994 int i;
1996 FOR_EACH_VEC_ELT (exprs, i, expr)
1998 if (!valid_in_sets (set1, set2, expr))
1999 bitmap_remove_from_set (set1, expr);
2001 exprs.release ();
2004 /* Clean the set of expressions that are no longer valid in SET. This
2005 means expressions that are made up of values we have no leaders for
2006 in SET. */
2008 static void
2009 clean (bitmap_set_t set)
2011 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set);
2012 pre_expr expr;
2013 int i;
2015 FOR_EACH_VEC_ELT (exprs, i, expr)
2017 if (!valid_in_sets (set, NULL, expr))
2018 bitmap_remove_from_set (set, expr);
2020 exprs.release ();
2023 /* Clean the set of expressions that are no longer valid in SET because
2024 they are clobbered in BLOCK or because they trap and may not be executed. */
2026 static void
2027 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2029 bitmap_iterator bi;
2030 unsigned i;
2031 pre_expr to_remove = NULL;
2033 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2035 /* Remove queued expr. */
2036 if (to_remove)
2038 bitmap_remove_from_set (set, to_remove);
2039 to_remove = NULL;
2042 pre_expr expr = expression_for_id (i);
2043 if (expr->kind == REFERENCE)
2045 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2046 if (ref->vuse)
2048 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2049 if (!gimple_nop_p (def_stmt)
2050 && ((gimple_bb (def_stmt) != block
2051 && !dominated_by_p (CDI_DOMINATORS,
2052 block, gimple_bb (def_stmt)))
2053 || (gimple_bb (def_stmt) == block
2054 && value_dies_in_block_x (expr, block))))
2055 to_remove = expr;
2058 else if (expr->kind == NARY)
2060 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2061 /* If the NARY may trap make sure the block does not contain
2062 a possible exit point.
2063 ??? This is overly conservative if we translate AVAIL_OUT
2064 as the available expression might be after the exit point. */
2065 if (BB_MAY_NOTRETURN (block)
2066 && vn_nary_may_trap (nary))
2067 to_remove = expr;
2071 /* Remove queued expr. */
2072 if (to_remove)
2073 bitmap_remove_from_set (set, to_remove);
2076 static sbitmap has_abnormal_preds;
2078 /* Compute the ANTIC set for BLOCK.
2080 If succs(BLOCK) > 1 then
2081 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2082 else if succs(BLOCK) == 1 then
2083 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2085 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2088 static bool
2089 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2091 bool changed = false;
2092 bitmap_set_t S, old, ANTIC_OUT;
2093 bitmap_iterator bi;
2094 unsigned int bii;
2095 edge e;
2096 edge_iterator ei;
2097 bool was_visited = BB_VISITED (block);
2099 old = ANTIC_OUT = S = NULL;
2100 BB_VISITED (block) = 1;
2102 /* If any edges from predecessors are abnormal, antic_in is empty,
2103 so do nothing. */
2104 if (block_has_abnormal_pred_edge)
2105 goto maybe_dump_sets;
2107 old = ANTIC_IN (block);
2108 ANTIC_OUT = bitmap_set_new ();
2110 /* If the block has no successors, ANTIC_OUT is empty. */
2111 if (EDGE_COUNT (block->succs) == 0)
2113 /* If we have one successor, we could have some phi nodes to
2114 translate through. */
2115 else if (single_succ_p (block))
2117 basic_block succ_bb = single_succ (block);
2118 gcc_assert (BB_VISITED (succ_bb));
2119 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2121 /* If we have multiple successors, we take the intersection of all of
2122 them. Note that in the case of loop exit phi nodes, we may have
2123 phis to translate through. */
2124 else
2126 size_t i;
2127 basic_block bprime, first = NULL;
2129 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2130 FOR_EACH_EDGE (e, ei, block->succs)
2132 if (!first
2133 && BB_VISITED (e->dest))
2134 first = e->dest;
2135 else if (BB_VISITED (e->dest))
2136 worklist.quick_push (e->dest);
2137 else
2139 /* Unvisited successors get their ANTIC_IN replaced by the
2140 maximal set to arrive at a maximum ANTIC_IN solution.
2141 We can ignore them in the intersection operation and thus
2142 need not explicitely represent that maximum solution. */
2143 if (dump_file && (dump_flags & TDF_DETAILS))
2144 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2145 e->src->index, e->dest->index);
2149 /* Of multiple successors we have to have visited one already
2150 which is guaranteed by iteration order. */
2151 gcc_assert (first != NULL);
2153 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2155 FOR_EACH_VEC_ELT (worklist, i, bprime)
2157 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2159 bitmap_set_t tmp = bitmap_set_new ();
2160 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2161 bitmap_set_and (ANTIC_OUT, tmp);
2162 bitmap_set_free (tmp);
2164 else
2165 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2169 /* Prune expressions that are clobbered in block and thus become
2170 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2171 prune_clobbered_mems (ANTIC_OUT, block);
2173 /* Generate ANTIC_OUT - TMP_GEN. */
2174 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2176 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2177 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2178 TMP_GEN (block));
2180 /* Then union in the ANTIC_OUT - TMP_GEN values,
2181 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2182 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2183 bitmap_value_insert_into_set (ANTIC_IN (block),
2184 expression_for_id (bii));
2186 clean (ANTIC_IN (block));
2188 if (!was_visited || !bitmap_set_equal (old, ANTIC_IN (block)))
2189 changed = true;
2191 maybe_dump_sets:
2192 if (dump_file && (dump_flags & TDF_DETAILS))
2194 if (ANTIC_OUT)
2195 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2197 if (changed)
2198 fprintf (dump_file, "[changed] ");
2199 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2200 block->index);
2202 if (S)
2203 print_bitmap_set (dump_file, S, "S", block->index);
2205 if (old)
2206 bitmap_set_free (old);
2207 if (S)
2208 bitmap_set_free (S);
2209 if (ANTIC_OUT)
2210 bitmap_set_free (ANTIC_OUT);
2211 return changed;
2214 /* Compute PARTIAL_ANTIC for BLOCK.
2216 If succs(BLOCK) > 1 then
2217 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2218 in ANTIC_OUT for all succ(BLOCK)
2219 else if succs(BLOCK) == 1 then
2220 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2222 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2223 - ANTIC_IN[BLOCK])
2226 static bool
2227 compute_partial_antic_aux (basic_block block,
2228 bool block_has_abnormal_pred_edge)
2230 bool changed = false;
2231 bitmap_set_t old_PA_IN;
2232 bitmap_set_t PA_OUT;
2233 edge e;
2234 edge_iterator ei;
2235 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2237 old_PA_IN = PA_OUT = NULL;
2239 /* If any edges from predecessors are abnormal, antic_in is empty,
2240 so do nothing. */
2241 if (block_has_abnormal_pred_edge)
2242 goto maybe_dump_sets;
2244 /* If there are too many partially anticipatable values in the
2245 block, phi_translate_set can take an exponential time: stop
2246 before the translation starts. */
2247 if (max_pa
2248 && single_succ_p (block)
2249 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2250 goto maybe_dump_sets;
2252 old_PA_IN = PA_IN (block);
2253 PA_OUT = bitmap_set_new ();
2255 /* If the block has no successors, ANTIC_OUT is empty. */
2256 if (EDGE_COUNT (block->succs) == 0)
2258 /* If we have one successor, we could have some phi nodes to
2259 translate through. Note that we can't phi translate across DFS
2260 back edges in partial antic, because it uses a union operation on
2261 the successors. For recurrences like IV's, we will end up
2262 generating a new value in the set on each go around (i + 3 (VH.1)
2263 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2264 else if (single_succ_p (block))
2266 basic_block succ = single_succ (block);
2267 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2268 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2270 /* If we have multiple successors, we take the union of all of
2271 them. */
2272 else
2274 size_t i;
2275 basic_block bprime;
2277 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2278 FOR_EACH_EDGE (e, ei, block->succs)
2280 if (e->flags & EDGE_DFS_BACK)
2281 continue;
2282 worklist.quick_push (e->dest);
2284 if (worklist.length () > 0)
2286 FOR_EACH_VEC_ELT (worklist, i, bprime)
2288 unsigned int i;
2289 bitmap_iterator bi;
2291 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2292 bitmap_value_insert_into_set (PA_OUT,
2293 expression_for_id (i));
2294 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2296 bitmap_set_t pa_in = bitmap_set_new ();
2297 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2298 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2299 bitmap_value_insert_into_set (PA_OUT,
2300 expression_for_id (i));
2301 bitmap_set_free (pa_in);
2303 else
2304 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2305 bitmap_value_insert_into_set (PA_OUT,
2306 expression_for_id (i));
2311 /* Prune expressions that are clobbered in block and thus become
2312 invalid if translated from PA_OUT to PA_IN. */
2313 prune_clobbered_mems (PA_OUT, block);
2315 /* PA_IN starts with PA_OUT - TMP_GEN.
2316 Then we subtract things from ANTIC_IN. */
2317 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2319 /* For partial antic, we want to put back in the phi results, since
2320 we will properly avoid making them partially antic over backedges. */
2321 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2322 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2324 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2325 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2327 dependent_clean (PA_IN (block), ANTIC_IN (block));
2329 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2330 changed = true;
2332 maybe_dump_sets:
2333 if (dump_file && (dump_flags & TDF_DETAILS))
2335 if (PA_OUT)
2336 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2338 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2340 if (old_PA_IN)
2341 bitmap_set_free (old_PA_IN);
2342 if (PA_OUT)
2343 bitmap_set_free (PA_OUT);
2344 return changed;
2347 /* Compute ANTIC and partial ANTIC sets. */
2349 static void
2350 compute_antic (void)
2352 bool changed = true;
2353 int num_iterations = 0;
2354 basic_block block;
2355 int i;
2356 edge_iterator ei;
2357 edge e;
2359 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2360 We pre-build the map of blocks with incoming abnormal edges here. */
2361 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2362 bitmap_clear (has_abnormal_preds);
2364 FOR_ALL_BB_FN (block, cfun)
2366 BB_VISITED (block) = 0;
2368 FOR_EACH_EDGE (e, ei, block->preds)
2369 if (e->flags & EDGE_ABNORMAL)
2371 bitmap_set_bit (has_abnormal_preds, block->index);
2373 /* We also anticipate nothing. */
2374 BB_VISITED (block) = 1;
2375 break;
2378 /* While we are here, give empty ANTIC_IN sets to each block. */
2379 ANTIC_IN (block) = bitmap_set_new ();
2380 PA_IN (block) = bitmap_set_new ();
2383 /* At the exit block we anticipate nothing. */
2384 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2386 sbitmap worklist = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
2387 bitmap_ones (worklist);
2388 while (changed)
2390 if (dump_file && (dump_flags & TDF_DETAILS))
2391 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2392 /* ??? We need to clear our PHI translation cache here as the
2393 ANTIC sets shrink and we restrict valid translations to
2394 those having operands with leaders in ANTIC. Same below
2395 for PA ANTIC computation. */
2396 num_iterations++;
2397 changed = false;
2398 for (i = postorder_num - 1; i >= 0; i--)
2400 if (bitmap_bit_p (worklist, postorder[i]))
2402 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2403 bitmap_clear_bit (worklist, block->index);
2404 if (compute_antic_aux (block,
2405 bitmap_bit_p (has_abnormal_preds,
2406 block->index)))
2408 FOR_EACH_EDGE (e, ei, block->preds)
2409 bitmap_set_bit (worklist, e->src->index);
2410 changed = true;
2414 /* Theoretically possible, but *highly* unlikely. */
2415 gcc_checking_assert (num_iterations < 500);
2418 statistics_histogram_event (cfun, "compute_antic iterations",
2419 num_iterations);
2421 if (do_partial_partial)
2423 bitmap_ones (worklist);
2424 num_iterations = 0;
2425 changed = true;
2426 while (changed)
2428 if (dump_file && (dump_flags & TDF_DETAILS))
2429 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2430 num_iterations++;
2431 changed = false;
2432 for (i = postorder_num - 1 ; i >= 0; i--)
2434 if (bitmap_bit_p (worklist, postorder[i]))
2436 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2437 bitmap_clear_bit (worklist, block->index);
2438 if (compute_partial_antic_aux (block,
2439 bitmap_bit_p (has_abnormal_preds,
2440 block->index)))
2442 FOR_EACH_EDGE (e, ei, block->preds)
2443 bitmap_set_bit (worklist, e->src->index);
2444 changed = true;
2448 /* Theoretically possible, but *highly* unlikely. */
2449 gcc_checking_assert (num_iterations < 500);
2451 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2452 num_iterations);
2454 sbitmap_free (has_abnormal_preds);
2455 sbitmap_free (worklist);
2459 /* Inserted expressions are placed onto this worklist, which is used
2460 for performing quick dead code elimination of insertions we made
2461 that didn't turn out to be necessary. */
2462 static bitmap inserted_exprs;
2464 /* The actual worker for create_component_ref_by_pieces. */
2466 static tree
2467 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2468 unsigned int *operand, gimple_seq *stmts)
2470 vn_reference_op_t currop = &ref->operands[*operand];
2471 tree genop;
2472 ++*operand;
2473 switch (currop->opcode)
2475 case CALL_EXPR:
2476 gcc_unreachable ();
2478 case MEM_REF:
2480 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2481 stmts);
2482 if (!baseop)
2483 return NULL_TREE;
2484 tree offset = currop->op0;
2485 if (TREE_CODE (baseop) == ADDR_EXPR
2486 && handled_component_p (TREE_OPERAND (baseop, 0)))
2488 HOST_WIDE_INT off;
2489 tree base;
2490 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2491 &off);
2492 gcc_assert (base);
2493 offset = int_const_binop (PLUS_EXPR, offset,
2494 build_int_cst (TREE_TYPE (offset),
2495 off));
2496 baseop = build_fold_addr_expr (base);
2498 genop = build2 (MEM_REF, currop->type, baseop, offset);
2499 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2500 MR_DEPENDENCE_BASE (genop) = currop->base;
2501 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2502 return genop;
2505 case TARGET_MEM_REF:
2507 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2508 vn_reference_op_t nextop = &ref->operands[++*operand];
2509 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2510 stmts);
2511 if (!baseop)
2512 return NULL_TREE;
2513 if (currop->op0)
2515 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2516 if (!genop0)
2517 return NULL_TREE;
2519 if (nextop->op0)
2521 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2522 if (!genop1)
2523 return NULL_TREE;
2525 genop = build5 (TARGET_MEM_REF, currop->type,
2526 baseop, currop->op2, genop0, currop->op1, genop1);
2528 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2529 MR_DEPENDENCE_BASE (genop) = currop->base;
2530 return genop;
2533 case ADDR_EXPR:
2534 if (currop->op0)
2536 gcc_assert (is_gimple_min_invariant (currop->op0));
2537 return currop->op0;
2539 /* Fallthrough. */
2540 case REALPART_EXPR:
2541 case IMAGPART_EXPR:
2542 case VIEW_CONVERT_EXPR:
2544 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2545 stmts);
2546 if (!genop0)
2547 return NULL_TREE;
2548 return fold_build1 (currop->opcode, currop->type, genop0);
2551 case WITH_SIZE_EXPR:
2553 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2554 stmts);
2555 if (!genop0)
2556 return NULL_TREE;
2557 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2558 if (!genop1)
2559 return NULL_TREE;
2560 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2563 case BIT_FIELD_REF:
2565 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2566 stmts);
2567 if (!genop0)
2568 return NULL_TREE;
2569 tree op1 = currop->op0;
2570 tree op2 = currop->op1;
2571 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2572 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2573 return fold (t);
2576 /* For array ref vn_reference_op's, operand 1 of the array ref
2577 is op0 of the reference op and operand 3 of the array ref is
2578 op1. */
2579 case ARRAY_RANGE_REF:
2580 case ARRAY_REF:
2582 tree genop0;
2583 tree genop1 = currop->op0;
2584 tree genop2 = currop->op1;
2585 tree genop3 = currop->op2;
2586 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2587 stmts);
2588 if (!genop0)
2589 return NULL_TREE;
2590 genop1 = find_or_generate_expression (block, genop1, stmts);
2591 if (!genop1)
2592 return NULL_TREE;
2593 if (genop2)
2595 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2596 /* Drop zero minimum index if redundant. */
2597 if (integer_zerop (genop2)
2598 && (!domain_type
2599 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2600 genop2 = NULL_TREE;
2601 else
2603 genop2 = find_or_generate_expression (block, genop2, stmts);
2604 if (!genop2)
2605 return NULL_TREE;
2608 if (genop3)
2610 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2611 /* We can't always put a size in units of the element alignment
2612 here as the element alignment may be not visible. See
2613 PR43783. Simply drop the element size for constant
2614 sizes. */
2615 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2616 genop3 = NULL_TREE;
2617 else
2619 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2620 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2621 genop3 = find_or_generate_expression (block, genop3, stmts);
2622 if (!genop3)
2623 return NULL_TREE;
2626 return build4 (currop->opcode, currop->type, genop0, genop1,
2627 genop2, genop3);
2629 case COMPONENT_REF:
2631 tree op0;
2632 tree op1;
2633 tree genop2 = currop->op1;
2634 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2635 if (!op0)
2636 return NULL_TREE;
2637 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2638 op1 = currop->op0;
2639 if (genop2)
2641 genop2 = find_or_generate_expression (block, genop2, stmts);
2642 if (!genop2)
2643 return NULL_TREE;
2645 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2648 case SSA_NAME:
2650 genop = find_or_generate_expression (block, currop->op0, stmts);
2651 return genop;
2653 case STRING_CST:
2654 case INTEGER_CST:
2655 case COMPLEX_CST:
2656 case VECTOR_CST:
2657 case REAL_CST:
2658 case CONSTRUCTOR:
2659 case VAR_DECL:
2660 case PARM_DECL:
2661 case CONST_DECL:
2662 case RESULT_DECL:
2663 case FUNCTION_DECL:
2664 return currop->op0;
2666 default:
2667 gcc_unreachable ();
2671 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2672 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2673 trying to rename aggregates into ssa form directly, which is a no no.
2675 Thus, this routine doesn't create temporaries, it just builds a
2676 single access expression for the array, calling
2677 find_or_generate_expression to build the innermost pieces.
2679 This function is a subroutine of create_expression_by_pieces, and
2680 should not be called on it's own unless you really know what you
2681 are doing. */
2683 static tree
2684 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2685 gimple_seq *stmts)
2687 unsigned int op = 0;
2688 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2691 /* Find a simple leader for an expression, or generate one using
2692 create_expression_by_pieces from a NARY expression for the value.
2693 BLOCK is the basic_block we are looking for leaders in.
2694 OP is the tree expression to find a leader for or generate.
2695 Returns the leader or NULL_TREE on failure. */
2697 static tree
2698 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2700 pre_expr expr = get_or_alloc_expr_for (op);
2701 unsigned int lookfor = get_expr_value_id (expr);
2702 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2703 if (leader)
2705 if (leader->kind == NAME)
2706 return PRE_EXPR_NAME (leader);
2707 else if (leader->kind == CONSTANT)
2708 return PRE_EXPR_CONSTANT (leader);
2710 /* Defer. */
2711 return NULL_TREE;
2714 /* It must be a complex expression, so generate it recursively. Note
2715 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2716 where the insert algorithm fails to insert a required expression. */
2717 bitmap exprset = value_expressions[lookfor];
2718 bitmap_iterator bi;
2719 unsigned int i;
2720 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2722 pre_expr temp = expression_for_id (i);
2723 /* We cannot insert random REFERENCE expressions at arbitrary
2724 places. We can insert NARYs which eventually re-materializes
2725 its operand values. */
2726 if (temp->kind == NARY)
2727 return create_expression_by_pieces (block, temp, stmts,
2728 get_expr_type (expr));
2731 /* Defer. */
2732 return NULL_TREE;
2735 #define NECESSARY GF_PLF_1
2737 /* Create an expression in pieces, so that we can handle very complex
2738 expressions that may be ANTIC, but not necessary GIMPLE.
2739 BLOCK is the basic block the expression will be inserted into,
2740 EXPR is the expression to insert (in value form)
2741 STMTS is a statement list to append the necessary insertions into.
2743 This function will die if we hit some value that shouldn't be
2744 ANTIC but is (IE there is no leader for it, or its components).
2745 The function returns NULL_TREE in case a different antic expression
2746 has to be inserted first.
2747 This function may also generate expressions that are themselves
2748 partially or fully redundant. Those that are will be either made
2749 fully redundant during the next iteration of insert (for partially
2750 redundant ones), or eliminated by eliminate (for fully redundant
2751 ones). */
2753 static tree
2754 create_expression_by_pieces (basic_block block, pre_expr expr,
2755 gimple_seq *stmts, tree type)
2757 tree name;
2758 tree folded;
2759 gimple_seq forced_stmts = NULL;
2760 unsigned int value_id;
2761 gimple_stmt_iterator gsi;
2762 tree exprtype = type ? type : get_expr_type (expr);
2763 pre_expr nameexpr;
2764 gassign *newstmt;
2766 switch (expr->kind)
2768 /* We may hit the NAME/CONSTANT case if we have to convert types
2769 that value numbering saw through. */
2770 case NAME:
2771 folded = PRE_EXPR_NAME (expr);
2772 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2773 return folded;
2774 break;
2775 case CONSTANT:
2777 folded = PRE_EXPR_CONSTANT (expr);
2778 tree tem = fold_convert (exprtype, folded);
2779 if (is_gimple_min_invariant (tem))
2780 return tem;
2781 break;
2783 case REFERENCE:
2784 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2786 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2787 unsigned int operand = 1;
2788 vn_reference_op_t currop = &ref->operands[0];
2789 tree sc = NULL_TREE;
2790 tree fn;
2791 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2792 fn = currop->op0;
2793 else
2794 fn = find_or_generate_expression (block, currop->op0, stmts);
2795 if (!fn)
2796 return NULL_TREE;
2797 if (currop->op1)
2799 sc = find_or_generate_expression (block, currop->op1, stmts);
2800 if (!sc)
2801 return NULL_TREE;
2803 auto_vec<tree> args (ref->operands.length () - 1);
2804 while (operand < ref->operands.length ())
2806 tree arg = create_component_ref_by_pieces_1 (block, ref,
2807 &operand, stmts);
2808 if (!arg)
2809 return NULL_TREE;
2810 args.quick_push (arg);
2812 gcall *call
2813 = gimple_build_call_vec ((TREE_CODE (fn) == FUNCTION_DECL
2814 ? build_fold_addr_expr (fn) : fn), args);
2815 gimple_call_set_with_bounds (call, currop->with_bounds);
2816 if (sc)
2817 gimple_call_set_chain (call, sc);
2818 tree forcedname = make_ssa_name (currop->type);
2819 gimple_call_set_lhs (call, forcedname);
2820 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2821 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2822 folded = forcedname;
2824 else
2826 folded = create_component_ref_by_pieces (block,
2827 PRE_EXPR_REFERENCE (expr),
2828 stmts);
2829 if (!folded)
2830 return NULL_TREE;
2831 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2832 newstmt = gimple_build_assign (name, folded);
2833 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2834 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2835 folded = name;
2837 break;
2838 case NARY:
2840 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2841 tree *genop = XALLOCAVEC (tree, nary->length);
2842 unsigned i;
2843 for (i = 0; i < nary->length; ++i)
2845 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2846 if (!genop[i])
2847 return NULL_TREE;
2848 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2849 may have conversions stripped. */
2850 if (nary->opcode == POINTER_PLUS_EXPR)
2852 if (i == 0)
2853 genop[i] = gimple_convert (&forced_stmts,
2854 nary->type, genop[i]);
2855 else if (i == 1)
2856 genop[i] = gimple_convert (&forced_stmts,
2857 sizetype, genop[i]);
2859 else
2860 genop[i] = gimple_convert (&forced_stmts,
2861 TREE_TYPE (nary->op[i]), genop[i]);
2863 if (nary->opcode == CONSTRUCTOR)
2865 vec<constructor_elt, va_gc> *elts = NULL;
2866 for (i = 0; i < nary->length; ++i)
2867 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2868 folded = build_constructor (nary->type, elts);
2869 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2870 newstmt = gimple_build_assign (name, folded);
2871 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2872 folded = name;
2874 else
2876 switch (nary->length)
2878 case 1:
2879 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2880 genop[0]);
2881 break;
2882 case 2:
2883 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2884 genop[0], genop[1]);
2885 break;
2886 case 3:
2887 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2888 genop[0], genop[1], genop[2]);
2889 break;
2890 default:
2891 gcc_unreachable ();
2895 break;
2896 default:
2897 gcc_unreachable ();
2900 folded = gimple_convert (&forced_stmts, exprtype, folded);
2902 /* If there is nothing to insert, return the simplified result. */
2903 if (gimple_seq_empty_p (forced_stmts))
2904 return folded;
2905 /* If we simplified to a constant return it and discard eventually
2906 built stmts. */
2907 if (is_gimple_min_invariant (folded))
2909 gimple_seq_discard (forced_stmts);
2910 return folded;
2912 /* Likewise if we simplified to sth not queued for insertion. */
2913 bool found = false;
2914 gsi = gsi_last (forced_stmts);
2915 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2917 gimple *stmt = gsi_stmt (gsi);
2918 tree forcedname = gimple_get_lhs (stmt);
2919 if (forcedname == folded)
2921 found = true;
2922 break;
2925 if (! found)
2927 gimple_seq_discard (forced_stmts);
2928 return folded;
2930 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2932 /* If we have any intermediate expressions to the value sets, add them
2933 to the value sets and chain them in the instruction stream. */
2934 if (forced_stmts)
2936 gsi = gsi_start (forced_stmts);
2937 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2939 gimple *stmt = gsi_stmt (gsi);
2940 tree forcedname = gimple_get_lhs (stmt);
2941 pre_expr nameexpr;
2943 if (forcedname != folded)
2945 VN_INFO_GET (forcedname)->valnum = forcedname;
2946 VN_INFO (forcedname)->value_id = get_next_value_id ();
2947 nameexpr = get_or_alloc_expr_for_name (forcedname);
2948 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2949 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2950 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2953 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2954 gimple_set_plf (stmt, NECESSARY, false);
2956 gimple_seq_add_seq (stmts, forced_stmts);
2959 name = folded;
2961 /* Fold the last statement. */
2962 gsi = gsi_last (*stmts);
2963 if (fold_stmt_inplace (&gsi))
2964 update_stmt (gsi_stmt (gsi));
2966 /* Add a value number to the temporary.
2967 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2968 we are creating the expression by pieces, and this particular piece of
2969 the expression may have been represented. There is no harm in replacing
2970 here. */
2971 value_id = get_expr_value_id (expr);
2972 VN_INFO_GET (name)->value_id = value_id;
2973 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2974 if (VN_INFO (name)->valnum == NULL_TREE)
2975 VN_INFO (name)->valnum = name;
2976 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2977 nameexpr = get_or_alloc_expr_for_name (name);
2978 add_to_value (value_id, nameexpr);
2979 if (NEW_SETS (block))
2980 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2981 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2983 pre_stats.insertions++;
2984 if (dump_file && (dump_flags & TDF_DETAILS))
2986 fprintf (dump_file, "Inserted ");
2987 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0, 0);
2988 fprintf (dump_file, " in predecessor %d (%04d)\n",
2989 block->index, value_id);
2992 return name;
2996 /* Insert the to-be-made-available values of expression EXPRNUM for each
2997 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2998 merge the result with a phi node, given the same value number as
2999 NODE. Return true if we have inserted new stuff. */
3001 static bool
3002 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3003 vec<pre_expr> avail)
3005 pre_expr expr = expression_for_id (exprnum);
3006 pre_expr newphi;
3007 unsigned int val = get_expr_value_id (expr);
3008 edge pred;
3009 bool insertions = false;
3010 bool nophi = false;
3011 basic_block bprime;
3012 pre_expr eprime;
3013 edge_iterator ei;
3014 tree type = get_expr_type (expr);
3015 tree temp;
3016 gphi *phi;
3018 /* Make sure we aren't creating an induction variable. */
3019 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
3021 bool firstinsideloop = false;
3022 bool secondinsideloop = false;
3023 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3024 EDGE_PRED (block, 0)->src);
3025 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3026 EDGE_PRED (block, 1)->src);
3027 /* Induction variables only have one edge inside the loop. */
3028 if ((firstinsideloop ^ secondinsideloop)
3029 && expr->kind != REFERENCE)
3031 if (dump_file && (dump_flags & TDF_DETAILS))
3032 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3033 nophi = true;
3037 /* Make the necessary insertions. */
3038 FOR_EACH_EDGE (pred, ei, block->preds)
3040 gimple_seq stmts = NULL;
3041 tree builtexpr;
3042 bprime = pred->src;
3043 eprime = avail[pred->dest_idx];
3044 builtexpr = create_expression_by_pieces (bprime, eprime,
3045 &stmts, type);
3046 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3047 if (!gimple_seq_empty_p (stmts))
3049 gsi_insert_seq_on_edge (pred, stmts);
3050 insertions = true;
3052 if (!builtexpr)
3054 /* We cannot insert a PHI node if we failed to insert
3055 on one edge. */
3056 nophi = true;
3057 continue;
3059 if (is_gimple_min_invariant (builtexpr))
3060 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3061 else
3062 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3064 /* If we didn't want a phi node, and we made insertions, we still have
3065 inserted new stuff, and thus return true. If we didn't want a phi node,
3066 and didn't make insertions, we haven't added anything new, so return
3067 false. */
3068 if (nophi && insertions)
3069 return true;
3070 else if (nophi && !insertions)
3071 return false;
3073 /* Now build a phi for the new variable. */
3074 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3075 phi = create_phi_node (temp, block);
3077 gimple_set_plf (phi, NECESSARY, false);
3078 VN_INFO_GET (temp)->value_id = val;
3079 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3080 if (VN_INFO (temp)->valnum == NULL_TREE)
3081 VN_INFO (temp)->valnum = temp;
3082 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3083 FOR_EACH_EDGE (pred, ei, block->preds)
3085 pre_expr ae = avail[pred->dest_idx];
3086 gcc_assert (get_expr_type (ae) == type
3087 || useless_type_conversion_p (type, get_expr_type (ae)));
3088 if (ae->kind == CONSTANT)
3089 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3090 pred, UNKNOWN_LOCATION);
3091 else
3092 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3095 newphi = get_or_alloc_expr_for_name (temp);
3096 add_to_value (val, newphi);
3098 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3099 this insertion, since we test for the existence of this value in PHI_GEN
3100 before proceeding with the partial redundancy checks in insert_aux.
3102 The value may exist in AVAIL_OUT, in particular, it could be represented
3103 by the expression we are trying to eliminate, in which case we want the
3104 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3105 inserted there.
3107 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3108 this block, because if it did, it would have existed in our dominator's
3109 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3112 bitmap_insert_into_set (PHI_GEN (block), newphi);
3113 bitmap_value_replace_in_set (AVAIL_OUT (block),
3114 newphi);
3115 bitmap_insert_into_set (NEW_SETS (block),
3116 newphi);
3118 /* If we insert a PHI node for a conversion of another PHI node
3119 in the same basic-block try to preserve range information.
3120 This is important so that followup loop passes receive optimal
3121 number of iteration analysis results. See PR61743. */
3122 if (expr->kind == NARY
3123 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3124 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3125 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3126 && INTEGRAL_TYPE_P (type)
3127 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3128 && (TYPE_PRECISION (type)
3129 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3130 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3132 wide_int min, max;
3133 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3134 && !wi::neg_p (min, SIGNED)
3135 && !wi::neg_p (max, SIGNED))
3136 /* Just handle extension and sign-changes of all-positive ranges. */
3137 set_range_info (temp,
3138 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3139 wide_int_storage::from (min, TYPE_PRECISION (type),
3140 TYPE_SIGN (type)),
3141 wide_int_storage::from (max, TYPE_PRECISION (type),
3142 TYPE_SIGN (type)));
3145 if (dump_file && (dump_flags & TDF_DETAILS))
3147 fprintf (dump_file, "Created phi ");
3148 print_gimple_stmt (dump_file, phi, 0, 0);
3149 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3151 pre_stats.phis++;
3152 return true;
3157 /* Perform insertion of partially redundant values.
3158 For BLOCK, do the following:
3159 1. Propagate the NEW_SETS of the dominator into the current block.
3160 If the block has multiple predecessors,
3161 2a. Iterate over the ANTIC expressions for the block to see if
3162 any of them are partially redundant.
3163 2b. If so, insert them into the necessary predecessors to make
3164 the expression fully redundant.
3165 2c. Insert a new PHI merging the values of the predecessors.
3166 2d. Insert the new PHI, and the new expressions, into the
3167 NEW_SETS set.
3168 3. Recursively call ourselves on the dominator children of BLOCK.
3170 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3171 do_regular_insertion and do_partial_insertion.
3175 static bool
3176 do_regular_insertion (basic_block block, basic_block dom)
3178 bool new_stuff = false;
3179 vec<pre_expr> exprs;
3180 pre_expr expr;
3181 auto_vec<pre_expr> avail;
3182 int i;
3184 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3185 avail.safe_grow (EDGE_COUNT (block->preds));
3187 FOR_EACH_VEC_ELT (exprs, i, expr)
3189 if (expr->kind == NARY
3190 || expr->kind == REFERENCE)
3192 unsigned int val;
3193 bool by_some = false;
3194 bool cant_insert = false;
3195 bool all_same = true;
3196 pre_expr first_s = NULL;
3197 edge pred;
3198 basic_block bprime;
3199 pre_expr eprime = NULL;
3200 edge_iterator ei;
3201 pre_expr edoubleprime = NULL;
3202 bool do_insertion = false;
3204 val = get_expr_value_id (expr);
3205 if (bitmap_set_contains_value (PHI_GEN (block), val))
3206 continue;
3207 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3209 if (dump_file && (dump_flags & TDF_DETAILS))
3211 fprintf (dump_file, "Found fully redundant value: ");
3212 print_pre_expr (dump_file, expr);
3213 fprintf (dump_file, "\n");
3215 continue;
3218 FOR_EACH_EDGE (pred, ei, block->preds)
3220 unsigned int vprime;
3222 /* We should never run insertion for the exit block
3223 and so not come across fake pred edges. */
3224 gcc_assert (!(pred->flags & EDGE_FAKE));
3225 bprime = pred->src;
3226 /* We are looking at ANTIC_OUT of bprime. */
3227 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3228 bprime, block);
3230 /* eprime will generally only be NULL if the
3231 value of the expression, translated
3232 through the PHI for this predecessor, is
3233 undefined. If that is the case, we can't
3234 make the expression fully redundant,
3235 because its value is undefined along a
3236 predecessor path. We can thus break out
3237 early because it doesn't matter what the
3238 rest of the results are. */
3239 if (eprime == NULL)
3241 avail[pred->dest_idx] = NULL;
3242 cant_insert = true;
3243 break;
3246 eprime = fully_constant_expression (eprime);
3247 vprime = get_expr_value_id (eprime);
3248 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3249 vprime);
3250 if (edoubleprime == NULL)
3252 avail[pred->dest_idx] = eprime;
3253 all_same = false;
3255 else
3257 avail[pred->dest_idx] = edoubleprime;
3258 by_some = true;
3259 /* We want to perform insertions to remove a redundancy on
3260 a path in the CFG we want to optimize for speed. */
3261 if (optimize_edge_for_speed_p (pred))
3262 do_insertion = true;
3263 if (first_s == NULL)
3264 first_s = edoubleprime;
3265 else if (!pre_expr_d::equal (first_s, edoubleprime))
3266 all_same = false;
3269 /* If we can insert it, it's not the same value
3270 already existing along every predecessor, and
3271 it's defined by some predecessor, it is
3272 partially redundant. */
3273 if (!cant_insert && !all_same && by_some)
3275 if (!do_insertion)
3277 if (dump_file && (dump_flags & TDF_DETAILS))
3279 fprintf (dump_file, "Skipping partial redundancy for "
3280 "expression ");
3281 print_pre_expr (dump_file, expr);
3282 fprintf (dump_file, " (%04d), no redundancy on to be "
3283 "optimized for speed edge\n", val);
3286 else if (dbg_cnt (treepre_insert))
3288 if (dump_file && (dump_flags & TDF_DETAILS))
3290 fprintf (dump_file, "Found partial redundancy for "
3291 "expression ");
3292 print_pre_expr (dump_file, expr);
3293 fprintf (dump_file, " (%04d)\n",
3294 get_expr_value_id (expr));
3296 if (insert_into_preds_of_block (block,
3297 get_expression_id (expr),
3298 avail))
3299 new_stuff = true;
3302 /* If all edges produce the same value and that value is
3303 an invariant, then the PHI has the same value on all
3304 edges. Note this. */
3305 else if (!cant_insert && all_same)
3307 gcc_assert (edoubleprime->kind == CONSTANT
3308 || edoubleprime->kind == NAME);
3310 tree temp = make_temp_ssa_name (get_expr_type (expr),
3311 NULL, "pretmp");
3312 gassign *assign
3313 = gimple_build_assign (temp,
3314 edoubleprime->kind == CONSTANT ?
3315 PRE_EXPR_CONSTANT (edoubleprime) :
3316 PRE_EXPR_NAME (edoubleprime));
3317 gimple_stmt_iterator gsi = gsi_after_labels (block);
3318 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3320 gimple_set_plf (assign, NECESSARY, false);
3321 VN_INFO_GET (temp)->value_id = val;
3322 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3323 if (VN_INFO (temp)->valnum == NULL_TREE)
3324 VN_INFO (temp)->valnum = temp;
3325 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3326 pre_expr newe = get_or_alloc_expr_for_name (temp);
3327 add_to_value (val, newe);
3328 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3329 bitmap_insert_into_set (NEW_SETS (block), newe);
3334 exprs.release ();
3335 return new_stuff;
3339 /* Perform insertion for partially anticipatable expressions. There
3340 is only one case we will perform insertion for these. This case is
3341 if the expression is partially anticipatable, and fully available.
3342 In this case, we know that putting it earlier will enable us to
3343 remove the later computation. */
3346 static bool
3347 do_partial_partial_insertion (basic_block block, basic_block dom)
3349 bool new_stuff = false;
3350 vec<pre_expr> exprs;
3351 pre_expr expr;
3352 auto_vec<pre_expr> avail;
3353 int i;
3355 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3356 avail.safe_grow (EDGE_COUNT (block->preds));
3358 FOR_EACH_VEC_ELT (exprs, i, expr)
3360 if (expr->kind == NARY
3361 || expr->kind == REFERENCE)
3363 unsigned int val;
3364 bool by_all = true;
3365 bool cant_insert = false;
3366 edge pred;
3367 basic_block bprime;
3368 pre_expr eprime = NULL;
3369 edge_iterator ei;
3371 val = get_expr_value_id (expr);
3372 if (bitmap_set_contains_value (PHI_GEN (block), val))
3373 continue;
3374 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3375 continue;
3377 FOR_EACH_EDGE (pred, ei, block->preds)
3379 unsigned int vprime;
3380 pre_expr edoubleprime;
3382 /* We should never run insertion for the exit block
3383 and so not come across fake pred edges. */
3384 gcc_assert (!(pred->flags & EDGE_FAKE));
3385 bprime = pred->src;
3386 eprime = phi_translate (expr, ANTIC_IN (block),
3387 PA_IN (block),
3388 bprime, block);
3390 /* eprime will generally only be NULL if the
3391 value of the expression, translated
3392 through the PHI for this predecessor, is
3393 undefined. If that is the case, we can't
3394 make the expression fully redundant,
3395 because its value is undefined along a
3396 predecessor path. We can thus break out
3397 early because it doesn't matter what the
3398 rest of the results are. */
3399 if (eprime == NULL)
3401 avail[pred->dest_idx] = NULL;
3402 cant_insert = true;
3403 break;
3406 eprime = fully_constant_expression (eprime);
3407 vprime = get_expr_value_id (eprime);
3408 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3409 avail[pred->dest_idx] = edoubleprime;
3410 if (edoubleprime == NULL)
3412 by_all = false;
3413 break;
3417 /* If we can insert it, it's not the same value
3418 already existing along every predecessor, and
3419 it's defined by some predecessor, it is
3420 partially redundant. */
3421 if (!cant_insert && by_all)
3423 edge succ;
3424 bool do_insertion = false;
3426 /* Insert only if we can remove a later expression on a path
3427 that we want to optimize for speed.
3428 The phi node that we will be inserting in BLOCK is not free,
3429 and inserting it for the sake of !optimize_for_speed successor
3430 may cause regressions on the speed path. */
3431 FOR_EACH_EDGE (succ, ei, block->succs)
3433 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3434 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3436 if (optimize_edge_for_speed_p (succ))
3437 do_insertion = true;
3441 if (!do_insertion)
3443 if (dump_file && (dump_flags & TDF_DETAILS))
3445 fprintf (dump_file, "Skipping partial partial redundancy "
3446 "for expression ");
3447 print_pre_expr (dump_file, expr);
3448 fprintf (dump_file, " (%04d), not (partially) anticipated "
3449 "on any to be optimized for speed edges\n", val);
3452 else if (dbg_cnt (treepre_insert))
3454 pre_stats.pa_insert++;
3455 if (dump_file && (dump_flags & TDF_DETAILS))
3457 fprintf (dump_file, "Found partial partial redundancy "
3458 "for expression ");
3459 print_pre_expr (dump_file, expr);
3460 fprintf (dump_file, " (%04d)\n",
3461 get_expr_value_id (expr));
3463 if (insert_into_preds_of_block (block,
3464 get_expression_id (expr),
3465 avail))
3466 new_stuff = true;
3472 exprs.release ();
3473 return new_stuff;
3476 static bool
3477 insert_aux (basic_block block)
3479 basic_block son;
3480 bool new_stuff = false;
3482 if (block)
3484 basic_block dom;
3485 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3486 if (dom)
3488 unsigned i;
3489 bitmap_iterator bi;
3490 bitmap_set_t newset = NEW_SETS (dom);
3491 if (newset)
3493 /* Note that we need to value_replace both NEW_SETS, and
3494 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3495 represented by some non-simple expression here that we want
3496 to replace it with. */
3497 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3499 pre_expr expr = expression_for_id (i);
3500 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3501 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3504 if (!single_pred_p (block))
3506 new_stuff |= do_regular_insertion (block, dom);
3507 if (do_partial_partial)
3508 new_stuff |= do_partial_partial_insertion (block, dom);
3512 for (son = first_dom_son (CDI_DOMINATORS, block);
3513 son;
3514 son = next_dom_son (CDI_DOMINATORS, son))
3516 new_stuff |= insert_aux (son);
3519 return new_stuff;
3522 /* Perform insertion of partially redundant values. */
3524 static void
3525 insert (void)
3527 bool new_stuff = true;
3528 basic_block bb;
3529 int num_iterations = 0;
3531 FOR_ALL_BB_FN (bb, cfun)
3532 NEW_SETS (bb) = bitmap_set_new ();
3534 while (new_stuff)
3536 num_iterations++;
3537 if (dump_file && dump_flags & TDF_DETAILS)
3538 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3539 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3541 /* Clear the NEW sets before the next iteration. We have already
3542 fully propagated its contents. */
3543 if (new_stuff)
3544 FOR_ALL_BB_FN (bb, cfun)
3545 bitmap_set_free (NEW_SETS (bb));
3547 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3551 /* Compute the AVAIL set for all basic blocks.
3553 This function performs value numbering of the statements in each basic
3554 block. The AVAIL sets are built from information we glean while doing
3555 this value numbering, since the AVAIL sets contain only one entry per
3556 value.
3558 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3559 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3561 static void
3562 compute_avail (void)
3565 basic_block block, son;
3566 basic_block *worklist;
3567 size_t sp = 0;
3568 unsigned i;
3570 /* We pretend that default definitions are defined in the entry block.
3571 This includes function arguments and the static chain decl. */
3572 for (i = 1; i < num_ssa_names; ++i)
3574 tree name = ssa_name (i);
3575 pre_expr e;
3576 if (!name
3577 || !SSA_NAME_IS_DEFAULT_DEF (name)
3578 || has_zero_uses (name)
3579 || virtual_operand_p (name))
3580 continue;
3582 e = get_or_alloc_expr_for_name (name);
3583 add_to_value (get_expr_value_id (e), e);
3584 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3585 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3589 if (dump_file && (dump_flags & TDF_DETAILS))
3591 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3592 "tmp_gen", ENTRY_BLOCK);
3593 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3594 "avail_out", ENTRY_BLOCK);
3597 /* Allocate the worklist. */
3598 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3600 /* Seed the algorithm by putting the dominator children of the entry
3601 block on the worklist. */
3602 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3603 son;
3604 son = next_dom_son (CDI_DOMINATORS, son))
3605 worklist[sp++] = son;
3607 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3608 = ssa_default_def (cfun, gimple_vop (cfun));
3610 /* Loop until the worklist is empty. */
3611 while (sp)
3613 gimple *stmt;
3614 basic_block dom;
3616 /* Pick a block from the worklist. */
3617 block = worklist[--sp];
3619 /* Initially, the set of available values in BLOCK is that of
3620 its immediate dominator. */
3621 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3622 if (dom)
3624 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3625 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3628 /* Generate values for PHI nodes. */
3629 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3630 gsi_next (&gsi))
3632 tree result = gimple_phi_result (gsi.phi ());
3634 /* We have no need for virtual phis, as they don't represent
3635 actual computations. */
3636 if (virtual_operand_p (result))
3638 BB_LIVE_VOP_ON_EXIT (block) = result;
3639 continue;
3642 pre_expr e = get_or_alloc_expr_for_name (result);
3643 add_to_value (get_expr_value_id (e), e);
3644 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3645 bitmap_insert_into_set (PHI_GEN (block), e);
3648 BB_MAY_NOTRETURN (block) = 0;
3650 /* Now compute value numbers and populate value sets with all
3651 the expressions computed in BLOCK. */
3652 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3653 gsi_next (&gsi))
3655 ssa_op_iter iter;
3656 tree op;
3658 stmt = gsi_stmt (gsi);
3660 /* Cache whether the basic-block has any non-visible side-effect
3661 or control flow.
3662 If this isn't a call or it is the last stmt in the
3663 basic-block then the CFG represents things correctly. */
3664 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3666 /* Non-looping const functions always return normally.
3667 Otherwise the call might not return or have side-effects
3668 that forbids hoisting possibly trapping expressions
3669 before it. */
3670 int flags = gimple_call_flags (stmt);
3671 if (!(flags & ECF_CONST)
3672 || (flags & ECF_LOOPING_CONST_OR_PURE))
3673 BB_MAY_NOTRETURN (block) = 1;
3676 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3678 pre_expr e = get_or_alloc_expr_for_name (op);
3680 add_to_value (get_expr_value_id (e), e);
3681 bitmap_insert_into_set (TMP_GEN (block), e);
3682 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3685 if (gimple_vdef (stmt))
3686 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3688 if (gimple_has_side_effects (stmt)
3689 || stmt_could_throw_p (stmt)
3690 || is_gimple_debug (stmt))
3691 continue;
3693 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3695 if (ssa_undefined_value_p (op))
3696 continue;
3697 pre_expr e = get_or_alloc_expr_for_name (op);
3698 bitmap_value_insert_into_set (EXP_GEN (block), e);
3701 switch (gimple_code (stmt))
3703 case GIMPLE_RETURN:
3704 continue;
3706 case GIMPLE_CALL:
3708 vn_reference_t ref;
3709 vn_reference_s ref1;
3710 pre_expr result = NULL;
3712 /* We can value number only calls to real functions. */
3713 if (gimple_call_internal_p (stmt))
3714 continue;
3716 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3717 if (!ref)
3718 continue;
3720 /* If the value of the call is not invalidated in
3721 this block until it is computed, add the expression
3722 to EXP_GEN. */
3723 if (!gimple_vuse (stmt)
3724 || gimple_code
3725 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3726 || gimple_bb (SSA_NAME_DEF_STMT
3727 (gimple_vuse (stmt))) != block)
3729 result = pre_expr_pool.allocate ();
3730 result->kind = REFERENCE;
3731 result->id = 0;
3732 PRE_EXPR_REFERENCE (result) = ref;
3734 get_or_alloc_expression_id (result);
3735 add_to_value (get_expr_value_id (result), result);
3736 bitmap_value_insert_into_set (EXP_GEN (block), result);
3738 continue;
3741 case GIMPLE_ASSIGN:
3743 pre_expr result = NULL;
3744 switch (vn_get_stmt_kind (stmt))
3746 case VN_NARY:
3748 enum tree_code code = gimple_assign_rhs_code (stmt);
3749 vn_nary_op_t nary;
3751 /* COND_EXPR and VEC_COND_EXPR are awkward in
3752 that they contain an embedded complex expression.
3753 Don't even try to shove those through PRE. */
3754 if (code == COND_EXPR
3755 || code == VEC_COND_EXPR)
3756 continue;
3758 vn_nary_op_lookup_stmt (stmt, &nary);
3759 if (!nary)
3760 continue;
3762 /* If the NARY traps and there was a preceding
3763 point in the block that might not return avoid
3764 adding the nary to EXP_GEN. */
3765 if (BB_MAY_NOTRETURN (block)
3766 && vn_nary_may_trap (nary))
3767 continue;
3769 result = pre_expr_pool.allocate ();
3770 result->kind = NARY;
3771 result->id = 0;
3772 PRE_EXPR_NARY (result) = nary;
3773 break;
3776 case VN_REFERENCE:
3778 vn_reference_t ref;
3779 vn_reference_lookup (gimple_assign_rhs1 (stmt),
3780 gimple_vuse (stmt),
3781 VN_WALK, &ref, true);
3782 if (!ref)
3783 continue;
3785 /* If the value of the reference is not invalidated in
3786 this block until it is computed, add the expression
3787 to EXP_GEN. */
3788 if (gimple_vuse (stmt))
3790 gimple *def_stmt;
3791 bool ok = true;
3792 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3793 while (!gimple_nop_p (def_stmt)
3794 && gimple_code (def_stmt) != GIMPLE_PHI
3795 && gimple_bb (def_stmt) == block)
3797 if (stmt_may_clobber_ref_p
3798 (def_stmt, gimple_assign_rhs1 (stmt)))
3800 ok = false;
3801 break;
3803 def_stmt
3804 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3806 if (!ok)
3807 continue;
3810 result = pre_expr_pool.allocate ();
3811 result->kind = REFERENCE;
3812 result->id = 0;
3813 PRE_EXPR_REFERENCE (result) = ref;
3814 break;
3817 default:
3818 continue;
3821 get_or_alloc_expression_id (result);
3822 add_to_value (get_expr_value_id (result), result);
3823 bitmap_value_insert_into_set (EXP_GEN (block), result);
3824 continue;
3826 default:
3827 break;
3831 if (dump_file && (dump_flags & TDF_DETAILS))
3833 print_bitmap_set (dump_file, EXP_GEN (block),
3834 "exp_gen", block->index);
3835 print_bitmap_set (dump_file, PHI_GEN (block),
3836 "phi_gen", block->index);
3837 print_bitmap_set (dump_file, TMP_GEN (block),
3838 "tmp_gen", block->index);
3839 print_bitmap_set (dump_file, AVAIL_OUT (block),
3840 "avail_out", block->index);
3843 /* Put the dominator children of BLOCK on the worklist of blocks
3844 to compute available sets for. */
3845 for (son = first_dom_son (CDI_DOMINATORS, block);
3846 son;
3847 son = next_dom_son (CDI_DOMINATORS, son))
3848 worklist[sp++] = son;
3851 free (worklist);
3855 /* Local state for the eliminate domwalk. */
3856 static vec<gimple *> el_to_remove;
3857 static vec<gimple *> el_to_fixup;
3858 static unsigned int el_todo;
3859 static vec<tree> el_avail;
3860 static vec<tree> el_avail_stack;
3862 /* Return a leader for OP that is available at the current point of the
3863 eliminate domwalk. */
3865 static tree
3866 eliminate_avail (tree op)
3868 tree valnum = VN_INFO (op)->valnum;
3869 if (TREE_CODE (valnum) == SSA_NAME)
3871 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
3872 return valnum;
3873 if (el_avail.length () > SSA_NAME_VERSION (valnum))
3874 return el_avail[SSA_NAME_VERSION (valnum)];
3876 else if (is_gimple_min_invariant (valnum))
3877 return valnum;
3878 return NULL_TREE;
3881 /* At the current point of the eliminate domwalk make OP available. */
3883 static void
3884 eliminate_push_avail (tree op)
3886 tree valnum = VN_INFO (op)->valnum;
3887 if (TREE_CODE (valnum) == SSA_NAME)
3889 if (el_avail.length () <= SSA_NAME_VERSION (valnum))
3890 el_avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
3891 tree pushop = op;
3892 if (el_avail[SSA_NAME_VERSION (valnum)])
3893 pushop = el_avail[SSA_NAME_VERSION (valnum)];
3894 el_avail_stack.safe_push (pushop);
3895 el_avail[SSA_NAME_VERSION (valnum)] = op;
3899 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3900 the leader for the expression if insertion was successful. */
3902 static tree
3903 eliminate_insert (gimple_stmt_iterator *gsi, tree val)
3905 gimple *stmt = gimple_seq_first_stmt (VN_INFO (val)->expr);
3906 if (!is_gimple_assign (stmt)
3907 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
3908 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR))
3909 return NULL_TREE;
3911 tree op = gimple_assign_rhs1 (stmt);
3912 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
3913 op = TREE_OPERAND (op, 0);
3914 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
3915 if (!leader)
3916 return NULL_TREE;
3918 gimple_seq stmts = NULL;
3919 tree res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
3920 TREE_TYPE (val), leader);
3921 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3922 VN_INFO_GET (res)->valnum = val;
3924 if (TREE_CODE (leader) == SSA_NAME)
3925 gimple_set_plf (SSA_NAME_DEF_STMT (leader), NECESSARY, true);
3927 pre_stats.insertions++;
3928 if (dump_file && (dump_flags & TDF_DETAILS))
3930 fprintf (dump_file, "Inserted ");
3931 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0, 0);
3934 return res;
3937 class eliminate_dom_walker : public dom_walker
3939 public:
3940 eliminate_dom_walker (cdi_direction direction, bool do_pre_)
3941 : dom_walker (direction), do_pre (do_pre_) {}
3943 virtual edge before_dom_children (basic_block);
3944 virtual void after_dom_children (basic_block);
3946 bool do_pre;
3949 /* Perform elimination for the basic-block B during the domwalk. */
3951 edge
3952 eliminate_dom_walker::before_dom_children (basic_block b)
3954 /* Mark new bb. */
3955 el_avail_stack.safe_push (NULL_TREE);
3957 /* ??? If we do nothing for unreachable blocks then this will confuse
3958 tailmerging. Eventually we can reduce its reliance on SCCVN now
3959 that we fully copy/constant-propagate (most) things. */
3961 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
3963 gphi *phi = gsi.phi ();
3964 tree res = PHI_RESULT (phi);
3966 if (virtual_operand_p (res))
3968 gsi_next (&gsi);
3969 continue;
3972 tree sprime = eliminate_avail (res);
3973 if (sprime
3974 && sprime != res)
3976 if (dump_file && (dump_flags & TDF_DETAILS))
3978 fprintf (dump_file, "Replaced redundant PHI node defining ");
3979 print_generic_expr (dump_file, res, 0);
3980 fprintf (dump_file, " with ");
3981 print_generic_expr (dump_file, sprime, 0);
3982 fprintf (dump_file, "\n");
3985 /* If we inserted this PHI node ourself, it's not an elimination. */
3986 if (inserted_exprs
3987 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
3988 pre_stats.phis--;
3989 else
3990 pre_stats.eliminations++;
3992 /* If we will propagate into all uses don't bother to do
3993 anything. */
3994 if (may_propagate_copy (res, sprime))
3996 /* Mark the PHI for removal. */
3997 el_to_remove.safe_push (phi);
3998 gsi_next (&gsi);
3999 continue;
4002 remove_phi_node (&gsi, false);
4004 if (inserted_exprs
4005 && !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4006 && TREE_CODE (sprime) == SSA_NAME)
4007 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4009 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4010 sprime = fold_convert (TREE_TYPE (res), sprime);
4011 gimple *stmt = gimple_build_assign (res, sprime);
4012 /* ??? It cannot yet be necessary (DOM walk). */
4013 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4015 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
4016 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4017 continue;
4020 eliminate_push_avail (res);
4021 gsi_next (&gsi);
4024 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
4025 !gsi_end_p (gsi);
4026 gsi_next (&gsi))
4028 tree sprime = NULL_TREE;
4029 gimple *stmt = gsi_stmt (gsi);
4030 tree lhs = gimple_get_lhs (stmt);
4031 if (lhs && TREE_CODE (lhs) == SSA_NAME
4032 && !gimple_has_volatile_ops (stmt)
4033 /* See PR43491. Do not replace a global register variable when
4034 it is a the RHS of an assignment. Do replace local register
4035 variables since gcc does not guarantee a local variable will
4036 be allocated in register.
4037 ??? The fix isn't effective here. This should instead
4038 be ensured by not value-numbering them the same but treating
4039 them like volatiles? */
4040 && !(gimple_assign_single_p (stmt)
4041 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4042 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4043 && is_global_var (gimple_assign_rhs1 (stmt)))))
4045 sprime = eliminate_avail (lhs);
4046 if (!sprime)
4048 /* If there is no existing usable leader but SCCVN thinks
4049 it has an expression it wants to use as replacement,
4050 insert that. */
4051 tree val = VN_INFO (lhs)->valnum;
4052 if (val != VN_TOP
4053 && TREE_CODE (val) == SSA_NAME
4054 && VN_INFO (val)->needs_insertion
4055 && VN_INFO (val)->expr != NULL
4056 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
4057 eliminate_push_avail (sprime);
4060 /* If this now constitutes a copy duplicate points-to
4061 and range info appropriately. This is especially
4062 important for inserted code. See tree-ssa-copy.c
4063 for similar code. */
4064 if (sprime
4065 && TREE_CODE (sprime) == SSA_NAME)
4067 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
4068 if (POINTER_TYPE_P (TREE_TYPE (lhs))
4069 && VN_INFO_PTR_INFO (lhs)
4070 && ! VN_INFO_PTR_INFO (sprime))
4072 duplicate_ssa_name_ptr_info (sprime,
4073 VN_INFO_PTR_INFO (lhs));
4074 if (b != sprime_b)
4075 mark_ptr_info_alignment_unknown
4076 (SSA_NAME_PTR_INFO (sprime));
4078 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
4079 && VN_INFO_RANGE_INFO (lhs)
4080 && ! VN_INFO_RANGE_INFO (sprime)
4081 && b == sprime_b)
4082 duplicate_ssa_name_range_info (sprime,
4083 VN_INFO_RANGE_TYPE (lhs),
4084 VN_INFO_RANGE_INFO (lhs));
4087 /* Inhibit the use of an inserted PHI on a loop header when
4088 the address of the memory reference is a simple induction
4089 variable. In other cases the vectorizer won't do anything
4090 anyway (either it's loop invariant or a complicated
4091 expression). */
4092 if (sprime
4093 && TREE_CODE (sprime) == SSA_NAME
4094 && do_pre
4095 && flag_tree_loop_vectorize
4096 && loop_outer (b->loop_father)
4097 && has_zero_uses (sprime)
4098 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
4099 && gimple_assign_load_p (stmt))
4101 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
4102 basic_block def_bb = gimple_bb (def_stmt);
4103 if (gimple_code (def_stmt) == GIMPLE_PHI
4104 && def_bb->loop_father->header == def_bb)
4106 loop_p loop = def_bb->loop_father;
4107 ssa_op_iter iter;
4108 tree op;
4109 bool found = false;
4110 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4112 affine_iv iv;
4113 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
4114 if (def_bb
4115 && flow_bb_inside_loop_p (loop, def_bb)
4116 && simple_iv (loop, loop, op, &iv, true))
4118 found = true;
4119 break;
4122 if (found)
4124 if (dump_file && (dump_flags & TDF_DETAILS))
4126 fprintf (dump_file, "Not replacing ");
4127 print_gimple_expr (dump_file, stmt, 0, 0);
4128 fprintf (dump_file, " with ");
4129 print_generic_expr (dump_file, sprime, 0);
4130 fprintf (dump_file, " which would add a loop"
4131 " carried dependence to loop %d\n",
4132 loop->num);
4134 /* Don't keep sprime available. */
4135 sprime = NULL_TREE;
4140 if (sprime)
4142 /* If we can propagate the value computed for LHS into
4143 all uses don't bother doing anything with this stmt. */
4144 if (may_propagate_copy (lhs, sprime))
4146 /* Mark it for removal. */
4147 el_to_remove.safe_push (stmt);
4149 /* ??? Don't count copy/constant propagations. */
4150 if (gimple_assign_single_p (stmt)
4151 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4152 || gimple_assign_rhs1 (stmt) == sprime))
4153 continue;
4155 if (dump_file && (dump_flags & TDF_DETAILS))
4157 fprintf (dump_file, "Replaced ");
4158 print_gimple_expr (dump_file, stmt, 0, 0);
4159 fprintf (dump_file, " with ");
4160 print_generic_expr (dump_file, sprime, 0);
4161 fprintf (dump_file, " in all uses of ");
4162 print_gimple_stmt (dump_file, stmt, 0, 0);
4165 pre_stats.eliminations++;
4166 continue;
4169 /* If this is an assignment from our leader (which
4170 happens in the case the value-number is a constant)
4171 then there is nothing to do. */
4172 if (gimple_assign_single_p (stmt)
4173 && sprime == gimple_assign_rhs1 (stmt))
4174 continue;
4176 /* Else replace its RHS. */
4177 bool can_make_abnormal_goto
4178 = is_gimple_call (stmt)
4179 && stmt_can_make_abnormal_goto (stmt);
4181 if (dump_file && (dump_flags & TDF_DETAILS))
4183 fprintf (dump_file, "Replaced ");
4184 print_gimple_expr (dump_file, stmt, 0, 0);
4185 fprintf (dump_file, " with ");
4186 print_generic_expr (dump_file, sprime, 0);
4187 fprintf (dump_file, " in ");
4188 print_gimple_stmt (dump_file, stmt, 0, 0);
4191 if (TREE_CODE (sprime) == SSA_NAME)
4192 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4193 NECESSARY, true);
4195 pre_stats.eliminations++;
4196 gimple *orig_stmt = stmt;
4197 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4198 TREE_TYPE (sprime)))
4199 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4200 tree vdef = gimple_vdef (stmt);
4201 tree vuse = gimple_vuse (stmt);
4202 propagate_tree_value_into_stmt (&gsi, sprime);
4203 stmt = gsi_stmt (gsi);
4204 update_stmt (stmt);
4205 if (vdef != gimple_vdef (stmt))
4206 VN_INFO (vdef)->valnum = vuse;
4208 /* If we removed EH side-effects from the statement, clean
4209 its EH information. */
4210 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4212 bitmap_set_bit (need_eh_cleanup,
4213 gimple_bb (stmt)->index);
4214 if (dump_file && (dump_flags & TDF_DETAILS))
4215 fprintf (dump_file, " Removed EH side-effects.\n");
4218 /* Likewise for AB side-effects. */
4219 if (can_make_abnormal_goto
4220 && !stmt_can_make_abnormal_goto (stmt))
4222 bitmap_set_bit (need_ab_cleanup,
4223 gimple_bb (stmt)->index);
4224 if (dump_file && (dump_flags & TDF_DETAILS))
4225 fprintf (dump_file, " Removed AB side-effects.\n");
4228 continue;
4232 /* If the statement is a scalar store, see if the expression
4233 has the same value number as its rhs. If so, the store is
4234 dead. */
4235 if (gimple_assign_single_p (stmt)
4236 && !gimple_has_volatile_ops (stmt)
4237 && !is_gimple_reg (gimple_assign_lhs (stmt))
4238 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4239 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4241 tree val;
4242 tree rhs = gimple_assign_rhs1 (stmt);
4243 vn_reference_t vnresult;
4244 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
4245 &vnresult, false);
4246 if (TREE_CODE (rhs) == SSA_NAME)
4247 rhs = VN_INFO (rhs)->valnum;
4248 if (val
4249 && operand_equal_p (val, rhs, 0))
4251 /* We can only remove the later store if the former aliases
4252 at least all accesses the later one does or if the store
4253 was to readonly memory storing the same value. */
4254 alias_set_type set = get_alias_set (lhs);
4255 if (! vnresult
4256 || vnresult->set == set
4257 || alias_set_subset_of (set, vnresult->set))
4259 if (dump_file && (dump_flags & TDF_DETAILS))
4261 fprintf (dump_file, "Deleted redundant store ");
4262 print_gimple_stmt (dump_file, stmt, 0, 0);
4265 /* Queue stmt for removal. */
4266 el_to_remove.safe_push (stmt);
4267 continue;
4272 /* If this is a control statement value numbering left edges
4273 unexecuted on force the condition in a way consistent with
4274 that. */
4275 if (gcond *cond = dyn_cast <gcond *> (stmt))
4277 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
4278 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
4280 if (dump_file && (dump_flags & TDF_DETAILS))
4282 fprintf (dump_file, "Removing unexecutable edge from ");
4283 print_gimple_stmt (dump_file, stmt, 0, 0);
4285 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
4286 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
4287 gimple_cond_make_true (cond);
4288 else
4289 gimple_cond_make_false (cond);
4290 update_stmt (cond);
4291 el_todo |= TODO_cleanup_cfg;
4292 continue;
4296 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
4297 bool was_noreturn = (is_gimple_call (stmt)
4298 && gimple_call_noreturn_p (stmt));
4299 tree vdef = gimple_vdef (stmt);
4300 tree vuse = gimple_vuse (stmt);
4302 /* If we didn't replace the whole stmt (or propagate the result
4303 into all uses), replace all uses on this stmt with their
4304 leaders. */
4305 use_operand_p use_p;
4306 ssa_op_iter iter;
4307 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4309 tree use = USE_FROM_PTR (use_p);
4310 /* ??? The call code above leaves stmt operands un-updated. */
4311 if (TREE_CODE (use) != SSA_NAME)
4312 continue;
4313 tree sprime = eliminate_avail (use);
4314 if (sprime && sprime != use
4315 && may_propagate_copy (use, sprime)
4316 /* We substitute into debug stmts to avoid excessive
4317 debug temporaries created by removed stmts, but we need
4318 to avoid doing so for inserted sprimes as we never want
4319 to create debug temporaries for them. */
4320 && (!inserted_exprs
4321 || TREE_CODE (sprime) != SSA_NAME
4322 || !is_gimple_debug (stmt)
4323 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
4325 propagate_value (use_p, sprime);
4326 gimple_set_modified (stmt, true);
4327 if (TREE_CODE (sprime) == SSA_NAME
4328 && !is_gimple_debug (stmt))
4329 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4330 NECESSARY, true);
4334 /* Visit indirect calls and turn them into direct calls if
4335 possible using the devirtualization machinery. */
4336 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4338 tree fn = gimple_call_fn (call_stmt);
4339 if (fn
4340 && flag_devirtualize
4341 && virtual_method_call_p (fn))
4343 tree otr_type = obj_type_ref_class (fn);
4344 tree instance;
4345 ipa_polymorphic_call_context context (current_function_decl, fn, stmt, &instance);
4346 bool final;
4348 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn), otr_type, stmt);
4350 vec <cgraph_node *>targets
4351 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
4352 tree_to_uhwi
4353 (OBJ_TYPE_REF_TOKEN (fn)),
4354 context,
4355 &final);
4356 if (dump_file)
4357 dump_possible_polymorphic_call_targets (dump_file,
4358 obj_type_ref_class (fn),
4359 tree_to_uhwi
4360 (OBJ_TYPE_REF_TOKEN (fn)),
4361 context);
4362 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4364 tree fn;
4365 if (targets.length () == 1)
4366 fn = targets[0]->decl;
4367 else
4368 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4369 if (dump_enabled_p ())
4371 location_t loc = gimple_location_safe (stmt);
4372 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4373 "converting indirect call to "
4374 "function %s\n",
4375 lang_hooks.decl_printable_name (fn, 2));
4377 gimple_call_set_fndecl (call_stmt, fn);
4378 /* If changing the call to __builtin_unreachable
4379 or similar noreturn function, adjust gimple_call_fntype
4380 too. */
4381 if (gimple_call_noreturn_p (call_stmt)
4382 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
4383 && TYPE_ARG_TYPES (TREE_TYPE (fn))
4384 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
4385 == void_type_node))
4386 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
4387 maybe_remove_unused_call_args (cfun, call_stmt);
4388 gimple_set_modified (stmt, true);
4393 if (gimple_modified_p (stmt))
4395 /* If a formerly non-invariant ADDR_EXPR is turned into an
4396 invariant one it was on a separate stmt. */
4397 if (gimple_assign_single_p (stmt)
4398 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
4399 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
4400 gimple *old_stmt = stmt;
4401 if (is_gimple_call (stmt))
4403 /* ??? Only fold calls inplace for now, this may create new
4404 SSA names which in turn will confuse free_scc_vn SSA name
4405 release code. */
4406 fold_stmt_inplace (&gsi);
4407 /* When changing a call into a noreturn call, cfg cleanup
4408 is needed to fix up the noreturn call. */
4409 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4410 el_to_fixup.safe_push (stmt);
4412 else
4414 fold_stmt (&gsi);
4415 stmt = gsi_stmt (gsi);
4416 if ((gimple_code (stmt) == GIMPLE_COND
4417 && (gimple_cond_true_p (as_a <gcond *> (stmt))
4418 || gimple_cond_false_p (as_a <gcond *> (stmt))))
4419 || (gimple_code (stmt) == GIMPLE_SWITCH
4420 && TREE_CODE (gimple_switch_index (
4421 as_a <gswitch *> (stmt)))
4422 == INTEGER_CST))
4423 el_todo |= TODO_cleanup_cfg;
4425 /* If we removed EH side-effects from the statement, clean
4426 its EH information. */
4427 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
4429 bitmap_set_bit (need_eh_cleanup,
4430 gimple_bb (stmt)->index);
4431 if (dump_file && (dump_flags & TDF_DETAILS))
4432 fprintf (dump_file, " Removed EH side-effects.\n");
4434 /* Likewise for AB side-effects. */
4435 if (can_make_abnormal_goto
4436 && !stmt_can_make_abnormal_goto (stmt))
4438 bitmap_set_bit (need_ab_cleanup,
4439 gimple_bb (stmt)->index);
4440 if (dump_file && (dump_flags & TDF_DETAILS))
4441 fprintf (dump_file, " Removed AB side-effects.\n");
4443 update_stmt (stmt);
4444 if (vdef != gimple_vdef (stmt))
4445 VN_INFO (vdef)->valnum = vuse;
4448 /* Make new values available - for fully redundant LHS we
4449 continue with the next stmt above and skip this. */
4450 def_operand_p defp;
4451 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4452 eliminate_push_avail (DEF_FROM_PTR (defp));
4455 /* Replace destination PHI arguments. */
4456 edge_iterator ei;
4457 edge e;
4458 FOR_EACH_EDGE (e, ei, b->succs)
4460 for (gphi_iterator gsi = gsi_start_phis (e->dest);
4461 !gsi_end_p (gsi);
4462 gsi_next (&gsi))
4464 gphi *phi = gsi.phi ();
4465 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
4466 tree arg = USE_FROM_PTR (use_p);
4467 if (TREE_CODE (arg) != SSA_NAME
4468 || virtual_operand_p (arg))
4469 continue;
4470 tree sprime = eliminate_avail (arg);
4471 if (sprime && may_propagate_copy (arg, sprime))
4473 propagate_value (use_p, sprime);
4474 if (TREE_CODE (sprime) == SSA_NAME)
4475 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4479 return NULL;
4482 /* Make no longer available leaders no longer available. */
4484 void
4485 eliminate_dom_walker::after_dom_children (basic_block)
4487 tree entry;
4488 while ((entry = el_avail_stack.pop ()) != NULL_TREE)
4490 tree valnum = VN_INFO (entry)->valnum;
4491 tree old = el_avail[SSA_NAME_VERSION (valnum)];
4492 if (old == entry)
4493 el_avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
4494 else
4495 el_avail[SSA_NAME_VERSION (valnum)] = entry;
4499 /* Eliminate fully redundant computations. */
4501 static unsigned int
4502 eliminate (bool do_pre)
4504 gimple_stmt_iterator gsi;
4505 gimple *stmt;
4507 need_eh_cleanup = BITMAP_ALLOC (NULL);
4508 need_ab_cleanup = BITMAP_ALLOC (NULL);
4510 el_to_remove.create (0);
4511 el_to_fixup.create (0);
4512 el_todo = 0;
4513 el_avail.create (num_ssa_names);
4514 el_avail_stack.create (0);
4516 eliminate_dom_walker (CDI_DOMINATORS,
4517 do_pre).walk (cfun->cfg->x_entry_block_ptr);
4519 el_avail.release ();
4520 el_avail_stack.release ();
4522 /* We cannot remove stmts during BB walk, especially not release SSA
4523 names there as this confuses the VN machinery. The stmts ending
4524 up in el_to_remove are either stores or simple copies.
4525 Remove stmts in reverse order to make debug stmt creation possible. */
4526 while (!el_to_remove.is_empty ())
4528 stmt = el_to_remove.pop ();
4530 if (dump_file && (dump_flags & TDF_DETAILS))
4532 fprintf (dump_file, "Removing dead stmt ");
4533 print_gimple_stmt (dump_file, stmt, 0, 0);
4536 tree lhs;
4537 if (gimple_code (stmt) == GIMPLE_PHI)
4538 lhs = gimple_phi_result (stmt);
4539 else
4540 lhs = gimple_get_lhs (stmt);
4542 if (inserted_exprs
4543 && TREE_CODE (lhs) == SSA_NAME)
4544 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4546 gsi = gsi_for_stmt (stmt);
4547 if (gimple_code (stmt) == GIMPLE_PHI)
4548 remove_phi_node (&gsi, true);
4549 else
4551 basic_block bb = gimple_bb (stmt);
4552 unlink_stmt_vdef (stmt);
4553 if (gsi_remove (&gsi, true))
4554 bitmap_set_bit (need_eh_cleanup, bb->index);
4555 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
4556 bitmap_set_bit (need_ab_cleanup, bb->index);
4557 release_defs (stmt);
4560 /* Removing a stmt may expose a forwarder block. */
4561 el_todo |= TODO_cleanup_cfg;
4563 el_to_remove.release ();
4565 /* Fixup stmts that became noreturn calls. This may require splitting
4566 blocks and thus isn't possible during the dominator walk. Do this
4567 in reverse order so we don't inadvertedly remove a stmt we want to
4568 fixup by visiting a dominating now noreturn call first. */
4569 while (!el_to_fixup.is_empty ())
4571 stmt = el_to_fixup.pop ();
4573 if (dump_file && (dump_flags & TDF_DETAILS))
4575 fprintf (dump_file, "Fixing up noreturn call ");
4576 print_gimple_stmt (dump_file, stmt, 0, 0);
4579 if (fixup_noreturn_call (stmt))
4580 el_todo |= TODO_cleanup_cfg;
4582 el_to_fixup.release ();
4584 return el_todo;
4587 /* Perform CFG cleanups made necessary by elimination. */
4589 static unsigned
4590 fini_eliminate (void)
4592 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4593 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4595 if (do_eh_cleanup)
4596 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4598 if (do_ab_cleanup)
4599 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4601 BITMAP_FREE (need_eh_cleanup);
4602 BITMAP_FREE (need_ab_cleanup);
4604 if (do_eh_cleanup || do_ab_cleanup)
4605 return TODO_cleanup_cfg;
4606 return 0;
4609 /* Borrow a bit of tree-ssa-dce.c for the moment.
4610 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4611 this may be a bit faster, and we may want critical edges kept split. */
4613 /* If OP's defining statement has not already been determined to be necessary,
4614 mark that statement necessary. Return the stmt, if it is newly
4615 necessary. */
4617 static inline gimple *
4618 mark_operand_necessary (tree op)
4620 gimple *stmt;
4622 gcc_assert (op);
4624 if (TREE_CODE (op) != SSA_NAME)
4625 return NULL;
4627 stmt = SSA_NAME_DEF_STMT (op);
4628 gcc_assert (stmt);
4630 if (gimple_plf (stmt, NECESSARY)
4631 || gimple_nop_p (stmt))
4632 return NULL;
4634 gimple_set_plf (stmt, NECESSARY, true);
4635 return stmt;
4638 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4639 to insert PHI nodes sometimes, and because value numbering of casts isn't
4640 perfect, we sometimes end up inserting dead code. This simple DCE-like
4641 pass removes any insertions we made that weren't actually used. */
4643 static void
4644 remove_dead_inserted_code (void)
4646 bitmap worklist;
4647 unsigned i;
4648 bitmap_iterator bi;
4649 gimple *t;
4651 worklist = BITMAP_ALLOC (NULL);
4652 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4654 t = SSA_NAME_DEF_STMT (ssa_name (i));
4655 if (gimple_plf (t, NECESSARY))
4656 bitmap_set_bit (worklist, i);
4658 while (!bitmap_empty_p (worklist))
4660 i = bitmap_first_set_bit (worklist);
4661 bitmap_clear_bit (worklist, i);
4662 t = SSA_NAME_DEF_STMT (ssa_name (i));
4664 /* PHI nodes are somewhat special in that each PHI alternative has
4665 data and control dependencies. All the statements feeding the
4666 PHI node's arguments are always necessary. */
4667 if (gimple_code (t) == GIMPLE_PHI)
4669 unsigned k;
4671 for (k = 0; k < gimple_phi_num_args (t); k++)
4673 tree arg = PHI_ARG_DEF (t, k);
4674 if (TREE_CODE (arg) == SSA_NAME)
4676 gimple *n = mark_operand_necessary (arg);
4677 if (n)
4678 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4682 else
4684 /* Propagate through the operands. Examine all the USE, VUSE and
4685 VDEF operands in this statement. Mark all the statements
4686 which feed this statement's uses as necessary. */
4687 ssa_op_iter iter;
4688 tree use;
4690 /* The operands of VDEF expressions are also needed as they
4691 represent potential definitions that may reach this
4692 statement (VDEF operands allow us to follow def-def
4693 links). */
4695 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4697 gimple *n = mark_operand_necessary (use);
4698 if (n)
4699 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4704 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4706 t = SSA_NAME_DEF_STMT (ssa_name (i));
4707 if (!gimple_plf (t, NECESSARY))
4709 gimple_stmt_iterator gsi;
4711 if (dump_file && (dump_flags & TDF_DETAILS))
4713 fprintf (dump_file, "Removing unnecessary insertion:");
4714 print_gimple_stmt (dump_file, t, 0, 0);
4717 gsi = gsi_for_stmt (t);
4718 if (gimple_code (t) == GIMPLE_PHI)
4719 remove_phi_node (&gsi, true);
4720 else
4722 gsi_remove (&gsi, true);
4723 release_defs (t);
4727 BITMAP_FREE (worklist);
4731 /* Initialize data structures used by PRE. */
4733 static void
4734 init_pre (void)
4736 basic_block bb;
4738 next_expression_id = 1;
4739 expressions.create (0);
4740 expressions.safe_push (NULL);
4741 value_expressions.create (get_max_value_id () + 1);
4742 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4743 name_to_id.create (0);
4745 inserted_exprs = BITMAP_ALLOC (NULL);
4747 connect_infinite_loops_to_exit ();
4748 memset (&pre_stats, 0, sizeof (pre_stats));
4750 /* For ANTIC computation we need a postorder that also guarantees that
4751 a block with a single successor is visited after its successor.
4752 RPO on the inverted CFG has this property. */
4753 postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
4754 postorder_num = inverted_post_order_compute (postorder);
4756 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4758 calculate_dominance_info (CDI_DOMINATORS);
4760 bitmap_obstack_initialize (&grand_bitmap_obstack);
4761 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4762 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4763 FOR_ALL_BB_FN (bb, cfun)
4765 EXP_GEN (bb) = bitmap_set_new ();
4766 PHI_GEN (bb) = bitmap_set_new ();
4767 TMP_GEN (bb) = bitmap_set_new ();
4768 AVAIL_OUT (bb) = bitmap_set_new ();
4773 /* Deallocate data structures used by PRE. */
4775 static void
4776 fini_pre ()
4778 free (postorder);
4779 value_expressions.release ();
4780 BITMAP_FREE (inserted_exprs);
4781 bitmap_obstack_release (&grand_bitmap_obstack);
4782 bitmap_set_pool.release ();
4783 pre_expr_pool.release ();
4784 delete phi_translate_table;
4785 phi_translate_table = NULL;
4786 delete expression_to_id;
4787 expression_to_id = NULL;
4788 name_to_id.release ();
4790 free_aux_for_blocks ();
4793 namespace {
4795 const pass_data pass_data_pre =
4797 GIMPLE_PASS, /* type */
4798 "pre", /* name */
4799 OPTGROUP_NONE, /* optinfo_flags */
4800 TV_TREE_PRE, /* tv_id */
4801 /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
4802 pass_pre. */
4803 ( PROP_no_crit_edges | PROP_cfg | PROP_ssa ), /* properties_required */
4804 0, /* properties_provided */
4805 PROP_no_crit_edges, /* properties_destroyed */
4806 TODO_rebuild_alias, /* todo_flags_start */
4807 0, /* todo_flags_finish */
4810 class pass_pre : public gimple_opt_pass
4812 public:
4813 pass_pre (gcc::context *ctxt)
4814 : gimple_opt_pass (pass_data_pre, ctxt)
4817 /* opt_pass methods: */
4818 virtual bool gate (function *) { return flag_tree_pre != 0; }
4819 virtual unsigned int execute (function *);
4821 }; // class pass_pre
4823 unsigned int
4824 pass_pre::execute (function *fun)
4826 unsigned int todo = 0;
4828 do_partial_partial =
4829 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4831 /* This has to happen before SCCVN runs because
4832 loop_optimizer_init may create new phis, etc. */
4833 loop_optimizer_init (LOOPS_NORMAL);
4835 if (!run_scc_vn (VN_WALK))
4837 loop_optimizer_finalize ();
4838 return 0;
4841 init_pre ();
4842 scev_initialize ();
4844 /* Collect and value number expressions computed in each basic block. */
4845 compute_avail ();
4847 /* Insert can get quite slow on an incredibly large number of basic
4848 blocks due to some quadratic behavior. Until this behavior is
4849 fixed, don't run it when he have an incredibly large number of
4850 bb's. If we aren't going to run insert, there is no point in
4851 computing ANTIC, either, even though it's plenty fast. */
4852 if (n_basic_blocks_for_fn (fun) < 4000)
4854 compute_antic ();
4855 insert ();
4858 /* Make sure to remove fake edges before committing our inserts.
4859 This makes sure we don't end up with extra critical edges that
4860 we would need to split. */
4861 remove_fake_exit_edges ();
4862 gsi_commit_edge_inserts ();
4864 /* Eliminate folds statements which might (should not...) end up
4865 not keeping virtual operands up-to-date. */
4866 gcc_assert (!need_ssa_update_p (fun));
4868 /* Remove all the redundant expressions. */
4869 todo |= eliminate (true);
4871 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4872 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4873 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4874 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
4876 clear_expression_ids ();
4877 remove_dead_inserted_code ();
4879 scev_finalize ();
4880 fini_pre ();
4881 todo |= fini_eliminate ();
4882 loop_optimizer_finalize ();
4884 /* Restore SSA info before tail-merging as that resets it as well. */
4885 scc_vn_restore_ssa_info ();
4887 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4888 case we can merge the block with the remaining predecessor of the block.
4889 It should either:
4890 - call merge_blocks after each tail merge iteration
4891 - call merge_blocks after all tail merge iterations
4892 - mark TODO_cleanup_cfg when necessary
4893 - share the cfg cleanup with fini_pre. */
4894 todo |= tail_merge_optimize (todo);
4896 free_scc_vn ();
4898 /* Tail merging invalidates the virtual SSA web, together with
4899 cfg-cleanup opportunities exposed by PRE this will wreck the
4900 SSA updating machinery. So make sure to run update-ssa
4901 manually, before eventually scheduling cfg-cleanup as part of
4902 the todo. */
4903 update_ssa (TODO_update_ssa_only_virtuals);
4905 return todo;
4908 } // anon namespace
4910 gimple_opt_pass *
4911 make_pass_pre (gcc::context *ctxt)
4913 return new pass_pre (ctxt);
4916 namespace {
4918 const pass_data pass_data_fre =
4920 GIMPLE_PASS, /* type */
4921 "fre", /* name */
4922 OPTGROUP_NONE, /* optinfo_flags */
4923 TV_TREE_FRE, /* tv_id */
4924 ( PROP_cfg | PROP_ssa ), /* properties_required */
4925 0, /* properties_provided */
4926 0, /* properties_destroyed */
4927 0, /* todo_flags_start */
4928 0, /* todo_flags_finish */
4931 class pass_fre : public gimple_opt_pass
4933 public:
4934 pass_fre (gcc::context *ctxt)
4935 : gimple_opt_pass (pass_data_fre, ctxt)
4938 /* opt_pass methods: */
4939 opt_pass * clone () { return new pass_fre (m_ctxt); }
4940 virtual bool gate (function *) { return flag_tree_fre != 0; }
4941 virtual unsigned int execute (function *);
4943 }; // class pass_fre
4945 unsigned int
4946 pass_fre::execute (function *fun)
4948 unsigned int todo = 0;
4950 if (!run_scc_vn (VN_WALKREWRITE))
4951 return 0;
4953 memset (&pre_stats, 0, sizeof (pre_stats));
4955 /* Remove all the redundant expressions. */
4956 todo |= eliminate (false);
4958 todo |= fini_eliminate ();
4960 scc_vn_restore_ssa_info ();
4961 free_scc_vn ();
4963 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4964 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
4966 return todo;
4969 } // anon namespace
4971 gimple_opt_pass *
4972 make_pass_fre (gcc::context *ctxt)
4974 return new pass_fre (ctxt);