Reverting merge from trunk
[official-gcc.git] / gcc / tree-ssa-pre.c
blob7052d94e49a46878d32c01f0f6d3a6c29061c074
1 /* SSA-PRE for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "gimple.h"
31 #include "gimplify.h"
32 #include "gimple-iterator.h"
33 #include "gimplify-me.h"
34 #include "gimple-ssa.h"
35 #include "tree-cfg.h"
36 #include "tree-phinodes.h"
37 #include "ssa-iterators.h"
38 #include "tree-ssanames.h"
39 #include "tree-ssa-loop.h"
40 #include "tree-into-ssa.h"
41 #include "tree-dfa.h"
42 #include "tree-ssa.h"
43 #include "hash-table.h"
44 #include "tree-iterator.h"
45 #include "alloc-pool.h"
46 #include "obstack.h"
47 #include "tree-pass.h"
48 #include "flags.h"
49 #include "langhooks.h"
50 #include "cfgloop.h"
51 #include "tree-ssa-sccvn.h"
52 #include "tree-scalar-evolution.h"
53 #include "params.h"
54 #include "dbgcnt.h"
55 #include "domwalk.h"
56 #include "ipa-prop.h"
57 #include "tree-ssa-propagate.h"
59 /* TODO:
61 1. Avail sets can be shared by making an avail_find_leader that
62 walks up the dominator tree and looks in those avail sets.
63 This might affect code optimality, it's unclear right now.
64 2. Strength reduction can be performed by anticipating expressions
65 we can repair later on.
66 3. We can do back-substitution or smarter value numbering to catch
67 commutative expressions split up over multiple statements.
70 /* For ease of terminology, "expression node" in the below refers to
71 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
72 represent the actual statement containing the expressions we care about,
73 and we cache the value number by putting it in the expression. */
75 /* Basic algorithm
77 First we walk the statements to generate the AVAIL sets, the
78 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
79 generation of values/expressions by a given block. We use them
80 when computing the ANTIC sets. The AVAIL sets consist of
81 SSA_NAME's that represent values, so we know what values are
82 available in what blocks. AVAIL is a forward dataflow problem. In
83 SSA, values are never killed, so we don't need a kill set, or a
84 fixpoint iteration, in order to calculate the AVAIL sets. In
85 traditional parlance, AVAIL sets tell us the downsafety of the
86 expressions/values.
88 Next, we generate the ANTIC sets. These sets represent the
89 anticipatable expressions. ANTIC is a backwards dataflow
90 problem. An expression is anticipatable in a given block if it could
91 be generated in that block. This means that if we had to perform
92 an insertion in that block, of the value of that expression, we
93 could. Calculating the ANTIC sets requires phi translation of
94 expressions, because the flow goes backwards through phis. We must
95 iterate to a fixpoint of the ANTIC sets, because we have a kill
96 set. Even in SSA form, values are not live over the entire
97 function, only from their definition point onwards. So we have to
98 remove values from the ANTIC set once we go past the definition
99 point of the leaders that make them up.
100 compute_antic/compute_antic_aux performs this computation.
102 Third, we perform insertions to make partially redundant
103 expressions fully redundant.
105 An expression is partially redundant (excluding partial
106 anticipation) if:
108 1. It is AVAIL in some, but not all, of the predecessors of a
109 given block.
110 2. It is ANTIC in all the predecessors.
112 In order to make it fully redundant, we insert the expression into
113 the predecessors where it is not available, but is ANTIC.
115 For the partial anticipation case, we only perform insertion if it
116 is partially anticipated in some block, and fully available in all
117 of the predecessors.
119 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
120 performs these steps.
122 Fourth, we eliminate fully redundant expressions.
123 This is a simple statement walk that replaces redundant
124 calculations with the now available values. */
126 /* Representations of value numbers:
128 Value numbers are represented by a representative SSA_NAME. We
129 will create fake SSA_NAME's in situations where we need a
130 representative but do not have one (because it is a complex
131 expression). In order to facilitate storing the value numbers in
132 bitmaps, and keep the number of wasted SSA_NAME's down, we also
133 associate a value_id with each value number, and create full blown
134 ssa_name's only where we actually need them (IE in operands of
135 existing expressions).
137 Theoretically you could replace all the value_id's with
138 SSA_NAME_VERSION, but this would allocate a large number of
139 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
140 It would also require an additional indirection at each point we
141 use the value id. */
143 /* Representation of expressions on value numbers:
145 Expressions consisting of value numbers are represented the same
146 way as our VN internally represents them, with an additional
147 "pre_expr" wrapping around them in order to facilitate storing all
148 of the expressions in the same sets. */
150 /* Representation of sets:
152 The dataflow sets do not need to be sorted in any particular order
153 for the majority of their lifetime, are simply represented as two
154 bitmaps, one that keeps track of values present in the set, and one
155 that keeps track of expressions present in the set.
157 When we need them in topological order, we produce it on demand by
158 transforming the bitmap into an array and sorting it into topo
159 order. */
161 /* Type of expression, used to know which member of the PRE_EXPR union
162 is valid. */
164 enum pre_expr_kind
166 NAME,
167 NARY,
168 REFERENCE,
169 CONSTANT
172 typedef union pre_expr_union_d
174 tree name;
175 tree constant;
176 vn_nary_op_t nary;
177 vn_reference_t reference;
178 } pre_expr_union;
180 typedef struct pre_expr_d : typed_noop_remove <pre_expr_d>
182 enum pre_expr_kind kind;
183 unsigned int id;
184 pre_expr_union u;
186 /* hash_table support. */
187 typedef pre_expr_d value_type;
188 typedef pre_expr_d compare_type;
189 static inline hashval_t hash (const pre_expr_d *);
190 static inline int equal (const pre_expr_d *, const pre_expr_d *);
191 } *pre_expr;
193 #define PRE_EXPR_NAME(e) (e)->u.name
194 #define PRE_EXPR_NARY(e) (e)->u.nary
195 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
196 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
198 /* Compare E1 and E1 for equality. */
200 inline int
201 pre_expr_d::equal (const value_type *e1, const compare_type *e2)
203 if (e1->kind != e2->kind)
204 return false;
206 switch (e1->kind)
208 case CONSTANT:
209 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
210 PRE_EXPR_CONSTANT (e2));
211 case NAME:
212 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
213 case NARY:
214 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
215 case REFERENCE:
216 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
217 PRE_EXPR_REFERENCE (e2));
218 default:
219 gcc_unreachable ();
223 /* Hash E. */
225 inline hashval_t
226 pre_expr_d::hash (const value_type *e)
228 switch (e->kind)
230 case CONSTANT:
231 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
232 case NAME:
233 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
234 case NARY:
235 return PRE_EXPR_NARY (e)->hashcode;
236 case REFERENCE:
237 return PRE_EXPR_REFERENCE (e)->hashcode;
238 default:
239 gcc_unreachable ();
243 /* Next global expression id number. */
244 static unsigned int next_expression_id;
246 /* Mapping from expression to id number we can use in bitmap sets. */
247 static vec<pre_expr> expressions;
248 static hash_table <pre_expr_d> expression_to_id;
249 static vec<unsigned> name_to_id;
251 /* Allocate an expression id for EXPR. */
253 static inline unsigned int
254 alloc_expression_id (pre_expr expr)
256 struct pre_expr_d **slot;
257 /* Make sure we won't overflow. */
258 gcc_assert (next_expression_id + 1 > next_expression_id);
259 expr->id = next_expression_id++;
260 expressions.safe_push (expr);
261 if (expr->kind == NAME)
263 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
264 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
265 re-allocations by using vec::reserve upfront. There is no
266 vec::quick_grow_cleared unfortunately. */
267 unsigned old_len = name_to_id.length ();
268 name_to_id.reserve (num_ssa_names - old_len);
269 name_to_id.safe_grow_cleared (num_ssa_names);
270 gcc_assert (name_to_id[version] == 0);
271 name_to_id[version] = expr->id;
273 else
275 slot = expression_to_id.find_slot (expr, INSERT);
276 gcc_assert (!*slot);
277 *slot = expr;
279 return next_expression_id - 1;
282 /* Return the expression id for tree EXPR. */
284 static inline unsigned int
285 get_expression_id (const pre_expr expr)
287 return expr->id;
290 static inline unsigned int
291 lookup_expression_id (const pre_expr expr)
293 struct pre_expr_d **slot;
295 if (expr->kind == NAME)
297 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
298 if (name_to_id.length () <= version)
299 return 0;
300 return name_to_id[version];
302 else
304 slot = expression_to_id.find_slot (expr, NO_INSERT);
305 if (!slot)
306 return 0;
307 return ((pre_expr)*slot)->id;
311 /* Return the existing expression id for EXPR, or create one if one
312 does not exist yet. */
314 static inline unsigned int
315 get_or_alloc_expression_id (pre_expr expr)
317 unsigned int id = lookup_expression_id (expr);
318 if (id == 0)
319 return alloc_expression_id (expr);
320 return expr->id = id;
323 /* Return the expression that has expression id ID */
325 static inline pre_expr
326 expression_for_id (unsigned int id)
328 return expressions[id];
331 /* Free the expression id field in all of our expressions,
332 and then destroy the expressions array. */
334 static void
335 clear_expression_ids (void)
337 expressions.release ();
340 static alloc_pool pre_expr_pool;
342 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
344 static pre_expr
345 get_or_alloc_expr_for_name (tree name)
347 struct pre_expr_d expr;
348 pre_expr result;
349 unsigned int result_id;
351 expr.kind = NAME;
352 expr.id = 0;
353 PRE_EXPR_NAME (&expr) = name;
354 result_id = lookup_expression_id (&expr);
355 if (result_id != 0)
356 return expression_for_id (result_id);
358 result = (pre_expr) pool_alloc (pre_expr_pool);
359 result->kind = NAME;
360 PRE_EXPR_NAME (result) = name;
361 alloc_expression_id (result);
362 return result;
365 /* An unordered bitmap set. One bitmap tracks values, the other,
366 expressions. */
367 typedef struct bitmap_set
369 bitmap_head expressions;
370 bitmap_head values;
371 } *bitmap_set_t;
373 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
374 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
376 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
377 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
379 /* Mapping from value id to expressions with that value_id. */
380 static vec<bitmap> value_expressions;
382 /* Sets that we need to keep track of. */
383 typedef struct bb_bitmap_sets
385 /* The EXP_GEN set, which represents expressions/values generated in
386 a basic block. */
387 bitmap_set_t exp_gen;
389 /* The PHI_GEN set, which represents PHI results generated in a
390 basic block. */
391 bitmap_set_t phi_gen;
393 /* The TMP_GEN set, which represents results/temporaries generated
394 in a basic block. IE the LHS of an expression. */
395 bitmap_set_t tmp_gen;
397 /* The AVAIL_OUT set, which represents which values are available in
398 a given basic block. */
399 bitmap_set_t avail_out;
401 /* The ANTIC_IN set, which represents which values are anticipatable
402 in a given basic block. */
403 bitmap_set_t antic_in;
405 /* The PA_IN set, which represents which values are
406 partially anticipatable in a given basic block. */
407 bitmap_set_t pa_in;
409 /* The NEW_SETS set, which is used during insertion to augment the
410 AVAIL_OUT set of blocks with the new insertions performed during
411 the current iteration. */
412 bitmap_set_t new_sets;
414 /* A cache for value_dies_in_block_x. */
415 bitmap expr_dies;
417 /* True if we have visited this block during ANTIC calculation. */
418 unsigned int visited : 1;
420 /* True we have deferred processing this block during ANTIC
421 calculation until its successor is processed. */
422 unsigned int deferred : 1;
424 /* True when the block contains a call that might not return. */
425 unsigned int contains_may_not_return_call : 1;
426 } *bb_value_sets_t;
428 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
429 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
430 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
431 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
432 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
433 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
434 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
435 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
436 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
437 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
438 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
441 /* Basic block list in postorder. */
442 static int *postorder;
443 static int postorder_num;
445 /* This structure is used to keep track of statistics on what
446 optimization PRE was able to perform. */
447 static struct
449 /* The number of RHS computations eliminated by PRE. */
450 int eliminations;
452 /* The number of new expressions/temporaries generated by PRE. */
453 int insertions;
455 /* The number of inserts found due to partial anticipation */
456 int pa_insert;
458 /* The number of new PHI nodes added by PRE. */
459 int phis;
460 } pre_stats;
462 static bool do_partial_partial;
463 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
464 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
465 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
466 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
467 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
468 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
469 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
470 unsigned int, bool);
471 static bitmap_set_t bitmap_set_new (void);
472 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
473 tree);
474 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
475 static unsigned int get_expr_value_id (pre_expr);
477 /* We can add and remove elements and entries to and from sets
478 and hash tables, so we use alloc pools for them. */
480 static alloc_pool bitmap_set_pool;
481 static bitmap_obstack grand_bitmap_obstack;
483 /* Set of blocks with statements that have had their EH properties changed. */
484 static bitmap need_eh_cleanup;
486 /* Set of blocks with statements that have had their AB properties changed. */
487 static bitmap need_ab_cleanup;
489 /* A three tuple {e, pred, v} used to cache phi translations in the
490 phi_translate_table. */
492 typedef struct expr_pred_trans_d : typed_free_remove<expr_pred_trans_d>
494 /* The expression. */
495 pre_expr e;
497 /* The predecessor block along which we translated the expression. */
498 basic_block pred;
500 /* The value that resulted from the translation. */
501 pre_expr v;
503 /* The hashcode for the expression, pred pair. This is cached for
504 speed reasons. */
505 hashval_t hashcode;
507 /* hash_table support. */
508 typedef expr_pred_trans_d value_type;
509 typedef expr_pred_trans_d compare_type;
510 static inline hashval_t hash (const value_type *);
511 static inline int equal (const value_type *, const compare_type *);
512 } *expr_pred_trans_t;
513 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
515 inline hashval_t
516 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
518 return e->hashcode;
521 inline int
522 expr_pred_trans_d::equal (const value_type *ve1,
523 const compare_type *ve2)
525 basic_block b1 = ve1->pred;
526 basic_block b2 = ve2->pred;
528 /* If they are not translations for the same basic block, they can't
529 be equal. */
530 if (b1 != b2)
531 return false;
532 return pre_expr_d::equal (ve1->e, ve2->e);
535 /* The phi_translate_table caches phi translations for a given
536 expression and predecessor. */
537 static hash_table <expr_pred_trans_d> phi_translate_table;
539 /* Add the tuple mapping from {expression E, basic block PRED} to
540 the phi translation table and return whether it pre-existed. */
542 static inline bool
543 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
545 expr_pred_trans_t *slot;
546 expr_pred_trans_d tem;
547 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
548 pred->index);
549 tem.e = e;
550 tem.pred = pred;
551 tem.hashcode = hash;
552 slot = phi_translate_table.find_slot_with_hash (&tem, hash, INSERT);
553 if (*slot)
555 *entry = *slot;
556 return true;
559 *entry = *slot = XNEW (struct expr_pred_trans_d);
560 (*entry)->e = e;
561 (*entry)->pred = pred;
562 (*entry)->hashcode = hash;
563 return false;
567 /* Add expression E to the expression set of value id V. */
569 static void
570 add_to_value (unsigned int v, pre_expr e)
572 bitmap set;
574 gcc_checking_assert (get_expr_value_id (e) == v);
576 if (v >= value_expressions.length ())
578 value_expressions.safe_grow_cleared (v + 1);
581 set = value_expressions[v];
582 if (!set)
584 set = BITMAP_ALLOC (&grand_bitmap_obstack);
585 value_expressions[v] = set;
588 bitmap_set_bit (set, get_or_alloc_expression_id (e));
591 /* Create a new bitmap set and return it. */
593 static bitmap_set_t
594 bitmap_set_new (void)
596 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
597 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
598 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
599 return ret;
602 /* Return the value id for a PRE expression EXPR. */
604 static unsigned int
605 get_expr_value_id (pre_expr expr)
607 unsigned int id;
608 switch (expr->kind)
610 case CONSTANT:
611 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
612 break;
613 case NAME:
614 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
615 break;
616 case NARY:
617 id = PRE_EXPR_NARY (expr)->value_id;
618 break;
619 case REFERENCE:
620 id = PRE_EXPR_REFERENCE (expr)->value_id;
621 break;
622 default:
623 gcc_unreachable ();
625 /* ??? We cannot assert that expr has a value-id (it can be 0), because
626 we assign value-ids only to expressions that have a result
627 in set_hashtable_value_ids. */
628 return id;
631 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
633 static tree
634 sccvn_valnum_from_value_id (unsigned int val)
636 bitmap_iterator bi;
637 unsigned int i;
638 bitmap exprset = value_expressions[val];
639 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
641 pre_expr vexpr = expression_for_id (i);
642 if (vexpr->kind == NAME)
643 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
644 else if (vexpr->kind == CONSTANT)
645 return PRE_EXPR_CONSTANT (vexpr);
647 return NULL_TREE;
650 /* Remove an expression EXPR from a bitmapped set. */
652 static void
653 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
655 unsigned int val = get_expr_value_id (expr);
656 if (!value_id_constant_p (val))
658 bitmap_clear_bit (&set->values, val);
659 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
663 static void
664 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
665 unsigned int val, bool allow_constants)
667 if (allow_constants || !value_id_constant_p (val))
669 /* We specifically expect this and only this function to be able to
670 insert constants into a set. */
671 bitmap_set_bit (&set->values, val);
672 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
676 /* Insert an expression EXPR into a bitmapped set. */
678 static void
679 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
681 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
684 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
686 static void
687 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
689 bitmap_copy (&dest->expressions, &orig->expressions);
690 bitmap_copy (&dest->values, &orig->values);
694 /* Free memory used up by SET. */
695 static void
696 bitmap_set_free (bitmap_set_t set)
698 bitmap_clear (&set->expressions);
699 bitmap_clear (&set->values);
703 /* Generate an topological-ordered array of bitmap set SET. */
705 static vec<pre_expr>
706 sorted_array_from_bitmap_set (bitmap_set_t set)
708 unsigned int i, j;
709 bitmap_iterator bi, bj;
710 vec<pre_expr> result;
712 /* Pre-allocate roughly enough space for the array. */
713 result.create (bitmap_count_bits (&set->values));
715 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
717 /* The number of expressions having a given value is usually
718 relatively small. Thus, rather than making a vector of all
719 the expressions and sorting it by value-id, we walk the values
720 and check in the reverse mapping that tells us what expressions
721 have a given value, to filter those in our set. As a result,
722 the expressions are inserted in value-id order, which means
723 topological order.
725 If this is somehow a significant lose for some cases, we can
726 choose which set to walk based on the set size. */
727 bitmap exprset = value_expressions[i];
728 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
730 if (bitmap_bit_p (&set->expressions, j))
731 result.safe_push (expression_for_id (j));
735 return result;
738 /* Perform bitmapped set operation DEST &= ORIG. */
740 static void
741 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
743 bitmap_iterator bi;
744 unsigned int i;
746 if (dest != orig)
748 bitmap_head temp;
749 bitmap_initialize (&temp, &grand_bitmap_obstack);
751 bitmap_and_into (&dest->values, &orig->values);
752 bitmap_copy (&temp, &dest->expressions);
753 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
755 pre_expr expr = expression_for_id (i);
756 unsigned int value_id = get_expr_value_id (expr);
757 if (!bitmap_bit_p (&dest->values, value_id))
758 bitmap_clear_bit (&dest->expressions, i);
760 bitmap_clear (&temp);
764 /* Subtract all values and expressions contained in ORIG from DEST. */
766 static bitmap_set_t
767 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
769 bitmap_set_t result = bitmap_set_new ();
770 bitmap_iterator bi;
771 unsigned int i;
773 bitmap_and_compl (&result->expressions, &dest->expressions,
774 &orig->expressions);
776 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
778 pre_expr expr = expression_for_id (i);
779 unsigned int value_id = get_expr_value_id (expr);
780 bitmap_set_bit (&result->values, value_id);
783 return result;
786 /* Subtract all the values in bitmap set B from bitmap set A. */
788 static void
789 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
791 unsigned int i;
792 bitmap_iterator bi;
793 bitmap_head temp;
795 bitmap_initialize (&temp, &grand_bitmap_obstack);
797 bitmap_copy (&temp, &a->expressions);
798 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
800 pre_expr expr = expression_for_id (i);
801 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
802 bitmap_remove_from_set (a, expr);
804 bitmap_clear (&temp);
808 /* Return true if bitmapped set SET contains the value VALUE_ID. */
810 static bool
811 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
813 if (value_id_constant_p (value_id))
814 return true;
816 if (!set || bitmap_empty_p (&set->expressions))
817 return false;
819 return bitmap_bit_p (&set->values, value_id);
822 static inline bool
823 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
825 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
828 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
830 static void
831 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
832 const pre_expr expr)
834 bitmap exprset;
835 unsigned int i;
836 bitmap_iterator bi;
838 if (value_id_constant_p (lookfor))
839 return;
841 if (!bitmap_set_contains_value (set, lookfor))
842 return;
844 /* The number of expressions having a given value is usually
845 significantly less than the total number of expressions in SET.
846 Thus, rather than check, for each expression in SET, whether it
847 has the value LOOKFOR, we walk the reverse mapping that tells us
848 what expressions have a given value, and see if any of those
849 expressions are in our set. For large testcases, this is about
850 5-10x faster than walking the bitmap. If this is somehow a
851 significant lose for some cases, we can choose which set to walk
852 based on the set size. */
853 exprset = value_expressions[lookfor];
854 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
856 if (bitmap_clear_bit (&set->expressions, i))
858 bitmap_set_bit (&set->expressions, get_expression_id (expr));
859 return;
863 gcc_unreachable ();
866 /* Return true if two bitmap sets are equal. */
868 static bool
869 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
871 return bitmap_equal_p (&a->values, &b->values);
874 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
875 and add it otherwise. */
877 static void
878 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
880 unsigned int val = get_expr_value_id (expr);
882 if (bitmap_set_contains_value (set, val))
883 bitmap_set_replace_value (set, val, expr);
884 else
885 bitmap_insert_into_set (set, expr);
888 /* Insert EXPR into SET if EXPR's value is not already present in
889 SET. */
891 static void
892 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
894 unsigned int val = get_expr_value_id (expr);
896 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
898 /* Constant values are always considered to be part of the set. */
899 if (value_id_constant_p (val))
900 return;
902 /* If the value membership changed, add the expression. */
903 if (bitmap_set_bit (&set->values, val))
904 bitmap_set_bit (&set->expressions, expr->id);
907 /* Print out EXPR to outfile. */
909 static void
910 print_pre_expr (FILE *outfile, const pre_expr expr)
912 switch (expr->kind)
914 case CONSTANT:
915 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
916 break;
917 case NAME:
918 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
919 break;
920 case NARY:
922 unsigned int i;
923 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
924 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
925 for (i = 0; i < nary->length; i++)
927 print_generic_expr (outfile, nary->op[i], 0);
928 if (i != (unsigned) nary->length - 1)
929 fprintf (outfile, ",");
931 fprintf (outfile, "}");
933 break;
935 case REFERENCE:
937 vn_reference_op_t vro;
938 unsigned int i;
939 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
940 fprintf (outfile, "{");
941 for (i = 0;
942 ref->operands.iterate (i, &vro);
943 i++)
945 bool closebrace = false;
946 if (vro->opcode != SSA_NAME
947 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
949 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
950 if (vro->op0)
952 fprintf (outfile, "<");
953 closebrace = true;
956 if (vro->op0)
958 print_generic_expr (outfile, vro->op0, 0);
959 if (vro->op1)
961 fprintf (outfile, ",");
962 print_generic_expr (outfile, vro->op1, 0);
964 if (vro->op2)
966 fprintf (outfile, ",");
967 print_generic_expr (outfile, vro->op2, 0);
970 if (closebrace)
971 fprintf (outfile, ">");
972 if (i != ref->operands.length () - 1)
973 fprintf (outfile, ",");
975 fprintf (outfile, "}");
976 if (ref->vuse)
978 fprintf (outfile, "@");
979 print_generic_expr (outfile, ref->vuse, 0);
982 break;
985 void debug_pre_expr (pre_expr);
987 /* Like print_pre_expr but always prints to stderr. */
988 DEBUG_FUNCTION void
989 debug_pre_expr (pre_expr e)
991 print_pre_expr (stderr, e);
992 fprintf (stderr, "\n");
995 /* Print out SET to OUTFILE. */
997 static void
998 print_bitmap_set (FILE *outfile, bitmap_set_t set,
999 const char *setname, int blockindex)
1001 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1002 if (set)
1004 bool first = true;
1005 unsigned i;
1006 bitmap_iterator bi;
1008 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1010 const pre_expr expr = expression_for_id (i);
1012 if (!first)
1013 fprintf (outfile, ", ");
1014 first = false;
1015 print_pre_expr (outfile, expr);
1017 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1020 fprintf (outfile, " }\n");
1023 void debug_bitmap_set (bitmap_set_t);
1025 DEBUG_FUNCTION void
1026 debug_bitmap_set (bitmap_set_t set)
1028 print_bitmap_set (stderr, set, "debug", 0);
1031 void debug_bitmap_sets_for (basic_block);
1033 DEBUG_FUNCTION void
1034 debug_bitmap_sets_for (basic_block bb)
1036 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1037 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1038 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1039 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1040 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1041 if (do_partial_partial)
1042 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1043 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1046 /* Print out the expressions that have VAL to OUTFILE. */
1048 static void
1049 print_value_expressions (FILE *outfile, unsigned int val)
1051 bitmap set = value_expressions[val];
1052 if (set)
1054 bitmap_set x;
1055 char s[10];
1056 sprintf (s, "%04d", val);
1057 x.expressions = *set;
1058 print_bitmap_set (outfile, &x, s, 0);
1063 DEBUG_FUNCTION void
1064 debug_value_expressions (unsigned int val)
1066 print_value_expressions (stderr, val);
1069 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1070 represent it. */
1072 static pre_expr
1073 get_or_alloc_expr_for_constant (tree constant)
1075 unsigned int result_id;
1076 unsigned int value_id;
1077 struct pre_expr_d expr;
1078 pre_expr newexpr;
1080 expr.kind = CONSTANT;
1081 PRE_EXPR_CONSTANT (&expr) = constant;
1082 result_id = lookup_expression_id (&expr);
1083 if (result_id != 0)
1084 return expression_for_id (result_id);
1086 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1087 newexpr->kind = CONSTANT;
1088 PRE_EXPR_CONSTANT (newexpr) = constant;
1089 alloc_expression_id (newexpr);
1090 value_id = get_or_alloc_constant_value_id (constant);
1091 add_to_value (value_id, newexpr);
1092 return newexpr;
1095 /* Given a value id V, find the actual tree representing the constant
1096 value if there is one, and return it. Return NULL if we can't find
1097 a constant. */
1099 static tree
1100 get_constant_for_value_id (unsigned int v)
1102 if (value_id_constant_p (v))
1104 unsigned int i;
1105 bitmap_iterator bi;
1106 bitmap exprset = value_expressions[v];
1108 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1110 pre_expr expr = expression_for_id (i);
1111 if (expr->kind == CONSTANT)
1112 return PRE_EXPR_CONSTANT (expr);
1115 return NULL;
1118 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1119 Currently only supports constants and SSA_NAMES. */
1120 static pre_expr
1121 get_or_alloc_expr_for (tree t)
1123 if (TREE_CODE (t) == SSA_NAME)
1124 return get_or_alloc_expr_for_name (t);
1125 else if (is_gimple_min_invariant (t))
1126 return get_or_alloc_expr_for_constant (t);
1127 else
1129 /* More complex expressions can result from SCCVN expression
1130 simplification that inserts values for them. As they all
1131 do not have VOPs the get handled by the nary ops struct. */
1132 vn_nary_op_t result;
1133 unsigned int result_id;
1134 vn_nary_op_lookup (t, &result);
1135 if (result != NULL)
1137 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1138 e->kind = NARY;
1139 PRE_EXPR_NARY (e) = result;
1140 result_id = lookup_expression_id (e);
1141 if (result_id != 0)
1143 pool_free (pre_expr_pool, e);
1144 e = expression_for_id (result_id);
1145 return e;
1147 alloc_expression_id (e);
1148 return e;
1151 return NULL;
1154 /* Return the folded version of T if T, when folded, is a gimple
1155 min_invariant. Otherwise, return T. */
1157 static pre_expr
1158 fully_constant_expression (pre_expr e)
1160 switch (e->kind)
1162 case CONSTANT:
1163 return e;
1164 case NARY:
1166 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1167 switch (TREE_CODE_CLASS (nary->opcode))
1169 case tcc_binary:
1170 case tcc_comparison:
1172 /* We have to go from trees to pre exprs to value ids to
1173 constants. */
1174 tree naryop0 = nary->op[0];
1175 tree naryop1 = nary->op[1];
1176 tree result;
1177 if (!is_gimple_min_invariant (naryop0))
1179 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1180 unsigned int vrep0 = get_expr_value_id (rep0);
1181 tree const0 = get_constant_for_value_id (vrep0);
1182 if (const0)
1183 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1185 if (!is_gimple_min_invariant (naryop1))
1187 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1188 unsigned int vrep1 = get_expr_value_id (rep1);
1189 tree const1 = get_constant_for_value_id (vrep1);
1190 if (const1)
1191 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1193 result = fold_binary (nary->opcode, nary->type,
1194 naryop0, naryop1);
1195 if (result && is_gimple_min_invariant (result))
1196 return get_or_alloc_expr_for_constant (result);
1197 /* We might have simplified the expression to a
1198 SSA_NAME for example from x_1 * 1. But we cannot
1199 insert a PHI for x_1 unconditionally as x_1 might
1200 not be available readily. */
1201 return e;
1203 case tcc_reference:
1204 if (nary->opcode != REALPART_EXPR
1205 && nary->opcode != IMAGPART_EXPR
1206 && nary->opcode != VIEW_CONVERT_EXPR)
1207 return e;
1208 /* Fallthrough. */
1209 case tcc_unary:
1211 /* We have to go from trees to pre exprs to value ids to
1212 constants. */
1213 tree naryop0 = nary->op[0];
1214 tree const0, result;
1215 if (is_gimple_min_invariant (naryop0))
1216 const0 = naryop0;
1217 else
1219 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1220 unsigned int vrep0 = get_expr_value_id (rep0);
1221 const0 = get_constant_for_value_id (vrep0);
1223 result = NULL;
1224 if (const0)
1226 tree type1 = TREE_TYPE (nary->op[0]);
1227 const0 = fold_convert (type1, const0);
1228 result = fold_unary (nary->opcode, nary->type, const0);
1230 if (result && is_gimple_min_invariant (result))
1231 return get_or_alloc_expr_for_constant (result);
1232 return e;
1234 default:
1235 return e;
1238 case REFERENCE:
1240 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1241 tree folded;
1242 if ((folded = fully_constant_vn_reference_p (ref)))
1243 return get_or_alloc_expr_for_constant (folded);
1244 return e;
1246 default:
1247 return e;
1249 return e;
1252 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1253 it has the value it would have in BLOCK. Set *SAME_VALID to true
1254 in case the new vuse doesn't change the value id of the OPERANDS. */
1256 static tree
1257 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1258 alias_set_type set, tree type, tree vuse,
1259 basic_block phiblock,
1260 basic_block block, bool *same_valid)
1262 gimple phi = SSA_NAME_DEF_STMT (vuse);
1263 ao_ref ref;
1264 edge e = NULL;
1265 bool use_oracle;
1267 *same_valid = true;
1269 if (gimple_bb (phi) != phiblock)
1270 return vuse;
1272 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1274 /* Use the alias-oracle to find either the PHI node in this block,
1275 the first VUSE used in this block that is equivalent to vuse or
1276 the first VUSE which definition in this block kills the value. */
1277 if (gimple_code (phi) == GIMPLE_PHI)
1278 e = find_edge (block, phiblock);
1279 else if (use_oracle)
1280 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1282 vuse = gimple_vuse (phi);
1283 phi = SSA_NAME_DEF_STMT (vuse);
1284 if (gimple_bb (phi) != phiblock)
1285 return vuse;
1286 if (gimple_code (phi) == GIMPLE_PHI)
1288 e = find_edge (block, phiblock);
1289 break;
1292 else
1293 return NULL_TREE;
1295 if (e)
1297 if (use_oracle)
1299 bitmap visited = NULL;
1300 unsigned int cnt;
1301 /* Try to find a vuse that dominates this phi node by skipping
1302 non-clobbering statements. */
1303 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false);
1304 if (visited)
1305 BITMAP_FREE (visited);
1307 else
1308 vuse = NULL_TREE;
1309 if (!vuse)
1311 /* If we didn't find any, the value ID can't stay the same,
1312 but return the translated vuse. */
1313 *same_valid = false;
1314 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1316 /* ??? We would like to return vuse here as this is the canonical
1317 upmost vdef that this reference is associated with. But during
1318 insertion of the references into the hash tables we only ever
1319 directly insert with their direct gimple_vuse, hence returning
1320 something else would make us not find the other expression. */
1321 return PHI_ARG_DEF (phi, e->dest_idx);
1324 return NULL_TREE;
1327 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1328 SET2. This is used to avoid making a set consisting of the union
1329 of PA_IN and ANTIC_IN during insert. */
1331 static inline pre_expr
1332 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1334 pre_expr result;
1336 result = bitmap_find_leader (set1, val);
1337 if (!result && set2)
1338 result = bitmap_find_leader (set2, val);
1339 return result;
1342 /* Get the tree type for our PRE expression e. */
1344 static tree
1345 get_expr_type (const pre_expr e)
1347 switch (e->kind)
1349 case NAME:
1350 return TREE_TYPE (PRE_EXPR_NAME (e));
1351 case CONSTANT:
1352 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1353 case REFERENCE:
1354 return PRE_EXPR_REFERENCE (e)->type;
1355 case NARY:
1356 return PRE_EXPR_NARY (e)->type;
1358 gcc_unreachable ();
1361 /* Get a representative SSA_NAME for a given expression.
1362 Since all of our sub-expressions are treated as values, we require
1363 them to be SSA_NAME's for simplicity.
1364 Prior versions of GVNPRE used to use "value handles" here, so that
1365 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1366 either case, the operands are really values (IE we do not expect
1367 them to be usable without finding leaders). */
1369 static tree
1370 get_representative_for (const pre_expr e)
1372 tree name;
1373 unsigned int value_id = get_expr_value_id (e);
1375 switch (e->kind)
1377 case NAME:
1378 return PRE_EXPR_NAME (e);
1379 case CONSTANT:
1380 return PRE_EXPR_CONSTANT (e);
1381 case NARY:
1382 case REFERENCE:
1384 /* Go through all of the expressions representing this value
1385 and pick out an SSA_NAME. */
1386 unsigned int i;
1387 bitmap_iterator bi;
1388 bitmap exprs = value_expressions[value_id];
1389 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1391 pre_expr rep = expression_for_id (i);
1392 if (rep->kind == NAME)
1393 return PRE_EXPR_NAME (rep);
1394 else if (rep->kind == CONSTANT)
1395 return PRE_EXPR_CONSTANT (rep);
1398 break;
1401 /* If we reached here we couldn't find an SSA_NAME. This can
1402 happen when we've discovered a value that has never appeared in
1403 the program as set to an SSA_NAME, as the result of phi translation.
1404 Create one here.
1405 ??? We should be able to re-use this when we insert the statement
1406 to compute it. */
1407 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1408 VN_INFO_GET (name)->value_id = value_id;
1409 VN_INFO (name)->valnum = name;
1410 /* ??? For now mark this SSA name for release by SCCVN. */
1411 VN_INFO (name)->needs_insertion = true;
1412 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1413 if (dump_file && (dump_flags & TDF_DETAILS))
1415 fprintf (dump_file, "Created SSA_NAME representative ");
1416 print_generic_expr (dump_file, name, 0);
1417 fprintf (dump_file, " for expression:");
1418 print_pre_expr (dump_file, e);
1419 fprintf (dump_file, " (%04d)\n", value_id);
1422 return name;
1427 static pre_expr
1428 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1429 basic_block pred, basic_block phiblock);
1431 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1432 the phis in PRED. Return NULL if we can't find a leader for each part
1433 of the translated expression. */
1435 static pre_expr
1436 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1437 basic_block pred, basic_block phiblock)
1439 switch (expr->kind)
1441 case NARY:
1443 unsigned int i;
1444 bool changed = false;
1445 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1446 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1447 sizeof_vn_nary_op (nary->length));
1448 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1450 for (i = 0; i < newnary->length; i++)
1452 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1453 continue;
1454 else
1456 pre_expr leader, result;
1457 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1458 leader = find_leader_in_sets (op_val_id, set1, set2);
1459 result = phi_translate (leader, set1, set2, pred, phiblock);
1460 if (result && result != leader)
1462 tree name = get_representative_for (result);
1463 if (!name)
1464 return NULL;
1465 newnary->op[i] = name;
1467 else if (!result)
1468 return NULL;
1470 changed |= newnary->op[i] != nary->op[i];
1473 if (changed)
1475 pre_expr constant;
1476 unsigned int new_val_id;
1478 tree result = vn_nary_op_lookup_pieces (newnary->length,
1479 newnary->opcode,
1480 newnary->type,
1481 &newnary->op[0],
1482 &nary);
1483 if (result && is_gimple_min_invariant (result))
1484 return get_or_alloc_expr_for_constant (result);
1486 expr = (pre_expr) pool_alloc (pre_expr_pool);
1487 expr->kind = NARY;
1488 expr->id = 0;
1489 if (nary)
1491 PRE_EXPR_NARY (expr) = nary;
1492 constant = fully_constant_expression (expr);
1493 if (constant != expr)
1494 return constant;
1496 new_val_id = nary->value_id;
1497 get_or_alloc_expression_id (expr);
1499 else
1501 new_val_id = get_next_value_id ();
1502 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1503 nary = vn_nary_op_insert_pieces (newnary->length,
1504 newnary->opcode,
1505 newnary->type,
1506 &newnary->op[0],
1507 result, new_val_id);
1508 PRE_EXPR_NARY (expr) = nary;
1509 constant = fully_constant_expression (expr);
1510 if (constant != expr)
1511 return constant;
1512 get_or_alloc_expression_id (expr);
1514 add_to_value (new_val_id, expr);
1516 return expr;
1518 break;
1520 case REFERENCE:
1522 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1523 vec<vn_reference_op_s> operands = ref->operands;
1524 tree vuse = ref->vuse;
1525 tree newvuse = vuse;
1526 vec<vn_reference_op_s> newoperands = vNULL;
1527 bool changed = false, same_valid = true;
1528 unsigned int i, j, n;
1529 vn_reference_op_t operand;
1530 vn_reference_t newref;
1532 for (i = 0, j = 0;
1533 operands.iterate (i, &operand); i++, j++)
1535 pre_expr opresult;
1536 pre_expr leader;
1537 tree op[3];
1538 tree type = operand->type;
1539 vn_reference_op_s newop = *operand;
1540 op[0] = operand->op0;
1541 op[1] = operand->op1;
1542 op[2] = operand->op2;
1543 for (n = 0; n < 3; ++n)
1545 unsigned int op_val_id;
1546 if (!op[n])
1547 continue;
1548 if (TREE_CODE (op[n]) != SSA_NAME)
1550 /* We can't possibly insert these. */
1551 if (n != 0
1552 && !is_gimple_min_invariant (op[n]))
1553 break;
1554 continue;
1556 op_val_id = VN_INFO (op[n])->value_id;
1557 leader = find_leader_in_sets (op_val_id, set1, set2);
1558 if (!leader)
1559 break;
1560 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1561 if (!opresult)
1562 break;
1563 if (opresult != leader)
1565 tree name = get_representative_for (opresult);
1566 if (!name)
1567 break;
1568 changed |= name != op[n];
1569 op[n] = name;
1572 if (n != 3)
1574 newoperands.release ();
1575 return NULL;
1577 if (!newoperands.exists ())
1578 newoperands = operands.copy ();
1579 /* We may have changed from an SSA_NAME to a constant */
1580 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1581 newop.opcode = TREE_CODE (op[0]);
1582 newop.type = type;
1583 newop.op0 = op[0];
1584 newop.op1 = op[1];
1585 newop.op2 = op[2];
1586 /* If it transforms a non-constant ARRAY_REF into a constant
1587 one, adjust the constant offset. */
1588 if (newop.opcode == ARRAY_REF
1589 && newop.off == -1
1590 && TREE_CODE (op[0]) == INTEGER_CST
1591 && TREE_CODE (op[1]) == INTEGER_CST
1592 && TREE_CODE (op[2]) == INTEGER_CST)
1594 double_int off = tree_to_double_int (op[0]);
1595 off += -tree_to_double_int (op[1]);
1596 off *= tree_to_double_int (op[2]);
1597 if (off.fits_shwi ())
1598 newop.off = off.low;
1600 newoperands[j] = newop;
1601 /* If it transforms from an SSA_NAME to an address, fold with
1602 a preceding indirect reference. */
1603 if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR
1604 && newoperands[j - 1].opcode == MEM_REF)
1605 vn_reference_fold_indirect (&newoperands, &j);
1607 if (i != operands.length ())
1609 newoperands.release ();
1610 return NULL;
1613 if (vuse)
1615 newvuse = translate_vuse_through_block (newoperands,
1616 ref->set, ref->type,
1617 vuse, phiblock, pred,
1618 &same_valid);
1619 if (newvuse == NULL_TREE)
1621 newoperands.release ();
1622 return NULL;
1626 if (changed || newvuse != vuse)
1628 unsigned int new_val_id;
1629 pre_expr constant;
1631 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1632 ref->type,
1633 newoperands,
1634 &newref, VN_WALK);
1635 if (result)
1636 newoperands.release ();
1638 /* We can always insert constants, so if we have a partial
1639 redundant constant load of another type try to translate it
1640 to a constant of appropriate type. */
1641 if (result && is_gimple_min_invariant (result))
1643 tree tem = result;
1644 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1646 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1647 if (tem && !is_gimple_min_invariant (tem))
1648 tem = NULL_TREE;
1650 if (tem)
1651 return get_or_alloc_expr_for_constant (tem);
1654 /* If we'd have to convert things we would need to validate
1655 if we can insert the translated expression. So fail
1656 here for now - we cannot insert an alias with a different
1657 type in the VN tables either, as that would assert. */
1658 if (result
1659 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1660 return NULL;
1661 else if (!result && newref
1662 && !useless_type_conversion_p (ref->type, newref->type))
1664 newoperands.release ();
1665 return NULL;
1668 expr = (pre_expr) pool_alloc (pre_expr_pool);
1669 expr->kind = REFERENCE;
1670 expr->id = 0;
1672 if (newref)
1674 PRE_EXPR_REFERENCE (expr) = newref;
1675 constant = fully_constant_expression (expr);
1676 if (constant != expr)
1677 return constant;
1679 new_val_id = newref->value_id;
1680 get_or_alloc_expression_id (expr);
1682 else
1684 if (changed || !same_valid)
1686 new_val_id = get_next_value_id ();
1687 value_expressions.safe_grow_cleared
1688 (get_max_value_id () + 1);
1690 else
1691 new_val_id = ref->value_id;
1692 newref = vn_reference_insert_pieces (newvuse, ref->set,
1693 ref->type,
1694 newoperands,
1695 result, new_val_id);
1696 newoperands.create (0);
1697 PRE_EXPR_REFERENCE (expr) = newref;
1698 constant = fully_constant_expression (expr);
1699 if (constant != expr)
1700 return constant;
1701 get_or_alloc_expression_id (expr);
1703 add_to_value (new_val_id, expr);
1705 newoperands.release ();
1706 return expr;
1708 break;
1710 case NAME:
1712 tree name = PRE_EXPR_NAME (expr);
1713 gimple def_stmt = SSA_NAME_DEF_STMT (name);
1714 /* If the SSA name is defined by a PHI node in this block,
1715 translate it. */
1716 if (gimple_code (def_stmt) == GIMPLE_PHI
1717 && gimple_bb (def_stmt) == phiblock)
1719 edge e = find_edge (pred, gimple_bb (def_stmt));
1720 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1722 /* Handle constant. */
1723 if (is_gimple_min_invariant (def))
1724 return get_or_alloc_expr_for_constant (def);
1726 return get_or_alloc_expr_for_name (def);
1728 /* Otherwise return it unchanged - it will get cleaned if its
1729 value is not available in PREDs AVAIL_OUT set of expressions. */
1730 return expr;
1733 default:
1734 gcc_unreachable ();
1738 /* Wrapper around phi_translate_1 providing caching functionality. */
1740 static pre_expr
1741 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1742 basic_block pred, basic_block phiblock)
1744 expr_pred_trans_t slot = NULL;
1745 pre_expr phitrans;
1747 if (!expr)
1748 return NULL;
1750 /* Constants contain no values that need translation. */
1751 if (expr->kind == CONSTANT)
1752 return expr;
1754 if (value_id_constant_p (get_expr_value_id (expr)))
1755 return expr;
1757 /* Don't add translations of NAMEs as those are cheap to translate. */
1758 if (expr->kind != NAME)
1760 if (phi_trans_add (&slot, expr, pred))
1761 return slot->v;
1762 /* Store NULL for the value we want to return in the case of
1763 recursing. */
1764 slot->v = NULL;
1767 /* Translate. */
1768 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1770 if (slot)
1772 if (phitrans)
1773 slot->v = phitrans;
1774 else
1775 /* Remove failed translations again, they cause insert
1776 iteration to not pick up new opportunities reliably. */
1777 phi_translate_table.remove_elt_with_hash (slot, slot->hashcode);
1780 return phitrans;
1784 /* For each expression in SET, translate the values through phi nodes
1785 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1786 expressions in DEST. */
1788 static void
1789 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1790 basic_block phiblock)
1792 vec<pre_expr> exprs;
1793 pre_expr expr;
1794 int i;
1796 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1798 bitmap_set_copy (dest, set);
1799 return;
1802 exprs = sorted_array_from_bitmap_set (set);
1803 FOR_EACH_VEC_ELT (exprs, i, expr)
1805 pre_expr translated;
1806 translated = phi_translate (expr, set, NULL, pred, phiblock);
1807 if (!translated)
1808 continue;
1810 /* We might end up with multiple expressions from SET being
1811 translated to the same value. In this case we do not want
1812 to retain the NARY or REFERENCE expression but prefer a NAME
1813 which would be the leader. */
1814 if (translated->kind == NAME)
1815 bitmap_value_replace_in_set (dest, translated);
1816 else
1817 bitmap_value_insert_into_set (dest, translated);
1819 exprs.release ();
1822 /* Find the leader for a value (i.e., the name representing that
1823 value) in a given set, and return it. Return NULL if no leader
1824 is found. */
1826 static pre_expr
1827 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1829 if (value_id_constant_p (val))
1831 unsigned int i;
1832 bitmap_iterator bi;
1833 bitmap exprset = value_expressions[val];
1835 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1837 pre_expr expr = expression_for_id (i);
1838 if (expr->kind == CONSTANT)
1839 return expr;
1842 if (bitmap_set_contains_value (set, val))
1844 /* Rather than walk the entire bitmap of expressions, and see
1845 whether any of them has the value we are looking for, we look
1846 at the reverse mapping, which tells us the set of expressions
1847 that have a given value (IE value->expressions with that
1848 value) and see if any of those expressions are in our set.
1849 The number of expressions per value is usually significantly
1850 less than the number of expressions in the set. In fact, for
1851 large testcases, doing it this way is roughly 5-10x faster
1852 than walking the bitmap.
1853 If this is somehow a significant lose for some cases, we can
1854 choose which set to walk based on which set is smaller. */
1855 unsigned int i;
1856 bitmap_iterator bi;
1857 bitmap exprset = value_expressions[val];
1859 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1860 return expression_for_id (i);
1862 return NULL;
1865 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1866 BLOCK by seeing if it is not killed in the block. Note that we are
1867 only determining whether there is a store that kills it. Because
1868 of the order in which clean iterates over values, we are guaranteed
1869 that altered operands will have caused us to be eliminated from the
1870 ANTIC_IN set already. */
1872 static bool
1873 value_dies_in_block_x (pre_expr expr, basic_block block)
1875 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1876 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1877 gimple def;
1878 gimple_stmt_iterator gsi;
1879 unsigned id = get_expression_id (expr);
1880 bool res = false;
1881 ao_ref ref;
1883 if (!vuse)
1884 return false;
1886 /* Lookup a previously calculated result. */
1887 if (EXPR_DIES (block)
1888 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1889 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1891 /* A memory expression {e, VUSE} dies in the block if there is a
1892 statement that may clobber e. If, starting statement walk from the
1893 top of the basic block, a statement uses VUSE there can be no kill
1894 inbetween that use and the original statement that loaded {e, VUSE},
1895 so we can stop walking. */
1896 ref.base = NULL_TREE;
1897 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1899 tree def_vuse, def_vdef;
1900 def = gsi_stmt (gsi);
1901 def_vuse = gimple_vuse (def);
1902 def_vdef = gimple_vdef (def);
1904 /* Not a memory statement. */
1905 if (!def_vuse)
1906 continue;
1908 /* Not a may-def. */
1909 if (!def_vdef)
1911 /* A load with the same VUSE, we're done. */
1912 if (def_vuse == vuse)
1913 break;
1915 continue;
1918 /* Init ref only if we really need it. */
1919 if (ref.base == NULL_TREE
1920 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1921 refx->operands))
1923 res = true;
1924 break;
1926 /* If the statement may clobber expr, it dies. */
1927 if (stmt_may_clobber_ref_p_1 (def, &ref))
1929 res = true;
1930 break;
1934 /* Remember the result. */
1935 if (!EXPR_DIES (block))
1936 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1937 bitmap_set_bit (EXPR_DIES (block), id * 2);
1938 if (res)
1939 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1941 return res;
1945 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1946 contains its value-id. */
1948 static bool
1949 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1951 if (op && TREE_CODE (op) == SSA_NAME)
1953 unsigned int value_id = VN_INFO (op)->value_id;
1954 if (!(bitmap_set_contains_value (set1, value_id)
1955 || (set2 && bitmap_set_contains_value (set2, value_id))))
1956 return false;
1958 return true;
1961 /* Determine if the expression EXPR is valid in SET1 U SET2.
1962 ONLY SET2 CAN BE NULL.
1963 This means that we have a leader for each part of the expression
1964 (if it consists of values), or the expression is an SSA_NAME.
1965 For loads/calls, we also see if the vuse is killed in this block. */
1967 static bool
1968 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
1969 basic_block block)
1971 switch (expr->kind)
1973 case NAME:
1974 return bitmap_find_leader (AVAIL_OUT (block),
1975 get_expr_value_id (expr)) != NULL;
1976 case NARY:
1978 unsigned int i;
1979 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1980 for (i = 0; i < nary->length; i++)
1981 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1982 return false;
1983 return true;
1985 break;
1986 case REFERENCE:
1988 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1989 vn_reference_op_t vro;
1990 unsigned int i;
1992 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1994 if (!op_valid_in_sets (set1, set2, vro->op0)
1995 || !op_valid_in_sets (set1, set2, vro->op1)
1996 || !op_valid_in_sets (set1, set2, vro->op2))
1997 return false;
1999 return true;
2001 default:
2002 gcc_unreachable ();
2006 /* Clean the set of expressions that are no longer valid in SET1 or
2007 SET2. This means expressions that are made up of values we have no
2008 leaders for in SET1 or SET2. This version is used for partial
2009 anticipation, which means it is not valid in either ANTIC_IN or
2010 PA_IN. */
2012 static void
2013 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2015 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
2016 pre_expr expr;
2017 int i;
2019 FOR_EACH_VEC_ELT (exprs, i, expr)
2021 if (!valid_in_sets (set1, set2, expr, block))
2022 bitmap_remove_from_set (set1, expr);
2024 exprs.release ();
2027 /* Clean the set of expressions that are no longer valid in SET. This
2028 means expressions that are made up of values we have no leaders for
2029 in SET. */
2031 static void
2032 clean (bitmap_set_t set, basic_block block)
2034 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set);
2035 pre_expr expr;
2036 int i;
2038 FOR_EACH_VEC_ELT (exprs, i, expr)
2040 if (!valid_in_sets (set, NULL, expr, block))
2041 bitmap_remove_from_set (set, expr);
2043 exprs.release ();
2046 /* Clean the set of expressions that are no longer valid in SET because
2047 they are clobbered in BLOCK or because they trap and may not be executed. */
2049 static void
2050 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2052 bitmap_iterator bi;
2053 unsigned i;
2055 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2057 pre_expr expr = expression_for_id (i);
2058 if (expr->kind == REFERENCE)
2060 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2061 if (ref->vuse)
2063 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2064 if (!gimple_nop_p (def_stmt)
2065 && ((gimple_bb (def_stmt) != block
2066 && !dominated_by_p (CDI_DOMINATORS,
2067 block, gimple_bb (def_stmt)))
2068 || (gimple_bb (def_stmt) == block
2069 && value_dies_in_block_x (expr, block))))
2070 bitmap_remove_from_set (set, expr);
2073 else if (expr->kind == NARY)
2075 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2076 /* If the NARY may trap make sure the block does not contain
2077 a possible exit point.
2078 ??? This is overly conservative if we translate AVAIL_OUT
2079 as the available expression might be after the exit point. */
2080 if (BB_MAY_NOTRETURN (block)
2081 && vn_nary_may_trap (nary))
2082 bitmap_remove_from_set (set, expr);
2087 static sbitmap has_abnormal_preds;
2089 /* List of blocks that may have changed during ANTIC computation and
2090 thus need to be iterated over. */
2092 static sbitmap changed_blocks;
2094 /* Decide whether to defer a block for a later iteration, or PHI
2095 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2096 should defer the block, and true if we processed it. */
2098 static bool
2099 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2100 basic_block block, basic_block phiblock)
2102 if (!BB_VISITED (phiblock))
2104 bitmap_set_bit (changed_blocks, block->index);
2105 BB_VISITED (block) = 0;
2106 BB_DEFERRED (block) = 1;
2107 return false;
2109 else
2110 phi_translate_set (dest, source, block, phiblock);
2111 return true;
2114 /* Compute the ANTIC set for BLOCK.
2116 If succs(BLOCK) > 1 then
2117 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2118 else if succs(BLOCK) == 1 then
2119 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2121 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2124 static bool
2125 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2127 bool changed = false;
2128 bitmap_set_t S, old, ANTIC_OUT;
2129 bitmap_iterator bi;
2130 unsigned int bii;
2131 edge e;
2132 edge_iterator ei;
2134 old = ANTIC_OUT = S = NULL;
2135 BB_VISITED (block) = 1;
2137 /* If any edges from predecessors are abnormal, antic_in is empty,
2138 so do nothing. */
2139 if (block_has_abnormal_pred_edge)
2140 goto maybe_dump_sets;
2142 old = ANTIC_IN (block);
2143 ANTIC_OUT = bitmap_set_new ();
2145 /* If the block has no successors, ANTIC_OUT is empty. */
2146 if (EDGE_COUNT (block->succs) == 0)
2148 /* If we have one successor, we could have some phi nodes to
2149 translate through. */
2150 else if (single_succ_p (block))
2152 basic_block succ_bb = single_succ (block);
2154 /* We trade iterations of the dataflow equations for having to
2155 phi translate the maximal set, which is incredibly slow
2156 (since the maximal set often has 300+ members, even when you
2157 have a small number of blocks).
2158 Basically, we defer the computation of ANTIC for this block
2159 until we have processed it's successor, which will inevitably
2160 have a *much* smaller set of values to phi translate once
2161 clean has been run on it.
2162 The cost of doing this is that we technically perform more
2163 iterations, however, they are lower cost iterations.
2165 Timings for PRE on tramp3d-v4:
2166 without maximal set fix: 11 seconds
2167 with maximal set fix/without deferring: 26 seconds
2168 with maximal set fix/with deferring: 11 seconds
2171 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2172 block, succ_bb))
2174 changed = true;
2175 goto maybe_dump_sets;
2178 /* If we have multiple successors, we take the intersection of all of
2179 them. Note that in the case of loop exit phi nodes, we may have
2180 phis to translate through. */
2181 else
2183 vec<basic_block> worklist;
2184 size_t i;
2185 basic_block bprime, first = NULL;
2187 worklist.create (EDGE_COUNT (block->succs));
2188 FOR_EACH_EDGE (e, ei, block->succs)
2190 if (!first
2191 && BB_VISITED (e->dest))
2192 first = e->dest;
2193 else if (BB_VISITED (e->dest))
2194 worklist.quick_push (e->dest);
2197 /* Of multiple successors we have to have visited one already. */
2198 if (!first)
2200 bitmap_set_bit (changed_blocks, block->index);
2201 BB_VISITED (block) = 0;
2202 BB_DEFERRED (block) = 1;
2203 changed = true;
2204 worklist.release ();
2205 goto maybe_dump_sets;
2208 if (!gimple_seq_empty_p (phi_nodes (first)))
2209 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2210 else
2211 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2213 FOR_EACH_VEC_ELT (worklist, i, bprime)
2215 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2217 bitmap_set_t tmp = bitmap_set_new ();
2218 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2219 bitmap_set_and (ANTIC_OUT, tmp);
2220 bitmap_set_free (tmp);
2222 else
2223 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2225 worklist.release ();
2228 /* Prune expressions that are clobbered in block and thus become
2229 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2230 prune_clobbered_mems (ANTIC_OUT, block);
2232 /* Generate ANTIC_OUT - TMP_GEN. */
2233 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2235 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2236 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2237 TMP_GEN (block));
2239 /* Then union in the ANTIC_OUT - TMP_GEN values,
2240 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2241 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2242 bitmap_value_insert_into_set (ANTIC_IN (block),
2243 expression_for_id (bii));
2245 clean (ANTIC_IN (block), block);
2247 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2249 changed = true;
2250 bitmap_set_bit (changed_blocks, block->index);
2251 FOR_EACH_EDGE (e, ei, block->preds)
2252 bitmap_set_bit (changed_blocks, e->src->index);
2254 else
2255 bitmap_clear_bit (changed_blocks, block->index);
2257 maybe_dump_sets:
2258 if (dump_file && (dump_flags & TDF_DETAILS))
2260 if (!BB_DEFERRED (block) || BB_VISITED (block))
2262 if (ANTIC_OUT)
2263 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2265 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2266 block->index);
2268 if (S)
2269 print_bitmap_set (dump_file, S, "S", block->index);
2271 else
2273 fprintf (dump_file,
2274 "Block %d was deferred for a future iteration.\n",
2275 block->index);
2278 if (old)
2279 bitmap_set_free (old);
2280 if (S)
2281 bitmap_set_free (S);
2282 if (ANTIC_OUT)
2283 bitmap_set_free (ANTIC_OUT);
2284 return changed;
2287 /* Compute PARTIAL_ANTIC for BLOCK.
2289 If succs(BLOCK) > 1 then
2290 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2291 in ANTIC_OUT for all succ(BLOCK)
2292 else if succs(BLOCK) == 1 then
2293 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2295 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2296 - ANTIC_IN[BLOCK])
2299 static bool
2300 compute_partial_antic_aux (basic_block block,
2301 bool block_has_abnormal_pred_edge)
2303 bool changed = false;
2304 bitmap_set_t old_PA_IN;
2305 bitmap_set_t PA_OUT;
2306 edge e;
2307 edge_iterator ei;
2308 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2310 old_PA_IN = PA_OUT = NULL;
2312 /* If any edges from predecessors are abnormal, antic_in is empty,
2313 so do nothing. */
2314 if (block_has_abnormal_pred_edge)
2315 goto maybe_dump_sets;
2317 /* If there are too many partially anticipatable values in the
2318 block, phi_translate_set can take an exponential time: stop
2319 before the translation starts. */
2320 if (max_pa
2321 && single_succ_p (block)
2322 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2323 goto maybe_dump_sets;
2325 old_PA_IN = PA_IN (block);
2326 PA_OUT = bitmap_set_new ();
2328 /* If the block has no successors, ANTIC_OUT is empty. */
2329 if (EDGE_COUNT (block->succs) == 0)
2331 /* If we have one successor, we could have some phi nodes to
2332 translate through. Note that we can't phi translate across DFS
2333 back edges in partial antic, because it uses a union operation on
2334 the successors. For recurrences like IV's, we will end up
2335 generating a new value in the set on each go around (i + 3 (VH.1)
2336 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2337 else if (single_succ_p (block))
2339 basic_block succ = single_succ (block);
2340 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2341 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2343 /* If we have multiple successors, we take the union of all of
2344 them. */
2345 else
2347 vec<basic_block> worklist;
2348 size_t i;
2349 basic_block bprime;
2351 worklist.create (EDGE_COUNT (block->succs));
2352 FOR_EACH_EDGE (e, ei, block->succs)
2354 if (e->flags & EDGE_DFS_BACK)
2355 continue;
2356 worklist.quick_push (e->dest);
2358 if (worklist.length () > 0)
2360 FOR_EACH_VEC_ELT (worklist, i, bprime)
2362 unsigned int i;
2363 bitmap_iterator bi;
2365 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2366 bitmap_value_insert_into_set (PA_OUT,
2367 expression_for_id (i));
2368 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2370 bitmap_set_t pa_in = bitmap_set_new ();
2371 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2372 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2373 bitmap_value_insert_into_set (PA_OUT,
2374 expression_for_id (i));
2375 bitmap_set_free (pa_in);
2377 else
2378 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2379 bitmap_value_insert_into_set (PA_OUT,
2380 expression_for_id (i));
2383 worklist.release ();
2386 /* Prune expressions that are clobbered in block and thus become
2387 invalid if translated from PA_OUT to PA_IN. */
2388 prune_clobbered_mems (PA_OUT, block);
2390 /* PA_IN starts with PA_OUT - TMP_GEN.
2391 Then we subtract things from ANTIC_IN. */
2392 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2394 /* For partial antic, we want to put back in the phi results, since
2395 we will properly avoid making them partially antic over backedges. */
2396 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2397 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2399 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2400 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2402 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2404 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2406 changed = true;
2407 bitmap_set_bit (changed_blocks, block->index);
2408 FOR_EACH_EDGE (e, ei, block->preds)
2409 bitmap_set_bit (changed_blocks, e->src->index);
2411 else
2412 bitmap_clear_bit (changed_blocks, block->index);
2414 maybe_dump_sets:
2415 if (dump_file && (dump_flags & TDF_DETAILS))
2417 if (PA_OUT)
2418 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2420 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2422 if (old_PA_IN)
2423 bitmap_set_free (old_PA_IN);
2424 if (PA_OUT)
2425 bitmap_set_free (PA_OUT);
2426 return changed;
2429 /* Compute ANTIC and partial ANTIC sets. */
2431 static void
2432 compute_antic (void)
2434 bool changed = true;
2435 int num_iterations = 0;
2436 basic_block block;
2437 int i;
2439 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2440 We pre-build the map of blocks with incoming abnormal edges here. */
2441 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2442 bitmap_clear (has_abnormal_preds);
2444 FOR_ALL_BB (block)
2446 edge_iterator ei;
2447 edge e;
2449 FOR_EACH_EDGE (e, ei, block->preds)
2451 e->flags &= ~EDGE_DFS_BACK;
2452 if (e->flags & EDGE_ABNORMAL)
2454 bitmap_set_bit (has_abnormal_preds, block->index);
2455 break;
2459 BB_VISITED (block) = 0;
2460 BB_DEFERRED (block) = 0;
2462 /* While we are here, give empty ANTIC_IN sets to each block. */
2463 ANTIC_IN (block) = bitmap_set_new ();
2464 PA_IN (block) = bitmap_set_new ();
2467 /* At the exit block we anticipate nothing. */
2468 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2470 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2471 bitmap_ones (changed_blocks);
2472 while (changed)
2474 if (dump_file && (dump_flags & TDF_DETAILS))
2475 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2476 /* ??? We need to clear our PHI translation cache here as the
2477 ANTIC sets shrink and we restrict valid translations to
2478 those having operands with leaders in ANTIC. Same below
2479 for PA ANTIC computation. */
2480 num_iterations++;
2481 changed = false;
2482 for (i = postorder_num - 1; i >= 0; i--)
2484 if (bitmap_bit_p (changed_blocks, postorder[i]))
2486 basic_block block = BASIC_BLOCK (postorder[i]);
2487 changed |= compute_antic_aux (block,
2488 bitmap_bit_p (has_abnormal_preds,
2489 block->index));
2492 /* Theoretically possible, but *highly* unlikely. */
2493 gcc_checking_assert (num_iterations < 500);
2496 statistics_histogram_event (cfun, "compute_antic iterations",
2497 num_iterations);
2499 if (do_partial_partial)
2501 bitmap_ones (changed_blocks);
2502 mark_dfs_back_edges ();
2503 num_iterations = 0;
2504 changed = true;
2505 while (changed)
2507 if (dump_file && (dump_flags & TDF_DETAILS))
2508 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2509 num_iterations++;
2510 changed = false;
2511 for (i = postorder_num - 1 ; i >= 0; i--)
2513 if (bitmap_bit_p (changed_blocks, postorder[i]))
2515 basic_block block = BASIC_BLOCK (postorder[i]);
2516 changed
2517 |= compute_partial_antic_aux (block,
2518 bitmap_bit_p (has_abnormal_preds,
2519 block->index));
2522 /* Theoretically possible, but *highly* unlikely. */
2523 gcc_checking_assert (num_iterations < 500);
2525 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2526 num_iterations);
2528 sbitmap_free (has_abnormal_preds);
2529 sbitmap_free (changed_blocks);
2533 /* Inserted expressions are placed onto this worklist, which is used
2534 for performing quick dead code elimination of insertions we made
2535 that didn't turn out to be necessary. */
2536 static bitmap inserted_exprs;
2538 /* The actual worker for create_component_ref_by_pieces. */
2540 static tree
2541 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2542 unsigned int *operand, gimple_seq *stmts)
2544 vn_reference_op_t currop = &ref->operands[*operand];
2545 tree genop;
2546 ++*operand;
2547 switch (currop->opcode)
2549 case CALL_EXPR:
2551 tree folded, sc = NULL_TREE;
2552 unsigned int nargs = 0;
2553 tree fn, *args;
2554 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2555 fn = currop->op0;
2556 else
2557 fn = find_or_generate_expression (block, currop->op0, stmts);
2558 if (!fn)
2559 return NULL_TREE;
2560 if (currop->op1)
2562 sc = find_or_generate_expression (block, currop->op1, stmts);
2563 if (!sc)
2564 return NULL_TREE;
2566 args = XNEWVEC (tree, ref->operands.length () - 1);
2567 while (*operand < ref->operands.length ())
2569 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2570 operand, stmts);
2571 if (!args[nargs])
2572 return NULL_TREE;
2573 nargs++;
2575 folded = build_call_array (currop->type,
2576 (TREE_CODE (fn) == FUNCTION_DECL
2577 ? build_fold_addr_expr (fn) : fn),
2578 nargs, args);
2579 free (args);
2580 if (sc)
2581 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2582 return folded;
2585 case MEM_REF:
2587 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2588 stmts);
2589 if (!baseop)
2590 return NULL_TREE;
2591 tree offset = currop->op0;
2592 if (TREE_CODE (baseop) == ADDR_EXPR
2593 && handled_component_p (TREE_OPERAND (baseop, 0)))
2595 HOST_WIDE_INT off;
2596 tree base;
2597 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2598 &off);
2599 gcc_assert (base);
2600 offset = int_const_binop (PLUS_EXPR, offset,
2601 build_int_cst (TREE_TYPE (offset),
2602 off));
2603 baseop = build_fold_addr_expr (base);
2605 return fold_build2 (MEM_REF, currop->type, baseop, offset);
2608 case TARGET_MEM_REF:
2610 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2611 vn_reference_op_t nextop = &ref->operands[++*operand];
2612 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2613 stmts);
2614 if (!baseop)
2615 return NULL_TREE;
2616 if (currop->op0)
2618 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2619 if (!genop0)
2620 return NULL_TREE;
2622 if (nextop->op0)
2624 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2625 if (!genop1)
2626 return NULL_TREE;
2628 return build5 (TARGET_MEM_REF, currop->type,
2629 baseop, currop->op2, genop0, currop->op1, genop1);
2632 case ADDR_EXPR:
2633 if (currop->op0)
2635 gcc_assert (is_gimple_min_invariant (currop->op0));
2636 return currop->op0;
2638 /* Fallthrough. */
2639 case REALPART_EXPR:
2640 case IMAGPART_EXPR:
2641 case VIEW_CONVERT_EXPR:
2643 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2644 stmts);
2645 if (!genop0)
2646 return NULL_TREE;
2647 return fold_build1 (currop->opcode, currop->type, genop0);
2650 case WITH_SIZE_EXPR:
2652 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2653 stmts);
2654 if (!genop0)
2655 return NULL_TREE;
2656 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2657 if (!genop1)
2658 return NULL_TREE;
2659 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2662 case BIT_FIELD_REF:
2664 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2665 stmts);
2666 if (!genop0)
2667 return NULL_TREE;
2668 tree op1 = currop->op0;
2669 tree op2 = currop->op1;
2670 return fold_build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2673 /* For array ref vn_reference_op's, operand 1 of the array ref
2674 is op0 of the reference op and operand 3 of the array ref is
2675 op1. */
2676 case ARRAY_RANGE_REF:
2677 case ARRAY_REF:
2679 tree genop0;
2680 tree genop1 = currop->op0;
2681 tree genop2 = currop->op1;
2682 tree genop3 = currop->op2;
2683 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2684 stmts);
2685 if (!genop0)
2686 return NULL_TREE;
2687 genop1 = find_or_generate_expression (block, genop1, stmts);
2688 if (!genop1)
2689 return NULL_TREE;
2690 if (genop2)
2692 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2693 /* Drop zero minimum index if redundant. */
2694 if (integer_zerop (genop2)
2695 && (!domain_type
2696 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2697 genop2 = NULL_TREE;
2698 else
2700 genop2 = find_or_generate_expression (block, genop2, stmts);
2701 if (!genop2)
2702 return NULL_TREE;
2705 if (genop3)
2707 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2708 /* We can't always put a size in units of the element alignment
2709 here as the element alignment may be not visible. See
2710 PR43783. Simply drop the element size for constant
2711 sizes. */
2712 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2713 genop3 = NULL_TREE;
2714 else
2716 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2717 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2718 genop3 = find_or_generate_expression (block, genop3, stmts);
2719 if (!genop3)
2720 return NULL_TREE;
2723 return build4 (currop->opcode, currop->type, genop0, genop1,
2724 genop2, genop3);
2726 case COMPONENT_REF:
2728 tree op0;
2729 tree op1;
2730 tree genop2 = currop->op1;
2731 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2732 if (!op0)
2733 return NULL_TREE;
2734 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2735 op1 = currop->op0;
2736 if (genop2)
2738 genop2 = find_or_generate_expression (block, genop2, stmts);
2739 if (!genop2)
2740 return NULL_TREE;
2742 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2745 case SSA_NAME:
2747 genop = find_or_generate_expression (block, currop->op0, stmts);
2748 return genop;
2750 case STRING_CST:
2751 case INTEGER_CST:
2752 case COMPLEX_CST:
2753 case VECTOR_CST:
2754 case REAL_CST:
2755 case CONSTRUCTOR:
2756 case VAR_DECL:
2757 case PARM_DECL:
2758 case CONST_DECL:
2759 case RESULT_DECL:
2760 case FUNCTION_DECL:
2761 return currop->op0;
2763 default:
2764 gcc_unreachable ();
2768 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2769 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2770 trying to rename aggregates into ssa form directly, which is a no no.
2772 Thus, this routine doesn't create temporaries, it just builds a
2773 single access expression for the array, calling
2774 find_or_generate_expression to build the innermost pieces.
2776 This function is a subroutine of create_expression_by_pieces, and
2777 should not be called on it's own unless you really know what you
2778 are doing. */
2780 static tree
2781 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2782 gimple_seq *stmts)
2784 unsigned int op = 0;
2785 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2788 /* Find a simple leader for an expression, or generate one using
2789 create_expression_by_pieces from a NARY expression for the value.
2790 BLOCK is the basic_block we are looking for leaders in.
2791 OP is the tree expression to find a leader for or generate.
2792 Returns the leader or NULL_TREE on failure. */
2794 static tree
2795 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2797 pre_expr expr = get_or_alloc_expr_for (op);
2798 unsigned int lookfor = get_expr_value_id (expr);
2799 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2800 if (leader)
2802 if (leader->kind == NAME)
2803 return PRE_EXPR_NAME (leader);
2804 else if (leader->kind == CONSTANT)
2805 return PRE_EXPR_CONSTANT (leader);
2807 /* Defer. */
2808 return NULL_TREE;
2811 /* It must be a complex expression, so generate it recursively. Note
2812 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2813 where the insert algorithm fails to insert a required expression. */
2814 bitmap exprset = value_expressions[lookfor];
2815 bitmap_iterator bi;
2816 unsigned int i;
2817 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2819 pre_expr temp = expression_for_id (i);
2820 /* We cannot insert random REFERENCE expressions at arbitrary
2821 places. We can insert NARYs which eventually re-materializes
2822 its operand values. */
2823 if (temp->kind == NARY)
2824 return create_expression_by_pieces (block, temp, stmts,
2825 get_expr_type (expr));
2828 /* Defer. */
2829 return NULL_TREE;
2832 #define NECESSARY GF_PLF_1
2834 /* Create an expression in pieces, so that we can handle very complex
2835 expressions that may be ANTIC, but not necessary GIMPLE.
2836 BLOCK is the basic block the expression will be inserted into,
2837 EXPR is the expression to insert (in value form)
2838 STMTS is a statement list to append the necessary insertions into.
2840 This function will die if we hit some value that shouldn't be
2841 ANTIC but is (IE there is no leader for it, or its components).
2842 The function returns NULL_TREE in case a different antic expression
2843 has to be inserted first.
2844 This function may also generate expressions that are themselves
2845 partially or fully redundant. Those that are will be either made
2846 fully redundant during the next iteration of insert (for partially
2847 redundant ones), or eliminated by eliminate (for fully redundant
2848 ones). */
2850 static tree
2851 create_expression_by_pieces (basic_block block, pre_expr expr,
2852 gimple_seq *stmts, tree type)
2854 tree name;
2855 tree folded;
2856 gimple_seq forced_stmts = NULL;
2857 unsigned int value_id;
2858 gimple_stmt_iterator gsi;
2859 tree exprtype = type ? type : get_expr_type (expr);
2860 pre_expr nameexpr;
2861 gimple newstmt;
2863 switch (expr->kind)
2865 /* We may hit the NAME/CONSTANT case if we have to convert types
2866 that value numbering saw through. */
2867 case NAME:
2868 folded = PRE_EXPR_NAME (expr);
2869 break;
2870 case CONSTANT:
2871 folded = PRE_EXPR_CONSTANT (expr);
2872 break;
2873 case REFERENCE:
2875 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2876 folded = create_component_ref_by_pieces (block, ref, stmts);
2877 if (!folded)
2878 return NULL_TREE;
2880 break;
2881 case NARY:
2883 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2884 tree *genop = XALLOCAVEC (tree, nary->length);
2885 unsigned i;
2886 for (i = 0; i < nary->length; ++i)
2888 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2889 if (!genop[i])
2890 return NULL_TREE;
2891 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2892 may have conversions stripped. */
2893 if (nary->opcode == POINTER_PLUS_EXPR)
2895 if (i == 0)
2896 genop[i] = fold_convert (nary->type, genop[i]);
2897 else if (i == 1)
2898 genop[i] = convert_to_ptrofftype (genop[i]);
2900 else
2901 genop[i] = fold_convert (TREE_TYPE (nary->op[i]), genop[i]);
2903 if (nary->opcode == CONSTRUCTOR)
2905 vec<constructor_elt, va_gc> *elts = NULL;
2906 for (i = 0; i < nary->length; ++i)
2907 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2908 folded = build_constructor (nary->type, elts);
2910 else
2912 switch (nary->length)
2914 case 1:
2915 folded = fold_build1 (nary->opcode, nary->type,
2916 genop[0]);
2917 break;
2918 case 2:
2919 folded = fold_build2 (nary->opcode, nary->type,
2920 genop[0], genop[1]);
2921 break;
2922 case 3:
2923 folded = fold_build3 (nary->opcode, nary->type,
2924 genop[0], genop[1], genop[2]);
2925 break;
2926 default:
2927 gcc_unreachable ();
2931 break;
2932 default:
2933 gcc_unreachable ();
2936 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2937 folded = fold_convert (exprtype, folded);
2939 /* Force the generated expression to be a sequence of GIMPLE
2940 statements.
2941 We have to call unshare_expr because force_gimple_operand may
2942 modify the tree we pass to it. */
2943 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
2944 false, NULL);
2946 /* If we have any intermediate expressions to the value sets, add them
2947 to the value sets and chain them in the instruction stream. */
2948 if (forced_stmts)
2950 gsi = gsi_start (forced_stmts);
2951 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2953 gimple stmt = gsi_stmt (gsi);
2954 tree forcedname = gimple_get_lhs (stmt);
2955 pre_expr nameexpr;
2957 if (TREE_CODE (forcedname) == SSA_NAME)
2959 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2960 VN_INFO_GET (forcedname)->valnum = forcedname;
2961 VN_INFO (forcedname)->value_id = get_next_value_id ();
2962 nameexpr = get_or_alloc_expr_for_name (forcedname);
2963 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2964 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2965 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2968 gimple_seq_add_seq (stmts, forced_stmts);
2971 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2972 newstmt = gimple_build_assign (name, folded);
2973 gimple_set_plf (newstmt, NECESSARY, false);
2975 gimple_seq_add_stmt (stmts, newstmt);
2976 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
2978 /* Fold the last statement. */
2979 gsi = gsi_last (*stmts);
2980 if (fold_stmt_inplace (&gsi))
2981 update_stmt (gsi_stmt (gsi));
2983 /* Add a value number to the temporary.
2984 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2985 we are creating the expression by pieces, and this particular piece of
2986 the expression may have been represented. There is no harm in replacing
2987 here. */
2988 value_id = get_expr_value_id (expr);
2989 VN_INFO_GET (name)->value_id = value_id;
2990 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2991 if (VN_INFO (name)->valnum == NULL_TREE)
2992 VN_INFO (name)->valnum = name;
2993 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2994 nameexpr = get_or_alloc_expr_for_name (name);
2995 add_to_value (value_id, nameexpr);
2996 if (NEW_SETS (block))
2997 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2998 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3000 pre_stats.insertions++;
3001 if (dump_file && (dump_flags & TDF_DETAILS))
3003 fprintf (dump_file, "Inserted ");
3004 print_gimple_stmt (dump_file, newstmt, 0, 0);
3005 fprintf (dump_file, " in predecessor %d (%04d)\n",
3006 block->index, value_id);
3009 return name;
3013 /* Returns true if we want to inhibit the insertions of PHI nodes
3014 for the given EXPR for basic block BB (a member of a loop).
3015 We want to do this, when we fear that the induction variable we
3016 create might inhibit vectorization. */
3018 static bool
3019 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3021 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3022 vec<vn_reference_op_s> ops = vr->operands;
3023 vn_reference_op_t op;
3024 unsigned i;
3026 /* If we aren't going to vectorize we don't inhibit anything. */
3027 if (!flag_tree_loop_vectorize)
3028 return false;
3030 /* Otherwise we inhibit the insertion when the address of the
3031 memory reference is a simple induction variable. In other
3032 cases the vectorizer won't do anything anyway (either it's
3033 loop invariant or a complicated expression). */
3034 FOR_EACH_VEC_ELT (ops, i, op)
3036 switch (op->opcode)
3038 case CALL_EXPR:
3039 /* Calls are not a problem. */
3040 return false;
3042 case ARRAY_REF:
3043 case ARRAY_RANGE_REF:
3044 if (TREE_CODE (op->op0) != SSA_NAME)
3045 break;
3046 /* Fallthru. */
3047 case SSA_NAME:
3049 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3050 affine_iv iv;
3051 /* Default defs are loop invariant. */
3052 if (!defbb)
3053 break;
3054 /* Defined outside this loop, also loop invariant. */
3055 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3056 break;
3057 /* If it's a simple induction variable inhibit insertion,
3058 the vectorizer might be interested in this one. */
3059 if (simple_iv (bb->loop_father, bb->loop_father,
3060 op->op0, &iv, true))
3061 return true;
3062 /* No simple IV, vectorizer can't do anything, hence no
3063 reason to inhibit the transformation for this operand. */
3064 break;
3066 default:
3067 break;
3070 return false;
3073 /* Insert the to-be-made-available values of expression EXPRNUM for each
3074 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3075 merge the result with a phi node, given the same value number as
3076 NODE. Return true if we have inserted new stuff. */
3078 static bool
3079 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3080 vec<pre_expr> avail)
3082 pre_expr expr = expression_for_id (exprnum);
3083 pre_expr newphi;
3084 unsigned int val = get_expr_value_id (expr);
3085 edge pred;
3086 bool insertions = false;
3087 bool nophi = false;
3088 basic_block bprime;
3089 pre_expr eprime;
3090 edge_iterator ei;
3091 tree type = get_expr_type (expr);
3092 tree temp;
3093 gimple phi;
3095 /* Make sure we aren't creating an induction variable. */
3096 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
3098 bool firstinsideloop = false;
3099 bool secondinsideloop = false;
3100 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3101 EDGE_PRED (block, 0)->src);
3102 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3103 EDGE_PRED (block, 1)->src);
3104 /* Induction variables only have one edge inside the loop. */
3105 if ((firstinsideloop ^ secondinsideloop)
3106 && (expr->kind != REFERENCE
3107 || inhibit_phi_insertion (block, expr)))
3109 if (dump_file && (dump_flags & TDF_DETAILS))
3110 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3111 nophi = true;
3115 /* Make the necessary insertions. */
3116 FOR_EACH_EDGE (pred, ei, block->preds)
3118 gimple_seq stmts = NULL;
3119 tree builtexpr;
3120 bprime = pred->src;
3121 eprime = avail[pred->dest_idx];
3123 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3125 builtexpr = create_expression_by_pieces (bprime, eprime,
3126 &stmts, type);
3127 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3128 gsi_insert_seq_on_edge (pred, stmts);
3129 if (!builtexpr)
3131 /* We cannot insert a PHI node if we failed to insert
3132 on one edge. */
3133 nophi = true;
3134 continue;
3136 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3137 insertions = true;
3139 else if (eprime->kind == CONSTANT)
3141 /* Constants may not have the right type, fold_convert
3142 should give us back a constant with the right type. */
3143 tree constant = PRE_EXPR_CONSTANT (eprime);
3144 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3146 tree builtexpr = fold_convert (type, constant);
3147 if (!is_gimple_min_invariant (builtexpr))
3149 tree forcedexpr = force_gimple_operand (builtexpr,
3150 &stmts, true,
3151 NULL);
3152 if (!is_gimple_min_invariant (forcedexpr))
3154 if (forcedexpr != builtexpr)
3156 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3157 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3159 if (stmts)
3161 gimple_stmt_iterator gsi;
3162 gsi = gsi_start (stmts);
3163 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3165 gimple stmt = gsi_stmt (gsi);
3166 tree lhs = gimple_get_lhs (stmt);
3167 if (TREE_CODE (lhs) == SSA_NAME)
3168 bitmap_set_bit (inserted_exprs,
3169 SSA_NAME_VERSION (lhs));
3170 gimple_set_plf (stmt, NECESSARY, false);
3172 gsi_insert_seq_on_edge (pred, stmts);
3174 avail[pred->dest_idx]
3175 = get_or_alloc_expr_for_name (forcedexpr);
3178 else
3179 avail[pred->dest_idx]
3180 = get_or_alloc_expr_for_constant (builtexpr);
3183 else if (eprime->kind == NAME)
3185 /* We may have to do a conversion because our value
3186 numbering can look through types in certain cases, but
3187 our IL requires all operands of a phi node have the same
3188 type. */
3189 tree name = PRE_EXPR_NAME (eprime);
3190 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3192 tree builtexpr;
3193 tree forcedexpr;
3194 builtexpr = fold_convert (type, name);
3195 forcedexpr = force_gimple_operand (builtexpr,
3196 &stmts, true,
3197 NULL);
3199 if (forcedexpr != name)
3201 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3202 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3205 if (stmts)
3207 gimple_stmt_iterator gsi;
3208 gsi = gsi_start (stmts);
3209 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3211 gimple stmt = gsi_stmt (gsi);
3212 tree lhs = gimple_get_lhs (stmt);
3213 if (TREE_CODE (lhs) == SSA_NAME)
3214 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3215 gimple_set_plf (stmt, NECESSARY, false);
3217 gsi_insert_seq_on_edge (pred, stmts);
3219 avail[pred->dest_idx] = get_or_alloc_expr_for_name (forcedexpr);
3223 /* If we didn't want a phi node, and we made insertions, we still have
3224 inserted new stuff, and thus return true. If we didn't want a phi node,
3225 and didn't make insertions, we haven't added anything new, so return
3226 false. */
3227 if (nophi && insertions)
3228 return true;
3229 else if (nophi && !insertions)
3230 return false;
3232 /* Now build a phi for the new variable. */
3233 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3234 phi = create_phi_node (temp, block);
3236 gimple_set_plf (phi, NECESSARY, false);
3237 VN_INFO_GET (temp)->value_id = val;
3238 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3239 if (VN_INFO (temp)->valnum == NULL_TREE)
3240 VN_INFO (temp)->valnum = temp;
3241 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3242 FOR_EACH_EDGE (pred, ei, block->preds)
3244 pre_expr ae = avail[pred->dest_idx];
3245 gcc_assert (get_expr_type (ae) == type
3246 || useless_type_conversion_p (type, get_expr_type (ae)));
3247 if (ae->kind == CONSTANT)
3248 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3249 pred, UNKNOWN_LOCATION);
3250 else
3251 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3254 newphi = get_or_alloc_expr_for_name (temp);
3255 add_to_value (val, newphi);
3257 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3258 this insertion, since we test for the existence of this value in PHI_GEN
3259 before proceeding with the partial redundancy checks in insert_aux.
3261 The value may exist in AVAIL_OUT, in particular, it could be represented
3262 by the expression we are trying to eliminate, in which case we want the
3263 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3264 inserted there.
3266 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3267 this block, because if it did, it would have existed in our dominator's
3268 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3271 bitmap_insert_into_set (PHI_GEN (block), newphi);
3272 bitmap_value_replace_in_set (AVAIL_OUT (block),
3273 newphi);
3274 bitmap_insert_into_set (NEW_SETS (block),
3275 newphi);
3277 if (dump_file && (dump_flags & TDF_DETAILS))
3279 fprintf (dump_file, "Created phi ");
3280 print_gimple_stmt (dump_file, phi, 0, 0);
3281 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3283 pre_stats.phis++;
3284 return true;
3289 /* Perform insertion of partially redundant values.
3290 For BLOCK, do the following:
3291 1. Propagate the NEW_SETS of the dominator into the current block.
3292 If the block has multiple predecessors,
3293 2a. Iterate over the ANTIC expressions for the block to see if
3294 any of them are partially redundant.
3295 2b. If so, insert them into the necessary predecessors to make
3296 the expression fully redundant.
3297 2c. Insert a new PHI merging the values of the predecessors.
3298 2d. Insert the new PHI, and the new expressions, into the
3299 NEW_SETS set.
3300 3. Recursively call ourselves on the dominator children of BLOCK.
3302 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3303 do_regular_insertion and do_partial_insertion.
3307 static bool
3308 do_regular_insertion (basic_block block, basic_block dom)
3310 bool new_stuff = false;
3311 vec<pre_expr> exprs;
3312 pre_expr expr;
3313 vec<pre_expr> avail = vNULL;
3314 int i;
3316 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3317 avail.safe_grow (EDGE_COUNT (block->preds));
3319 FOR_EACH_VEC_ELT (exprs, i, expr)
3321 if (expr->kind == NARY
3322 || expr->kind == REFERENCE)
3324 unsigned int val;
3325 bool by_some = false;
3326 bool cant_insert = false;
3327 bool all_same = true;
3328 pre_expr first_s = NULL;
3329 edge pred;
3330 basic_block bprime;
3331 pre_expr eprime = NULL;
3332 edge_iterator ei;
3333 pre_expr edoubleprime = NULL;
3334 bool do_insertion = false;
3336 val = get_expr_value_id (expr);
3337 if (bitmap_set_contains_value (PHI_GEN (block), val))
3338 continue;
3339 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3341 if (dump_file && (dump_flags & TDF_DETAILS))
3343 fprintf (dump_file, "Found fully redundant value: ");
3344 print_pre_expr (dump_file, expr);
3345 fprintf (dump_file, "\n");
3347 continue;
3350 FOR_EACH_EDGE (pred, ei, block->preds)
3352 unsigned int vprime;
3354 /* We should never run insertion for the exit block
3355 and so not come across fake pred edges. */
3356 gcc_assert (!(pred->flags & EDGE_FAKE));
3357 bprime = pred->src;
3358 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3359 bprime, block);
3361 /* eprime will generally only be NULL if the
3362 value of the expression, translated
3363 through the PHI for this predecessor, is
3364 undefined. If that is the case, we can't
3365 make the expression fully redundant,
3366 because its value is undefined along a
3367 predecessor path. We can thus break out
3368 early because it doesn't matter what the
3369 rest of the results are. */
3370 if (eprime == NULL)
3372 avail[pred->dest_idx] = NULL;
3373 cant_insert = true;
3374 break;
3377 eprime = fully_constant_expression (eprime);
3378 vprime = get_expr_value_id (eprime);
3379 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3380 vprime);
3381 if (edoubleprime == NULL)
3383 avail[pred->dest_idx] = eprime;
3384 all_same = false;
3386 else
3388 avail[pred->dest_idx] = edoubleprime;
3389 by_some = true;
3390 /* We want to perform insertions to remove a redundancy on
3391 a path in the CFG we want to optimize for speed. */
3392 if (optimize_edge_for_speed_p (pred))
3393 do_insertion = true;
3394 if (first_s == NULL)
3395 first_s = edoubleprime;
3396 else if (!pre_expr_d::equal (first_s, edoubleprime))
3397 all_same = false;
3400 /* If we can insert it, it's not the same value
3401 already existing along every predecessor, and
3402 it's defined by some predecessor, it is
3403 partially redundant. */
3404 if (!cant_insert && !all_same && by_some)
3406 if (!do_insertion)
3408 if (dump_file && (dump_flags & TDF_DETAILS))
3410 fprintf (dump_file, "Skipping partial redundancy for "
3411 "expression ");
3412 print_pre_expr (dump_file, expr);
3413 fprintf (dump_file, " (%04d), no redundancy on to be "
3414 "optimized for speed edge\n", val);
3417 else if (dbg_cnt (treepre_insert))
3419 if (dump_file && (dump_flags & TDF_DETAILS))
3421 fprintf (dump_file, "Found partial redundancy for "
3422 "expression ");
3423 print_pre_expr (dump_file, expr);
3424 fprintf (dump_file, " (%04d)\n",
3425 get_expr_value_id (expr));
3427 if (insert_into_preds_of_block (block,
3428 get_expression_id (expr),
3429 avail))
3430 new_stuff = true;
3433 /* If all edges produce the same value and that value is
3434 an invariant, then the PHI has the same value on all
3435 edges. Note this. */
3436 else if (!cant_insert && all_same)
3438 gcc_assert (edoubleprime->kind == CONSTANT
3439 || edoubleprime->kind == NAME);
3441 tree temp = make_temp_ssa_name (get_expr_type (expr),
3442 NULL, "pretmp");
3443 gimple assign = gimple_build_assign (temp,
3444 edoubleprime->kind == CONSTANT ? PRE_EXPR_CONSTANT (edoubleprime) : PRE_EXPR_NAME (edoubleprime));
3445 gimple_stmt_iterator gsi = gsi_after_labels (block);
3446 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3448 gimple_set_plf (assign, NECESSARY, false);
3449 VN_INFO_GET (temp)->value_id = val;
3450 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3451 if (VN_INFO (temp)->valnum == NULL_TREE)
3452 VN_INFO (temp)->valnum = temp;
3453 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3454 pre_expr newe = get_or_alloc_expr_for_name (temp);
3455 add_to_value (val, newe);
3456 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3457 bitmap_insert_into_set (NEW_SETS (block), newe);
3462 exprs.release ();
3463 avail.release ();
3464 return new_stuff;
3468 /* Perform insertion for partially anticipatable expressions. There
3469 is only one case we will perform insertion for these. This case is
3470 if the expression is partially anticipatable, and fully available.
3471 In this case, we know that putting it earlier will enable us to
3472 remove the later computation. */
3475 static bool
3476 do_partial_partial_insertion (basic_block block, basic_block dom)
3478 bool new_stuff = false;
3479 vec<pre_expr> exprs;
3480 pre_expr expr;
3481 vec<pre_expr> avail = vNULL;
3482 int i;
3484 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3485 avail.safe_grow (EDGE_COUNT (block->preds));
3487 FOR_EACH_VEC_ELT (exprs, i, expr)
3489 if (expr->kind == NARY
3490 || expr->kind == REFERENCE)
3492 unsigned int val;
3493 bool by_all = true;
3494 bool cant_insert = false;
3495 edge pred;
3496 basic_block bprime;
3497 pre_expr eprime = NULL;
3498 edge_iterator ei;
3500 val = get_expr_value_id (expr);
3501 if (bitmap_set_contains_value (PHI_GEN (block), val))
3502 continue;
3503 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3504 continue;
3506 FOR_EACH_EDGE (pred, ei, block->preds)
3508 unsigned int vprime;
3509 pre_expr edoubleprime;
3511 /* We should never run insertion for the exit block
3512 and so not come across fake pred edges. */
3513 gcc_assert (!(pred->flags & EDGE_FAKE));
3514 bprime = pred->src;
3515 eprime = phi_translate (expr, ANTIC_IN (block),
3516 PA_IN (block),
3517 bprime, block);
3519 /* eprime will generally only be NULL if the
3520 value of the expression, translated
3521 through the PHI for this predecessor, is
3522 undefined. If that is the case, we can't
3523 make the expression fully redundant,
3524 because its value is undefined along a
3525 predecessor path. We can thus break out
3526 early because it doesn't matter what the
3527 rest of the results are. */
3528 if (eprime == NULL)
3530 avail[pred->dest_idx] = NULL;
3531 cant_insert = true;
3532 break;
3535 eprime = fully_constant_expression (eprime);
3536 vprime = get_expr_value_id (eprime);
3537 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3538 avail[pred->dest_idx] = edoubleprime;
3539 if (edoubleprime == NULL)
3541 by_all = false;
3542 break;
3546 /* If we can insert it, it's not the same value
3547 already existing along every predecessor, and
3548 it's defined by some predecessor, it is
3549 partially redundant. */
3550 if (!cant_insert && by_all)
3552 edge succ;
3553 bool do_insertion = false;
3555 /* Insert only if we can remove a later expression on a path
3556 that we want to optimize for speed.
3557 The phi node that we will be inserting in BLOCK is not free,
3558 and inserting it for the sake of !optimize_for_speed successor
3559 may cause regressions on the speed path. */
3560 FOR_EACH_EDGE (succ, ei, block->succs)
3562 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3563 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3565 if (optimize_edge_for_speed_p (succ))
3566 do_insertion = true;
3570 if (!do_insertion)
3572 if (dump_file && (dump_flags & TDF_DETAILS))
3574 fprintf (dump_file, "Skipping partial partial redundancy "
3575 "for expression ");
3576 print_pre_expr (dump_file, expr);
3577 fprintf (dump_file, " (%04d), not (partially) anticipated "
3578 "on any to be optimized for speed edges\n", val);
3581 else if (dbg_cnt (treepre_insert))
3583 pre_stats.pa_insert++;
3584 if (dump_file && (dump_flags & TDF_DETAILS))
3586 fprintf (dump_file, "Found partial partial redundancy "
3587 "for expression ");
3588 print_pre_expr (dump_file, expr);
3589 fprintf (dump_file, " (%04d)\n",
3590 get_expr_value_id (expr));
3592 if (insert_into_preds_of_block (block,
3593 get_expression_id (expr),
3594 avail))
3595 new_stuff = true;
3601 exprs.release ();
3602 avail.release ();
3603 return new_stuff;
3606 static bool
3607 insert_aux (basic_block block)
3609 basic_block son;
3610 bool new_stuff = false;
3612 if (block)
3614 basic_block dom;
3615 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3616 if (dom)
3618 unsigned i;
3619 bitmap_iterator bi;
3620 bitmap_set_t newset = NEW_SETS (dom);
3621 if (newset)
3623 /* Note that we need to value_replace both NEW_SETS, and
3624 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3625 represented by some non-simple expression here that we want
3626 to replace it with. */
3627 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3629 pre_expr expr = expression_for_id (i);
3630 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3631 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3634 if (!single_pred_p (block))
3636 new_stuff |= do_regular_insertion (block, dom);
3637 if (do_partial_partial)
3638 new_stuff |= do_partial_partial_insertion (block, dom);
3642 for (son = first_dom_son (CDI_DOMINATORS, block);
3643 son;
3644 son = next_dom_son (CDI_DOMINATORS, son))
3646 new_stuff |= insert_aux (son);
3649 return new_stuff;
3652 /* Perform insertion of partially redundant values. */
3654 static void
3655 insert (void)
3657 bool new_stuff = true;
3658 basic_block bb;
3659 int num_iterations = 0;
3661 FOR_ALL_BB (bb)
3662 NEW_SETS (bb) = bitmap_set_new ();
3664 while (new_stuff)
3666 num_iterations++;
3667 if (dump_file && dump_flags & TDF_DETAILS)
3668 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3669 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3671 /* Clear the NEW sets before the next iteration. We have already
3672 fully propagated its contents. */
3673 if (new_stuff)
3674 FOR_ALL_BB (bb)
3675 bitmap_set_free (NEW_SETS (bb));
3677 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3681 /* Compute the AVAIL set for all basic blocks.
3683 This function performs value numbering of the statements in each basic
3684 block. The AVAIL sets are built from information we glean while doing
3685 this value numbering, since the AVAIL sets contain only one entry per
3686 value.
3688 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3689 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3691 static void
3692 compute_avail (void)
3695 basic_block block, son;
3696 basic_block *worklist;
3697 size_t sp = 0;
3698 unsigned i;
3700 /* We pretend that default definitions are defined in the entry block.
3701 This includes function arguments and the static chain decl. */
3702 for (i = 1; i < num_ssa_names; ++i)
3704 tree name = ssa_name (i);
3705 pre_expr e;
3706 if (!name
3707 || !SSA_NAME_IS_DEFAULT_DEF (name)
3708 || has_zero_uses (name)
3709 || virtual_operand_p (name))
3710 continue;
3712 e = get_or_alloc_expr_for_name (name);
3713 add_to_value (get_expr_value_id (e), e);
3714 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3715 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3718 if (dump_file && (dump_flags & TDF_DETAILS))
3720 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR),
3721 "tmp_gen", ENTRY_BLOCK);
3722 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR),
3723 "avail_out", ENTRY_BLOCK);
3726 /* Allocate the worklist. */
3727 worklist = XNEWVEC (basic_block, n_basic_blocks);
3729 /* Seed the algorithm by putting the dominator children of the entry
3730 block on the worklist. */
3731 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3732 son;
3733 son = next_dom_son (CDI_DOMINATORS, son))
3734 worklist[sp++] = son;
3736 /* Loop until the worklist is empty. */
3737 while (sp)
3739 gimple_stmt_iterator gsi;
3740 gimple stmt;
3741 basic_block dom;
3743 /* Pick a block from the worklist. */
3744 block = worklist[--sp];
3746 /* Initially, the set of available values in BLOCK is that of
3747 its immediate dominator. */
3748 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3749 if (dom)
3750 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3752 /* Generate values for PHI nodes. */
3753 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3755 tree result = gimple_phi_result (gsi_stmt (gsi));
3757 /* We have no need for virtual phis, as they don't represent
3758 actual computations. */
3759 if (virtual_operand_p (result))
3760 continue;
3762 pre_expr e = get_or_alloc_expr_for_name (result);
3763 add_to_value (get_expr_value_id (e), e);
3764 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3765 bitmap_insert_into_set (PHI_GEN (block), e);
3768 BB_MAY_NOTRETURN (block) = 0;
3770 /* Now compute value numbers and populate value sets with all
3771 the expressions computed in BLOCK. */
3772 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3774 ssa_op_iter iter;
3775 tree op;
3777 stmt = gsi_stmt (gsi);
3779 /* Cache whether the basic-block has any non-visible side-effect
3780 or control flow.
3781 If this isn't a call or it is the last stmt in the
3782 basic-block then the CFG represents things correctly. */
3783 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3785 /* Non-looping const functions always return normally.
3786 Otherwise the call might not return or have side-effects
3787 that forbids hoisting possibly trapping expressions
3788 before it. */
3789 int flags = gimple_call_flags (stmt);
3790 if (!(flags & ECF_CONST)
3791 || (flags & ECF_LOOPING_CONST_OR_PURE))
3792 BB_MAY_NOTRETURN (block) = 1;
3795 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3797 pre_expr e = get_or_alloc_expr_for_name (op);
3799 add_to_value (get_expr_value_id (e), e);
3800 bitmap_insert_into_set (TMP_GEN (block), e);
3801 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3804 if (gimple_has_side_effects (stmt)
3805 || stmt_could_throw_p (stmt)
3806 || is_gimple_debug (stmt))
3807 continue;
3809 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3811 if (ssa_undefined_value_p (op))
3812 continue;
3813 pre_expr e = get_or_alloc_expr_for_name (op);
3814 bitmap_value_insert_into_set (EXP_GEN (block), e);
3817 switch (gimple_code (stmt))
3819 case GIMPLE_RETURN:
3820 continue;
3822 case GIMPLE_CALL:
3824 vn_reference_t ref;
3825 pre_expr result = NULL;
3826 vec<vn_reference_op_s> ops = vNULL;
3828 /* We can value number only calls to real functions. */
3829 if (gimple_call_internal_p (stmt))
3830 continue;
3832 copy_reference_ops_from_call (stmt, &ops);
3833 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3834 gimple_expr_type (stmt),
3835 ops, &ref, VN_NOWALK);
3836 ops.release ();
3837 if (!ref)
3838 continue;
3840 /* If the value of the call is not invalidated in
3841 this block until it is computed, add the expression
3842 to EXP_GEN. */
3843 if (!gimple_vuse (stmt)
3844 || gimple_code
3845 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3846 || gimple_bb (SSA_NAME_DEF_STMT
3847 (gimple_vuse (stmt))) != block)
3849 result = (pre_expr) pool_alloc (pre_expr_pool);
3850 result->kind = REFERENCE;
3851 result->id = 0;
3852 PRE_EXPR_REFERENCE (result) = ref;
3854 get_or_alloc_expression_id (result);
3855 add_to_value (get_expr_value_id (result), result);
3856 bitmap_value_insert_into_set (EXP_GEN (block), result);
3858 continue;
3861 case GIMPLE_ASSIGN:
3863 pre_expr result = NULL;
3864 switch (vn_get_stmt_kind (stmt))
3866 case VN_NARY:
3868 enum tree_code code = gimple_assign_rhs_code (stmt);
3869 vn_nary_op_t nary;
3871 /* COND_EXPR and VEC_COND_EXPR are awkward in
3872 that they contain an embedded complex expression.
3873 Don't even try to shove those through PRE. */
3874 if (code == COND_EXPR
3875 || code == VEC_COND_EXPR)
3876 continue;
3878 vn_nary_op_lookup_stmt (stmt, &nary);
3879 if (!nary)
3880 continue;
3882 /* If the NARY traps and there was a preceding
3883 point in the block that might not return avoid
3884 adding the nary to EXP_GEN. */
3885 if (BB_MAY_NOTRETURN (block)
3886 && vn_nary_may_trap (nary))
3887 continue;
3889 result = (pre_expr) pool_alloc (pre_expr_pool);
3890 result->kind = NARY;
3891 result->id = 0;
3892 PRE_EXPR_NARY (result) = nary;
3893 break;
3896 case VN_REFERENCE:
3898 vn_reference_t ref;
3899 vn_reference_lookup (gimple_assign_rhs1 (stmt),
3900 gimple_vuse (stmt),
3901 VN_WALK, &ref);
3902 if (!ref)
3903 continue;
3905 /* If the value of the reference is not invalidated in
3906 this block until it is computed, add the expression
3907 to EXP_GEN. */
3908 if (gimple_vuse (stmt))
3910 gimple def_stmt;
3911 bool ok = true;
3912 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3913 while (!gimple_nop_p (def_stmt)
3914 && gimple_code (def_stmt) != GIMPLE_PHI
3915 && gimple_bb (def_stmt) == block)
3917 if (stmt_may_clobber_ref_p
3918 (def_stmt, gimple_assign_rhs1 (stmt)))
3920 ok = false;
3921 break;
3923 def_stmt
3924 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3926 if (!ok)
3927 continue;
3930 result = (pre_expr) pool_alloc (pre_expr_pool);
3931 result->kind = REFERENCE;
3932 result->id = 0;
3933 PRE_EXPR_REFERENCE (result) = ref;
3934 break;
3937 default:
3938 continue;
3941 get_or_alloc_expression_id (result);
3942 add_to_value (get_expr_value_id (result), result);
3943 bitmap_value_insert_into_set (EXP_GEN (block), result);
3944 continue;
3946 default:
3947 break;
3951 if (dump_file && (dump_flags & TDF_DETAILS))
3953 print_bitmap_set (dump_file, EXP_GEN (block),
3954 "exp_gen", block->index);
3955 print_bitmap_set (dump_file, PHI_GEN (block),
3956 "phi_gen", block->index);
3957 print_bitmap_set (dump_file, TMP_GEN (block),
3958 "tmp_gen", block->index);
3959 print_bitmap_set (dump_file, AVAIL_OUT (block),
3960 "avail_out", block->index);
3963 /* Put the dominator children of BLOCK on the worklist of blocks
3964 to compute available sets for. */
3965 for (son = first_dom_son (CDI_DOMINATORS, block);
3966 son;
3967 son = next_dom_son (CDI_DOMINATORS, son))
3968 worklist[sp++] = son;
3971 free (worklist);
3975 /* Local state for the eliminate domwalk. */
3976 static vec<gimple> el_to_remove;
3977 static vec<gimple> el_to_update;
3978 static unsigned int el_todo;
3979 static vec<tree> el_avail;
3980 static vec<tree> el_avail_stack;
3982 /* Return a leader for OP that is available at the current point of the
3983 eliminate domwalk. */
3985 static tree
3986 eliminate_avail (tree op)
3988 tree valnum = VN_INFO (op)->valnum;
3989 if (TREE_CODE (valnum) == SSA_NAME)
3991 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
3992 return valnum;
3993 if (el_avail.length () > SSA_NAME_VERSION (valnum))
3994 return el_avail[SSA_NAME_VERSION (valnum)];
3996 else if (is_gimple_min_invariant (valnum))
3997 return valnum;
3998 return NULL_TREE;
4001 /* At the current point of the eliminate domwalk make OP available. */
4003 static void
4004 eliminate_push_avail (tree op)
4006 tree valnum = VN_INFO (op)->valnum;
4007 if (TREE_CODE (valnum) == SSA_NAME)
4009 if (el_avail.length () <= SSA_NAME_VERSION (valnum))
4010 el_avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
4011 el_avail[SSA_NAME_VERSION (valnum)] = op;
4012 el_avail_stack.safe_push (op);
4016 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4017 the leader for the expression if insertion was successful. */
4019 static tree
4020 eliminate_insert (gimple_stmt_iterator *gsi, tree val)
4022 tree expr = vn_get_expr_for (val);
4023 if (!CONVERT_EXPR_P (expr)
4024 && TREE_CODE (expr) != VIEW_CONVERT_EXPR)
4025 return NULL_TREE;
4027 tree op = TREE_OPERAND (expr, 0);
4028 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
4029 if (!leader)
4030 return NULL_TREE;
4032 tree res = make_temp_ssa_name (TREE_TYPE (val), NULL, "pretmp");
4033 gimple tem = gimple_build_assign (res,
4034 fold_build1 (TREE_CODE (expr),
4035 TREE_TYPE (expr), leader));
4036 gsi_insert_before (gsi, tem, GSI_SAME_STMT);
4037 VN_INFO_GET (res)->valnum = val;
4039 if (TREE_CODE (leader) == SSA_NAME)
4040 gimple_set_plf (SSA_NAME_DEF_STMT (leader), NECESSARY, true);
4042 pre_stats.insertions++;
4043 if (dump_file && (dump_flags & TDF_DETAILS))
4045 fprintf (dump_file, "Inserted ");
4046 print_gimple_stmt (dump_file, tem, 0, 0);
4049 return res;
4052 class eliminate_dom_walker : public dom_walker
4054 public:
4055 eliminate_dom_walker (cdi_direction direction) : dom_walker (direction) {}
4057 virtual void before_dom_children (basic_block);
4058 virtual void after_dom_children (basic_block);
4061 /* Perform elimination for the basic-block B during the domwalk. */
4063 void
4064 eliminate_dom_walker::before_dom_children (basic_block b)
4066 gimple_stmt_iterator gsi;
4067 gimple stmt;
4069 /* Mark new bb. */
4070 el_avail_stack.safe_push (NULL_TREE);
4072 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4074 gimple stmt, phi = gsi_stmt (gsi);
4075 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4076 gimple_stmt_iterator gsi2;
4078 /* We want to perform redundant PHI elimination. Do so by
4079 replacing the PHI with a single copy if possible.
4080 Do not touch inserted, single-argument or virtual PHIs. */
4081 if (gimple_phi_num_args (phi) == 1
4082 || virtual_operand_p (res))
4084 gsi_next (&gsi);
4085 continue;
4088 sprime = eliminate_avail (res);
4089 if (!sprime
4090 || sprime == res)
4092 eliminate_push_avail (res);
4093 gsi_next (&gsi);
4094 continue;
4096 else if (is_gimple_min_invariant (sprime))
4098 if (!useless_type_conversion_p (TREE_TYPE (res),
4099 TREE_TYPE (sprime)))
4100 sprime = fold_convert (TREE_TYPE (res), sprime);
4103 if (dump_file && (dump_flags & TDF_DETAILS))
4105 fprintf (dump_file, "Replaced redundant PHI node defining ");
4106 print_generic_expr (dump_file, res, 0);
4107 fprintf (dump_file, " with ");
4108 print_generic_expr (dump_file, sprime, 0);
4109 fprintf (dump_file, "\n");
4112 remove_phi_node (&gsi, false);
4114 if (inserted_exprs
4115 && !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4116 && TREE_CODE (sprime) == SSA_NAME)
4117 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4119 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4120 sprime = fold_convert (TREE_TYPE (res), sprime);
4121 stmt = gimple_build_assign (res, sprime);
4122 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4124 gsi2 = gsi_after_labels (b);
4125 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4126 /* Queue the copy for eventual removal. */
4127 el_to_remove.safe_push (stmt);
4128 /* If we inserted this PHI node ourself, it's not an elimination. */
4129 if (inserted_exprs
4130 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4131 pre_stats.phis--;
4132 else
4133 pre_stats.eliminations++;
4136 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4138 tree lhs = NULL_TREE;
4139 tree rhs = NULL_TREE;
4141 stmt = gsi_stmt (gsi);
4143 if (gimple_has_lhs (stmt))
4144 lhs = gimple_get_lhs (stmt);
4146 if (gimple_assign_single_p (stmt))
4147 rhs = gimple_assign_rhs1 (stmt);
4149 /* Lookup the RHS of the expression, see if we have an
4150 available computation for it. If so, replace the RHS with
4151 the available computation. */
4152 if (gimple_has_lhs (stmt)
4153 && TREE_CODE (lhs) == SSA_NAME
4154 && !gimple_has_volatile_ops (stmt))
4156 tree sprime;
4157 gimple orig_stmt = stmt;
4159 sprime = eliminate_avail (lhs);
4160 /* If there is no usable leader mark lhs as leader for its value. */
4161 if (!sprime)
4162 eliminate_push_avail (lhs);
4164 /* See PR43491. Do not replace a global register variable when
4165 it is a the RHS of an assignment. Do replace local register
4166 variables since gcc does not guarantee a local variable will
4167 be allocated in register.
4168 Do not perform copy propagation or undo constant propagation. */
4169 if (gimple_assign_single_p (stmt)
4170 && (TREE_CODE (rhs) == SSA_NAME
4171 || is_gimple_min_invariant (rhs)
4172 || (TREE_CODE (rhs) == VAR_DECL
4173 && is_global_var (rhs)
4174 && DECL_HARD_REGISTER (rhs))))
4175 continue;
4177 if (!sprime)
4179 /* If there is no existing usable leader but SCCVN thinks
4180 it has an expression it wants to use as replacement,
4181 insert that. */
4182 tree val = VN_INFO (lhs)->valnum;
4183 if (val != VN_TOP
4184 && TREE_CODE (val) == SSA_NAME
4185 && VN_INFO (val)->needs_insertion
4186 && VN_INFO (val)->expr != NULL_TREE
4187 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
4188 eliminate_push_avail (sprime);
4190 else if (is_gimple_min_invariant (sprime))
4192 /* If there is no existing leader but SCCVN knows this
4193 value is constant, use that constant. */
4194 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4195 TREE_TYPE (sprime)))
4196 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4198 if (dump_file && (dump_flags & TDF_DETAILS))
4200 fprintf (dump_file, "Replaced ");
4201 print_gimple_expr (dump_file, stmt, 0, 0);
4202 fprintf (dump_file, " with ");
4203 print_generic_expr (dump_file, sprime, 0);
4204 fprintf (dump_file, " in ");
4205 print_gimple_stmt (dump_file, stmt, 0, 0);
4207 pre_stats.eliminations++;
4208 propagate_tree_value_into_stmt (&gsi, sprime);
4209 stmt = gsi_stmt (gsi);
4210 update_stmt (stmt);
4212 /* If we removed EH side-effects from the statement, clean
4213 its EH information. */
4214 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4216 bitmap_set_bit (need_eh_cleanup,
4217 gimple_bb (stmt)->index);
4218 if (dump_file && (dump_flags & TDF_DETAILS))
4219 fprintf (dump_file, " Removed EH side-effects.\n");
4221 continue;
4224 if (sprime
4225 && sprime != lhs
4226 && (rhs == NULL_TREE
4227 || TREE_CODE (rhs) != SSA_NAME
4228 || may_propagate_copy (rhs, sprime)))
4230 bool can_make_abnormal_goto
4231 = is_gimple_call (stmt)
4232 && stmt_can_make_abnormal_goto (stmt);
4234 gcc_assert (sprime != rhs);
4236 if (dump_file && (dump_flags & TDF_DETAILS))
4238 fprintf (dump_file, "Replaced ");
4239 print_gimple_expr (dump_file, stmt, 0, 0);
4240 fprintf (dump_file, " with ");
4241 print_generic_expr (dump_file, sprime, 0);
4242 fprintf (dump_file, " in ");
4243 print_gimple_stmt (dump_file, stmt, 0, 0);
4246 if (TREE_CODE (sprime) == SSA_NAME)
4247 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4248 NECESSARY, true);
4249 /* We need to make sure the new and old types actually match,
4250 which may require adding a simple cast, which fold_convert
4251 will do for us. */
4252 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4253 && !useless_type_conversion_p (gimple_expr_type (stmt),
4254 TREE_TYPE (sprime)))
4255 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4257 pre_stats.eliminations++;
4258 propagate_tree_value_into_stmt (&gsi, sprime);
4259 stmt = gsi_stmt (gsi);
4260 update_stmt (stmt);
4262 /* If we removed EH side-effects from the statement, clean
4263 its EH information. */
4264 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4266 bitmap_set_bit (need_eh_cleanup,
4267 gimple_bb (stmt)->index);
4268 if (dump_file && (dump_flags & TDF_DETAILS))
4269 fprintf (dump_file, " Removed EH side-effects.\n");
4272 /* Likewise for AB side-effects. */
4273 if (can_make_abnormal_goto
4274 && !stmt_can_make_abnormal_goto (stmt))
4276 bitmap_set_bit (need_ab_cleanup,
4277 gimple_bb (stmt)->index);
4278 if (dump_file && (dump_flags & TDF_DETAILS))
4279 fprintf (dump_file, " Removed AB side-effects.\n");
4283 /* If the statement is a scalar store, see if the expression
4284 has the same value number as its rhs. If so, the store is
4285 dead. */
4286 else if (gimple_assign_single_p (stmt)
4287 && !gimple_has_volatile_ops (stmt)
4288 && !is_gimple_reg (gimple_assign_lhs (stmt))
4289 && (TREE_CODE (rhs) == SSA_NAME
4290 || is_gimple_min_invariant (rhs)))
4292 tree val;
4293 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4294 gimple_vuse (stmt), VN_WALK, NULL);
4295 if (TREE_CODE (rhs) == SSA_NAME)
4296 rhs = VN_INFO (rhs)->valnum;
4297 if (val
4298 && operand_equal_p (val, rhs, 0))
4300 if (dump_file && (dump_flags & TDF_DETAILS))
4302 fprintf (dump_file, "Deleted redundant store ");
4303 print_gimple_stmt (dump_file, stmt, 0, 0);
4306 /* Queue stmt for removal. */
4307 el_to_remove.safe_push (stmt);
4310 /* Visit COND_EXPRs and fold the comparison with the
4311 available value-numbers. */
4312 else if (gimple_code (stmt) == GIMPLE_COND)
4314 tree op0 = gimple_cond_lhs (stmt);
4315 tree op1 = gimple_cond_rhs (stmt);
4316 tree result;
4318 if (TREE_CODE (op0) == SSA_NAME)
4319 op0 = VN_INFO (op0)->valnum;
4320 if (TREE_CODE (op1) == SSA_NAME)
4321 op1 = VN_INFO (op1)->valnum;
4322 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4323 op0, op1);
4324 if (result && TREE_CODE (result) == INTEGER_CST)
4326 if (integer_zerop (result))
4327 gimple_cond_make_false (stmt);
4328 else
4329 gimple_cond_make_true (stmt);
4330 update_stmt (stmt);
4331 el_todo = TODO_cleanup_cfg;
4334 /* Visit indirect calls and turn them into direct calls if
4335 possible. */
4336 if (is_gimple_call (stmt))
4338 tree orig_fn = gimple_call_fn (stmt);
4339 tree fn;
4340 if (!orig_fn)
4341 continue;
4342 if (TREE_CODE (orig_fn) == SSA_NAME)
4343 fn = VN_INFO (orig_fn)->valnum;
4344 else if (TREE_CODE (orig_fn) == OBJ_TYPE_REF
4345 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn)) == SSA_NAME)
4347 fn = VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn))->valnum;
4348 if (!gimple_call_addr_fndecl (fn))
4350 fn = ipa_intraprocedural_devirtualization (stmt);
4351 if (fn)
4352 fn = build_fold_addr_expr (fn);
4355 else
4356 continue;
4357 if (gimple_call_addr_fndecl (fn) != NULL_TREE
4358 && useless_type_conversion_p (TREE_TYPE (orig_fn),
4359 TREE_TYPE (fn)))
4361 bool can_make_abnormal_goto
4362 = stmt_can_make_abnormal_goto (stmt);
4363 bool was_noreturn = gimple_call_noreturn_p (stmt);
4365 if (dump_file && (dump_flags & TDF_DETAILS))
4367 fprintf (dump_file, "Replacing call target with ");
4368 print_generic_expr (dump_file, fn, 0);
4369 fprintf (dump_file, " in ");
4370 print_gimple_stmt (dump_file, stmt, 0, 0);
4373 gimple_call_set_fn (stmt, fn);
4374 el_to_update.safe_push (stmt);
4376 /* When changing a call into a noreturn call, cfg cleanup
4377 is needed to fix up the noreturn call. */
4378 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4379 el_todo |= TODO_cleanup_cfg;
4381 /* If we removed EH side-effects from the statement, clean
4382 its EH information. */
4383 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4385 bitmap_set_bit (need_eh_cleanup,
4386 gimple_bb (stmt)->index);
4387 if (dump_file && (dump_flags & TDF_DETAILS))
4388 fprintf (dump_file, " Removed EH side-effects.\n");
4391 /* Likewise for AB side-effects. */
4392 if (can_make_abnormal_goto
4393 && !stmt_can_make_abnormal_goto (stmt))
4395 bitmap_set_bit (need_ab_cleanup,
4396 gimple_bb (stmt)->index);
4397 if (dump_file && (dump_flags & TDF_DETAILS))
4398 fprintf (dump_file, " Removed AB side-effects.\n");
4401 /* Changing an indirect call to a direct call may
4402 have exposed different semantics. This may
4403 require an SSA update. */
4404 el_todo |= TODO_update_ssa_only_virtuals;
4410 /* Make no longer available leaders no longer available. */
4412 void
4413 eliminate_dom_walker::after_dom_children (basic_block)
4415 tree entry;
4416 while ((entry = el_avail_stack.pop ()) != NULL_TREE)
4417 el_avail[SSA_NAME_VERSION (VN_INFO (entry)->valnum)] = NULL_TREE;
4420 /* Eliminate fully redundant computations. */
4422 static unsigned int
4423 eliminate (void)
4425 gimple_stmt_iterator gsi;
4426 gimple stmt;
4427 unsigned i;
4429 need_eh_cleanup = BITMAP_ALLOC (NULL);
4430 need_ab_cleanup = BITMAP_ALLOC (NULL);
4432 el_to_remove.create (0);
4433 el_to_update.create (0);
4434 el_todo = 0;
4435 el_avail.create (0);
4436 el_avail_stack.create (0);
4438 eliminate_dom_walker (CDI_DOMINATORS).walk (cfun->cfg->x_entry_block_ptr);
4440 el_avail.release ();
4441 el_avail_stack.release ();
4443 /* We cannot remove stmts during BB walk, especially not release SSA
4444 names there as this confuses the VN machinery. The stmts ending
4445 up in el_to_remove are either stores or simple copies. */
4446 FOR_EACH_VEC_ELT (el_to_remove, i, stmt)
4448 tree lhs = gimple_assign_lhs (stmt);
4449 tree rhs = gimple_assign_rhs1 (stmt);
4450 use_operand_p use_p;
4451 gimple use_stmt;
4453 /* If there is a single use only, propagate the equivalency
4454 instead of keeping the copy. */
4455 if (TREE_CODE (lhs) == SSA_NAME
4456 && TREE_CODE (rhs) == SSA_NAME
4457 && single_imm_use (lhs, &use_p, &use_stmt)
4458 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4460 SET_USE (use_p, rhs);
4461 update_stmt (use_stmt);
4462 if (inserted_exprs
4463 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4464 && TREE_CODE (rhs) == SSA_NAME)
4465 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4468 /* If this is a store or a now unused copy, remove it. */
4469 if (TREE_CODE (lhs) != SSA_NAME
4470 || has_zero_uses (lhs))
4472 basic_block bb = gimple_bb (stmt);
4473 gsi = gsi_for_stmt (stmt);
4474 unlink_stmt_vdef (stmt);
4475 if (gsi_remove (&gsi, true))
4476 bitmap_set_bit (need_eh_cleanup, bb->index);
4477 if (inserted_exprs
4478 && TREE_CODE (lhs) == SSA_NAME)
4479 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4480 release_defs (stmt);
4483 el_to_remove.release ();
4485 /* We cannot update call statements with virtual operands during
4486 SSA walk. This might remove them which in turn makes our
4487 VN lattice invalid. */
4488 FOR_EACH_VEC_ELT (el_to_update, i, stmt)
4489 update_stmt (stmt);
4490 el_to_update.release ();
4492 return el_todo;
4495 /* Perform CFG cleanups made necessary by elimination. */
4497 static unsigned
4498 fini_eliminate (void)
4500 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4501 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4503 if (do_eh_cleanup)
4504 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4506 if (do_ab_cleanup)
4507 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4509 BITMAP_FREE (need_eh_cleanup);
4510 BITMAP_FREE (need_ab_cleanup);
4512 if (do_eh_cleanup || do_ab_cleanup)
4513 return TODO_cleanup_cfg;
4514 return 0;
4517 /* Borrow a bit of tree-ssa-dce.c for the moment.
4518 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4519 this may be a bit faster, and we may want critical edges kept split. */
4521 /* If OP's defining statement has not already been determined to be necessary,
4522 mark that statement necessary. Return the stmt, if it is newly
4523 necessary. */
4525 static inline gimple
4526 mark_operand_necessary (tree op)
4528 gimple stmt;
4530 gcc_assert (op);
4532 if (TREE_CODE (op) != SSA_NAME)
4533 return NULL;
4535 stmt = SSA_NAME_DEF_STMT (op);
4536 gcc_assert (stmt);
4538 if (gimple_plf (stmt, NECESSARY)
4539 || gimple_nop_p (stmt))
4540 return NULL;
4542 gimple_set_plf (stmt, NECESSARY, true);
4543 return stmt;
4546 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4547 to insert PHI nodes sometimes, and because value numbering of casts isn't
4548 perfect, we sometimes end up inserting dead code. This simple DCE-like
4549 pass removes any insertions we made that weren't actually used. */
4551 static void
4552 remove_dead_inserted_code (void)
4554 bitmap worklist;
4555 unsigned i;
4556 bitmap_iterator bi;
4557 gimple t;
4559 worklist = BITMAP_ALLOC (NULL);
4560 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4562 t = SSA_NAME_DEF_STMT (ssa_name (i));
4563 if (gimple_plf (t, NECESSARY))
4564 bitmap_set_bit (worklist, i);
4566 while (!bitmap_empty_p (worklist))
4568 i = bitmap_first_set_bit (worklist);
4569 bitmap_clear_bit (worklist, i);
4570 t = SSA_NAME_DEF_STMT (ssa_name (i));
4572 /* PHI nodes are somewhat special in that each PHI alternative has
4573 data and control dependencies. All the statements feeding the
4574 PHI node's arguments are always necessary. */
4575 if (gimple_code (t) == GIMPLE_PHI)
4577 unsigned k;
4579 for (k = 0; k < gimple_phi_num_args (t); k++)
4581 tree arg = PHI_ARG_DEF (t, k);
4582 if (TREE_CODE (arg) == SSA_NAME)
4584 gimple n = mark_operand_necessary (arg);
4585 if (n)
4586 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4590 else
4592 /* Propagate through the operands. Examine all the USE, VUSE and
4593 VDEF operands in this statement. Mark all the statements
4594 which feed this statement's uses as necessary. */
4595 ssa_op_iter iter;
4596 tree use;
4598 /* The operands of VDEF expressions are also needed as they
4599 represent potential definitions that may reach this
4600 statement (VDEF operands allow us to follow def-def
4601 links). */
4603 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4605 gimple n = mark_operand_necessary (use);
4606 if (n)
4607 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4612 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4614 t = SSA_NAME_DEF_STMT (ssa_name (i));
4615 if (!gimple_plf (t, NECESSARY))
4617 gimple_stmt_iterator gsi;
4619 if (dump_file && (dump_flags & TDF_DETAILS))
4621 fprintf (dump_file, "Removing unnecessary insertion:");
4622 print_gimple_stmt (dump_file, t, 0, 0);
4625 gsi = gsi_for_stmt (t);
4626 if (gimple_code (t) == GIMPLE_PHI)
4627 remove_phi_node (&gsi, true);
4628 else
4630 gsi_remove (&gsi, true);
4631 release_defs (t);
4635 BITMAP_FREE (worklist);
4639 /* Initialize data structures used by PRE. */
4641 static void
4642 init_pre (void)
4644 basic_block bb;
4646 next_expression_id = 1;
4647 expressions.create (0);
4648 expressions.safe_push (NULL);
4649 value_expressions.create (get_max_value_id () + 1);
4650 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4651 name_to_id.create (0);
4653 inserted_exprs = BITMAP_ALLOC (NULL);
4655 connect_infinite_loops_to_exit ();
4656 memset (&pre_stats, 0, sizeof (pre_stats));
4658 postorder = XNEWVEC (int, n_basic_blocks);
4659 postorder_num = inverted_post_order_compute (postorder);
4661 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4663 calculate_dominance_info (CDI_POST_DOMINATORS);
4664 calculate_dominance_info (CDI_DOMINATORS);
4666 bitmap_obstack_initialize (&grand_bitmap_obstack);
4667 phi_translate_table.create (5110);
4668 expression_to_id.create (num_ssa_names * 3);
4669 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4670 sizeof (struct bitmap_set), 30);
4671 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4672 sizeof (struct pre_expr_d), 30);
4673 FOR_ALL_BB (bb)
4675 EXP_GEN (bb) = bitmap_set_new ();
4676 PHI_GEN (bb) = bitmap_set_new ();
4677 TMP_GEN (bb) = bitmap_set_new ();
4678 AVAIL_OUT (bb) = bitmap_set_new ();
4683 /* Deallocate data structures used by PRE. */
4685 static void
4686 fini_pre ()
4688 free (postorder);
4689 value_expressions.release ();
4690 BITMAP_FREE (inserted_exprs);
4691 bitmap_obstack_release (&grand_bitmap_obstack);
4692 free_alloc_pool (bitmap_set_pool);
4693 free_alloc_pool (pre_expr_pool);
4694 phi_translate_table.dispose ();
4695 expression_to_id.dispose ();
4696 name_to_id.release ();
4698 free_aux_for_blocks ();
4700 free_dominance_info (CDI_POST_DOMINATORS);
4703 /* Gate and execute functions for PRE. */
4705 static unsigned int
4706 do_pre (void)
4708 unsigned int todo = 0;
4710 do_partial_partial =
4711 flag_tree_partial_pre && optimize_function_for_speed_p (cfun);
4713 /* This has to happen before SCCVN runs because
4714 loop_optimizer_init may create new phis, etc. */
4715 loop_optimizer_init (LOOPS_NORMAL);
4717 if (!run_scc_vn (VN_WALK))
4719 loop_optimizer_finalize ();
4720 return 0;
4723 init_pre ();
4724 scev_initialize ();
4726 /* Collect and value number expressions computed in each basic block. */
4727 compute_avail ();
4729 /* Insert can get quite slow on an incredibly large number of basic
4730 blocks due to some quadratic behavior. Until this behavior is
4731 fixed, don't run it when he have an incredibly large number of
4732 bb's. If we aren't going to run insert, there is no point in
4733 computing ANTIC, either, even though it's plenty fast. */
4734 if (n_basic_blocks < 4000)
4736 compute_antic ();
4737 insert ();
4740 /* Make sure to remove fake edges before committing our inserts.
4741 This makes sure we don't end up with extra critical edges that
4742 we would need to split. */
4743 remove_fake_exit_edges ();
4744 gsi_commit_edge_inserts ();
4746 /* Remove all the redundant expressions. */
4747 todo |= eliminate ();
4749 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4750 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4751 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4752 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4754 clear_expression_ids ();
4755 remove_dead_inserted_code ();
4756 todo |= TODO_verify_flow;
4758 scev_finalize ();
4759 fini_pre ();
4760 todo |= fini_eliminate ();
4761 loop_optimizer_finalize ();
4763 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4764 case we can merge the block with the remaining predecessor of the block.
4765 It should either:
4766 - call merge_blocks after each tail merge iteration
4767 - call merge_blocks after all tail merge iterations
4768 - mark TODO_cleanup_cfg when necessary
4769 - share the cfg cleanup with fini_pre. */
4770 todo |= tail_merge_optimize (todo);
4772 free_scc_vn ();
4774 /* Tail merging invalidates the virtual SSA web, together with
4775 cfg-cleanup opportunities exposed by PRE this will wreck the
4776 SSA updating machinery. So make sure to run update-ssa
4777 manually, before eventually scheduling cfg-cleanup as part of
4778 the todo. */
4779 update_ssa (TODO_update_ssa_only_virtuals);
4781 return todo;
4784 static bool
4785 gate_pre (void)
4787 return flag_tree_pre != 0;
4790 namespace {
4792 const pass_data pass_data_pre =
4794 GIMPLE_PASS, /* type */
4795 "pre", /* name */
4796 OPTGROUP_NONE, /* optinfo_flags */
4797 true, /* has_gate */
4798 true, /* has_execute */
4799 TV_TREE_PRE, /* tv_id */
4800 ( PROP_no_crit_edges | PROP_cfg | PROP_ssa ), /* properties_required */
4801 0, /* properties_provided */
4802 0, /* properties_destroyed */
4803 TODO_rebuild_alias, /* todo_flags_start */
4804 TODO_verify_ssa, /* todo_flags_finish */
4807 class pass_pre : public gimple_opt_pass
4809 public:
4810 pass_pre (gcc::context *ctxt)
4811 : gimple_opt_pass (pass_data_pre, ctxt)
4814 /* opt_pass methods: */
4815 bool gate () { return gate_pre (); }
4816 unsigned int execute () { return do_pre (); }
4818 }; // class pass_pre
4820 } // anon namespace
4822 gimple_opt_pass *
4823 make_pass_pre (gcc::context *ctxt)
4825 return new pass_pre (ctxt);
4829 /* Gate and execute functions for FRE. */
4831 static unsigned int
4832 execute_fre (void)
4834 unsigned int todo = 0;
4836 if (!run_scc_vn (VN_WALKREWRITE))
4837 return 0;
4839 memset (&pre_stats, 0, sizeof (pre_stats));
4841 /* Remove all the redundant expressions. */
4842 todo |= eliminate ();
4844 todo |= fini_eliminate ();
4846 free_scc_vn ();
4848 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4849 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4851 return todo;
4854 static bool
4855 gate_fre (void)
4857 return flag_tree_fre != 0;
4860 namespace {
4862 const pass_data pass_data_fre =
4864 GIMPLE_PASS, /* type */
4865 "fre", /* name */
4866 OPTGROUP_NONE, /* optinfo_flags */
4867 true, /* has_gate */
4868 true, /* has_execute */
4869 TV_TREE_FRE, /* tv_id */
4870 ( PROP_cfg | PROP_ssa ), /* properties_required */
4871 0, /* properties_provided */
4872 0, /* properties_destroyed */
4873 0, /* todo_flags_start */
4874 TODO_verify_ssa, /* todo_flags_finish */
4877 class pass_fre : public gimple_opt_pass
4879 public:
4880 pass_fre (gcc::context *ctxt)
4881 : gimple_opt_pass (pass_data_fre, ctxt)
4884 /* opt_pass methods: */
4885 opt_pass * clone () { return new pass_fre (m_ctxt); }
4886 bool gate () { return gate_fre (); }
4887 unsigned int execute () { return execute_fre (); }
4889 }; // class pass_fre
4891 } // anon namespace
4893 gimple_opt_pass *
4894 make_pass_fre (gcc::context *ctxt)
4896 return new pass_fre (ctxt);