Clean up some minor white space issues in trans-decl.c and trans-expr.c
[official-gcc.git] / gcc / tree-ssa-pre.c
blob1f1b0559d08bb23a43147baad7b50639c322596f
1 /* SSA-PRE for trees.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-ssa-loop.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-ssa.h"
46 #include "cfgloop.h"
47 #include "tree-ssa-sccvn.h"
48 #include "tree-scalar-evolution.h"
49 #include "params.h"
50 #include "dbgcnt.h"
51 #include "domwalk.h"
52 #include "tree-ssa-propagate.h"
53 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
55 #include "langhooks.h"
57 /* TODO:
59 1. Avail sets can be shared by making an avail_find_leader that
60 walks up the dominator tree and looks in those avail sets.
61 This might affect code optimality, it's unclear right now.
62 2. Strength reduction can be performed by anticipating expressions
63 we can repair later on.
64 3. We can do back-substitution or smarter value numbering to catch
65 commutative expressions split up over multiple statements.
68 /* For ease of terminology, "expression node" in the below refers to
69 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
70 represent the actual statement containing the expressions we care about,
71 and we cache the value number by putting it in the expression. */
73 /* Basic algorithm
75 First we walk the statements to generate the AVAIL sets, the
76 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
77 generation of values/expressions by a given block. We use them
78 when computing the ANTIC sets. The AVAIL sets consist of
79 SSA_NAME's that represent values, so we know what values are
80 available in what blocks. AVAIL is a forward dataflow problem. In
81 SSA, values are never killed, so we don't need a kill set, or a
82 fixpoint iteration, in order to calculate the AVAIL sets. In
83 traditional parlance, AVAIL sets tell us the downsafety of the
84 expressions/values.
86 Next, we generate the ANTIC sets. These sets represent the
87 anticipatable expressions. ANTIC is a backwards dataflow
88 problem. An expression is anticipatable in a given block if it could
89 be generated in that block. This means that if we had to perform
90 an insertion in that block, of the value of that expression, we
91 could. Calculating the ANTIC sets requires phi translation of
92 expressions, because the flow goes backwards through phis. We must
93 iterate to a fixpoint of the ANTIC sets, because we have a kill
94 set. Even in SSA form, values are not live over the entire
95 function, only from their definition point onwards. So we have to
96 remove values from the ANTIC set once we go past the definition
97 point of the leaders that make them up.
98 compute_antic/compute_antic_aux performs this computation.
100 Third, we perform insertions to make partially redundant
101 expressions fully redundant.
103 An expression is partially redundant (excluding partial
104 anticipation) if:
106 1. It is AVAIL in some, but not all, of the predecessors of a
107 given block.
108 2. It is ANTIC in all the predecessors.
110 In order to make it fully redundant, we insert the expression into
111 the predecessors where it is not available, but is ANTIC.
113 For the partial anticipation case, we only perform insertion if it
114 is partially anticipated in some block, and fully available in all
115 of the predecessors.
117 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
118 performs these steps.
120 Fourth, we eliminate fully redundant expressions.
121 This is a simple statement walk that replaces redundant
122 calculations with the now available values. */
124 /* Representations of value numbers:
126 Value numbers are represented by a representative SSA_NAME. We
127 will create fake SSA_NAME's in situations where we need a
128 representative but do not have one (because it is a complex
129 expression). In order to facilitate storing the value numbers in
130 bitmaps, and keep the number of wasted SSA_NAME's down, we also
131 associate a value_id with each value number, and create full blown
132 ssa_name's only where we actually need them (IE in operands of
133 existing expressions).
135 Theoretically you could replace all the value_id's with
136 SSA_NAME_VERSION, but this would allocate a large number of
137 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
138 It would also require an additional indirection at each point we
139 use the value id. */
141 /* Representation of expressions on value numbers:
143 Expressions consisting of value numbers are represented the same
144 way as our VN internally represents them, with an additional
145 "pre_expr" wrapping around them in order to facilitate storing all
146 of the expressions in the same sets. */
148 /* Representation of sets:
150 The dataflow sets do not need to be sorted in any particular order
151 for the majority of their lifetime, are simply represented as two
152 bitmaps, one that keeps track of values present in the set, and one
153 that keeps track of expressions present in the set.
155 When we need them in topological order, we produce it on demand by
156 transforming the bitmap into an array and sorting it into topo
157 order. */
159 /* Type of expression, used to know which member of the PRE_EXPR union
160 is valid. */
162 enum pre_expr_kind
164 NAME,
165 NARY,
166 REFERENCE,
167 CONSTANT
170 union pre_expr_union
172 tree name;
173 tree constant;
174 vn_nary_op_t nary;
175 vn_reference_t reference;
178 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
180 enum pre_expr_kind kind;
181 unsigned int id;
182 pre_expr_union u;
184 /* hash_table support. */
185 static inline hashval_t hash (const pre_expr_d *);
186 static inline int equal (const pre_expr_d *, const pre_expr_d *);
187 } *pre_expr;
189 #define PRE_EXPR_NAME(e) (e)->u.name
190 #define PRE_EXPR_NARY(e) (e)->u.nary
191 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
192 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
194 /* Compare E1 and E1 for equality. */
196 inline int
197 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
199 if (e1->kind != e2->kind)
200 return false;
202 switch (e1->kind)
204 case CONSTANT:
205 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
206 PRE_EXPR_CONSTANT (e2));
207 case NAME:
208 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
209 case NARY:
210 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
211 case REFERENCE:
212 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
213 PRE_EXPR_REFERENCE (e2));
214 default:
215 gcc_unreachable ();
219 /* Hash E. */
221 inline hashval_t
222 pre_expr_d::hash (const pre_expr_d *e)
224 switch (e->kind)
226 case CONSTANT:
227 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
228 case NAME:
229 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
230 case NARY:
231 return PRE_EXPR_NARY (e)->hashcode;
232 case REFERENCE:
233 return PRE_EXPR_REFERENCE (e)->hashcode;
234 default:
235 gcc_unreachable ();
239 /* Next global expression id number. */
240 static unsigned int next_expression_id;
242 /* Mapping from expression to id number we can use in bitmap sets. */
243 static vec<pre_expr> expressions;
244 static hash_table<pre_expr_d> *expression_to_id;
245 static vec<unsigned> name_to_id;
247 /* Allocate an expression id for EXPR. */
249 static inline unsigned int
250 alloc_expression_id (pre_expr expr)
252 struct pre_expr_d **slot;
253 /* Make sure we won't overflow. */
254 gcc_assert (next_expression_id + 1 > next_expression_id);
255 expr->id = next_expression_id++;
256 expressions.safe_push (expr);
257 if (expr->kind == NAME)
259 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
260 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
261 re-allocations by using vec::reserve upfront. */
262 unsigned old_len = name_to_id.length ();
263 name_to_id.reserve (num_ssa_names - old_len);
264 name_to_id.quick_grow_cleared (num_ssa_names);
265 gcc_assert (name_to_id[version] == 0);
266 name_to_id[version] = expr->id;
268 else
270 slot = expression_to_id->find_slot (expr, INSERT);
271 gcc_assert (!*slot);
272 *slot = expr;
274 return next_expression_id - 1;
277 /* Return the expression id for tree EXPR. */
279 static inline unsigned int
280 get_expression_id (const pre_expr expr)
282 return expr->id;
285 static inline unsigned int
286 lookup_expression_id (const pre_expr expr)
288 struct pre_expr_d **slot;
290 if (expr->kind == NAME)
292 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
293 if (name_to_id.length () <= version)
294 return 0;
295 return name_to_id[version];
297 else
299 slot = expression_to_id->find_slot (expr, NO_INSERT);
300 if (!slot)
301 return 0;
302 return ((pre_expr)*slot)->id;
306 /* Return the existing expression id for EXPR, or create one if one
307 does not exist yet. */
309 static inline unsigned int
310 get_or_alloc_expression_id (pre_expr expr)
312 unsigned int id = lookup_expression_id (expr);
313 if (id == 0)
314 return alloc_expression_id (expr);
315 return expr->id = id;
318 /* Return the expression that has expression id ID */
320 static inline pre_expr
321 expression_for_id (unsigned int id)
323 return expressions[id];
326 /* Free the expression id field in all of our expressions,
327 and then destroy the expressions array. */
329 static void
330 clear_expression_ids (void)
332 expressions.release ();
335 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
337 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
339 static pre_expr
340 get_or_alloc_expr_for_name (tree name)
342 struct pre_expr_d expr;
343 pre_expr result;
344 unsigned int result_id;
346 expr.kind = NAME;
347 expr.id = 0;
348 PRE_EXPR_NAME (&expr) = name;
349 result_id = lookup_expression_id (&expr);
350 if (result_id != 0)
351 return expression_for_id (result_id);
353 result = pre_expr_pool.allocate ();
354 result->kind = NAME;
355 PRE_EXPR_NAME (result) = name;
356 alloc_expression_id (result);
357 return result;
360 /* An unordered bitmap set. One bitmap tracks values, the other,
361 expressions. */
362 typedef struct bitmap_set
364 bitmap_head expressions;
365 bitmap_head values;
366 } *bitmap_set_t;
368 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
369 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
371 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
372 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
374 /* Mapping from value id to expressions with that value_id. */
375 static vec<bitmap> value_expressions;
377 /* Sets that we need to keep track of. */
378 typedef struct bb_bitmap_sets
380 /* The EXP_GEN set, which represents expressions/values generated in
381 a basic block. */
382 bitmap_set_t exp_gen;
384 /* The PHI_GEN set, which represents PHI results generated in a
385 basic block. */
386 bitmap_set_t phi_gen;
388 /* The TMP_GEN set, which represents results/temporaries generated
389 in a basic block. IE the LHS of an expression. */
390 bitmap_set_t tmp_gen;
392 /* The AVAIL_OUT set, which represents which values are available in
393 a given basic block. */
394 bitmap_set_t avail_out;
396 /* The ANTIC_IN set, which represents which values are anticipatable
397 in a given basic block. */
398 bitmap_set_t antic_in;
400 /* The PA_IN set, which represents which values are
401 partially anticipatable in a given basic block. */
402 bitmap_set_t pa_in;
404 /* The NEW_SETS set, which is used during insertion to augment the
405 AVAIL_OUT set of blocks with the new insertions performed during
406 the current iteration. */
407 bitmap_set_t new_sets;
409 /* A cache for value_dies_in_block_x. */
410 bitmap expr_dies;
412 /* The live virtual operand on successor edges. */
413 tree vop_on_exit;
415 /* True if we have visited this block during ANTIC calculation. */
416 unsigned int visited : 1;
418 /* True when the block contains a call that might not return. */
419 unsigned int contains_may_not_return_call : 1;
420 } *bb_value_sets_t;
422 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
423 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
424 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
425 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
426 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
427 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
428 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
429 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
430 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
431 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
432 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
435 /* Basic block list in postorder. */
436 static int *postorder;
437 static int postorder_num;
439 /* This structure is used to keep track of statistics on what
440 optimization PRE was able to perform. */
441 static struct
443 /* The number of RHS computations eliminated by PRE. */
444 int eliminations;
446 /* The number of new expressions/temporaries generated by PRE. */
447 int insertions;
449 /* The number of inserts found due to partial anticipation */
450 int pa_insert;
452 /* The number of new PHI nodes added by PRE. */
453 int phis;
454 } pre_stats;
456 static bool do_partial_partial;
457 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
458 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
459 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
460 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
461 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
462 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
463 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
464 unsigned int, bool);
465 static bitmap_set_t bitmap_set_new (void);
466 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
467 tree);
468 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
469 static unsigned int get_expr_value_id (pre_expr);
471 /* We can add and remove elements and entries to and from sets
472 and hash tables, so we use alloc pools for them. */
474 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
475 static bitmap_obstack grand_bitmap_obstack;
477 /* Set of blocks with statements that have had their EH properties changed. */
478 static bitmap need_eh_cleanup;
480 /* Set of blocks with statements that have had their AB properties changed. */
481 static bitmap need_ab_cleanup;
483 /* A three tuple {e, pred, v} used to cache phi translations in the
484 phi_translate_table. */
486 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
488 /* The expression. */
489 pre_expr e;
491 /* The predecessor block along which we translated the expression. */
492 basic_block pred;
494 /* The value that resulted from the translation. */
495 pre_expr v;
497 /* The hashcode for the expression, pred pair. This is cached for
498 speed reasons. */
499 hashval_t hashcode;
501 /* hash_table support. */
502 static inline hashval_t hash (const expr_pred_trans_d *);
503 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
504 } *expr_pred_trans_t;
505 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
507 inline hashval_t
508 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
510 return e->hashcode;
513 inline int
514 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
515 const expr_pred_trans_d *ve2)
517 basic_block b1 = ve1->pred;
518 basic_block b2 = ve2->pred;
520 /* If they are not translations for the same basic block, they can't
521 be equal. */
522 if (b1 != b2)
523 return false;
524 return pre_expr_d::equal (ve1->e, ve2->e);
527 /* The phi_translate_table caches phi translations for a given
528 expression and predecessor. */
529 static hash_table<expr_pred_trans_d> *phi_translate_table;
531 /* Add the tuple mapping from {expression E, basic block PRED} to
532 the phi translation table and return whether it pre-existed. */
534 static inline bool
535 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
537 expr_pred_trans_t *slot;
538 expr_pred_trans_d tem;
539 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
540 pred->index);
541 tem.e = e;
542 tem.pred = pred;
543 tem.hashcode = hash;
544 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
545 if (*slot)
547 *entry = *slot;
548 return true;
551 *entry = *slot = XNEW (struct expr_pred_trans_d);
552 (*entry)->e = e;
553 (*entry)->pred = pred;
554 (*entry)->hashcode = hash;
555 return false;
559 /* Add expression E to the expression set of value id V. */
561 static void
562 add_to_value (unsigned int v, pre_expr e)
564 bitmap set;
566 gcc_checking_assert (get_expr_value_id (e) == v);
568 if (v >= value_expressions.length ())
570 value_expressions.safe_grow_cleared (v + 1);
573 set = value_expressions[v];
574 if (!set)
576 set = BITMAP_ALLOC (&grand_bitmap_obstack);
577 value_expressions[v] = set;
580 bitmap_set_bit (set, get_or_alloc_expression_id (e));
583 /* Create a new bitmap set and return it. */
585 static bitmap_set_t
586 bitmap_set_new (void)
588 bitmap_set_t ret = bitmap_set_pool.allocate ();
589 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
590 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
591 return ret;
594 /* Return the value id for a PRE expression EXPR. */
596 static unsigned int
597 get_expr_value_id (pre_expr expr)
599 unsigned int id;
600 switch (expr->kind)
602 case CONSTANT:
603 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
604 break;
605 case NAME:
606 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
607 break;
608 case NARY:
609 id = PRE_EXPR_NARY (expr)->value_id;
610 break;
611 case REFERENCE:
612 id = PRE_EXPR_REFERENCE (expr)->value_id;
613 break;
614 default:
615 gcc_unreachable ();
617 /* ??? We cannot assert that expr has a value-id (it can be 0), because
618 we assign value-ids only to expressions that have a result
619 in set_hashtable_value_ids. */
620 return id;
623 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
625 static tree
626 sccvn_valnum_from_value_id (unsigned int val)
628 bitmap_iterator bi;
629 unsigned int i;
630 bitmap exprset = value_expressions[val];
631 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
633 pre_expr vexpr = expression_for_id (i);
634 if (vexpr->kind == NAME)
635 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
636 else if (vexpr->kind == CONSTANT)
637 return PRE_EXPR_CONSTANT (vexpr);
639 return NULL_TREE;
642 /* Remove an expression EXPR from a bitmapped set. */
644 static void
645 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
647 unsigned int val = get_expr_value_id (expr);
648 if (!value_id_constant_p (val))
650 bitmap_clear_bit (&set->values, val);
651 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
655 static void
656 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
657 unsigned int val, bool allow_constants)
659 if (allow_constants || !value_id_constant_p (val))
661 /* We specifically expect this and only this function to be able to
662 insert constants into a set. */
663 bitmap_set_bit (&set->values, val);
664 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
668 /* Insert an expression EXPR into a bitmapped set. */
670 static void
671 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
673 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
676 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
678 static void
679 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
681 bitmap_copy (&dest->expressions, &orig->expressions);
682 bitmap_copy (&dest->values, &orig->values);
686 /* Free memory used up by SET. */
687 static void
688 bitmap_set_free (bitmap_set_t set)
690 bitmap_clear (&set->expressions);
691 bitmap_clear (&set->values);
695 /* Generate an topological-ordered array of bitmap set SET. */
697 static vec<pre_expr>
698 sorted_array_from_bitmap_set (bitmap_set_t set)
700 unsigned int i, j;
701 bitmap_iterator bi, bj;
702 vec<pre_expr> result;
704 /* Pre-allocate enough space for the array. */
705 result.create (bitmap_count_bits (&set->expressions));
707 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
709 /* The number of expressions having a given value is usually
710 relatively small. Thus, rather than making a vector of all
711 the expressions and sorting it by value-id, we walk the values
712 and check in the reverse mapping that tells us what expressions
713 have a given value, to filter those in our set. As a result,
714 the expressions are inserted in value-id order, which means
715 topological order.
717 If this is somehow a significant lose for some cases, we can
718 choose which set to walk based on the set size. */
719 bitmap exprset = value_expressions[i];
720 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
722 if (bitmap_bit_p (&set->expressions, j))
723 result.quick_push (expression_for_id (j));
727 return result;
730 /* Perform bitmapped set operation DEST &= ORIG. */
732 static void
733 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
735 bitmap_iterator bi;
736 unsigned int i;
738 if (dest != orig)
740 bitmap_head temp;
741 bitmap_initialize (&temp, &grand_bitmap_obstack);
743 bitmap_and_into (&dest->values, &orig->values);
744 bitmap_copy (&temp, &dest->expressions);
745 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
747 pre_expr expr = expression_for_id (i);
748 unsigned int value_id = get_expr_value_id (expr);
749 if (!bitmap_bit_p (&dest->values, value_id))
750 bitmap_clear_bit (&dest->expressions, i);
752 bitmap_clear (&temp);
756 /* Subtract all values and expressions contained in ORIG from DEST. */
758 static bitmap_set_t
759 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
761 bitmap_set_t result = bitmap_set_new ();
762 bitmap_iterator bi;
763 unsigned int i;
765 bitmap_and_compl (&result->expressions, &dest->expressions,
766 &orig->expressions);
768 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
770 pre_expr expr = expression_for_id (i);
771 unsigned int value_id = get_expr_value_id (expr);
772 bitmap_set_bit (&result->values, value_id);
775 return result;
778 /* Subtract all the values in bitmap set B from bitmap set A. */
780 static void
781 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
783 unsigned int i;
784 bitmap_iterator bi;
785 bitmap_head temp;
787 bitmap_initialize (&temp, &grand_bitmap_obstack);
789 bitmap_copy (&temp, &a->expressions);
790 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
792 pre_expr expr = expression_for_id (i);
793 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
794 bitmap_remove_from_set (a, expr);
796 bitmap_clear (&temp);
800 /* Return true if bitmapped set SET contains the value VALUE_ID. */
802 static bool
803 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
805 if (value_id_constant_p (value_id))
806 return true;
808 if (!set || bitmap_empty_p (&set->expressions))
809 return false;
811 return bitmap_bit_p (&set->values, value_id);
814 static inline bool
815 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
817 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
820 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
822 static void
823 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
824 const pre_expr expr)
826 bitmap exprset;
827 unsigned int i;
828 bitmap_iterator bi;
830 if (value_id_constant_p (lookfor))
831 return;
833 if (!bitmap_set_contains_value (set, lookfor))
834 return;
836 /* The number of expressions having a given value is usually
837 significantly less than the total number of expressions in SET.
838 Thus, rather than check, for each expression in SET, whether it
839 has the value LOOKFOR, we walk the reverse mapping that tells us
840 what expressions have a given value, and see if any of those
841 expressions are in our set. For large testcases, this is about
842 5-10x faster than walking the bitmap. If this is somehow a
843 significant lose for some cases, we can choose which set to walk
844 based on the set size. */
845 exprset = value_expressions[lookfor];
846 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
848 if (bitmap_clear_bit (&set->expressions, i))
850 bitmap_set_bit (&set->expressions, get_expression_id (expr));
851 return;
855 gcc_unreachable ();
858 /* Return true if two bitmap sets are equal. */
860 static bool
861 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
863 return bitmap_equal_p (&a->values, &b->values);
866 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
867 and add it otherwise. */
869 static void
870 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
872 unsigned int val = get_expr_value_id (expr);
874 if (bitmap_set_contains_value (set, val))
875 bitmap_set_replace_value (set, val, expr);
876 else
877 bitmap_insert_into_set (set, expr);
880 /* Insert EXPR into SET if EXPR's value is not already present in
881 SET. */
883 static void
884 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
886 unsigned int val = get_expr_value_id (expr);
888 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
890 /* Constant values are always considered to be part of the set. */
891 if (value_id_constant_p (val))
892 return;
894 /* If the value membership changed, add the expression. */
895 if (bitmap_set_bit (&set->values, val))
896 bitmap_set_bit (&set->expressions, expr->id);
899 /* Print out EXPR to outfile. */
901 static void
902 print_pre_expr (FILE *outfile, const pre_expr expr)
904 switch (expr->kind)
906 case CONSTANT:
907 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
908 break;
909 case NAME:
910 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
911 break;
912 case NARY:
914 unsigned int i;
915 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
916 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
917 for (i = 0; i < nary->length; i++)
919 print_generic_expr (outfile, nary->op[i], 0);
920 if (i != (unsigned) nary->length - 1)
921 fprintf (outfile, ",");
923 fprintf (outfile, "}");
925 break;
927 case REFERENCE:
929 vn_reference_op_t vro;
930 unsigned int i;
931 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
932 fprintf (outfile, "{");
933 for (i = 0;
934 ref->operands.iterate (i, &vro);
935 i++)
937 bool closebrace = false;
938 if (vro->opcode != SSA_NAME
939 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
941 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
942 if (vro->op0)
944 fprintf (outfile, "<");
945 closebrace = true;
948 if (vro->op0)
950 print_generic_expr (outfile, vro->op0, 0);
951 if (vro->op1)
953 fprintf (outfile, ",");
954 print_generic_expr (outfile, vro->op1, 0);
956 if (vro->op2)
958 fprintf (outfile, ",");
959 print_generic_expr (outfile, vro->op2, 0);
962 if (closebrace)
963 fprintf (outfile, ">");
964 if (i != ref->operands.length () - 1)
965 fprintf (outfile, ",");
967 fprintf (outfile, "}");
968 if (ref->vuse)
970 fprintf (outfile, "@");
971 print_generic_expr (outfile, ref->vuse, 0);
974 break;
977 void debug_pre_expr (pre_expr);
979 /* Like print_pre_expr but always prints to stderr. */
980 DEBUG_FUNCTION void
981 debug_pre_expr (pre_expr e)
983 print_pre_expr (stderr, e);
984 fprintf (stderr, "\n");
987 /* Print out SET to OUTFILE. */
989 static void
990 print_bitmap_set (FILE *outfile, bitmap_set_t set,
991 const char *setname, int blockindex)
993 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
994 if (set)
996 bool first = true;
997 unsigned i;
998 bitmap_iterator bi;
1000 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1002 const pre_expr expr = expression_for_id (i);
1004 if (!first)
1005 fprintf (outfile, ", ");
1006 first = false;
1007 print_pre_expr (outfile, expr);
1009 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1012 fprintf (outfile, " }\n");
1015 void debug_bitmap_set (bitmap_set_t);
1017 DEBUG_FUNCTION void
1018 debug_bitmap_set (bitmap_set_t set)
1020 print_bitmap_set (stderr, set, "debug", 0);
1023 void debug_bitmap_sets_for (basic_block);
1025 DEBUG_FUNCTION void
1026 debug_bitmap_sets_for (basic_block bb)
1028 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1029 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1030 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1031 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1032 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1033 if (do_partial_partial)
1034 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1035 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1038 /* Print out the expressions that have VAL to OUTFILE. */
1040 static void
1041 print_value_expressions (FILE *outfile, unsigned int val)
1043 bitmap set = value_expressions[val];
1044 if (set)
1046 bitmap_set x;
1047 char s[10];
1048 sprintf (s, "%04d", val);
1049 x.expressions = *set;
1050 print_bitmap_set (outfile, &x, s, 0);
1055 DEBUG_FUNCTION void
1056 debug_value_expressions (unsigned int val)
1058 print_value_expressions (stderr, val);
1061 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1062 represent it. */
1064 static pre_expr
1065 get_or_alloc_expr_for_constant (tree constant)
1067 unsigned int result_id;
1068 unsigned int value_id;
1069 struct pre_expr_d expr;
1070 pre_expr newexpr;
1072 expr.kind = CONSTANT;
1073 PRE_EXPR_CONSTANT (&expr) = constant;
1074 result_id = lookup_expression_id (&expr);
1075 if (result_id != 0)
1076 return expression_for_id (result_id);
1078 newexpr = pre_expr_pool.allocate ();
1079 newexpr->kind = CONSTANT;
1080 PRE_EXPR_CONSTANT (newexpr) = constant;
1081 alloc_expression_id (newexpr);
1082 value_id = get_or_alloc_constant_value_id (constant);
1083 add_to_value (value_id, newexpr);
1084 return newexpr;
1087 /* Given a value id V, find the actual tree representing the constant
1088 value if there is one, and return it. Return NULL if we can't find
1089 a constant. */
1091 static tree
1092 get_constant_for_value_id (unsigned int v)
1094 if (value_id_constant_p (v))
1096 unsigned int i;
1097 bitmap_iterator bi;
1098 bitmap exprset = value_expressions[v];
1100 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1102 pre_expr expr = expression_for_id (i);
1103 if (expr->kind == CONSTANT)
1104 return PRE_EXPR_CONSTANT (expr);
1107 return NULL;
1110 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1111 Currently only supports constants and SSA_NAMES. */
1112 static pre_expr
1113 get_or_alloc_expr_for (tree t)
1115 if (TREE_CODE (t) == SSA_NAME)
1116 return get_or_alloc_expr_for_name (t);
1117 else if (is_gimple_min_invariant (t))
1118 return get_or_alloc_expr_for_constant (t);
1119 else
1121 /* More complex expressions can result from SCCVN expression
1122 simplification that inserts values for them. As they all
1123 do not have VOPs the get handled by the nary ops struct. */
1124 vn_nary_op_t result;
1125 unsigned int result_id;
1126 vn_nary_op_lookup (t, &result);
1127 if (result != NULL)
1129 pre_expr e = pre_expr_pool.allocate ();
1130 e->kind = NARY;
1131 PRE_EXPR_NARY (e) = result;
1132 result_id = lookup_expression_id (e);
1133 if (result_id != 0)
1135 pre_expr_pool.remove (e);
1136 e = expression_for_id (result_id);
1137 return e;
1139 alloc_expression_id (e);
1140 return e;
1143 return NULL;
1146 /* Return the folded version of T if T, when folded, is a gimple
1147 min_invariant. Otherwise, return T. */
1149 static pre_expr
1150 fully_constant_expression (pre_expr e)
1152 switch (e->kind)
1154 case CONSTANT:
1155 return e;
1156 case NARY:
1158 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1159 switch (TREE_CODE_CLASS (nary->opcode))
1161 case tcc_binary:
1162 case tcc_comparison:
1164 /* We have to go from trees to pre exprs to value ids to
1165 constants. */
1166 tree naryop0 = nary->op[0];
1167 tree naryop1 = nary->op[1];
1168 tree result;
1169 if (!is_gimple_min_invariant (naryop0))
1171 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1172 unsigned int vrep0 = get_expr_value_id (rep0);
1173 tree const0 = get_constant_for_value_id (vrep0);
1174 if (const0)
1175 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1177 if (!is_gimple_min_invariant (naryop1))
1179 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1180 unsigned int vrep1 = get_expr_value_id (rep1);
1181 tree const1 = get_constant_for_value_id (vrep1);
1182 if (const1)
1183 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1185 result = fold_binary (nary->opcode, nary->type,
1186 naryop0, naryop1);
1187 if (result && is_gimple_min_invariant (result))
1188 return get_or_alloc_expr_for_constant (result);
1189 /* We might have simplified the expression to a
1190 SSA_NAME for example from x_1 * 1. But we cannot
1191 insert a PHI for x_1 unconditionally as x_1 might
1192 not be available readily. */
1193 return e;
1195 case tcc_reference:
1196 if (nary->opcode != REALPART_EXPR
1197 && nary->opcode != IMAGPART_EXPR
1198 && nary->opcode != VIEW_CONVERT_EXPR)
1199 return e;
1200 /* Fallthrough. */
1201 case tcc_unary:
1203 /* We have to go from trees to pre exprs to value ids to
1204 constants. */
1205 tree naryop0 = nary->op[0];
1206 tree const0, result;
1207 if (is_gimple_min_invariant (naryop0))
1208 const0 = naryop0;
1209 else
1211 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1212 unsigned int vrep0 = get_expr_value_id (rep0);
1213 const0 = get_constant_for_value_id (vrep0);
1215 result = NULL;
1216 if (const0)
1218 tree type1 = TREE_TYPE (nary->op[0]);
1219 const0 = fold_convert (type1, const0);
1220 result = fold_unary (nary->opcode, nary->type, const0);
1222 if (result && is_gimple_min_invariant (result))
1223 return get_or_alloc_expr_for_constant (result);
1224 return e;
1226 default:
1227 return e;
1230 case REFERENCE:
1232 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1233 tree folded;
1234 if ((folded = fully_constant_vn_reference_p (ref)))
1235 return get_or_alloc_expr_for_constant (folded);
1236 return e;
1238 default:
1239 return e;
1241 return e;
1244 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1245 it has the value it would have in BLOCK. Set *SAME_VALID to true
1246 in case the new vuse doesn't change the value id of the OPERANDS. */
1248 static tree
1249 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1250 alias_set_type set, tree type, tree vuse,
1251 basic_block phiblock,
1252 basic_block block, bool *same_valid)
1254 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1255 ao_ref ref;
1256 edge e = NULL;
1257 bool use_oracle;
1259 *same_valid = true;
1261 if (gimple_bb (phi) != phiblock)
1262 return vuse;
1264 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1266 /* Use the alias-oracle to find either the PHI node in this block,
1267 the first VUSE used in this block that is equivalent to vuse or
1268 the first VUSE which definition in this block kills the value. */
1269 if (gimple_code (phi) == GIMPLE_PHI)
1270 e = find_edge (block, phiblock);
1271 else if (use_oracle)
1272 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1274 vuse = gimple_vuse (phi);
1275 phi = SSA_NAME_DEF_STMT (vuse);
1276 if (gimple_bb (phi) != phiblock)
1277 return vuse;
1278 if (gimple_code (phi) == GIMPLE_PHI)
1280 e = find_edge (block, phiblock);
1281 break;
1284 else
1285 return NULL_TREE;
1287 if (e)
1289 if (use_oracle)
1291 bitmap visited = NULL;
1292 unsigned int cnt;
1293 /* Try to find a vuse that dominates this phi node by skipping
1294 non-clobbering statements. */
1295 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1296 NULL, NULL);
1297 if (visited)
1298 BITMAP_FREE (visited);
1300 else
1301 vuse = NULL_TREE;
1302 if (!vuse)
1304 /* If we didn't find any, the value ID can't stay the same,
1305 but return the translated vuse. */
1306 *same_valid = false;
1307 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1309 /* ??? We would like to return vuse here as this is the canonical
1310 upmost vdef that this reference is associated with. But during
1311 insertion of the references into the hash tables we only ever
1312 directly insert with their direct gimple_vuse, hence returning
1313 something else would make us not find the other expression. */
1314 return PHI_ARG_DEF (phi, e->dest_idx);
1317 return NULL_TREE;
1320 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1321 SET2. This is used to avoid making a set consisting of the union
1322 of PA_IN and ANTIC_IN during insert. */
1324 static inline pre_expr
1325 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1327 pre_expr result;
1329 result = bitmap_find_leader (set1, val);
1330 if (!result && set2)
1331 result = bitmap_find_leader (set2, val);
1332 return result;
1335 /* Get the tree type for our PRE expression e. */
1337 static tree
1338 get_expr_type (const pre_expr e)
1340 switch (e->kind)
1342 case NAME:
1343 return TREE_TYPE (PRE_EXPR_NAME (e));
1344 case CONSTANT:
1345 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1346 case REFERENCE:
1347 return PRE_EXPR_REFERENCE (e)->type;
1348 case NARY:
1349 return PRE_EXPR_NARY (e)->type;
1351 gcc_unreachable ();
1354 /* Get a representative SSA_NAME for a given expression.
1355 Since all of our sub-expressions are treated as values, we require
1356 them to be SSA_NAME's for simplicity.
1357 Prior versions of GVNPRE used to use "value handles" here, so that
1358 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1359 either case, the operands are really values (IE we do not expect
1360 them to be usable without finding leaders). */
1362 static tree
1363 get_representative_for (const pre_expr e)
1365 tree name;
1366 unsigned int value_id = get_expr_value_id (e);
1368 switch (e->kind)
1370 case NAME:
1371 return PRE_EXPR_NAME (e);
1372 case CONSTANT:
1373 return PRE_EXPR_CONSTANT (e);
1374 case NARY:
1375 case REFERENCE:
1377 /* Go through all of the expressions representing this value
1378 and pick out an SSA_NAME. */
1379 unsigned int i;
1380 bitmap_iterator bi;
1381 bitmap exprs = value_expressions[value_id];
1382 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1384 pre_expr rep = expression_for_id (i);
1385 if (rep->kind == NAME)
1386 return PRE_EXPR_NAME (rep);
1387 else if (rep->kind == CONSTANT)
1388 return PRE_EXPR_CONSTANT (rep);
1391 break;
1394 /* If we reached here we couldn't find an SSA_NAME. This can
1395 happen when we've discovered a value that has never appeared in
1396 the program as set to an SSA_NAME, as the result of phi translation.
1397 Create one here.
1398 ??? We should be able to re-use this when we insert the statement
1399 to compute it. */
1400 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1401 VN_INFO_GET (name)->value_id = value_id;
1402 VN_INFO (name)->valnum = name;
1403 /* ??? For now mark this SSA name for release by SCCVN. */
1404 VN_INFO (name)->needs_insertion = true;
1405 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1406 if (dump_file && (dump_flags & TDF_DETAILS))
1408 fprintf (dump_file, "Created SSA_NAME representative ");
1409 print_generic_expr (dump_file, name, 0);
1410 fprintf (dump_file, " for expression:");
1411 print_pre_expr (dump_file, e);
1412 fprintf (dump_file, " (%04d)\n", value_id);
1415 return name;
1420 static pre_expr
1421 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1422 basic_block pred, basic_block phiblock);
1424 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1425 the phis in PRED. Return NULL if we can't find a leader for each part
1426 of the translated expression. */
1428 static pre_expr
1429 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1430 basic_block pred, basic_block phiblock)
1432 switch (expr->kind)
1434 case NARY:
1436 unsigned int i;
1437 bool changed = false;
1438 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1439 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1440 sizeof_vn_nary_op (nary->length));
1441 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1443 for (i = 0; i < newnary->length; i++)
1445 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1446 continue;
1447 else
1449 pre_expr leader, result;
1450 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1451 leader = find_leader_in_sets (op_val_id, set1, set2);
1452 result = phi_translate (leader, set1, set2, pred, phiblock);
1453 if (result && result != leader)
1455 tree name = get_representative_for (result);
1456 if (!name)
1457 return NULL;
1458 newnary->op[i] = name;
1460 else if (!result)
1461 return NULL;
1463 changed |= newnary->op[i] != nary->op[i];
1466 if (changed)
1468 pre_expr constant;
1469 unsigned int new_val_id;
1471 tree result = vn_nary_op_lookup_pieces (newnary->length,
1472 newnary->opcode,
1473 newnary->type,
1474 &newnary->op[0],
1475 &nary);
1476 if (result && is_gimple_min_invariant (result))
1477 return get_or_alloc_expr_for_constant (result);
1479 expr = pre_expr_pool.allocate ();
1480 expr->kind = NARY;
1481 expr->id = 0;
1482 if (nary)
1484 PRE_EXPR_NARY (expr) = nary;
1485 constant = fully_constant_expression (expr);
1486 if (constant != expr)
1487 return constant;
1489 new_val_id = nary->value_id;
1490 get_or_alloc_expression_id (expr);
1492 else
1494 new_val_id = get_next_value_id ();
1495 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1496 nary = vn_nary_op_insert_pieces (newnary->length,
1497 newnary->opcode,
1498 newnary->type,
1499 &newnary->op[0],
1500 result, new_val_id);
1501 PRE_EXPR_NARY (expr) = nary;
1502 constant = fully_constant_expression (expr);
1503 if (constant != expr)
1504 return constant;
1505 get_or_alloc_expression_id (expr);
1507 add_to_value (new_val_id, expr);
1509 return expr;
1511 break;
1513 case REFERENCE:
1515 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1516 vec<vn_reference_op_s> operands = ref->operands;
1517 tree vuse = ref->vuse;
1518 tree newvuse = vuse;
1519 vec<vn_reference_op_s> newoperands = vNULL;
1520 bool changed = false, same_valid = true;
1521 unsigned int i, n;
1522 vn_reference_op_t operand;
1523 vn_reference_t newref;
1525 for (i = 0; operands.iterate (i, &operand); i++)
1527 pre_expr opresult;
1528 pre_expr leader;
1529 tree op[3];
1530 tree type = operand->type;
1531 vn_reference_op_s newop = *operand;
1532 op[0] = operand->op0;
1533 op[1] = operand->op1;
1534 op[2] = operand->op2;
1535 for (n = 0; n < 3; ++n)
1537 unsigned int op_val_id;
1538 if (!op[n])
1539 continue;
1540 if (TREE_CODE (op[n]) != SSA_NAME)
1542 /* We can't possibly insert these. */
1543 if (n != 0
1544 && !is_gimple_min_invariant (op[n]))
1545 break;
1546 continue;
1548 op_val_id = VN_INFO (op[n])->value_id;
1549 leader = find_leader_in_sets (op_val_id, set1, set2);
1550 if (!leader)
1551 break;
1552 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1553 if (!opresult)
1554 break;
1555 if (opresult != leader)
1557 tree name = get_representative_for (opresult);
1558 if (!name)
1559 break;
1560 changed |= name != op[n];
1561 op[n] = name;
1564 if (n != 3)
1566 newoperands.release ();
1567 return NULL;
1569 if (!changed)
1570 continue;
1571 if (!newoperands.exists ())
1572 newoperands = operands.copy ();
1573 /* We may have changed from an SSA_NAME to a constant */
1574 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1575 newop.opcode = TREE_CODE (op[0]);
1576 newop.type = type;
1577 newop.op0 = op[0];
1578 newop.op1 = op[1];
1579 newop.op2 = op[2];
1580 newoperands[i] = newop;
1582 gcc_checking_assert (i == operands.length ());
1584 if (vuse)
1586 newvuse = translate_vuse_through_block (newoperands.exists ()
1587 ? newoperands : operands,
1588 ref->set, ref->type,
1589 vuse, phiblock, pred,
1590 &same_valid);
1591 if (newvuse == NULL_TREE)
1593 newoperands.release ();
1594 return NULL;
1598 if (changed || newvuse != vuse)
1600 unsigned int new_val_id;
1601 pre_expr constant;
1603 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1604 ref->type,
1605 newoperands.exists ()
1606 ? newoperands : operands,
1607 &newref, VN_WALK);
1608 if (result)
1609 newoperands.release ();
1611 /* We can always insert constants, so if we have a partial
1612 redundant constant load of another type try to translate it
1613 to a constant of appropriate type. */
1614 if (result && is_gimple_min_invariant (result))
1616 tree tem = result;
1617 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1619 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1620 if (tem && !is_gimple_min_invariant (tem))
1621 tem = NULL_TREE;
1623 if (tem)
1624 return get_or_alloc_expr_for_constant (tem);
1627 /* If we'd have to convert things we would need to validate
1628 if we can insert the translated expression. So fail
1629 here for now - we cannot insert an alias with a different
1630 type in the VN tables either, as that would assert. */
1631 if (result
1632 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1633 return NULL;
1634 else if (!result && newref
1635 && !useless_type_conversion_p (ref->type, newref->type))
1637 newoperands.release ();
1638 return NULL;
1641 expr = pre_expr_pool.allocate ();
1642 expr->kind = REFERENCE;
1643 expr->id = 0;
1645 if (newref)
1647 PRE_EXPR_REFERENCE (expr) = newref;
1648 constant = fully_constant_expression (expr);
1649 if (constant != expr)
1650 return constant;
1652 new_val_id = newref->value_id;
1653 get_or_alloc_expression_id (expr);
1655 else
1657 if (changed || !same_valid)
1659 new_val_id = get_next_value_id ();
1660 value_expressions.safe_grow_cleared
1661 (get_max_value_id () + 1);
1663 else
1664 new_val_id = ref->value_id;
1665 if (!newoperands.exists ())
1666 newoperands = operands.copy ();
1667 newref = vn_reference_insert_pieces (newvuse, ref->set,
1668 ref->type,
1669 newoperands,
1670 result, new_val_id);
1671 newoperands = vNULL;
1672 PRE_EXPR_REFERENCE (expr) = newref;
1673 constant = fully_constant_expression (expr);
1674 if (constant != expr)
1675 return constant;
1676 get_or_alloc_expression_id (expr);
1678 add_to_value (new_val_id, expr);
1680 newoperands.release ();
1681 return expr;
1683 break;
1685 case NAME:
1687 tree name = PRE_EXPR_NAME (expr);
1688 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1689 /* If the SSA name is defined by a PHI node in this block,
1690 translate it. */
1691 if (gimple_code (def_stmt) == GIMPLE_PHI
1692 && gimple_bb (def_stmt) == phiblock)
1694 edge e = find_edge (pred, gimple_bb (def_stmt));
1695 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1697 /* Handle constant. */
1698 if (is_gimple_min_invariant (def))
1699 return get_or_alloc_expr_for_constant (def);
1701 return get_or_alloc_expr_for_name (def);
1703 /* Otherwise return it unchanged - it will get removed if its
1704 value is not available in PREDs AVAIL_OUT set of expressions
1705 by the subtraction of TMP_GEN. */
1706 return expr;
1709 default:
1710 gcc_unreachable ();
1714 /* Wrapper around phi_translate_1 providing caching functionality. */
1716 static pre_expr
1717 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1718 basic_block pred, basic_block phiblock)
1720 expr_pred_trans_t slot = NULL;
1721 pre_expr phitrans;
1723 if (!expr)
1724 return NULL;
1726 /* Constants contain no values that need translation. */
1727 if (expr->kind == CONSTANT)
1728 return expr;
1730 if (value_id_constant_p (get_expr_value_id (expr)))
1731 return expr;
1733 /* Don't add translations of NAMEs as those are cheap to translate. */
1734 if (expr->kind != NAME)
1736 if (phi_trans_add (&slot, expr, pred))
1737 return slot->v;
1738 /* Store NULL for the value we want to return in the case of
1739 recursing. */
1740 slot->v = NULL;
1743 /* Translate. */
1744 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1746 if (slot)
1748 if (phitrans)
1749 slot->v = phitrans;
1750 else
1751 /* Remove failed translations again, they cause insert
1752 iteration to not pick up new opportunities reliably. */
1753 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1756 return phitrans;
1760 /* For each expression in SET, translate the values through phi nodes
1761 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1762 expressions in DEST. */
1764 static void
1765 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1766 basic_block phiblock)
1768 vec<pre_expr> exprs;
1769 pre_expr expr;
1770 int i;
1772 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1774 bitmap_set_copy (dest, set);
1775 return;
1778 exprs = sorted_array_from_bitmap_set (set);
1779 FOR_EACH_VEC_ELT (exprs, i, expr)
1781 pre_expr translated;
1782 translated = phi_translate (expr, set, NULL, pred, phiblock);
1783 if (!translated)
1784 continue;
1786 /* We might end up with multiple expressions from SET being
1787 translated to the same value. In this case we do not want
1788 to retain the NARY or REFERENCE expression but prefer a NAME
1789 which would be the leader. */
1790 if (translated->kind == NAME)
1791 bitmap_value_replace_in_set (dest, translated);
1792 else
1793 bitmap_value_insert_into_set (dest, translated);
1795 exprs.release ();
1798 /* Find the leader for a value (i.e., the name representing that
1799 value) in a given set, and return it. Return NULL if no leader
1800 is found. */
1802 static pre_expr
1803 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1805 if (value_id_constant_p (val))
1807 unsigned int i;
1808 bitmap_iterator bi;
1809 bitmap exprset = value_expressions[val];
1811 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1813 pre_expr expr = expression_for_id (i);
1814 if (expr->kind == CONSTANT)
1815 return expr;
1818 if (bitmap_set_contains_value (set, val))
1820 /* Rather than walk the entire bitmap of expressions, and see
1821 whether any of them has the value we are looking for, we look
1822 at the reverse mapping, which tells us the set of expressions
1823 that have a given value (IE value->expressions with that
1824 value) and see if any of those expressions are in our set.
1825 The number of expressions per value is usually significantly
1826 less than the number of expressions in the set. In fact, for
1827 large testcases, doing it this way is roughly 5-10x faster
1828 than walking the bitmap.
1829 If this is somehow a significant lose for some cases, we can
1830 choose which set to walk based on which set is smaller. */
1831 unsigned int i;
1832 bitmap_iterator bi;
1833 bitmap exprset = value_expressions[val];
1835 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1836 return expression_for_id (i);
1838 return NULL;
1841 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1842 BLOCK by seeing if it is not killed in the block. Note that we are
1843 only determining whether there is a store that kills it. Because
1844 of the order in which clean iterates over values, we are guaranteed
1845 that altered operands will have caused us to be eliminated from the
1846 ANTIC_IN set already. */
1848 static bool
1849 value_dies_in_block_x (pre_expr expr, basic_block block)
1851 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1852 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1853 gimple *def;
1854 gimple_stmt_iterator gsi;
1855 unsigned id = get_expression_id (expr);
1856 bool res = false;
1857 ao_ref ref;
1859 if (!vuse)
1860 return false;
1862 /* Lookup a previously calculated result. */
1863 if (EXPR_DIES (block)
1864 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1865 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1867 /* A memory expression {e, VUSE} dies in the block if there is a
1868 statement that may clobber e. If, starting statement walk from the
1869 top of the basic block, a statement uses VUSE there can be no kill
1870 inbetween that use and the original statement that loaded {e, VUSE},
1871 so we can stop walking. */
1872 ref.base = NULL_TREE;
1873 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1875 tree def_vuse, def_vdef;
1876 def = gsi_stmt (gsi);
1877 def_vuse = gimple_vuse (def);
1878 def_vdef = gimple_vdef (def);
1880 /* Not a memory statement. */
1881 if (!def_vuse)
1882 continue;
1884 /* Not a may-def. */
1885 if (!def_vdef)
1887 /* A load with the same VUSE, we're done. */
1888 if (def_vuse == vuse)
1889 break;
1891 continue;
1894 /* Init ref only if we really need it. */
1895 if (ref.base == NULL_TREE
1896 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1897 refx->operands))
1899 res = true;
1900 break;
1902 /* If the statement may clobber expr, it dies. */
1903 if (stmt_may_clobber_ref_p_1 (def, &ref))
1905 res = true;
1906 break;
1910 /* Remember the result. */
1911 if (!EXPR_DIES (block))
1912 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1913 bitmap_set_bit (EXPR_DIES (block), id * 2);
1914 if (res)
1915 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1917 return res;
1921 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1922 contains its value-id. */
1924 static bool
1925 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1927 if (op && TREE_CODE (op) == SSA_NAME)
1929 unsigned int value_id = VN_INFO (op)->value_id;
1930 if (!(bitmap_set_contains_value (set1, value_id)
1931 || (set2 && bitmap_set_contains_value (set2, value_id))))
1932 return false;
1934 return true;
1937 /* Determine if the expression EXPR is valid in SET1 U SET2.
1938 ONLY SET2 CAN BE NULL.
1939 This means that we have a leader for each part of the expression
1940 (if it consists of values), or the expression is an SSA_NAME.
1941 For loads/calls, we also see if the vuse is killed in this block. */
1943 static bool
1944 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1946 switch (expr->kind)
1948 case NAME:
1949 /* By construction all NAMEs are available. Non-available
1950 NAMEs are removed by subtracting TMP_GEN from the sets. */
1951 return true;
1952 case NARY:
1954 unsigned int i;
1955 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1956 for (i = 0; i < nary->length; i++)
1957 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1958 return false;
1959 return true;
1961 break;
1962 case REFERENCE:
1964 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1965 vn_reference_op_t vro;
1966 unsigned int i;
1968 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1970 if (!op_valid_in_sets (set1, set2, vro->op0)
1971 || !op_valid_in_sets (set1, set2, vro->op1)
1972 || !op_valid_in_sets (set1, set2, vro->op2))
1973 return false;
1975 return true;
1977 default:
1978 gcc_unreachable ();
1982 /* Clean the set of expressions that are no longer valid in SET1 or
1983 SET2. This means expressions that are made up of values we have no
1984 leaders for in SET1 or SET2. This version is used for partial
1985 anticipation, which means it is not valid in either ANTIC_IN or
1986 PA_IN. */
1988 static void
1989 dependent_clean (bitmap_set_t set1, bitmap_set_t set2)
1991 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
1992 pre_expr expr;
1993 int i;
1995 FOR_EACH_VEC_ELT (exprs, i, expr)
1997 if (!valid_in_sets (set1, set2, expr))
1998 bitmap_remove_from_set (set1, expr);
2000 exprs.release ();
2003 /* Clean the set of expressions that are no longer valid in SET. This
2004 means expressions that are made up of values we have no leaders for
2005 in SET. */
2007 static void
2008 clean (bitmap_set_t set)
2010 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set);
2011 pre_expr expr;
2012 int i;
2014 FOR_EACH_VEC_ELT (exprs, i, expr)
2016 if (!valid_in_sets (set, NULL, expr))
2017 bitmap_remove_from_set (set, expr);
2019 exprs.release ();
2022 /* Clean the set of expressions that are no longer valid in SET because
2023 they are clobbered in BLOCK or because they trap and may not be executed. */
2025 static void
2026 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2028 bitmap_iterator bi;
2029 unsigned i;
2031 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2033 pre_expr expr = expression_for_id (i);
2034 if (expr->kind == REFERENCE)
2036 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2037 if (ref->vuse)
2039 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2040 if (!gimple_nop_p (def_stmt)
2041 && ((gimple_bb (def_stmt) != block
2042 && !dominated_by_p (CDI_DOMINATORS,
2043 block, gimple_bb (def_stmt)))
2044 || (gimple_bb (def_stmt) == block
2045 && value_dies_in_block_x (expr, block))))
2046 bitmap_remove_from_set (set, expr);
2049 else if (expr->kind == NARY)
2051 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2052 /* If the NARY may trap make sure the block does not contain
2053 a possible exit point.
2054 ??? This is overly conservative if we translate AVAIL_OUT
2055 as the available expression might be after the exit point. */
2056 if (BB_MAY_NOTRETURN (block)
2057 && vn_nary_may_trap (nary))
2058 bitmap_remove_from_set (set, expr);
2063 static sbitmap has_abnormal_preds;
2065 /* List of blocks that may have changed during ANTIC computation and
2066 thus need to be iterated over. */
2068 static sbitmap changed_blocks;
2070 /* Compute the ANTIC set for BLOCK.
2072 If succs(BLOCK) > 1 then
2073 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2074 else if succs(BLOCK) == 1 then
2075 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2077 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2080 static bool
2081 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2083 bool changed = false;
2084 bitmap_set_t S, old, ANTIC_OUT;
2085 bitmap_iterator bi;
2086 unsigned int bii;
2087 edge e;
2088 edge_iterator ei;
2090 old = ANTIC_OUT = S = NULL;
2091 BB_VISITED (block) = 1;
2093 /* If any edges from predecessors are abnormal, antic_in is empty,
2094 so do nothing. */
2095 if (block_has_abnormal_pred_edge)
2096 goto maybe_dump_sets;
2098 old = ANTIC_IN (block);
2099 ANTIC_OUT = bitmap_set_new ();
2101 /* If the block has no successors, ANTIC_OUT is empty. */
2102 if (EDGE_COUNT (block->succs) == 0)
2104 /* If we have one successor, we could have some phi nodes to
2105 translate through. */
2106 else if (single_succ_p (block))
2108 basic_block succ_bb = single_succ (block);
2109 gcc_assert (BB_VISITED (succ_bb));
2110 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2112 /* If we have multiple successors, we take the intersection of all of
2113 them. Note that in the case of loop exit phi nodes, we may have
2114 phis to translate through. */
2115 else
2117 size_t i;
2118 basic_block bprime, first = NULL;
2120 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2121 FOR_EACH_EDGE (e, ei, block->succs)
2123 if (!first
2124 && BB_VISITED (e->dest))
2125 first = e->dest;
2126 else if (BB_VISITED (e->dest))
2127 worklist.quick_push (e->dest);
2130 /* Of multiple successors we have to have visited one already
2131 which is guaranteed by iteration order. */
2132 gcc_assert (first != NULL);
2134 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2136 FOR_EACH_VEC_ELT (worklist, i, bprime)
2138 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2140 bitmap_set_t tmp = bitmap_set_new ();
2141 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2142 bitmap_set_and (ANTIC_OUT, tmp);
2143 bitmap_set_free (tmp);
2145 else
2146 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2150 /* Prune expressions that are clobbered in block and thus become
2151 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2152 prune_clobbered_mems (ANTIC_OUT, block);
2154 /* Generate ANTIC_OUT - TMP_GEN. */
2155 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2157 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2158 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2159 TMP_GEN (block));
2161 /* Then union in the ANTIC_OUT - TMP_GEN values,
2162 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2163 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2164 bitmap_value_insert_into_set (ANTIC_IN (block),
2165 expression_for_id (bii));
2167 clean (ANTIC_IN (block));
2169 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2171 changed = true;
2172 bitmap_set_bit (changed_blocks, block->index);
2173 FOR_EACH_EDGE (e, ei, block->preds)
2174 bitmap_set_bit (changed_blocks, e->src->index);
2176 else
2177 bitmap_clear_bit (changed_blocks, block->index);
2179 maybe_dump_sets:
2180 if (dump_file && (dump_flags & TDF_DETAILS))
2182 if (ANTIC_OUT)
2183 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2185 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2186 block->index);
2188 if (S)
2189 print_bitmap_set (dump_file, S, "S", block->index);
2191 if (old)
2192 bitmap_set_free (old);
2193 if (S)
2194 bitmap_set_free (S);
2195 if (ANTIC_OUT)
2196 bitmap_set_free (ANTIC_OUT);
2197 return changed;
2200 /* Compute PARTIAL_ANTIC for BLOCK.
2202 If succs(BLOCK) > 1 then
2203 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2204 in ANTIC_OUT for all succ(BLOCK)
2205 else if succs(BLOCK) == 1 then
2206 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2208 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2209 - ANTIC_IN[BLOCK])
2212 static bool
2213 compute_partial_antic_aux (basic_block block,
2214 bool block_has_abnormal_pred_edge)
2216 bool changed = false;
2217 bitmap_set_t old_PA_IN;
2218 bitmap_set_t PA_OUT;
2219 edge e;
2220 edge_iterator ei;
2221 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2223 old_PA_IN = PA_OUT = NULL;
2225 /* If any edges from predecessors are abnormal, antic_in is empty,
2226 so do nothing. */
2227 if (block_has_abnormal_pred_edge)
2228 goto maybe_dump_sets;
2230 /* If there are too many partially anticipatable values in the
2231 block, phi_translate_set can take an exponential time: stop
2232 before the translation starts. */
2233 if (max_pa
2234 && single_succ_p (block)
2235 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2236 goto maybe_dump_sets;
2238 old_PA_IN = PA_IN (block);
2239 PA_OUT = bitmap_set_new ();
2241 /* If the block has no successors, ANTIC_OUT is empty. */
2242 if (EDGE_COUNT (block->succs) == 0)
2244 /* If we have one successor, we could have some phi nodes to
2245 translate through. Note that we can't phi translate across DFS
2246 back edges in partial antic, because it uses a union operation on
2247 the successors. For recurrences like IV's, we will end up
2248 generating a new value in the set on each go around (i + 3 (VH.1)
2249 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2250 else if (single_succ_p (block))
2252 basic_block succ = single_succ (block);
2253 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2254 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2256 /* If we have multiple successors, we take the union of all of
2257 them. */
2258 else
2260 size_t i;
2261 basic_block bprime;
2263 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2264 FOR_EACH_EDGE (e, ei, block->succs)
2266 if (e->flags & EDGE_DFS_BACK)
2267 continue;
2268 worklist.quick_push (e->dest);
2270 if (worklist.length () > 0)
2272 FOR_EACH_VEC_ELT (worklist, i, bprime)
2274 unsigned int i;
2275 bitmap_iterator bi;
2277 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2278 bitmap_value_insert_into_set (PA_OUT,
2279 expression_for_id (i));
2280 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2282 bitmap_set_t pa_in = bitmap_set_new ();
2283 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2284 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2285 bitmap_value_insert_into_set (PA_OUT,
2286 expression_for_id (i));
2287 bitmap_set_free (pa_in);
2289 else
2290 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2291 bitmap_value_insert_into_set (PA_OUT,
2292 expression_for_id (i));
2297 /* Prune expressions that are clobbered in block and thus become
2298 invalid if translated from PA_OUT to PA_IN. */
2299 prune_clobbered_mems (PA_OUT, block);
2301 /* PA_IN starts with PA_OUT - TMP_GEN.
2302 Then we subtract things from ANTIC_IN. */
2303 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2305 /* For partial antic, we want to put back in the phi results, since
2306 we will properly avoid making them partially antic over backedges. */
2307 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2308 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2310 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2311 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2313 dependent_clean (PA_IN (block), ANTIC_IN (block));
2315 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2317 changed = true;
2318 bitmap_set_bit (changed_blocks, block->index);
2319 FOR_EACH_EDGE (e, ei, block->preds)
2320 bitmap_set_bit (changed_blocks, e->src->index);
2322 else
2323 bitmap_clear_bit (changed_blocks, block->index);
2325 maybe_dump_sets:
2326 if (dump_file && (dump_flags & TDF_DETAILS))
2328 if (PA_OUT)
2329 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2331 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2333 if (old_PA_IN)
2334 bitmap_set_free (old_PA_IN);
2335 if (PA_OUT)
2336 bitmap_set_free (PA_OUT);
2337 return changed;
2340 /* Compute ANTIC and partial ANTIC sets. */
2342 static void
2343 compute_antic (void)
2345 bool changed = true;
2346 int num_iterations = 0;
2347 basic_block block;
2348 int i;
2350 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2351 We pre-build the map of blocks with incoming abnormal edges here. */
2352 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2353 bitmap_clear (has_abnormal_preds);
2355 FOR_ALL_BB_FN (block, cfun)
2357 edge_iterator ei;
2358 edge e;
2360 FOR_EACH_EDGE (e, ei, block->preds)
2362 e->flags &= ~EDGE_DFS_BACK;
2363 if (e->flags & EDGE_ABNORMAL)
2365 bitmap_set_bit (has_abnormal_preds, block->index);
2366 break;
2370 BB_VISITED (block) = 0;
2372 /* While we are here, give empty ANTIC_IN sets to each block. */
2373 ANTIC_IN (block) = bitmap_set_new ();
2374 PA_IN (block) = bitmap_set_new ();
2377 /* At the exit block we anticipate nothing. */
2378 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2380 changed_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
2381 bitmap_ones (changed_blocks);
2382 while (changed)
2384 if (dump_file && (dump_flags & TDF_DETAILS))
2385 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2386 /* ??? We need to clear our PHI translation cache here as the
2387 ANTIC sets shrink and we restrict valid translations to
2388 those having operands with leaders in ANTIC. Same below
2389 for PA ANTIC computation. */
2390 num_iterations++;
2391 changed = false;
2392 for (i = postorder_num - 1; i >= 0; i--)
2394 if (bitmap_bit_p (changed_blocks, postorder[i]))
2396 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2397 changed |= compute_antic_aux (block,
2398 bitmap_bit_p (has_abnormal_preds,
2399 block->index));
2402 /* Theoretically possible, but *highly* unlikely. */
2403 gcc_checking_assert (num_iterations < 500);
2406 statistics_histogram_event (cfun, "compute_antic iterations",
2407 num_iterations);
2409 if (do_partial_partial)
2411 bitmap_ones (changed_blocks);
2412 mark_dfs_back_edges ();
2413 num_iterations = 0;
2414 changed = true;
2415 while (changed)
2417 if (dump_file && (dump_flags & TDF_DETAILS))
2418 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2419 num_iterations++;
2420 changed = false;
2421 for (i = postorder_num - 1 ; i >= 0; i--)
2423 if (bitmap_bit_p (changed_blocks, postorder[i]))
2425 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2426 changed
2427 |= compute_partial_antic_aux (block,
2428 bitmap_bit_p (has_abnormal_preds,
2429 block->index));
2432 /* Theoretically possible, but *highly* unlikely. */
2433 gcc_checking_assert (num_iterations < 500);
2435 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2436 num_iterations);
2438 sbitmap_free (has_abnormal_preds);
2439 sbitmap_free (changed_blocks);
2443 /* Inserted expressions are placed onto this worklist, which is used
2444 for performing quick dead code elimination of insertions we made
2445 that didn't turn out to be necessary. */
2446 static bitmap inserted_exprs;
2448 /* The actual worker for create_component_ref_by_pieces. */
2450 static tree
2451 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2452 unsigned int *operand, gimple_seq *stmts)
2454 vn_reference_op_t currop = &ref->operands[*operand];
2455 tree genop;
2456 ++*operand;
2457 switch (currop->opcode)
2459 case CALL_EXPR:
2460 gcc_unreachable ();
2462 case MEM_REF:
2464 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2465 stmts);
2466 if (!baseop)
2467 return NULL_TREE;
2468 tree offset = currop->op0;
2469 if (TREE_CODE (baseop) == ADDR_EXPR
2470 && handled_component_p (TREE_OPERAND (baseop, 0)))
2472 HOST_WIDE_INT off;
2473 tree base;
2474 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2475 &off);
2476 gcc_assert (base);
2477 offset = int_const_binop (PLUS_EXPR, offset,
2478 build_int_cst (TREE_TYPE (offset),
2479 off));
2480 baseop = build_fold_addr_expr (base);
2482 genop = build2 (MEM_REF, currop->type, baseop, offset);
2483 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2484 MR_DEPENDENCE_BASE (genop) = currop->base;
2485 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2486 return genop;
2489 case TARGET_MEM_REF:
2491 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2492 vn_reference_op_t nextop = &ref->operands[++*operand];
2493 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2494 stmts);
2495 if (!baseop)
2496 return NULL_TREE;
2497 if (currop->op0)
2499 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2500 if (!genop0)
2501 return NULL_TREE;
2503 if (nextop->op0)
2505 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2506 if (!genop1)
2507 return NULL_TREE;
2509 genop = build5 (TARGET_MEM_REF, currop->type,
2510 baseop, currop->op2, genop0, currop->op1, genop1);
2512 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2513 MR_DEPENDENCE_BASE (genop) = currop->base;
2514 return genop;
2517 case ADDR_EXPR:
2518 if (currop->op0)
2520 gcc_assert (is_gimple_min_invariant (currop->op0));
2521 return currop->op0;
2523 /* Fallthrough. */
2524 case REALPART_EXPR:
2525 case IMAGPART_EXPR:
2526 case VIEW_CONVERT_EXPR:
2528 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2529 stmts);
2530 if (!genop0)
2531 return NULL_TREE;
2532 return fold_build1 (currop->opcode, currop->type, genop0);
2535 case WITH_SIZE_EXPR:
2537 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2538 stmts);
2539 if (!genop0)
2540 return NULL_TREE;
2541 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2542 if (!genop1)
2543 return NULL_TREE;
2544 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2547 case BIT_FIELD_REF:
2549 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2550 stmts);
2551 if (!genop0)
2552 return NULL_TREE;
2553 tree op1 = currop->op0;
2554 tree op2 = currop->op1;
2555 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2556 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2557 return fold (t);
2560 /* For array ref vn_reference_op's, operand 1 of the array ref
2561 is op0 of the reference op and operand 3 of the array ref is
2562 op1. */
2563 case ARRAY_RANGE_REF:
2564 case ARRAY_REF:
2566 tree genop0;
2567 tree genop1 = currop->op0;
2568 tree genop2 = currop->op1;
2569 tree genop3 = currop->op2;
2570 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2571 stmts);
2572 if (!genop0)
2573 return NULL_TREE;
2574 genop1 = find_or_generate_expression (block, genop1, stmts);
2575 if (!genop1)
2576 return NULL_TREE;
2577 if (genop2)
2579 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2580 /* Drop zero minimum index if redundant. */
2581 if (integer_zerop (genop2)
2582 && (!domain_type
2583 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2584 genop2 = NULL_TREE;
2585 else
2587 genop2 = find_or_generate_expression (block, genop2, stmts);
2588 if (!genop2)
2589 return NULL_TREE;
2592 if (genop3)
2594 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2595 /* We can't always put a size in units of the element alignment
2596 here as the element alignment may be not visible. See
2597 PR43783. Simply drop the element size for constant
2598 sizes. */
2599 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2600 genop3 = NULL_TREE;
2601 else
2603 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2604 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2605 genop3 = find_or_generate_expression (block, genop3, stmts);
2606 if (!genop3)
2607 return NULL_TREE;
2610 return build4 (currop->opcode, currop->type, genop0, genop1,
2611 genop2, genop3);
2613 case COMPONENT_REF:
2615 tree op0;
2616 tree op1;
2617 tree genop2 = currop->op1;
2618 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2619 if (!op0)
2620 return NULL_TREE;
2621 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2622 op1 = currop->op0;
2623 if (genop2)
2625 genop2 = find_or_generate_expression (block, genop2, stmts);
2626 if (!genop2)
2627 return NULL_TREE;
2629 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2632 case SSA_NAME:
2634 genop = find_or_generate_expression (block, currop->op0, stmts);
2635 return genop;
2637 case STRING_CST:
2638 case INTEGER_CST:
2639 case COMPLEX_CST:
2640 case VECTOR_CST:
2641 case REAL_CST:
2642 case CONSTRUCTOR:
2643 case VAR_DECL:
2644 case PARM_DECL:
2645 case CONST_DECL:
2646 case RESULT_DECL:
2647 case FUNCTION_DECL:
2648 return currop->op0;
2650 default:
2651 gcc_unreachable ();
2655 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2656 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2657 trying to rename aggregates into ssa form directly, which is a no no.
2659 Thus, this routine doesn't create temporaries, it just builds a
2660 single access expression for the array, calling
2661 find_or_generate_expression to build the innermost pieces.
2663 This function is a subroutine of create_expression_by_pieces, and
2664 should not be called on it's own unless you really know what you
2665 are doing. */
2667 static tree
2668 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2669 gimple_seq *stmts)
2671 unsigned int op = 0;
2672 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2675 /* Find a simple leader for an expression, or generate one using
2676 create_expression_by_pieces from a NARY expression for the value.
2677 BLOCK is the basic_block we are looking for leaders in.
2678 OP is the tree expression to find a leader for or generate.
2679 Returns the leader or NULL_TREE on failure. */
2681 static tree
2682 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2684 pre_expr expr = get_or_alloc_expr_for (op);
2685 unsigned int lookfor = get_expr_value_id (expr);
2686 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2687 if (leader)
2689 if (leader->kind == NAME)
2690 return PRE_EXPR_NAME (leader);
2691 else if (leader->kind == CONSTANT)
2692 return PRE_EXPR_CONSTANT (leader);
2694 /* Defer. */
2695 return NULL_TREE;
2698 /* It must be a complex expression, so generate it recursively. Note
2699 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2700 where the insert algorithm fails to insert a required expression. */
2701 bitmap exprset = value_expressions[lookfor];
2702 bitmap_iterator bi;
2703 unsigned int i;
2704 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2706 pre_expr temp = expression_for_id (i);
2707 /* We cannot insert random REFERENCE expressions at arbitrary
2708 places. We can insert NARYs which eventually re-materializes
2709 its operand values. */
2710 if (temp->kind == NARY)
2711 return create_expression_by_pieces (block, temp, stmts,
2712 get_expr_type (expr));
2715 /* Defer. */
2716 return NULL_TREE;
2719 #define NECESSARY GF_PLF_1
2721 /* Create an expression in pieces, so that we can handle very complex
2722 expressions that may be ANTIC, but not necessary GIMPLE.
2723 BLOCK is the basic block the expression will be inserted into,
2724 EXPR is the expression to insert (in value form)
2725 STMTS is a statement list to append the necessary insertions into.
2727 This function will die if we hit some value that shouldn't be
2728 ANTIC but is (IE there is no leader for it, or its components).
2729 The function returns NULL_TREE in case a different antic expression
2730 has to be inserted first.
2731 This function may also generate expressions that are themselves
2732 partially or fully redundant. Those that are will be either made
2733 fully redundant during the next iteration of insert (for partially
2734 redundant ones), or eliminated by eliminate (for fully redundant
2735 ones). */
2737 static tree
2738 create_expression_by_pieces (basic_block block, pre_expr expr,
2739 gimple_seq *stmts, tree type)
2741 tree name;
2742 tree folded;
2743 gimple_seq forced_stmts = NULL;
2744 unsigned int value_id;
2745 gimple_stmt_iterator gsi;
2746 tree exprtype = type ? type : get_expr_type (expr);
2747 pre_expr nameexpr;
2748 gassign *newstmt;
2750 switch (expr->kind)
2752 /* We may hit the NAME/CONSTANT case if we have to convert types
2753 that value numbering saw through. */
2754 case NAME:
2755 folded = PRE_EXPR_NAME (expr);
2756 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2757 return folded;
2758 break;
2759 case CONSTANT:
2761 folded = PRE_EXPR_CONSTANT (expr);
2762 tree tem = fold_convert (exprtype, folded);
2763 if (is_gimple_min_invariant (tem))
2764 return tem;
2765 break;
2767 case REFERENCE:
2768 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2770 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2771 unsigned int operand = 1;
2772 vn_reference_op_t currop = &ref->operands[0];
2773 tree sc = NULL_TREE;
2774 tree fn;
2775 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2776 fn = currop->op0;
2777 else
2778 fn = find_or_generate_expression (block, currop->op0, stmts);
2779 if (!fn)
2780 return NULL_TREE;
2781 if (currop->op1)
2783 sc = find_or_generate_expression (block, currop->op1, stmts);
2784 if (!sc)
2785 return NULL_TREE;
2787 auto_vec<tree> args (ref->operands.length () - 1);
2788 while (operand < ref->operands.length ())
2790 tree arg = create_component_ref_by_pieces_1 (block, ref,
2791 &operand, stmts);
2792 if (!arg)
2793 return NULL_TREE;
2794 args.quick_push (arg);
2796 gcall *call
2797 = gimple_build_call_vec ((TREE_CODE (fn) == FUNCTION_DECL
2798 ? build_fold_addr_expr (fn) : fn), args);
2799 gimple_call_set_with_bounds (call, currop->with_bounds);
2800 if (sc)
2801 gimple_call_set_chain (call, sc);
2802 tree forcedname = make_ssa_name (currop->type);
2803 gimple_call_set_lhs (call, forcedname);
2804 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2805 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2806 folded = forcedname;
2808 else
2810 folded = create_component_ref_by_pieces (block,
2811 PRE_EXPR_REFERENCE (expr),
2812 stmts);
2813 if (!folded)
2814 return NULL_TREE;
2815 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2816 newstmt = gimple_build_assign (name, folded);
2817 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2818 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2819 folded = name;
2821 break;
2822 case NARY:
2824 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2825 tree *genop = XALLOCAVEC (tree, nary->length);
2826 unsigned i;
2827 for (i = 0; i < nary->length; ++i)
2829 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2830 if (!genop[i])
2831 return NULL_TREE;
2832 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2833 may have conversions stripped. */
2834 if (nary->opcode == POINTER_PLUS_EXPR)
2836 if (i == 0)
2837 genop[i] = gimple_convert (&forced_stmts,
2838 nary->type, genop[i]);
2839 else if (i == 1)
2840 genop[i] = gimple_convert (&forced_stmts,
2841 sizetype, genop[i]);
2843 else
2844 genop[i] = gimple_convert (&forced_stmts,
2845 TREE_TYPE (nary->op[i]), genop[i]);
2847 if (nary->opcode == CONSTRUCTOR)
2849 vec<constructor_elt, va_gc> *elts = NULL;
2850 for (i = 0; i < nary->length; ++i)
2851 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2852 folded = build_constructor (nary->type, elts);
2853 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2854 newstmt = gimple_build_assign (name, folded);
2855 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2856 folded = name;
2858 else
2860 switch (nary->length)
2862 case 1:
2863 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2864 genop[0]);
2865 break;
2866 case 2:
2867 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2868 genop[0], genop[1]);
2869 break;
2870 case 3:
2871 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2872 genop[0], genop[1], genop[2]);
2873 break;
2874 default:
2875 gcc_unreachable ();
2879 break;
2880 default:
2881 gcc_unreachable ();
2884 folded = gimple_convert (&forced_stmts, exprtype, folded);
2886 /* If there is nothing to insert, return the simplified result. */
2887 if (gimple_seq_empty_p (forced_stmts))
2888 return folded;
2889 /* If we simplified to a constant return it and discard eventually
2890 built stmts. */
2891 if (is_gimple_min_invariant (folded))
2893 gimple_seq_discard (forced_stmts);
2894 return folded;
2897 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2899 /* If we have any intermediate expressions to the value sets, add them
2900 to the value sets and chain them in the instruction stream. */
2901 if (forced_stmts)
2903 gsi = gsi_start (forced_stmts);
2904 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2906 gimple *stmt = gsi_stmt (gsi);
2907 tree forcedname = gimple_get_lhs (stmt);
2908 pre_expr nameexpr;
2910 if (forcedname != folded)
2912 VN_INFO_GET (forcedname)->valnum = forcedname;
2913 VN_INFO (forcedname)->value_id = get_next_value_id ();
2914 nameexpr = get_or_alloc_expr_for_name (forcedname);
2915 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2916 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2917 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2920 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2921 gimple_set_plf (stmt, NECESSARY, false);
2923 gimple_seq_add_seq (stmts, forced_stmts);
2926 name = folded;
2928 /* Fold the last statement. */
2929 gsi = gsi_last (*stmts);
2930 if (fold_stmt_inplace (&gsi))
2931 update_stmt (gsi_stmt (gsi));
2933 /* Add a value number to the temporary.
2934 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2935 we are creating the expression by pieces, and this particular piece of
2936 the expression may have been represented. There is no harm in replacing
2937 here. */
2938 value_id = get_expr_value_id (expr);
2939 VN_INFO_GET (name)->value_id = value_id;
2940 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2941 if (VN_INFO (name)->valnum == NULL_TREE)
2942 VN_INFO (name)->valnum = name;
2943 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2944 nameexpr = get_or_alloc_expr_for_name (name);
2945 add_to_value (value_id, nameexpr);
2946 if (NEW_SETS (block))
2947 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2948 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2950 pre_stats.insertions++;
2951 if (dump_file && (dump_flags & TDF_DETAILS))
2953 fprintf (dump_file, "Inserted ");
2954 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0, 0);
2955 fprintf (dump_file, " in predecessor %d (%04d)\n",
2956 block->index, value_id);
2959 return name;
2963 /* Insert the to-be-made-available values of expression EXPRNUM for each
2964 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2965 merge the result with a phi node, given the same value number as
2966 NODE. Return true if we have inserted new stuff. */
2968 static bool
2969 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2970 vec<pre_expr> avail)
2972 pre_expr expr = expression_for_id (exprnum);
2973 pre_expr newphi;
2974 unsigned int val = get_expr_value_id (expr);
2975 edge pred;
2976 bool insertions = false;
2977 bool nophi = false;
2978 basic_block bprime;
2979 pre_expr eprime;
2980 edge_iterator ei;
2981 tree type = get_expr_type (expr);
2982 tree temp;
2983 gphi *phi;
2985 /* Make sure we aren't creating an induction variable. */
2986 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
2988 bool firstinsideloop = false;
2989 bool secondinsideloop = false;
2990 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2991 EDGE_PRED (block, 0)->src);
2992 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2993 EDGE_PRED (block, 1)->src);
2994 /* Induction variables only have one edge inside the loop. */
2995 if ((firstinsideloop ^ secondinsideloop)
2996 && expr->kind != REFERENCE)
2998 if (dump_file && (dump_flags & TDF_DETAILS))
2999 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3000 nophi = true;
3004 /* Make the necessary insertions. */
3005 FOR_EACH_EDGE (pred, ei, block->preds)
3007 gimple_seq stmts = NULL;
3008 tree builtexpr;
3009 bprime = pred->src;
3010 eprime = avail[pred->dest_idx];
3011 builtexpr = create_expression_by_pieces (bprime, eprime,
3012 &stmts, type);
3013 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3014 if (!gimple_seq_empty_p (stmts))
3016 gsi_insert_seq_on_edge (pred, stmts);
3017 insertions = true;
3019 if (!builtexpr)
3021 /* We cannot insert a PHI node if we failed to insert
3022 on one edge. */
3023 nophi = true;
3024 continue;
3026 if (is_gimple_min_invariant (builtexpr))
3027 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3028 else
3029 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3031 /* If we didn't want a phi node, and we made insertions, we still have
3032 inserted new stuff, and thus return true. If we didn't want a phi node,
3033 and didn't make insertions, we haven't added anything new, so return
3034 false. */
3035 if (nophi && insertions)
3036 return true;
3037 else if (nophi && !insertions)
3038 return false;
3040 /* Now build a phi for the new variable. */
3041 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3042 phi = create_phi_node (temp, block);
3044 gimple_set_plf (phi, NECESSARY, false);
3045 VN_INFO_GET (temp)->value_id = val;
3046 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3047 if (VN_INFO (temp)->valnum == NULL_TREE)
3048 VN_INFO (temp)->valnum = temp;
3049 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3050 FOR_EACH_EDGE (pred, ei, block->preds)
3052 pre_expr ae = avail[pred->dest_idx];
3053 gcc_assert (get_expr_type (ae) == type
3054 || useless_type_conversion_p (type, get_expr_type (ae)));
3055 if (ae->kind == CONSTANT)
3056 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3057 pred, UNKNOWN_LOCATION);
3058 else
3059 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3062 newphi = get_or_alloc_expr_for_name (temp);
3063 add_to_value (val, newphi);
3065 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3066 this insertion, since we test for the existence of this value in PHI_GEN
3067 before proceeding with the partial redundancy checks in insert_aux.
3069 The value may exist in AVAIL_OUT, in particular, it could be represented
3070 by the expression we are trying to eliminate, in which case we want the
3071 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3072 inserted there.
3074 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3075 this block, because if it did, it would have existed in our dominator's
3076 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3079 bitmap_insert_into_set (PHI_GEN (block), newphi);
3080 bitmap_value_replace_in_set (AVAIL_OUT (block),
3081 newphi);
3082 bitmap_insert_into_set (NEW_SETS (block),
3083 newphi);
3085 /* If we insert a PHI node for a conversion of another PHI node
3086 in the same basic-block try to preserve range information.
3087 This is important so that followup loop passes receive optimal
3088 number of iteration analysis results. See PR61743. */
3089 if (expr->kind == NARY
3090 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3091 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3092 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3093 && INTEGRAL_TYPE_P (type)
3094 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3095 && (TYPE_PRECISION (type)
3096 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3097 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3099 wide_int min, max;
3100 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3101 && !wi::neg_p (min, SIGNED)
3102 && !wi::neg_p (max, SIGNED))
3103 /* Just handle extension and sign-changes of all-positive ranges. */
3104 set_range_info (temp,
3105 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3106 wide_int_storage::from (min, TYPE_PRECISION (type),
3107 TYPE_SIGN (type)),
3108 wide_int_storage::from (max, TYPE_PRECISION (type),
3109 TYPE_SIGN (type)));
3112 if (dump_file && (dump_flags & TDF_DETAILS))
3114 fprintf (dump_file, "Created phi ");
3115 print_gimple_stmt (dump_file, phi, 0, 0);
3116 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3118 pre_stats.phis++;
3119 return true;
3124 /* Perform insertion of partially redundant values.
3125 For BLOCK, do the following:
3126 1. Propagate the NEW_SETS of the dominator into the current block.
3127 If the block has multiple predecessors,
3128 2a. Iterate over the ANTIC expressions for the block to see if
3129 any of them are partially redundant.
3130 2b. If so, insert them into the necessary predecessors to make
3131 the expression fully redundant.
3132 2c. Insert a new PHI merging the values of the predecessors.
3133 2d. Insert the new PHI, and the new expressions, into the
3134 NEW_SETS set.
3135 3. Recursively call ourselves on the dominator children of BLOCK.
3137 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3138 do_regular_insertion and do_partial_insertion.
3142 static bool
3143 do_regular_insertion (basic_block block, basic_block dom)
3145 bool new_stuff = false;
3146 vec<pre_expr> exprs;
3147 pre_expr expr;
3148 auto_vec<pre_expr> avail;
3149 int i;
3151 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3152 avail.safe_grow (EDGE_COUNT (block->preds));
3154 FOR_EACH_VEC_ELT (exprs, i, expr)
3156 if (expr->kind == NARY
3157 || expr->kind == REFERENCE)
3159 unsigned int val;
3160 bool by_some = false;
3161 bool cant_insert = false;
3162 bool all_same = true;
3163 pre_expr first_s = NULL;
3164 edge pred;
3165 basic_block bprime;
3166 pre_expr eprime = NULL;
3167 edge_iterator ei;
3168 pre_expr edoubleprime = NULL;
3169 bool do_insertion = false;
3171 val = get_expr_value_id (expr);
3172 if (bitmap_set_contains_value (PHI_GEN (block), val))
3173 continue;
3174 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3176 if (dump_file && (dump_flags & TDF_DETAILS))
3178 fprintf (dump_file, "Found fully redundant value: ");
3179 print_pre_expr (dump_file, expr);
3180 fprintf (dump_file, "\n");
3182 continue;
3185 FOR_EACH_EDGE (pred, ei, block->preds)
3187 unsigned int vprime;
3189 /* We should never run insertion for the exit block
3190 and so not come across fake pred edges. */
3191 gcc_assert (!(pred->flags & EDGE_FAKE));
3192 bprime = pred->src;
3193 /* We are looking at ANTIC_OUT of bprime. */
3194 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3195 bprime, block);
3197 /* eprime will generally only be NULL if the
3198 value of the expression, translated
3199 through the PHI for this predecessor, is
3200 undefined. If that is the case, we can't
3201 make the expression fully redundant,
3202 because its value is undefined along a
3203 predecessor path. We can thus break out
3204 early because it doesn't matter what the
3205 rest of the results are. */
3206 if (eprime == NULL)
3208 avail[pred->dest_idx] = NULL;
3209 cant_insert = true;
3210 break;
3213 eprime = fully_constant_expression (eprime);
3214 vprime = get_expr_value_id (eprime);
3215 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3216 vprime);
3217 if (edoubleprime == NULL)
3219 avail[pred->dest_idx] = eprime;
3220 all_same = false;
3222 else
3224 avail[pred->dest_idx] = edoubleprime;
3225 by_some = true;
3226 /* We want to perform insertions to remove a redundancy on
3227 a path in the CFG we want to optimize for speed. */
3228 if (optimize_edge_for_speed_p (pred))
3229 do_insertion = true;
3230 if (first_s == NULL)
3231 first_s = edoubleprime;
3232 else if (!pre_expr_d::equal (first_s, edoubleprime))
3233 all_same = false;
3236 /* If we can insert it, it's not the same value
3237 already existing along every predecessor, and
3238 it's defined by some predecessor, it is
3239 partially redundant. */
3240 if (!cant_insert && !all_same && by_some)
3242 if (!do_insertion)
3244 if (dump_file && (dump_flags & TDF_DETAILS))
3246 fprintf (dump_file, "Skipping partial redundancy for "
3247 "expression ");
3248 print_pre_expr (dump_file, expr);
3249 fprintf (dump_file, " (%04d), no redundancy on to be "
3250 "optimized for speed edge\n", val);
3253 else if (dbg_cnt (treepre_insert))
3255 if (dump_file && (dump_flags & TDF_DETAILS))
3257 fprintf (dump_file, "Found partial redundancy for "
3258 "expression ");
3259 print_pre_expr (dump_file, expr);
3260 fprintf (dump_file, " (%04d)\n",
3261 get_expr_value_id (expr));
3263 if (insert_into_preds_of_block (block,
3264 get_expression_id (expr),
3265 avail))
3266 new_stuff = true;
3269 /* If all edges produce the same value and that value is
3270 an invariant, then the PHI has the same value on all
3271 edges. Note this. */
3272 else if (!cant_insert && all_same)
3274 gcc_assert (edoubleprime->kind == CONSTANT
3275 || edoubleprime->kind == NAME);
3277 tree temp = make_temp_ssa_name (get_expr_type (expr),
3278 NULL, "pretmp");
3279 gassign *assign
3280 = gimple_build_assign (temp,
3281 edoubleprime->kind == CONSTANT ?
3282 PRE_EXPR_CONSTANT (edoubleprime) :
3283 PRE_EXPR_NAME (edoubleprime));
3284 gimple_stmt_iterator gsi = gsi_after_labels (block);
3285 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3287 gimple_set_plf (assign, NECESSARY, false);
3288 VN_INFO_GET (temp)->value_id = val;
3289 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3290 if (VN_INFO (temp)->valnum == NULL_TREE)
3291 VN_INFO (temp)->valnum = temp;
3292 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3293 pre_expr newe = get_or_alloc_expr_for_name (temp);
3294 add_to_value (val, newe);
3295 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3296 bitmap_insert_into_set (NEW_SETS (block), newe);
3301 exprs.release ();
3302 return new_stuff;
3306 /* Perform insertion for partially anticipatable expressions. There
3307 is only one case we will perform insertion for these. This case is
3308 if the expression is partially anticipatable, and fully available.
3309 In this case, we know that putting it earlier will enable us to
3310 remove the later computation. */
3313 static bool
3314 do_partial_partial_insertion (basic_block block, basic_block dom)
3316 bool new_stuff = false;
3317 vec<pre_expr> exprs;
3318 pre_expr expr;
3319 auto_vec<pre_expr> avail;
3320 int i;
3322 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3323 avail.safe_grow (EDGE_COUNT (block->preds));
3325 FOR_EACH_VEC_ELT (exprs, i, expr)
3327 if (expr->kind == NARY
3328 || expr->kind == REFERENCE)
3330 unsigned int val;
3331 bool by_all = true;
3332 bool cant_insert = false;
3333 edge pred;
3334 basic_block bprime;
3335 pre_expr eprime = NULL;
3336 edge_iterator ei;
3338 val = get_expr_value_id (expr);
3339 if (bitmap_set_contains_value (PHI_GEN (block), val))
3340 continue;
3341 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3342 continue;
3344 FOR_EACH_EDGE (pred, ei, block->preds)
3346 unsigned int vprime;
3347 pre_expr edoubleprime;
3349 /* We should never run insertion for the exit block
3350 and so not come across fake pred edges. */
3351 gcc_assert (!(pred->flags & EDGE_FAKE));
3352 bprime = pred->src;
3353 eprime = phi_translate (expr, ANTIC_IN (block),
3354 PA_IN (block),
3355 bprime, block);
3357 /* eprime will generally only be NULL if the
3358 value of the expression, translated
3359 through the PHI for this predecessor, is
3360 undefined. If that is the case, we can't
3361 make the expression fully redundant,
3362 because its value is undefined along a
3363 predecessor path. We can thus break out
3364 early because it doesn't matter what the
3365 rest of the results are. */
3366 if (eprime == NULL)
3368 avail[pred->dest_idx] = NULL;
3369 cant_insert = true;
3370 break;
3373 eprime = fully_constant_expression (eprime);
3374 vprime = get_expr_value_id (eprime);
3375 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3376 avail[pred->dest_idx] = edoubleprime;
3377 if (edoubleprime == NULL)
3379 by_all = false;
3380 break;
3384 /* If we can insert it, it's not the same value
3385 already existing along every predecessor, and
3386 it's defined by some predecessor, it is
3387 partially redundant. */
3388 if (!cant_insert && by_all)
3390 edge succ;
3391 bool do_insertion = false;
3393 /* Insert only if we can remove a later expression on a path
3394 that we want to optimize for speed.
3395 The phi node that we will be inserting in BLOCK is not free,
3396 and inserting it for the sake of !optimize_for_speed successor
3397 may cause regressions on the speed path. */
3398 FOR_EACH_EDGE (succ, ei, block->succs)
3400 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3401 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3403 if (optimize_edge_for_speed_p (succ))
3404 do_insertion = true;
3408 if (!do_insertion)
3410 if (dump_file && (dump_flags & TDF_DETAILS))
3412 fprintf (dump_file, "Skipping partial partial redundancy "
3413 "for expression ");
3414 print_pre_expr (dump_file, expr);
3415 fprintf (dump_file, " (%04d), not (partially) anticipated "
3416 "on any to be optimized for speed edges\n", val);
3419 else if (dbg_cnt (treepre_insert))
3421 pre_stats.pa_insert++;
3422 if (dump_file && (dump_flags & TDF_DETAILS))
3424 fprintf (dump_file, "Found partial partial redundancy "
3425 "for expression ");
3426 print_pre_expr (dump_file, expr);
3427 fprintf (dump_file, " (%04d)\n",
3428 get_expr_value_id (expr));
3430 if (insert_into_preds_of_block (block,
3431 get_expression_id (expr),
3432 avail))
3433 new_stuff = true;
3439 exprs.release ();
3440 return new_stuff;
3443 static bool
3444 insert_aux (basic_block block)
3446 basic_block son;
3447 bool new_stuff = false;
3449 if (block)
3451 basic_block dom;
3452 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3453 if (dom)
3455 unsigned i;
3456 bitmap_iterator bi;
3457 bitmap_set_t newset = NEW_SETS (dom);
3458 if (newset)
3460 /* Note that we need to value_replace both NEW_SETS, and
3461 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3462 represented by some non-simple expression here that we want
3463 to replace it with. */
3464 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3466 pre_expr expr = expression_for_id (i);
3467 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3468 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3471 if (!single_pred_p (block))
3473 new_stuff |= do_regular_insertion (block, dom);
3474 if (do_partial_partial)
3475 new_stuff |= do_partial_partial_insertion (block, dom);
3479 for (son = first_dom_son (CDI_DOMINATORS, block);
3480 son;
3481 son = next_dom_son (CDI_DOMINATORS, son))
3483 new_stuff |= insert_aux (son);
3486 return new_stuff;
3489 /* Perform insertion of partially redundant values. */
3491 static void
3492 insert (void)
3494 bool new_stuff = true;
3495 basic_block bb;
3496 int num_iterations = 0;
3498 FOR_ALL_BB_FN (bb, cfun)
3499 NEW_SETS (bb) = bitmap_set_new ();
3501 while (new_stuff)
3503 num_iterations++;
3504 if (dump_file && dump_flags & TDF_DETAILS)
3505 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3506 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3508 /* Clear the NEW sets before the next iteration. We have already
3509 fully propagated its contents. */
3510 if (new_stuff)
3511 FOR_ALL_BB_FN (bb, cfun)
3512 bitmap_set_free (NEW_SETS (bb));
3514 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3518 /* Compute the AVAIL set for all basic blocks.
3520 This function performs value numbering of the statements in each basic
3521 block. The AVAIL sets are built from information we glean while doing
3522 this value numbering, since the AVAIL sets contain only one entry per
3523 value.
3525 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3526 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3528 static void
3529 compute_avail (void)
3532 basic_block block, son;
3533 basic_block *worklist;
3534 size_t sp = 0;
3535 unsigned i;
3537 /* We pretend that default definitions are defined in the entry block.
3538 This includes function arguments and the static chain decl. */
3539 for (i = 1; i < num_ssa_names; ++i)
3541 tree name = ssa_name (i);
3542 pre_expr e;
3543 if (!name
3544 || !SSA_NAME_IS_DEFAULT_DEF (name)
3545 || has_zero_uses (name)
3546 || virtual_operand_p (name))
3547 continue;
3549 e = get_or_alloc_expr_for_name (name);
3550 add_to_value (get_expr_value_id (e), e);
3551 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3552 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3556 if (dump_file && (dump_flags & TDF_DETAILS))
3558 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3559 "tmp_gen", ENTRY_BLOCK);
3560 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3561 "avail_out", ENTRY_BLOCK);
3564 /* Allocate the worklist. */
3565 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3567 /* Seed the algorithm by putting the dominator children of the entry
3568 block on the worklist. */
3569 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3570 son;
3571 son = next_dom_son (CDI_DOMINATORS, son))
3572 worklist[sp++] = son;
3574 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3575 = ssa_default_def (cfun, gimple_vop (cfun));
3577 /* Loop until the worklist is empty. */
3578 while (sp)
3580 gimple *stmt;
3581 basic_block dom;
3583 /* Pick a block from the worklist. */
3584 block = worklist[--sp];
3586 /* Initially, the set of available values in BLOCK is that of
3587 its immediate dominator. */
3588 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3589 if (dom)
3591 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3592 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3595 /* Generate values for PHI nodes. */
3596 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3597 gsi_next (&gsi))
3599 tree result = gimple_phi_result (gsi.phi ());
3601 /* We have no need for virtual phis, as they don't represent
3602 actual computations. */
3603 if (virtual_operand_p (result))
3605 BB_LIVE_VOP_ON_EXIT (block) = result;
3606 continue;
3609 pre_expr e = get_or_alloc_expr_for_name (result);
3610 add_to_value (get_expr_value_id (e), e);
3611 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3612 bitmap_insert_into_set (PHI_GEN (block), e);
3615 BB_MAY_NOTRETURN (block) = 0;
3617 /* Now compute value numbers and populate value sets with all
3618 the expressions computed in BLOCK. */
3619 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3620 gsi_next (&gsi))
3622 ssa_op_iter iter;
3623 tree op;
3625 stmt = gsi_stmt (gsi);
3627 /* Cache whether the basic-block has any non-visible side-effect
3628 or control flow.
3629 If this isn't a call or it is the last stmt in the
3630 basic-block then the CFG represents things correctly. */
3631 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3633 /* Non-looping const functions always return normally.
3634 Otherwise the call might not return or have side-effects
3635 that forbids hoisting possibly trapping expressions
3636 before it. */
3637 int flags = gimple_call_flags (stmt);
3638 if (!(flags & ECF_CONST)
3639 || (flags & ECF_LOOPING_CONST_OR_PURE))
3640 BB_MAY_NOTRETURN (block) = 1;
3643 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3645 pre_expr e = get_or_alloc_expr_for_name (op);
3647 add_to_value (get_expr_value_id (e), e);
3648 bitmap_insert_into_set (TMP_GEN (block), e);
3649 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3652 if (gimple_vdef (stmt))
3653 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3655 if (gimple_has_side_effects (stmt)
3656 || stmt_could_throw_p (stmt)
3657 || is_gimple_debug (stmt))
3658 continue;
3660 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3662 if (ssa_undefined_value_p (op))
3663 continue;
3664 pre_expr e = get_or_alloc_expr_for_name (op);
3665 bitmap_value_insert_into_set (EXP_GEN (block), e);
3668 switch (gimple_code (stmt))
3670 case GIMPLE_RETURN:
3671 continue;
3673 case GIMPLE_CALL:
3675 vn_reference_t ref;
3676 vn_reference_s ref1;
3677 pre_expr result = NULL;
3679 /* We can value number only calls to real functions. */
3680 if (gimple_call_internal_p (stmt))
3681 continue;
3683 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3684 if (!ref)
3685 continue;
3687 /* If the value of the call is not invalidated in
3688 this block until it is computed, add the expression
3689 to EXP_GEN. */
3690 if (!gimple_vuse (stmt)
3691 || gimple_code
3692 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3693 || gimple_bb (SSA_NAME_DEF_STMT
3694 (gimple_vuse (stmt))) != block)
3696 result = pre_expr_pool.allocate ();
3697 result->kind = REFERENCE;
3698 result->id = 0;
3699 PRE_EXPR_REFERENCE (result) = ref;
3701 get_or_alloc_expression_id (result);
3702 add_to_value (get_expr_value_id (result), result);
3703 bitmap_value_insert_into_set (EXP_GEN (block), result);
3705 continue;
3708 case GIMPLE_ASSIGN:
3710 pre_expr result = NULL;
3711 switch (vn_get_stmt_kind (stmt))
3713 case VN_NARY:
3715 enum tree_code code = gimple_assign_rhs_code (stmt);
3716 vn_nary_op_t nary;
3718 /* COND_EXPR and VEC_COND_EXPR are awkward in
3719 that they contain an embedded complex expression.
3720 Don't even try to shove those through PRE. */
3721 if (code == COND_EXPR
3722 || code == VEC_COND_EXPR)
3723 continue;
3725 vn_nary_op_lookup_stmt (stmt, &nary);
3726 if (!nary)
3727 continue;
3729 /* If the NARY traps and there was a preceding
3730 point in the block that might not return avoid
3731 adding the nary to EXP_GEN. */
3732 if (BB_MAY_NOTRETURN (block)
3733 && vn_nary_may_trap (nary))
3734 continue;
3736 result = pre_expr_pool.allocate ();
3737 result->kind = NARY;
3738 result->id = 0;
3739 PRE_EXPR_NARY (result) = nary;
3740 break;
3743 case VN_REFERENCE:
3745 vn_reference_t ref;
3746 vn_reference_lookup (gimple_assign_rhs1 (stmt),
3747 gimple_vuse (stmt),
3748 VN_WALK, &ref);
3749 if (!ref)
3750 continue;
3752 /* If the value of the reference is not invalidated in
3753 this block until it is computed, add the expression
3754 to EXP_GEN. */
3755 if (gimple_vuse (stmt))
3757 gimple *def_stmt;
3758 bool ok = true;
3759 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3760 while (!gimple_nop_p (def_stmt)
3761 && gimple_code (def_stmt) != GIMPLE_PHI
3762 && gimple_bb (def_stmt) == block)
3764 if (stmt_may_clobber_ref_p
3765 (def_stmt, gimple_assign_rhs1 (stmt)))
3767 ok = false;
3768 break;
3770 def_stmt
3771 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3773 if (!ok)
3774 continue;
3777 result = pre_expr_pool.allocate ();
3778 result->kind = REFERENCE;
3779 result->id = 0;
3780 PRE_EXPR_REFERENCE (result) = ref;
3781 break;
3784 default:
3785 continue;
3788 get_or_alloc_expression_id (result);
3789 add_to_value (get_expr_value_id (result), result);
3790 bitmap_value_insert_into_set (EXP_GEN (block), result);
3791 continue;
3793 default:
3794 break;
3798 if (dump_file && (dump_flags & TDF_DETAILS))
3800 print_bitmap_set (dump_file, EXP_GEN (block),
3801 "exp_gen", block->index);
3802 print_bitmap_set (dump_file, PHI_GEN (block),
3803 "phi_gen", block->index);
3804 print_bitmap_set (dump_file, TMP_GEN (block),
3805 "tmp_gen", block->index);
3806 print_bitmap_set (dump_file, AVAIL_OUT (block),
3807 "avail_out", block->index);
3810 /* Put the dominator children of BLOCK on the worklist of blocks
3811 to compute available sets for. */
3812 for (son = first_dom_son (CDI_DOMINATORS, block);
3813 son;
3814 son = next_dom_son (CDI_DOMINATORS, son))
3815 worklist[sp++] = son;
3818 free (worklist);
3822 /* Local state for the eliminate domwalk. */
3823 static vec<gimple *> el_to_remove;
3824 static vec<gimple *> el_to_fixup;
3825 static unsigned int el_todo;
3826 static vec<tree> el_avail;
3827 static vec<tree> el_avail_stack;
3829 /* Return a leader for OP that is available at the current point of the
3830 eliminate domwalk. */
3832 static tree
3833 eliminate_avail (tree op)
3835 tree valnum = VN_INFO (op)->valnum;
3836 if (TREE_CODE (valnum) == SSA_NAME)
3838 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
3839 return valnum;
3840 if (el_avail.length () > SSA_NAME_VERSION (valnum))
3841 return el_avail[SSA_NAME_VERSION (valnum)];
3843 else if (is_gimple_min_invariant (valnum))
3844 return valnum;
3845 return NULL_TREE;
3848 /* At the current point of the eliminate domwalk make OP available. */
3850 static void
3851 eliminate_push_avail (tree op)
3853 tree valnum = VN_INFO (op)->valnum;
3854 if (TREE_CODE (valnum) == SSA_NAME)
3856 if (el_avail.length () <= SSA_NAME_VERSION (valnum))
3857 el_avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
3858 tree pushop = op;
3859 if (el_avail[SSA_NAME_VERSION (valnum)])
3860 pushop = el_avail[SSA_NAME_VERSION (valnum)];
3861 el_avail_stack.safe_push (pushop);
3862 el_avail[SSA_NAME_VERSION (valnum)] = op;
3866 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3867 the leader for the expression if insertion was successful. */
3869 static tree
3870 eliminate_insert (gimple_stmt_iterator *gsi, tree val)
3872 gimple *stmt = gimple_seq_first_stmt (VN_INFO (val)->expr);
3873 if (!is_gimple_assign (stmt)
3874 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
3875 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR))
3876 return NULL_TREE;
3878 tree op = gimple_assign_rhs1 (stmt);
3879 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
3880 op = TREE_OPERAND (op, 0);
3881 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
3882 if (!leader)
3883 return NULL_TREE;
3885 gimple_seq stmts = NULL;
3886 tree res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
3887 TREE_TYPE (val), leader);
3888 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3889 VN_INFO_GET (res)->valnum = val;
3891 if (TREE_CODE (leader) == SSA_NAME)
3892 gimple_set_plf (SSA_NAME_DEF_STMT (leader), NECESSARY, true);
3894 pre_stats.insertions++;
3895 if (dump_file && (dump_flags & TDF_DETAILS))
3897 fprintf (dump_file, "Inserted ");
3898 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0, 0);
3901 return res;
3904 class eliminate_dom_walker : public dom_walker
3906 public:
3907 eliminate_dom_walker (cdi_direction direction, bool do_pre_)
3908 : dom_walker (direction), do_pre (do_pre_) {}
3910 virtual edge before_dom_children (basic_block);
3911 virtual void after_dom_children (basic_block);
3913 bool do_pre;
3916 /* Perform elimination for the basic-block B during the domwalk. */
3918 edge
3919 eliminate_dom_walker::before_dom_children (basic_block b)
3921 /* Mark new bb. */
3922 el_avail_stack.safe_push (NULL_TREE);
3924 /* ??? If we do nothing for unreachable blocks then this will confuse
3925 tailmerging. Eventually we can reduce its reliance on SCCVN now
3926 that we fully copy/constant-propagate (most) things. */
3928 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
3930 gphi *phi = gsi.phi ();
3931 tree res = PHI_RESULT (phi);
3933 if (virtual_operand_p (res))
3935 gsi_next (&gsi);
3936 continue;
3939 tree sprime = eliminate_avail (res);
3940 if (sprime
3941 && sprime != res)
3943 if (dump_file && (dump_flags & TDF_DETAILS))
3945 fprintf (dump_file, "Replaced redundant PHI node defining ");
3946 print_generic_expr (dump_file, res, 0);
3947 fprintf (dump_file, " with ");
3948 print_generic_expr (dump_file, sprime, 0);
3949 fprintf (dump_file, "\n");
3952 /* If we inserted this PHI node ourself, it's not an elimination. */
3953 if (inserted_exprs
3954 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
3955 pre_stats.phis--;
3956 else
3957 pre_stats.eliminations++;
3959 /* If we will propagate into all uses don't bother to do
3960 anything. */
3961 if (may_propagate_copy (res, sprime))
3963 /* Mark the PHI for removal. */
3964 el_to_remove.safe_push (phi);
3965 gsi_next (&gsi);
3966 continue;
3969 remove_phi_node (&gsi, false);
3971 if (inserted_exprs
3972 && !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
3973 && TREE_CODE (sprime) == SSA_NAME)
3974 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
3976 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
3977 sprime = fold_convert (TREE_TYPE (res), sprime);
3978 gimple *stmt = gimple_build_assign (res, sprime);
3979 /* ??? It cannot yet be necessary (DOM walk). */
3980 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
3982 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
3983 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
3984 continue;
3987 eliminate_push_avail (res);
3988 gsi_next (&gsi);
3991 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
3992 !gsi_end_p (gsi);
3993 gsi_next (&gsi))
3995 tree sprime = NULL_TREE;
3996 gimple *stmt = gsi_stmt (gsi);
3997 tree lhs = gimple_get_lhs (stmt);
3998 if (lhs && TREE_CODE (lhs) == SSA_NAME
3999 && !gimple_has_volatile_ops (stmt)
4000 /* See PR43491. Do not replace a global register variable when
4001 it is a the RHS of an assignment. Do replace local register
4002 variables since gcc does not guarantee a local variable will
4003 be allocated in register.
4004 ??? The fix isn't effective here. This should instead
4005 be ensured by not value-numbering them the same but treating
4006 them like volatiles? */
4007 && !(gimple_assign_single_p (stmt)
4008 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4009 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4010 && is_global_var (gimple_assign_rhs1 (stmt)))))
4012 sprime = eliminate_avail (lhs);
4013 if (!sprime)
4015 /* If there is no existing usable leader but SCCVN thinks
4016 it has an expression it wants to use as replacement,
4017 insert that. */
4018 tree val = VN_INFO (lhs)->valnum;
4019 if (val != VN_TOP
4020 && TREE_CODE (val) == SSA_NAME
4021 && VN_INFO (val)->needs_insertion
4022 && VN_INFO (val)->expr != NULL
4023 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
4024 eliminate_push_avail (sprime);
4027 /* If this now constitutes a copy duplicate points-to
4028 and range info appropriately. This is especially
4029 important for inserted code. See tree-ssa-copy.c
4030 for similar code. */
4031 if (sprime
4032 && TREE_CODE (sprime) == SSA_NAME)
4034 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
4035 if (POINTER_TYPE_P (TREE_TYPE (lhs))
4036 && SSA_NAME_PTR_INFO (lhs)
4037 && !SSA_NAME_PTR_INFO (sprime))
4039 duplicate_ssa_name_ptr_info (sprime,
4040 SSA_NAME_PTR_INFO (lhs));
4041 if (b != sprime_b)
4042 mark_ptr_info_alignment_unknown
4043 (SSA_NAME_PTR_INFO (sprime));
4045 else if (!POINTER_TYPE_P (TREE_TYPE (lhs))
4046 && SSA_NAME_RANGE_INFO (lhs)
4047 && !SSA_NAME_RANGE_INFO (sprime)
4048 && b == sprime_b)
4049 duplicate_ssa_name_range_info (sprime,
4050 SSA_NAME_RANGE_TYPE (lhs),
4051 SSA_NAME_RANGE_INFO (lhs));
4054 /* Inhibit the use of an inserted PHI on a loop header when
4055 the address of the memory reference is a simple induction
4056 variable. In other cases the vectorizer won't do anything
4057 anyway (either it's loop invariant or a complicated
4058 expression). */
4059 if (sprime
4060 && TREE_CODE (sprime) == SSA_NAME
4061 && do_pre
4062 && flag_tree_loop_vectorize
4063 && loop_outer (b->loop_father)
4064 && has_zero_uses (sprime)
4065 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
4066 && gimple_assign_load_p (stmt))
4068 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
4069 basic_block def_bb = gimple_bb (def_stmt);
4070 if (gimple_code (def_stmt) == GIMPLE_PHI
4071 && def_bb->loop_father->header == def_bb)
4073 loop_p loop = def_bb->loop_father;
4074 ssa_op_iter iter;
4075 tree op;
4076 bool found = false;
4077 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4079 affine_iv iv;
4080 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
4081 if (def_bb
4082 && flow_bb_inside_loop_p (loop, def_bb)
4083 && simple_iv (loop, loop, op, &iv, true))
4085 found = true;
4086 break;
4089 if (found)
4091 if (dump_file && (dump_flags & TDF_DETAILS))
4093 fprintf (dump_file, "Not replacing ");
4094 print_gimple_expr (dump_file, stmt, 0, 0);
4095 fprintf (dump_file, " with ");
4096 print_generic_expr (dump_file, sprime, 0);
4097 fprintf (dump_file, " which would add a loop"
4098 " carried dependence to loop %d\n",
4099 loop->num);
4101 /* Don't keep sprime available. */
4102 sprime = NULL_TREE;
4107 if (sprime)
4109 /* If we can propagate the value computed for LHS into
4110 all uses don't bother doing anything with this stmt. */
4111 if (may_propagate_copy (lhs, sprime))
4113 /* Mark it for removal. */
4114 el_to_remove.safe_push (stmt);
4116 /* ??? Don't count copy/constant propagations. */
4117 if (gimple_assign_single_p (stmt)
4118 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4119 || gimple_assign_rhs1 (stmt) == sprime))
4120 continue;
4122 if (dump_file && (dump_flags & TDF_DETAILS))
4124 fprintf (dump_file, "Replaced ");
4125 print_gimple_expr (dump_file, stmt, 0, 0);
4126 fprintf (dump_file, " with ");
4127 print_generic_expr (dump_file, sprime, 0);
4128 fprintf (dump_file, " in all uses of ");
4129 print_gimple_stmt (dump_file, stmt, 0, 0);
4132 pre_stats.eliminations++;
4133 continue;
4136 /* If this is an assignment from our leader (which
4137 happens in the case the value-number is a constant)
4138 then there is nothing to do. */
4139 if (gimple_assign_single_p (stmt)
4140 && sprime == gimple_assign_rhs1 (stmt))
4141 continue;
4143 /* Else replace its RHS. */
4144 bool can_make_abnormal_goto
4145 = is_gimple_call (stmt)
4146 && stmt_can_make_abnormal_goto (stmt);
4148 if (dump_file && (dump_flags & TDF_DETAILS))
4150 fprintf (dump_file, "Replaced ");
4151 print_gimple_expr (dump_file, stmt, 0, 0);
4152 fprintf (dump_file, " with ");
4153 print_generic_expr (dump_file, sprime, 0);
4154 fprintf (dump_file, " in ");
4155 print_gimple_stmt (dump_file, stmt, 0, 0);
4158 if (TREE_CODE (sprime) == SSA_NAME)
4159 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4160 NECESSARY, true);
4162 pre_stats.eliminations++;
4163 gimple *orig_stmt = stmt;
4164 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4165 TREE_TYPE (sprime)))
4166 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4167 tree vdef = gimple_vdef (stmt);
4168 tree vuse = gimple_vuse (stmt);
4169 propagate_tree_value_into_stmt (&gsi, sprime);
4170 stmt = gsi_stmt (gsi);
4171 update_stmt (stmt);
4172 if (vdef != gimple_vdef (stmt))
4173 VN_INFO (vdef)->valnum = vuse;
4175 /* If we removed EH side-effects from the statement, clean
4176 its EH information. */
4177 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4179 bitmap_set_bit (need_eh_cleanup,
4180 gimple_bb (stmt)->index);
4181 if (dump_file && (dump_flags & TDF_DETAILS))
4182 fprintf (dump_file, " Removed EH side-effects.\n");
4185 /* Likewise for AB side-effects. */
4186 if (can_make_abnormal_goto
4187 && !stmt_can_make_abnormal_goto (stmt))
4189 bitmap_set_bit (need_ab_cleanup,
4190 gimple_bb (stmt)->index);
4191 if (dump_file && (dump_flags & TDF_DETAILS))
4192 fprintf (dump_file, " Removed AB side-effects.\n");
4195 continue;
4199 /* If the statement is a scalar store, see if the expression
4200 has the same value number as its rhs. If so, the store is
4201 dead. */
4202 if (gimple_assign_single_p (stmt)
4203 && !gimple_has_volatile_ops (stmt)
4204 && !is_gimple_reg (gimple_assign_lhs (stmt))
4205 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4206 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4208 tree val;
4209 tree rhs = gimple_assign_rhs1 (stmt);
4210 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4211 gimple_vuse (stmt), VN_WALK, NULL);
4212 if (TREE_CODE (rhs) == SSA_NAME)
4213 rhs = VN_INFO (rhs)->valnum;
4214 if (val
4215 && operand_equal_p (val, rhs, 0))
4217 if (dump_file && (dump_flags & TDF_DETAILS))
4219 fprintf (dump_file, "Deleted redundant store ");
4220 print_gimple_stmt (dump_file, stmt, 0, 0);
4223 /* Queue stmt for removal. */
4224 el_to_remove.safe_push (stmt);
4225 continue;
4229 /* If this is a control statement value numbering left edges
4230 unexecuted on force the condition in a way consistent with
4231 that. */
4232 if (gcond *cond = dyn_cast <gcond *> (stmt))
4234 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
4235 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
4237 if (dump_file && (dump_flags & TDF_DETAILS))
4239 fprintf (dump_file, "Removing unexecutable edge from ");
4240 print_gimple_stmt (dump_file, stmt, 0, 0);
4242 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
4243 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
4244 gimple_cond_make_true (cond);
4245 else
4246 gimple_cond_make_false (cond);
4247 update_stmt (cond);
4248 el_todo |= TODO_cleanup_cfg;
4249 continue;
4253 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
4254 bool was_noreturn = (is_gimple_call (stmt)
4255 && gimple_call_noreturn_p (stmt));
4256 tree vdef = gimple_vdef (stmt);
4257 tree vuse = gimple_vuse (stmt);
4259 /* If we didn't replace the whole stmt (or propagate the result
4260 into all uses), replace all uses on this stmt with their
4261 leaders. */
4262 use_operand_p use_p;
4263 ssa_op_iter iter;
4264 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4266 tree use = USE_FROM_PTR (use_p);
4267 /* ??? The call code above leaves stmt operands un-updated. */
4268 if (TREE_CODE (use) != SSA_NAME)
4269 continue;
4270 tree sprime = eliminate_avail (use);
4271 if (sprime && sprime != use
4272 && may_propagate_copy (use, sprime)
4273 /* We substitute into debug stmts to avoid excessive
4274 debug temporaries created by removed stmts, but we need
4275 to avoid doing so for inserted sprimes as we never want
4276 to create debug temporaries for them. */
4277 && (!inserted_exprs
4278 || TREE_CODE (sprime) != SSA_NAME
4279 || !is_gimple_debug (stmt)
4280 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
4282 propagate_value (use_p, sprime);
4283 gimple_set_modified (stmt, true);
4284 if (TREE_CODE (sprime) == SSA_NAME
4285 && !is_gimple_debug (stmt))
4286 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4287 NECESSARY, true);
4291 /* Visit indirect calls and turn them into direct calls if
4292 possible using the devirtualization machinery. */
4293 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4295 tree fn = gimple_call_fn (call_stmt);
4296 if (fn
4297 && flag_devirtualize
4298 && virtual_method_call_p (fn))
4300 tree otr_type = obj_type_ref_class (fn);
4301 tree instance;
4302 ipa_polymorphic_call_context context (current_function_decl, fn, stmt, &instance);
4303 bool final;
4305 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn), otr_type, stmt);
4307 vec <cgraph_node *>targets
4308 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
4309 tree_to_uhwi
4310 (OBJ_TYPE_REF_TOKEN (fn)),
4311 context,
4312 &final);
4313 if (dump_file)
4314 dump_possible_polymorphic_call_targets (dump_file,
4315 obj_type_ref_class (fn),
4316 tree_to_uhwi
4317 (OBJ_TYPE_REF_TOKEN (fn)),
4318 context);
4319 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4321 tree fn;
4322 if (targets.length () == 1)
4323 fn = targets[0]->decl;
4324 else
4325 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4326 if (dump_enabled_p ())
4328 location_t loc = gimple_location_safe (stmt);
4329 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4330 "converting indirect call to "
4331 "function %s\n",
4332 lang_hooks.decl_printable_name (fn, 2));
4334 gimple_call_set_fndecl (call_stmt, fn);
4335 maybe_remove_unused_call_args (cfun, call_stmt);
4336 gimple_set_modified (stmt, true);
4341 if (gimple_modified_p (stmt))
4343 /* If a formerly non-invariant ADDR_EXPR is turned into an
4344 invariant one it was on a separate stmt. */
4345 if (gimple_assign_single_p (stmt)
4346 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
4347 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
4348 gimple *old_stmt = stmt;
4349 if (is_gimple_call (stmt))
4351 /* ??? Only fold calls inplace for now, this may create new
4352 SSA names which in turn will confuse free_scc_vn SSA name
4353 release code. */
4354 fold_stmt_inplace (&gsi);
4355 /* When changing a call into a noreturn call, cfg cleanup
4356 is needed to fix up the noreturn call. */
4357 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4358 el_to_fixup.safe_push (stmt);
4360 else
4362 fold_stmt (&gsi);
4363 stmt = gsi_stmt (gsi);
4364 if ((gimple_code (stmt) == GIMPLE_COND
4365 && (gimple_cond_true_p (as_a <gcond *> (stmt))
4366 || gimple_cond_false_p (as_a <gcond *> (stmt))))
4367 || (gimple_code (stmt) == GIMPLE_SWITCH
4368 && TREE_CODE (gimple_switch_index (
4369 as_a <gswitch *> (stmt)))
4370 == INTEGER_CST))
4371 el_todo |= TODO_cleanup_cfg;
4373 /* If we removed EH side-effects from the statement, clean
4374 its EH information. */
4375 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
4377 bitmap_set_bit (need_eh_cleanup,
4378 gimple_bb (stmt)->index);
4379 if (dump_file && (dump_flags & TDF_DETAILS))
4380 fprintf (dump_file, " Removed EH side-effects.\n");
4382 /* Likewise for AB side-effects. */
4383 if (can_make_abnormal_goto
4384 && !stmt_can_make_abnormal_goto (stmt))
4386 bitmap_set_bit (need_ab_cleanup,
4387 gimple_bb (stmt)->index);
4388 if (dump_file && (dump_flags & TDF_DETAILS))
4389 fprintf (dump_file, " Removed AB side-effects.\n");
4391 update_stmt (stmt);
4392 if (vdef != gimple_vdef (stmt))
4393 VN_INFO (vdef)->valnum = vuse;
4396 /* Make new values available - for fully redundant LHS we
4397 continue with the next stmt above and skip this. */
4398 def_operand_p defp;
4399 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4400 eliminate_push_avail (DEF_FROM_PTR (defp));
4403 /* Replace destination PHI arguments. */
4404 edge_iterator ei;
4405 edge e;
4406 FOR_EACH_EDGE (e, ei, b->succs)
4408 for (gphi_iterator gsi = gsi_start_phis (e->dest);
4409 !gsi_end_p (gsi);
4410 gsi_next (&gsi))
4412 gphi *phi = gsi.phi ();
4413 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
4414 tree arg = USE_FROM_PTR (use_p);
4415 if (TREE_CODE (arg) != SSA_NAME
4416 || virtual_operand_p (arg))
4417 continue;
4418 tree sprime = eliminate_avail (arg);
4419 if (sprime && may_propagate_copy (arg, sprime))
4421 propagate_value (use_p, sprime);
4422 if (TREE_CODE (sprime) == SSA_NAME)
4423 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4427 return NULL;
4430 /* Make no longer available leaders no longer available. */
4432 void
4433 eliminate_dom_walker::after_dom_children (basic_block)
4435 tree entry;
4436 while ((entry = el_avail_stack.pop ()) != NULL_TREE)
4438 tree valnum = VN_INFO (entry)->valnum;
4439 tree old = el_avail[SSA_NAME_VERSION (valnum)];
4440 if (old == entry)
4441 el_avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
4442 else
4443 el_avail[SSA_NAME_VERSION (valnum)] = entry;
4447 /* Eliminate fully redundant computations. */
4449 static unsigned int
4450 eliminate (bool do_pre)
4452 gimple_stmt_iterator gsi;
4453 gimple *stmt;
4455 need_eh_cleanup = BITMAP_ALLOC (NULL);
4456 need_ab_cleanup = BITMAP_ALLOC (NULL);
4458 el_to_remove.create (0);
4459 el_to_fixup.create (0);
4460 el_todo = 0;
4461 el_avail.create (num_ssa_names);
4462 el_avail_stack.create (0);
4464 eliminate_dom_walker (CDI_DOMINATORS,
4465 do_pre).walk (cfun->cfg->x_entry_block_ptr);
4467 el_avail.release ();
4468 el_avail_stack.release ();
4470 /* We cannot remove stmts during BB walk, especially not release SSA
4471 names there as this confuses the VN machinery. The stmts ending
4472 up in el_to_remove are either stores or simple copies.
4473 Remove stmts in reverse order to make debug stmt creation possible. */
4474 while (!el_to_remove.is_empty ())
4476 stmt = el_to_remove.pop ();
4478 if (dump_file && (dump_flags & TDF_DETAILS))
4480 fprintf (dump_file, "Removing dead stmt ");
4481 print_gimple_stmt (dump_file, stmt, 0, 0);
4484 tree lhs;
4485 if (gimple_code (stmt) == GIMPLE_PHI)
4486 lhs = gimple_phi_result (stmt);
4487 else
4488 lhs = gimple_get_lhs (stmt);
4490 if (inserted_exprs
4491 && TREE_CODE (lhs) == SSA_NAME)
4492 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4494 gsi = gsi_for_stmt (stmt);
4495 if (gimple_code (stmt) == GIMPLE_PHI)
4496 remove_phi_node (&gsi, true);
4497 else
4499 basic_block bb = gimple_bb (stmt);
4500 unlink_stmt_vdef (stmt);
4501 if (gsi_remove (&gsi, true))
4502 bitmap_set_bit (need_eh_cleanup, bb->index);
4503 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
4504 bitmap_set_bit (need_ab_cleanup, bb->index);
4505 release_defs (stmt);
4508 /* Removing a stmt may expose a forwarder block. */
4509 el_todo |= TODO_cleanup_cfg;
4511 el_to_remove.release ();
4513 /* Fixup stmts that became noreturn calls. This may require splitting
4514 blocks and thus isn't possible during the dominator walk. Do this
4515 in reverse order so we don't inadvertedly remove a stmt we want to
4516 fixup by visiting a dominating now noreturn call first. */
4517 while (!el_to_fixup.is_empty ())
4519 stmt = el_to_fixup.pop ();
4521 if (dump_file && (dump_flags & TDF_DETAILS))
4523 fprintf (dump_file, "Fixing up noreturn call ");
4524 print_gimple_stmt (dump_file, stmt, 0, 0);
4527 if (fixup_noreturn_call (stmt))
4528 el_todo |= TODO_cleanup_cfg;
4530 el_to_fixup.release ();
4532 return el_todo;
4535 /* Perform CFG cleanups made necessary by elimination. */
4537 static unsigned
4538 fini_eliminate (void)
4540 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4541 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4543 if (do_eh_cleanup)
4544 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4546 if (do_ab_cleanup)
4547 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4549 BITMAP_FREE (need_eh_cleanup);
4550 BITMAP_FREE (need_ab_cleanup);
4552 if (do_eh_cleanup || do_ab_cleanup)
4553 return TODO_cleanup_cfg;
4554 return 0;
4557 /* Borrow a bit of tree-ssa-dce.c for the moment.
4558 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4559 this may be a bit faster, and we may want critical edges kept split. */
4561 /* If OP's defining statement has not already been determined to be necessary,
4562 mark that statement necessary. Return the stmt, if it is newly
4563 necessary. */
4565 static inline gimple *
4566 mark_operand_necessary (tree op)
4568 gimple *stmt;
4570 gcc_assert (op);
4572 if (TREE_CODE (op) != SSA_NAME)
4573 return NULL;
4575 stmt = SSA_NAME_DEF_STMT (op);
4576 gcc_assert (stmt);
4578 if (gimple_plf (stmt, NECESSARY)
4579 || gimple_nop_p (stmt))
4580 return NULL;
4582 gimple_set_plf (stmt, NECESSARY, true);
4583 return stmt;
4586 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4587 to insert PHI nodes sometimes, and because value numbering of casts isn't
4588 perfect, we sometimes end up inserting dead code. This simple DCE-like
4589 pass removes any insertions we made that weren't actually used. */
4591 static void
4592 remove_dead_inserted_code (void)
4594 bitmap worklist;
4595 unsigned i;
4596 bitmap_iterator bi;
4597 gimple *t;
4599 worklist = BITMAP_ALLOC (NULL);
4600 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4602 t = SSA_NAME_DEF_STMT (ssa_name (i));
4603 if (gimple_plf (t, NECESSARY))
4604 bitmap_set_bit (worklist, i);
4606 while (!bitmap_empty_p (worklist))
4608 i = bitmap_first_set_bit (worklist);
4609 bitmap_clear_bit (worklist, i);
4610 t = SSA_NAME_DEF_STMT (ssa_name (i));
4612 /* PHI nodes are somewhat special in that each PHI alternative has
4613 data and control dependencies. All the statements feeding the
4614 PHI node's arguments are always necessary. */
4615 if (gimple_code (t) == GIMPLE_PHI)
4617 unsigned k;
4619 for (k = 0; k < gimple_phi_num_args (t); k++)
4621 tree arg = PHI_ARG_DEF (t, k);
4622 if (TREE_CODE (arg) == SSA_NAME)
4624 gimple *n = mark_operand_necessary (arg);
4625 if (n)
4626 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4630 else
4632 /* Propagate through the operands. Examine all the USE, VUSE and
4633 VDEF operands in this statement. Mark all the statements
4634 which feed this statement's uses as necessary. */
4635 ssa_op_iter iter;
4636 tree use;
4638 /* The operands of VDEF expressions are also needed as they
4639 represent potential definitions that may reach this
4640 statement (VDEF operands allow us to follow def-def
4641 links). */
4643 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4645 gimple *n = mark_operand_necessary (use);
4646 if (n)
4647 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4652 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4654 t = SSA_NAME_DEF_STMT (ssa_name (i));
4655 if (!gimple_plf (t, NECESSARY))
4657 gimple_stmt_iterator gsi;
4659 if (dump_file && (dump_flags & TDF_DETAILS))
4661 fprintf (dump_file, "Removing unnecessary insertion:");
4662 print_gimple_stmt (dump_file, t, 0, 0);
4665 gsi = gsi_for_stmt (t);
4666 if (gimple_code (t) == GIMPLE_PHI)
4667 remove_phi_node (&gsi, true);
4668 else
4670 gsi_remove (&gsi, true);
4671 release_defs (t);
4675 BITMAP_FREE (worklist);
4679 /* Initialize data structures used by PRE. */
4681 static void
4682 init_pre (void)
4684 basic_block bb;
4686 next_expression_id = 1;
4687 expressions.create (0);
4688 expressions.safe_push (NULL);
4689 value_expressions.create (get_max_value_id () + 1);
4690 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4691 name_to_id.create (0);
4693 inserted_exprs = BITMAP_ALLOC (NULL);
4695 connect_infinite_loops_to_exit ();
4696 memset (&pre_stats, 0, sizeof (pre_stats));
4698 postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
4699 postorder_num = inverted_post_order_compute (postorder);
4701 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4703 calculate_dominance_info (CDI_POST_DOMINATORS);
4704 calculate_dominance_info (CDI_DOMINATORS);
4706 bitmap_obstack_initialize (&grand_bitmap_obstack);
4707 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4708 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4709 FOR_ALL_BB_FN (bb, cfun)
4711 EXP_GEN (bb) = bitmap_set_new ();
4712 PHI_GEN (bb) = bitmap_set_new ();
4713 TMP_GEN (bb) = bitmap_set_new ();
4714 AVAIL_OUT (bb) = bitmap_set_new ();
4719 /* Deallocate data structures used by PRE. */
4721 static void
4722 fini_pre ()
4724 free (postorder);
4725 value_expressions.release ();
4726 BITMAP_FREE (inserted_exprs);
4727 bitmap_obstack_release (&grand_bitmap_obstack);
4728 bitmap_set_pool.release ();
4729 pre_expr_pool.release ();
4730 delete phi_translate_table;
4731 phi_translate_table = NULL;
4732 delete expression_to_id;
4733 expression_to_id = NULL;
4734 name_to_id.release ();
4736 free_aux_for_blocks ();
4738 free_dominance_info (CDI_POST_DOMINATORS);
4741 namespace {
4743 const pass_data pass_data_pre =
4745 GIMPLE_PASS, /* type */
4746 "pre", /* name */
4747 OPTGROUP_NONE, /* optinfo_flags */
4748 TV_TREE_PRE, /* tv_id */
4749 /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
4750 pass_pre. */
4751 ( PROP_no_crit_edges | PROP_cfg | PROP_ssa ), /* properties_required */
4752 0, /* properties_provided */
4753 PROP_no_crit_edges, /* properties_destroyed */
4754 TODO_rebuild_alias, /* todo_flags_start */
4755 0, /* todo_flags_finish */
4758 class pass_pre : public gimple_opt_pass
4760 public:
4761 pass_pre (gcc::context *ctxt)
4762 : gimple_opt_pass (pass_data_pre, ctxt)
4765 /* opt_pass methods: */
4766 virtual bool gate (function *) { return flag_tree_pre != 0; }
4767 virtual unsigned int execute (function *);
4769 }; // class pass_pre
4771 unsigned int
4772 pass_pre::execute (function *fun)
4774 unsigned int todo = 0;
4776 do_partial_partial =
4777 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4779 /* This has to happen before SCCVN runs because
4780 loop_optimizer_init may create new phis, etc. */
4781 loop_optimizer_init (LOOPS_NORMAL);
4783 if (!run_scc_vn (VN_WALK))
4785 loop_optimizer_finalize ();
4786 return 0;
4789 init_pre ();
4790 scev_initialize ();
4792 /* Collect and value number expressions computed in each basic block. */
4793 compute_avail ();
4795 /* Insert can get quite slow on an incredibly large number of basic
4796 blocks due to some quadratic behavior. Until this behavior is
4797 fixed, don't run it when he have an incredibly large number of
4798 bb's. If we aren't going to run insert, there is no point in
4799 computing ANTIC, either, even though it's plenty fast. */
4800 if (n_basic_blocks_for_fn (fun) < 4000)
4802 compute_antic ();
4803 insert ();
4806 /* Make sure to remove fake edges before committing our inserts.
4807 This makes sure we don't end up with extra critical edges that
4808 we would need to split. */
4809 remove_fake_exit_edges ();
4810 gsi_commit_edge_inserts ();
4812 /* Eliminate folds statements which might (should not...) end up
4813 not keeping virtual operands up-to-date. */
4814 gcc_assert (!need_ssa_update_p (fun));
4816 /* Remove all the redundant expressions. */
4817 todo |= eliminate (true);
4819 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4820 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4821 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4822 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
4824 clear_expression_ids ();
4825 remove_dead_inserted_code ();
4827 scev_finalize ();
4828 fini_pre ();
4829 todo |= fini_eliminate ();
4830 loop_optimizer_finalize ();
4832 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4833 case we can merge the block with the remaining predecessor of the block.
4834 It should either:
4835 - call merge_blocks after each tail merge iteration
4836 - call merge_blocks after all tail merge iterations
4837 - mark TODO_cleanup_cfg when necessary
4838 - share the cfg cleanup with fini_pre. */
4839 todo |= tail_merge_optimize (todo);
4841 free_scc_vn ();
4843 /* Tail merging invalidates the virtual SSA web, together with
4844 cfg-cleanup opportunities exposed by PRE this will wreck the
4845 SSA updating machinery. So make sure to run update-ssa
4846 manually, before eventually scheduling cfg-cleanup as part of
4847 the todo. */
4848 update_ssa (TODO_update_ssa_only_virtuals);
4850 return todo;
4853 } // anon namespace
4855 gimple_opt_pass *
4856 make_pass_pre (gcc::context *ctxt)
4858 return new pass_pre (ctxt);
4861 namespace {
4863 const pass_data pass_data_fre =
4865 GIMPLE_PASS, /* type */
4866 "fre", /* name */
4867 OPTGROUP_NONE, /* optinfo_flags */
4868 TV_TREE_FRE, /* tv_id */
4869 ( PROP_cfg | PROP_ssa ), /* properties_required */
4870 0, /* properties_provided */
4871 0, /* properties_destroyed */
4872 0, /* todo_flags_start */
4873 0, /* todo_flags_finish */
4876 class pass_fre : public gimple_opt_pass
4878 public:
4879 pass_fre (gcc::context *ctxt)
4880 : gimple_opt_pass (pass_data_fre, ctxt)
4883 /* opt_pass methods: */
4884 opt_pass * clone () { return new pass_fre (m_ctxt); }
4885 virtual bool gate (function *) { return flag_tree_fre != 0; }
4886 virtual unsigned int execute (function *);
4888 }; // class pass_fre
4890 unsigned int
4891 pass_fre::execute (function *fun)
4893 unsigned int todo = 0;
4895 if (!run_scc_vn (VN_WALKREWRITE))
4896 return 0;
4898 memset (&pre_stats, 0, sizeof (pre_stats));
4900 /* Remove all the redundant expressions. */
4901 todo |= eliminate (false);
4903 todo |= fini_eliminate ();
4905 free_scc_vn ();
4907 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4908 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
4910 return todo;
4913 } // anon namespace
4915 gimple_opt_pass *
4916 make_pass_fre (gcc::context *ctxt)
4918 return new pass_fre (ctxt);