2010-04-20 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-pre.c
blob0fe9433419dc6a68e303c53ce930bc9383cfe5c0
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
5 <stevenb@suse.de>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
12 any later version.
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "fibheap.h"
37 #include "hashtab.h"
38 #include "tree-iterator.h"
39 #include "real.h"
40 #include "alloc-pool.h"
41 #include "obstack.h"
42 #include "tree-pass.h"
43 #include "flags.h"
44 #include "bitmap.h"
45 #include "langhooks.h"
46 #include "cfgloop.h"
47 #include "tree-ssa-sccvn.h"
48 #include "tree-scalar-evolution.h"
49 #include "params.h"
50 #include "dbgcnt.h"
52 /* TODO:
54 1. Avail sets can be shared by making an avail_find_leader that
55 walks up the dominator tree and looks in those avail sets.
56 This might affect code optimality, it's unclear right now.
57 2. Strength reduction can be performed by anticipating expressions
58 we can repair later on.
59 3. We can do back-substitution or smarter value numbering to catch
60 commutative expressions split up over multiple statements.
63 /* For ease of terminology, "expression node" in the below refers to
64 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
65 represent the actual statement containing the expressions we care about,
66 and we cache the value number by putting it in the expression. */
68 /* Basic algorithm
70 First we walk the statements to generate the AVAIL sets, the
71 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
72 generation of values/expressions by a given block. We use them
73 when computing the ANTIC sets. The AVAIL sets consist of
74 SSA_NAME's that represent values, so we know what values are
75 available in what blocks. AVAIL is a forward dataflow problem. In
76 SSA, values are never killed, so we don't need a kill set, or a
77 fixpoint iteration, in order to calculate the AVAIL sets. In
78 traditional parlance, AVAIL sets tell us the downsafety of the
79 expressions/values.
81 Next, we generate the ANTIC sets. These sets represent the
82 anticipatable expressions. ANTIC is a backwards dataflow
83 problem. An expression is anticipatable in a given block if it could
84 be generated in that block. This means that if we had to perform
85 an insertion in that block, of the value of that expression, we
86 could. Calculating the ANTIC sets requires phi translation of
87 expressions, because the flow goes backwards through phis. We must
88 iterate to a fixpoint of the ANTIC sets, because we have a kill
89 set. Even in SSA form, values are not live over the entire
90 function, only from their definition point onwards. So we have to
91 remove values from the ANTIC set once we go past the definition
92 point of the leaders that make them up.
93 compute_antic/compute_antic_aux performs this computation.
95 Third, we perform insertions to make partially redundant
96 expressions fully redundant.
98 An expression is partially redundant (excluding partial
99 anticipation) if:
101 1. It is AVAIL in some, but not all, of the predecessors of a
102 given block.
103 2. It is ANTIC in all the predecessors.
105 In order to make it fully redundant, we insert the expression into
106 the predecessors where it is not available, but is ANTIC.
108 For the partial anticipation case, we only perform insertion if it
109 is partially anticipated in some block, and fully available in all
110 of the predecessors.
112 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
113 performs these steps.
115 Fourth, we eliminate fully redundant expressions.
116 This is a simple statement walk that replaces redundant
117 calculations with the now available values. */
119 /* Representations of value numbers:
121 Value numbers are represented by a representative SSA_NAME. We
122 will create fake SSA_NAME's in situations where we need a
123 representative but do not have one (because it is a complex
124 expression). In order to facilitate storing the value numbers in
125 bitmaps, and keep the number of wasted SSA_NAME's down, we also
126 associate a value_id with each value number, and create full blown
127 ssa_name's only where we actually need them (IE in operands of
128 existing expressions).
130 Theoretically you could replace all the value_id's with
131 SSA_NAME_VERSION, but this would allocate a large number of
132 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
133 It would also require an additional indirection at each point we
134 use the value id. */
136 /* Representation of expressions on value numbers:
138 Expressions consisting of value numbers are represented the same
139 way as our VN internally represents them, with an additional
140 "pre_expr" wrapping around them in order to facilitate storing all
141 of the expressions in the same sets. */
143 /* Representation of sets:
145 The dataflow sets do not need to be sorted in any particular order
146 for the majority of their lifetime, are simply represented as two
147 bitmaps, one that keeps track of values present in the set, and one
148 that keeps track of expressions present in the set.
150 When we need them in topological order, we produce it on demand by
151 transforming the bitmap into an array and sorting it into topo
152 order. */
154 /* Type of expression, used to know which member of the PRE_EXPR union
155 is valid. */
157 enum pre_expr_kind
159 NAME,
160 NARY,
161 REFERENCE,
162 CONSTANT
165 typedef union pre_expr_union_d
167 tree name;
168 tree constant;
169 vn_nary_op_t nary;
170 vn_reference_t reference;
171 } pre_expr_union;
173 typedef struct pre_expr_d
175 enum pre_expr_kind kind;
176 unsigned int id;
177 pre_expr_union u;
178 } *pre_expr;
180 #define PRE_EXPR_NAME(e) (e)->u.name
181 #define PRE_EXPR_NARY(e) (e)->u.nary
182 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
183 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
185 static int
186 pre_expr_eq (const void *p1, const void *p2)
188 const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1;
189 const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2;
191 if (e1->kind != e2->kind)
192 return false;
194 switch (e1->kind)
196 case CONSTANT:
197 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
198 PRE_EXPR_CONSTANT (e2));
199 case NAME:
200 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
201 case NARY:
202 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
203 case REFERENCE:
204 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
205 PRE_EXPR_REFERENCE (e2));
206 default:
207 gcc_unreachable ();
211 static hashval_t
212 pre_expr_hash (const void *p1)
214 const struct pre_expr_d *e = (const struct pre_expr_d *) p1;
215 switch (e->kind)
217 case CONSTANT:
218 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
219 case NAME:
220 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
221 case NARY:
222 return PRE_EXPR_NARY (e)->hashcode;
223 case REFERENCE:
224 return PRE_EXPR_REFERENCE (e)->hashcode;
225 default:
226 gcc_unreachable ();
231 /* Next global expression id number. */
232 static unsigned int next_expression_id;
234 /* Mapping from expression to id number we can use in bitmap sets. */
235 DEF_VEC_P (pre_expr);
236 DEF_VEC_ALLOC_P (pre_expr, heap);
237 static VEC(pre_expr, heap) *expressions;
238 static htab_t expression_to_id;
239 static VEC(unsigned, heap) *name_to_id;
241 /* Allocate an expression id for EXPR. */
243 static inline unsigned int
244 alloc_expression_id (pre_expr expr)
246 void **slot;
247 /* Make sure we won't overflow. */
248 gcc_assert (next_expression_id + 1 > next_expression_id);
249 expr->id = next_expression_id++;
250 VEC_safe_push (pre_expr, heap, expressions, expr);
251 if (expr->kind == NAME)
253 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
254 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
255 re-allocations by using VEC_reserve upfront. There is no
256 VEC_quick_grow_cleared unfortunately. */
257 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
258 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
259 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
260 VEC_replace (unsigned, name_to_id, version, expr->id);
262 else
264 slot = htab_find_slot (expression_to_id, expr, INSERT);
265 gcc_assert (!*slot);
266 *slot = expr;
268 return next_expression_id - 1;
271 /* Return the expression id for tree EXPR. */
273 static inline unsigned int
274 get_expression_id (const pre_expr expr)
276 return expr->id;
279 static inline unsigned int
280 lookup_expression_id (const pre_expr expr)
282 void **slot;
284 if (expr->kind == NAME)
286 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
287 if (VEC_length (unsigned, name_to_id) <= version)
288 return 0;
289 return VEC_index (unsigned, name_to_id, version);
291 else
293 slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
294 if (!slot)
295 return 0;
296 return ((pre_expr)*slot)->id;
300 /* Return the existing expression id for EXPR, or create one if one
301 does not exist yet. */
303 static inline unsigned int
304 get_or_alloc_expression_id (pre_expr expr)
306 unsigned int id = lookup_expression_id (expr);
307 if (id == 0)
308 return alloc_expression_id (expr);
309 return expr->id = id;
312 /* Return the expression that has expression id ID */
314 static inline pre_expr
315 expression_for_id (unsigned int id)
317 return VEC_index (pre_expr, expressions, id);
320 /* Free the expression id field in all of our expressions,
321 and then destroy the expressions array. */
323 static void
324 clear_expression_ids (void)
326 VEC_free (pre_expr, heap, expressions);
329 static alloc_pool pre_expr_pool;
331 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
333 static pre_expr
334 get_or_alloc_expr_for_name (tree name)
336 struct pre_expr_d expr;
337 pre_expr result;
338 unsigned int result_id;
340 expr.kind = NAME;
341 expr.id = 0;
342 PRE_EXPR_NAME (&expr) = name;
343 result_id = lookup_expression_id (&expr);
344 if (result_id != 0)
345 return expression_for_id (result_id);
347 result = (pre_expr) pool_alloc (pre_expr_pool);
348 result->kind = NAME;
349 PRE_EXPR_NAME (result) = name;
350 alloc_expression_id (result);
351 return result;
354 static bool in_fre = false;
356 /* An unordered bitmap set. One bitmap tracks values, the other,
357 expressions. */
358 typedef struct bitmap_set
360 bitmap expressions;
361 bitmap values;
362 } *bitmap_set_t;
364 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
365 EXECUTE_IF_SET_IN_BITMAP((set)->expressions, 0, (id), (bi))
367 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
368 EXECUTE_IF_SET_IN_BITMAP((set)->values, 0, (id), (bi))
370 /* Mapping from value id to expressions with that value_id. */
371 DEF_VEC_P (bitmap_set_t);
372 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
373 static VEC(bitmap_set_t, heap) *value_expressions;
375 /* Sets that we need to keep track of. */
376 typedef struct bb_bitmap_sets
378 /* The EXP_GEN set, which represents expressions/values generated in
379 a basic block. */
380 bitmap_set_t exp_gen;
382 /* The PHI_GEN set, which represents PHI results generated in a
383 basic block. */
384 bitmap_set_t phi_gen;
386 /* The TMP_GEN set, which represents results/temporaries generated
387 in a basic block. IE the LHS of an expression. */
388 bitmap_set_t tmp_gen;
390 /* The AVAIL_OUT set, which represents which values are available in
391 a given basic block. */
392 bitmap_set_t avail_out;
394 /* The ANTIC_IN set, which represents which values are anticipatable
395 in a given basic block. */
396 bitmap_set_t antic_in;
398 /* The PA_IN set, which represents which values are
399 partially anticipatable in a given basic block. */
400 bitmap_set_t pa_in;
402 /* The NEW_SETS set, which is used during insertion to augment the
403 AVAIL_OUT set of blocks with the new insertions performed during
404 the current iteration. */
405 bitmap_set_t new_sets;
407 /* A cache for value_dies_in_block_x. */
408 bitmap expr_dies;
410 /* True if we have visited this block during ANTIC calculation. */
411 unsigned int visited : 1;
413 /* True we have deferred processing this block during ANTIC
414 calculation until its successor is processed. */
415 unsigned int deferred : 1;
417 /* True when the block contains a call that might not return. */
418 unsigned int contains_may_not_return_call : 1;
419 } *bb_value_sets_t;
421 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
422 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
423 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
424 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
425 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
426 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
427 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
428 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
429 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
430 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
431 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
434 /* Basic block list in postorder. */
435 static int *postorder;
437 /* This structure is used to keep track of statistics on what
438 optimization PRE was able to perform. */
439 static struct
441 /* The number of RHS computations eliminated by PRE. */
442 int eliminations;
444 /* The number of new expressions/temporaries generated by PRE. */
445 int insertions;
447 /* The number of inserts found due to partial anticipation */
448 int pa_insert;
450 /* The number of new PHI nodes added by PRE. */
451 int phis;
453 /* The number of values found constant. */
454 int constified;
456 } pre_stats;
458 static bool do_partial_partial;
459 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
460 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
461 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
462 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
463 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
464 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
465 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
466 unsigned int, bool);
467 static bitmap_set_t bitmap_set_new (void);
468 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
469 gimple, tree);
470 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
471 gimple);
472 static unsigned int get_expr_value_id (pre_expr);
474 /* We can add and remove elements and entries to and from sets
475 and hash tables, so we use alloc pools for them. */
477 static alloc_pool bitmap_set_pool;
478 static bitmap_obstack grand_bitmap_obstack;
480 /* To avoid adding 300 temporary variables when we only need one, we
481 only create one temporary variable, on demand, and build ssa names
482 off that. We do have to change the variable if the types don't
483 match the current variable's type. */
484 static tree pretemp;
485 static tree storetemp;
486 static tree prephitemp;
488 /* Set of blocks with statements that have had its EH information
489 cleaned up. */
490 static bitmap need_eh_cleanup;
492 /* The phi_translate_table caches phi translations for a given
493 expression and predecessor. */
495 static htab_t phi_translate_table;
497 /* A three tuple {e, pred, v} used to cache phi translations in the
498 phi_translate_table. */
500 typedef struct expr_pred_trans_d
502 /* The expression. */
503 pre_expr e;
505 /* The predecessor block along which we translated the expression. */
506 basic_block pred;
508 /* The value that resulted from the translation. */
509 pre_expr v;
511 /* The hashcode for the expression, pred pair. This is cached for
512 speed reasons. */
513 hashval_t hashcode;
514 } *expr_pred_trans_t;
515 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
517 /* Return the hash value for a phi translation table entry. */
519 static hashval_t
520 expr_pred_trans_hash (const void *p)
522 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
523 return ve->hashcode;
526 /* Return true if two phi translation table entries are the same.
527 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
529 static int
530 expr_pred_trans_eq (const void *p1, const void *p2)
532 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
533 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
534 basic_block b1 = ve1->pred;
535 basic_block b2 = ve2->pred;
537 /* If they are not translations for the same basic block, they can't
538 be equal. */
539 if (b1 != b2)
540 return false;
541 return pre_expr_eq (ve1->e, ve2->e);
544 /* Search in the phi translation table for the translation of
545 expression E in basic block PRED.
546 Return the translated value, if found, NULL otherwise. */
548 static inline pre_expr
549 phi_trans_lookup (pre_expr e, basic_block pred)
551 void **slot;
552 struct expr_pred_trans_d ept;
554 ept.e = e;
555 ept.pred = pred;
556 ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index);
557 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
558 NO_INSERT);
559 if (!slot)
560 return NULL;
561 else
562 return ((expr_pred_trans_t) *slot)->v;
566 /* Add the tuple mapping from {expression E, basic block PRED} to
567 value V, to the phi translation table. */
569 static inline void
570 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
572 void **slot;
573 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
574 new_pair->e = e;
575 new_pair->pred = pred;
576 new_pair->v = v;
577 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e),
578 pred->index);
580 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
581 new_pair->hashcode, INSERT);
582 if (*slot)
583 free (*slot);
584 *slot = (void *) new_pair;
588 /* Add expression E to the expression set of value id V. */
590 void
591 add_to_value (unsigned int v, pre_expr e)
593 bitmap_set_t set;
595 gcc_assert (get_expr_value_id (e) == v);
597 if (v >= VEC_length (bitmap_set_t, value_expressions))
599 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
600 v + 1);
603 set = VEC_index (bitmap_set_t, value_expressions, v);
604 if (!set)
606 set = bitmap_set_new ();
607 VEC_replace (bitmap_set_t, value_expressions, v, set);
610 bitmap_insert_into_set_1 (set, e, v, true);
613 /* Create a new bitmap set and return it. */
615 static bitmap_set_t
616 bitmap_set_new (void)
618 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
619 ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
620 ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
621 return ret;
624 /* Return the value id for a PRE expression EXPR. */
626 static unsigned int
627 get_expr_value_id (pre_expr expr)
629 switch (expr->kind)
631 case CONSTANT:
633 unsigned int id;
634 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
635 if (id == 0)
637 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
638 add_to_value (id, expr);
640 return id;
642 case NAME:
643 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
644 case NARY:
645 return PRE_EXPR_NARY (expr)->value_id;
646 case REFERENCE:
647 return PRE_EXPR_REFERENCE (expr)->value_id;
648 default:
649 gcc_unreachable ();
653 /* Remove an expression EXPR from a bitmapped set. */
655 static void
656 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
658 unsigned int val = get_expr_value_id (expr);
659 if (!value_id_constant_p (val))
661 bitmap_clear_bit (set->values, val);
662 bitmap_clear_bit (set->expressions, get_expression_id (expr));
666 static void
667 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
668 unsigned int val, bool allow_constants)
670 if (allow_constants || !value_id_constant_p (val))
672 /* We specifically expect this and only this function to be able to
673 insert constants into a set. */
674 bitmap_set_bit (set->values, val);
675 bitmap_set_bit (set->expressions, get_or_alloc_expression_id (expr));
679 /* Insert an expression EXPR into a bitmapped set. */
681 static void
682 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
684 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
687 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
689 static void
690 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
692 bitmap_copy (dest->expressions, orig->expressions);
693 bitmap_copy (dest->values, orig->values);
697 /* Free memory used up by SET. */
698 static void
699 bitmap_set_free (bitmap_set_t set)
701 BITMAP_FREE (set->expressions);
702 BITMAP_FREE (set->values);
706 /* Generate an topological-ordered array of bitmap set SET. */
708 static VEC(pre_expr, heap) *
709 sorted_array_from_bitmap_set (bitmap_set_t set)
711 unsigned int i, j;
712 bitmap_iterator bi, bj;
713 VEC(pre_expr, heap) *result;
715 /* Pre-allocate roughly enough space for the array. */
716 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (set->values));
718 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
720 /* The number of expressions having a given value is usually
721 relatively small. Thus, rather than making a vector of all
722 the expressions and sorting it by value-id, we walk the values
723 and check in the reverse mapping that tells us what expressions
724 have a given value, to filter those in our set. As a result,
725 the expressions are inserted in value-id order, which means
726 topological order.
728 If this is somehow a significant lose for some cases, we can
729 choose which set to walk based on the set size. */
730 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
731 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
733 if (bitmap_bit_p (set->expressions, j))
734 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
738 return result;
741 /* Perform bitmapped set operation DEST &= ORIG. */
743 static void
744 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
746 bitmap_iterator bi;
747 unsigned int i;
749 if (dest != orig)
751 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
753 bitmap_and_into (dest->values, orig->values);
754 bitmap_copy (temp, dest->expressions);
755 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
757 pre_expr expr = expression_for_id (i);
758 unsigned int value_id = get_expr_value_id (expr);
759 if (!bitmap_bit_p (dest->values, value_id))
760 bitmap_clear_bit (dest->expressions, i);
762 BITMAP_FREE (temp);
766 /* Subtract all values and expressions contained in ORIG from DEST. */
768 static bitmap_set_t
769 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
771 bitmap_set_t result = bitmap_set_new ();
772 bitmap_iterator bi;
773 unsigned int i;
775 bitmap_and_compl (result->expressions, dest->expressions,
776 orig->expressions);
778 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
780 pre_expr expr = expression_for_id (i);
781 unsigned int value_id = get_expr_value_id (expr);
782 bitmap_set_bit (result->values, value_id);
785 return result;
788 /* Subtract all the values in bitmap set B from bitmap set A. */
790 static void
791 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
793 unsigned int i;
794 bitmap_iterator bi;
795 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
797 bitmap_copy (temp, a->expressions);
798 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
800 pre_expr expr = expression_for_id (i);
801 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
802 bitmap_remove_from_set (a, expr);
804 BITMAP_FREE (temp);
808 /* Return true if bitmapped set SET contains the value VALUE_ID. */
810 static bool
811 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
813 if (value_id_constant_p (value_id))
814 return true;
816 if (!set || bitmap_empty_p (set->expressions))
817 return false;
819 return bitmap_bit_p (set->values, value_id);
822 static inline bool
823 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
825 return bitmap_bit_p (set->expressions, get_expression_id (expr));
828 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
830 static void
831 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
832 const pre_expr expr)
834 bitmap_set_t exprset;
835 unsigned int i;
836 bitmap_iterator bi;
838 if (value_id_constant_p (lookfor))
839 return;
841 if (!bitmap_set_contains_value (set, lookfor))
842 return;
844 /* The number of expressions having a given value is usually
845 significantly less than the total number of expressions in SET.
846 Thus, rather than check, for each expression in SET, whether it
847 has the value LOOKFOR, we walk the reverse mapping that tells us
848 what expressions have a given value, and see if any of those
849 expressions are in our set. For large testcases, this is about
850 5-10x faster than walking the bitmap. If this is somehow a
851 significant lose for some cases, we can choose which set to walk
852 based on the set size. */
853 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
854 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
856 if (bitmap_bit_p (set->expressions, i))
858 bitmap_clear_bit (set->expressions, i);
859 bitmap_set_bit (set->expressions, get_expression_id (expr));
860 return;
865 /* Return true if two bitmap sets are equal. */
867 static bool
868 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
870 return bitmap_equal_p (a->values, b->values);
873 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
874 and add it otherwise. */
876 static void
877 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
879 unsigned int val = get_expr_value_id (expr);
881 if (bitmap_set_contains_value (set, val))
882 bitmap_set_replace_value (set, val, expr);
883 else
884 bitmap_insert_into_set (set, expr);
887 /* Insert EXPR into SET if EXPR's value is not already present in
888 SET. */
890 static void
891 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
893 unsigned int val = get_expr_value_id (expr);
895 #ifdef ENABLE_CHECKING
896 gcc_assert (expr->id == get_or_alloc_expression_id (expr));
897 #endif
899 /* Constant values are always considered to be part of the set. */
900 if (value_id_constant_p (val))
901 return;
903 /* If the value membership changed, add the expression. */
904 if (bitmap_set_bit (set->values, val))
905 bitmap_set_bit (set->expressions, expr->id);
908 /* Print out EXPR to outfile. */
910 static void
911 print_pre_expr (FILE *outfile, const pre_expr expr)
913 switch (expr->kind)
915 case CONSTANT:
916 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
917 break;
918 case NAME:
919 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
920 break;
921 case NARY:
923 unsigned int i;
924 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
925 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
926 for (i = 0; i < nary->length; i++)
928 print_generic_expr (outfile, nary->op[i], 0);
929 if (i != (unsigned) nary->length - 1)
930 fprintf (outfile, ",");
932 fprintf (outfile, "}");
934 break;
936 case REFERENCE:
938 vn_reference_op_t vro;
939 unsigned int i;
940 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
941 fprintf (outfile, "{");
942 for (i = 0;
943 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
944 i++)
946 bool closebrace = false;
947 if (vro->opcode != SSA_NAME
948 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
950 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
951 if (vro->op0)
953 fprintf (outfile, "<");
954 closebrace = true;
957 if (vro->op0)
959 print_generic_expr (outfile, vro->op0, 0);
960 if (vro->op1)
962 fprintf (outfile, ",");
963 print_generic_expr (outfile, vro->op1, 0);
965 if (vro->op2)
967 fprintf (outfile, ",");
968 print_generic_expr (outfile, vro->op2, 0);
971 if (closebrace)
972 fprintf (outfile, ">");
973 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
974 fprintf (outfile, ",");
976 fprintf (outfile, "}");
977 if (ref->vuse)
979 fprintf (outfile, "@");
980 print_generic_expr (outfile, ref->vuse, 0);
983 break;
986 void debug_pre_expr (pre_expr);
988 /* Like print_pre_expr but always prints to stderr. */
989 void
990 debug_pre_expr (pre_expr e)
992 print_pre_expr (stderr, e);
993 fprintf (stderr, "\n");
996 /* Print out SET to OUTFILE. */
998 static void
999 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1000 const char *setname, int blockindex)
1002 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1003 if (set)
1005 bool first = true;
1006 unsigned i;
1007 bitmap_iterator bi;
1009 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1011 const pre_expr expr = expression_for_id (i);
1013 if (!first)
1014 fprintf (outfile, ", ");
1015 first = false;
1016 print_pre_expr (outfile, expr);
1018 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1021 fprintf (outfile, " }\n");
1024 void debug_bitmap_set (bitmap_set_t);
1026 void
1027 debug_bitmap_set (bitmap_set_t set)
1029 print_bitmap_set (stderr, set, "debug", 0);
1032 /* Print out the expressions that have VAL to OUTFILE. */
1034 void
1035 print_value_expressions (FILE *outfile, unsigned int val)
1037 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1038 if (set)
1040 char s[10];
1041 sprintf (s, "%04d", val);
1042 print_bitmap_set (outfile, set, s, 0);
1047 void
1048 debug_value_expressions (unsigned int val)
1050 print_value_expressions (stderr, val);
1053 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1054 represent it. */
1056 static pre_expr
1057 get_or_alloc_expr_for_constant (tree constant)
1059 unsigned int result_id;
1060 unsigned int value_id;
1061 struct pre_expr_d expr;
1062 pre_expr newexpr;
1064 expr.kind = CONSTANT;
1065 PRE_EXPR_CONSTANT (&expr) = constant;
1066 result_id = lookup_expression_id (&expr);
1067 if (result_id != 0)
1068 return expression_for_id (result_id);
1070 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1071 newexpr->kind = CONSTANT;
1072 PRE_EXPR_CONSTANT (newexpr) = constant;
1073 alloc_expression_id (newexpr);
1074 value_id = get_or_alloc_constant_value_id (constant);
1075 add_to_value (value_id, newexpr);
1076 return newexpr;
1079 /* Given a value id V, find the actual tree representing the constant
1080 value if there is one, and return it. Return NULL if we can't find
1081 a constant. */
1083 static tree
1084 get_constant_for_value_id (unsigned int v)
1086 if (value_id_constant_p (v))
1088 unsigned int i;
1089 bitmap_iterator bi;
1090 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1092 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1094 pre_expr expr = expression_for_id (i);
1095 if (expr->kind == CONSTANT)
1096 return PRE_EXPR_CONSTANT (expr);
1099 return NULL;
1102 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1103 Currently only supports constants and SSA_NAMES. */
1104 static pre_expr
1105 get_or_alloc_expr_for (tree t)
1107 if (TREE_CODE (t) == SSA_NAME)
1108 return get_or_alloc_expr_for_name (t);
1109 else if (is_gimple_min_invariant (t))
1110 return get_or_alloc_expr_for_constant (t);
1111 else
1113 /* More complex expressions can result from SCCVN expression
1114 simplification that inserts values for them. As they all
1115 do not have VOPs the get handled by the nary ops struct. */
1116 vn_nary_op_t result;
1117 unsigned int result_id;
1118 vn_nary_op_lookup (t, &result);
1119 if (result != NULL)
1121 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1122 e->kind = NARY;
1123 PRE_EXPR_NARY (e) = result;
1124 result_id = lookup_expression_id (e);
1125 if (result_id != 0)
1127 pool_free (pre_expr_pool, e);
1128 e = expression_for_id (result_id);
1129 return e;
1131 alloc_expression_id (e);
1132 return e;
1135 return NULL;
1138 /* Return the folded version of T if T, when folded, is a gimple
1139 min_invariant. Otherwise, return T. */
1141 static pre_expr
1142 fully_constant_expression (pre_expr e)
1144 switch (e->kind)
1146 case CONSTANT:
1147 return e;
1148 case NARY:
1150 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1151 switch (TREE_CODE_CLASS (nary->opcode))
1153 case tcc_expression:
1154 if (nary->opcode == TRUTH_NOT_EXPR)
1155 goto do_unary;
1156 if (nary->opcode != TRUTH_AND_EXPR
1157 && nary->opcode != TRUTH_OR_EXPR
1158 && nary->opcode != TRUTH_XOR_EXPR)
1159 return e;
1160 /* Fallthrough. */
1161 case tcc_binary:
1162 case tcc_comparison:
1164 /* We have to go from trees to pre exprs to value ids to
1165 constants. */
1166 tree naryop0 = nary->op[0];
1167 tree naryop1 = nary->op[1];
1168 tree result;
1169 if (!is_gimple_min_invariant (naryop0))
1171 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1172 unsigned int vrep0 = get_expr_value_id (rep0);
1173 tree const0 = get_constant_for_value_id (vrep0);
1174 if (const0)
1175 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1177 if (!is_gimple_min_invariant (naryop1))
1179 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1180 unsigned int vrep1 = get_expr_value_id (rep1);
1181 tree const1 = get_constant_for_value_id (vrep1);
1182 if (const1)
1183 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1185 result = fold_binary (nary->opcode, nary->type,
1186 naryop0, naryop1);
1187 if (result && is_gimple_min_invariant (result))
1188 return get_or_alloc_expr_for_constant (result);
1189 /* We might have simplified the expression to a
1190 SSA_NAME for example from x_1 * 1. But we cannot
1191 insert a PHI for x_1 unconditionally as x_1 might
1192 not be available readily. */
1193 return e;
1195 case tcc_reference:
1196 if (nary->opcode != REALPART_EXPR
1197 && nary->opcode != IMAGPART_EXPR
1198 && nary->opcode != VIEW_CONVERT_EXPR)
1199 return e;
1200 /* Fallthrough. */
1201 case tcc_unary:
1202 do_unary:
1204 /* We have to go from trees to pre exprs to value ids to
1205 constants. */
1206 tree naryop0 = nary->op[0];
1207 tree const0, result;
1208 if (is_gimple_min_invariant (naryop0))
1209 const0 = naryop0;
1210 else
1212 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1213 unsigned int vrep0 = get_expr_value_id (rep0);
1214 const0 = get_constant_for_value_id (vrep0);
1216 result = NULL;
1217 if (const0)
1219 tree type1 = TREE_TYPE (nary->op[0]);
1220 const0 = fold_convert (type1, const0);
1221 result = fold_unary (nary->opcode, nary->type, const0);
1223 if (result && is_gimple_min_invariant (result))
1224 return get_or_alloc_expr_for_constant (result);
1225 return e;
1227 default:
1228 return e;
1231 case REFERENCE:
1233 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1234 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1235 vn_reference_op_t op;
1237 /* Try to simplify the translated expression if it is
1238 a call to a builtin function with at most two arguments. */
1239 op = VEC_index (vn_reference_op_s, operands, 0);
1240 if (op->opcode == CALL_EXPR
1241 && TREE_CODE (op->op0) == ADDR_EXPR
1242 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1243 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1244 && VEC_length (vn_reference_op_s, operands) >= 2
1245 && VEC_length (vn_reference_op_s, operands) <= 3)
1247 vn_reference_op_t arg0, arg1 = NULL;
1248 bool anyconst = false;
1249 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1250 if (VEC_length (vn_reference_op_s, operands) > 2)
1251 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1252 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1253 || (arg0->opcode == ADDR_EXPR
1254 && is_gimple_min_invariant (arg0->op0)))
1255 anyconst = true;
1256 if (arg1
1257 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1258 || (arg1->opcode == ADDR_EXPR
1259 && is_gimple_min_invariant (arg1->op0))))
1260 anyconst = true;
1261 if (anyconst)
1263 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1264 arg1 ? 2 : 1,
1265 arg0->op0,
1266 arg1 ? arg1->op0 : NULL);
1267 if (folded
1268 && TREE_CODE (folded) == NOP_EXPR)
1269 folded = TREE_OPERAND (folded, 0);
1270 if (folded
1271 && is_gimple_min_invariant (folded))
1272 return get_or_alloc_expr_for_constant (folded);
1275 return e;
1277 default:
1278 return e;
1280 return e;
1283 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1284 it has the value it would have in BLOCK. Set *SAME_VALID to true
1285 in case the new vuse doesn't change the value id of the OPERANDS. */
1287 static tree
1288 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1289 alias_set_type set, tree type, tree vuse,
1290 basic_block phiblock,
1291 basic_block block, bool *same_valid)
1293 gimple phi = SSA_NAME_DEF_STMT (vuse);
1294 ao_ref ref;
1295 edge e = NULL;
1296 bool use_oracle;
1298 *same_valid = true;
1300 if (gimple_bb (phi) != phiblock)
1301 return vuse;
1303 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1305 /* Use the alias-oracle to find either the PHI node in this block,
1306 the first VUSE used in this block that is equivalent to vuse or
1307 the first VUSE which definition in this block kills the value. */
1308 if (gimple_code (phi) == GIMPLE_PHI)
1309 e = find_edge (block, phiblock);
1310 else if (use_oracle)
1311 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1313 vuse = gimple_vuse (phi);
1314 phi = SSA_NAME_DEF_STMT (vuse);
1315 if (gimple_bb (phi) != phiblock)
1316 return vuse;
1317 if (gimple_code (phi) == GIMPLE_PHI)
1319 e = find_edge (block, phiblock);
1320 break;
1323 else
1324 return NULL_TREE;
1326 if (e)
1328 if (use_oracle)
1330 bitmap visited = NULL;
1331 /* Try to find a vuse that dominates this phi node by skipping
1332 non-clobbering statements. */
1333 vuse = get_continuation_for_phi (phi, &ref, &visited);
1334 if (visited)
1335 BITMAP_FREE (visited);
1337 else
1338 vuse = NULL_TREE;
1339 if (!vuse)
1341 /* If we didn't find any, the value ID can't stay the same,
1342 but return the translated vuse. */
1343 *same_valid = false;
1344 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1346 /* ??? We would like to return vuse here as this is the canonical
1347 upmost vdef that this reference is associated with. But during
1348 insertion of the references into the hash tables we only ever
1349 directly insert with their direct gimple_vuse, hence returning
1350 something else would make us not find the other expression. */
1351 return PHI_ARG_DEF (phi, e->dest_idx);
1354 return NULL_TREE;
1357 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1358 SET2. This is used to avoid making a set consisting of the union
1359 of PA_IN and ANTIC_IN during insert. */
1361 static inline pre_expr
1362 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1364 pre_expr result;
1366 result = bitmap_find_leader (set1, val, NULL);
1367 if (!result && set2)
1368 result = bitmap_find_leader (set2, val, NULL);
1369 return result;
1372 /* Get the tree type for our PRE expression e. */
1374 static tree
1375 get_expr_type (const pre_expr e)
1377 switch (e->kind)
1379 case NAME:
1380 return TREE_TYPE (PRE_EXPR_NAME (e));
1381 case CONSTANT:
1382 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1383 case REFERENCE:
1384 return PRE_EXPR_REFERENCE (e)->type;
1385 case NARY:
1386 return PRE_EXPR_NARY (e)->type;
1388 gcc_unreachable();
1391 /* Get a representative SSA_NAME for a given expression.
1392 Since all of our sub-expressions are treated as values, we require
1393 them to be SSA_NAME's for simplicity.
1394 Prior versions of GVNPRE used to use "value handles" here, so that
1395 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1396 either case, the operands are really values (IE we do not expect
1397 them to be usable without finding leaders). */
1399 static tree
1400 get_representative_for (const pre_expr e)
1402 tree exprtype;
1403 tree name;
1404 unsigned int value_id = get_expr_value_id (e);
1406 switch (e->kind)
1408 case NAME:
1409 return PRE_EXPR_NAME (e);
1410 case CONSTANT:
1411 return PRE_EXPR_CONSTANT (e);
1412 case NARY:
1413 case REFERENCE:
1415 /* Go through all of the expressions representing this value
1416 and pick out an SSA_NAME. */
1417 unsigned int i;
1418 bitmap_iterator bi;
1419 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1420 value_id);
1421 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1423 pre_expr rep = expression_for_id (i);
1424 if (rep->kind == NAME)
1425 return PRE_EXPR_NAME (rep);
1428 break;
1430 /* If we reached here we couldn't find an SSA_NAME. This can
1431 happen when we've discovered a value that has never appeared in
1432 the program as set to an SSA_NAME, most likely as the result of
1433 phi translation. */
1434 if (dump_file)
1436 fprintf (dump_file,
1437 "Could not find SSA_NAME representative for expression:");
1438 print_pre_expr (dump_file, e);
1439 fprintf (dump_file, "\n");
1442 exprtype = get_expr_type (e);
1444 /* Build and insert the assignment of the end result to the temporary
1445 that we will return. */
1446 if (!pretemp || exprtype != TREE_TYPE (pretemp))
1448 pretemp = create_tmp_var (exprtype, "pretmp");
1449 get_var_ann (pretemp);
1452 name = make_ssa_name (pretemp, gimple_build_nop ());
1453 VN_INFO_GET (name)->value_id = value_id;
1454 if (e->kind == CONSTANT)
1455 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1456 else
1457 VN_INFO (name)->valnum = name;
1459 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1460 if (dump_file)
1462 fprintf (dump_file, "Created SSA_NAME representative ");
1463 print_generic_expr (dump_file, name, 0);
1464 fprintf (dump_file, " for expression:");
1465 print_pre_expr (dump_file, e);
1466 fprintf (dump_file, "\n");
1469 return name;
1474 static pre_expr
1475 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1476 basic_block pred, basic_block phiblock);
1478 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1479 the phis in PRED. Return NULL if we can't find a leader for each part
1480 of the translated expression. */
1482 static pre_expr
1483 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1484 basic_block pred, basic_block phiblock)
1486 switch (expr->kind)
1488 case NARY:
1490 unsigned int i;
1491 bool changed = false;
1492 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1493 struct vn_nary_op_s newnary;
1494 /* The NARY structure is only guaranteed to have been
1495 allocated to the nary->length operands. */
1496 memcpy (&newnary, nary, (sizeof (struct vn_nary_op_s)
1497 - sizeof (tree) * (4 - nary->length)));
1499 for (i = 0; i < newnary.length; i++)
1501 if (TREE_CODE (newnary.op[i]) != SSA_NAME)
1502 continue;
1503 else
1505 pre_expr leader, result;
1506 unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id;
1507 leader = find_leader_in_sets (op_val_id, set1, set2);
1508 result = phi_translate (leader, set1, set2, pred, phiblock);
1509 if (result && result != leader)
1511 tree name = get_representative_for (result);
1512 if (!name)
1513 return NULL;
1514 newnary.op[i] = name;
1516 else if (!result)
1517 return NULL;
1519 changed |= newnary.op[i] != nary->op[i];
1522 if (changed)
1524 pre_expr constant;
1525 unsigned int new_val_id;
1527 tree result = vn_nary_op_lookup_pieces (newnary.length,
1528 newnary.opcode,
1529 newnary.type,
1530 newnary.op[0],
1531 newnary.op[1],
1532 newnary.op[2],
1533 newnary.op[3],
1534 &nary);
1535 if (result && is_gimple_min_invariant (result))
1536 return get_or_alloc_expr_for_constant (result);
1538 expr = (pre_expr) pool_alloc (pre_expr_pool);
1539 expr->kind = NARY;
1540 expr->id = 0;
1541 if (nary)
1543 PRE_EXPR_NARY (expr) = nary;
1544 constant = fully_constant_expression (expr);
1545 if (constant != expr)
1546 return constant;
1548 new_val_id = nary->value_id;
1549 get_or_alloc_expression_id (expr);
1551 else
1553 new_val_id = get_next_value_id ();
1554 VEC_safe_grow_cleared (bitmap_set_t, heap,
1555 value_expressions,
1556 get_max_value_id() + 1);
1557 nary = vn_nary_op_insert_pieces (newnary.length,
1558 newnary.opcode,
1559 newnary.type,
1560 newnary.op[0],
1561 newnary.op[1],
1562 newnary.op[2],
1563 newnary.op[3],
1564 result, new_val_id);
1565 PRE_EXPR_NARY (expr) = nary;
1566 constant = fully_constant_expression (expr);
1567 if (constant != expr)
1568 return constant;
1569 get_or_alloc_expression_id (expr);
1571 add_to_value (new_val_id, expr);
1573 return expr;
1575 break;
1577 case REFERENCE:
1579 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1580 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1581 tree vuse = ref->vuse;
1582 tree newvuse = vuse;
1583 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1584 bool changed = false, same_valid = true;
1585 unsigned int i, j;
1586 vn_reference_op_t operand;
1587 vn_reference_t newref;
1589 for (i = 0, j = 0;
1590 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1592 pre_expr opresult;
1593 pre_expr leader;
1594 tree oldop0 = operand->op0;
1595 tree oldop1 = operand->op1;
1596 tree oldop2 = operand->op2;
1597 tree op0 = oldop0;
1598 tree op1 = oldop1;
1599 tree op2 = oldop2;
1600 tree type = operand->type;
1601 vn_reference_op_s newop = *operand;
1603 if (op0 && TREE_CODE (op0) == SSA_NAME)
1605 unsigned int op_val_id = VN_INFO (op0)->value_id;
1606 leader = find_leader_in_sets (op_val_id, set1, set2);
1607 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1608 if (opresult && opresult != leader)
1610 tree name = get_representative_for (opresult);
1611 if (!name)
1612 break;
1613 op0 = name;
1615 else if (!opresult)
1616 break;
1618 changed |= op0 != oldop0;
1620 if (op1 && TREE_CODE (op1) == SSA_NAME)
1622 unsigned int op_val_id = VN_INFO (op1)->value_id;
1623 leader = find_leader_in_sets (op_val_id, set1, set2);
1624 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1625 if (opresult && opresult != leader)
1627 tree name = get_representative_for (opresult);
1628 if (!name)
1629 break;
1630 op1 = name;
1632 else if (!opresult)
1633 break;
1635 /* We can't possibly insert these. */
1636 else if (op1 && !is_gimple_min_invariant (op1))
1637 break;
1638 changed |= op1 != oldop1;
1639 if (op2 && TREE_CODE (op2) == SSA_NAME)
1641 unsigned int op_val_id = VN_INFO (op2)->value_id;
1642 leader = find_leader_in_sets (op_val_id, set1, set2);
1643 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1644 if (opresult && opresult != leader)
1646 tree name = get_representative_for (opresult);
1647 if (!name)
1648 break;
1649 op2 = name;
1651 else if (!opresult)
1652 break;
1654 /* We can't possibly insert these. */
1655 else if (op2 && !is_gimple_min_invariant (op2))
1656 break;
1657 changed |= op2 != oldop2;
1659 if (!newoperands)
1660 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1661 /* We may have changed from an SSA_NAME to a constant */
1662 if (newop.opcode == SSA_NAME && TREE_CODE (op0) != SSA_NAME)
1663 newop.opcode = TREE_CODE (op0);
1664 newop.type = type;
1665 newop.op0 = op0;
1666 newop.op1 = op1;
1667 newop.op2 = op2;
1668 VEC_replace (vn_reference_op_s, newoperands, j, &newop);
1669 /* If it transforms from an SSA_NAME to an address, fold with
1670 a preceding indirect reference. */
1671 if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
1672 && VEC_index (vn_reference_op_s,
1673 newoperands, j - 1)->opcode == INDIRECT_REF)
1674 vn_reference_fold_indirect (&newoperands, &j);
1676 if (i != VEC_length (vn_reference_op_s, operands))
1678 if (newoperands)
1679 VEC_free (vn_reference_op_s, heap, newoperands);
1680 return NULL;
1683 if (vuse)
1685 newvuse = translate_vuse_through_block (newoperands,
1686 ref->set, ref->type,
1687 vuse, phiblock, pred,
1688 &same_valid);
1689 if (newvuse == NULL_TREE)
1691 VEC_free (vn_reference_op_s, heap, newoperands);
1692 return NULL;
1696 if (changed || newvuse != vuse)
1698 unsigned int new_val_id;
1699 pre_expr constant;
1701 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1702 ref->type,
1703 newoperands,
1704 &newref, true);
1705 if (newref)
1706 VEC_free (vn_reference_op_s, heap, newoperands);
1708 if (result && is_gimple_min_invariant (result))
1710 gcc_assert (!newoperands);
1711 return get_or_alloc_expr_for_constant (result);
1714 expr = (pre_expr) pool_alloc (pre_expr_pool);
1715 expr->kind = REFERENCE;
1716 expr->id = 0;
1718 if (newref)
1720 PRE_EXPR_REFERENCE (expr) = newref;
1721 constant = fully_constant_expression (expr);
1722 if (constant != expr)
1723 return constant;
1725 new_val_id = newref->value_id;
1726 get_or_alloc_expression_id (expr);
1728 else
1730 if (changed || !same_valid)
1732 new_val_id = get_next_value_id ();
1733 VEC_safe_grow_cleared (bitmap_set_t, heap,
1734 value_expressions,
1735 get_max_value_id() + 1);
1737 else
1738 new_val_id = ref->value_id;
1739 newref = vn_reference_insert_pieces (newvuse, ref->set,
1740 ref->type,
1741 newoperands,
1742 result, new_val_id);
1743 newoperands = NULL;
1744 PRE_EXPR_REFERENCE (expr) = newref;
1745 constant = fully_constant_expression (expr);
1746 if (constant != expr)
1747 return constant;
1748 get_or_alloc_expression_id (expr);
1750 add_to_value (new_val_id, expr);
1752 VEC_free (vn_reference_op_s, heap, newoperands);
1753 return expr;
1755 break;
1757 case NAME:
1759 gimple phi = NULL;
1760 edge e;
1761 gimple def_stmt;
1762 tree name = PRE_EXPR_NAME (expr);
1764 def_stmt = SSA_NAME_DEF_STMT (name);
1765 if (gimple_code (def_stmt) == GIMPLE_PHI
1766 && gimple_bb (def_stmt) == phiblock)
1767 phi = def_stmt;
1768 else
1769 return expr;
1771 e = find_edge (pred, gimple_bb (phi));
1772 if (e)
1774 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1775 pre_expr newexpr;
1777 if (TREE_CODE (def) == SSA_NAME)
1778 def = VN_INFO (def)->valnum;
1780 /* Handle constant. */
1781 if (is_gimple_min_invariant (def))
1782 return get_or_alloc_expr_for_constant (def);
1784 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1785 return NULL;
1787 newexpr = get_or_alloc_expr_for_name (def);
1788 return newexpr;
1791 return expr;
1793 default:
1794 gcc_unreachable ();
1798 /* Wrapper around phi_translate_1 providing caching functionality. */
1800 static pre_expr
1801 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1802 basic_block pred, basic_block phiblock)
1804 pre_expr phitrans;
1806 if (!expr)
1807 return NULL;
1809 /* Constants contain no values that need translation. */
1810 if (expr->kind == CONSTANT)
1811 return expr;
1813 if (value_id_constant_p (get_expr_value_id (expr)))
1814 return expr;
1816 if (expr->kind != NAME)
1818 phitrans = phi_trans_lookup (expr, pred);
1819 if (phitrans)
1820 return phitrans;
1823 /* Translate. */
1824 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1826 /* Don't add empty translations to the cache. Neither add
1827 translations of NAMEs as those are cheap to translate. */
1828 if (phitrans
1829 && expr->kind != NAME)
1830 phi_trans_add (expr, phitrans, pred);
1832 return phitrans;
1836 /* For each expression in SET, translate the values through phi nodes
1837 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1838 expressions in DEST. */
1840 static void
1841 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1842 basic_block phiblock)
1844 VEC (pre_expr, heap) *exprs;
1845 pre_expr expr;
1846 int i;
1848 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1850 bitmap_set_copy (dest, set);
1851 return;
1854 exprs = sorted_array_from_bitmap_set (set);
1855 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
1857 pre_expr translated;
1858 translated = phi_translate (expr, set, NULL, pred, phiblock);
1859 if (!translated)
1860 continue;
1862 /* We might end up with multiple expressions from SET being
1863 translated to the same value. In this case we do not want
1864 to retain the NARY or REFERENCE expression but prefer a NAME
1865 which would be the leader. */
1866 if (translated->kind == NAME)
1867 bitmap_value_replace_in_set (dest, translated);
1868 else
1869 bitmap_value_insert_into_set (dest, translated);
1871 VEC_free (pre_expr, heap, exprs);
1874 /* Find the leader for a value (i.e., the name representing that
1875 value) in a given set, and return it. If STMT is non-NULL it
1876 makes sure the defining statement for the leader dominates it.
1877 Return NULL if no leader is found. */
1879 static pre_expr
1880 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1882 if (value_id_constant_p (val))
1884 unsigned int i;
1885 bitmap_iterator bi;
1886 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1888 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1890 pre_expr expr = expression_for_id (i);
1891 if (expr->kind == CONSTANT)
1892 return expr;
1895 if (bitmap_set_contains_value (set, val))
1897 /* Rather than walk the entire bitmap of expressions, and see
1898 whether any of them has the value we are looking for, we look
1899 at the reverse mapping, which tells us the set of expressions
1900 that have a given value (IE value->expressions with that
1901 value) and see if any of those expressions are in our set.
1902 The number of expressions per value is usually significantly
1903 less than the number of expressions in the set. In fact, for
1904 large testcases, doing it this way is roughly 5-10x faster
1905 than walking the bitmap.
1906 If this is somehow a significant lose for some cases, we can
1907 choose which set to walk based on which set is smaller. */
1908 unsigned int i;
1909 bitmap_iterator bi;
1910 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1912 EXECUTE_IF_AND_IN_BITMAP (exprset->expressions,
1913 set->expressions, 0, i, bi)
1915 pre_expr val = expression_for_id (i);
1916 /* At the point where stmt is not null, there should always
1917 be an SSA_NAME first in the list of expressions. */
1918 if (stmt)
1920 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1921 if (gimple_code (def_stmt) != GIMPLE_PHI
1922 && gimple_bb (def_stmt) == gimple_bb (stmt)
1923 && gimple_uid (def_stmt) >= gimple_uid (stmt))
1924 continue;
1926 return val;
1929 return NULL;
1932 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1933 BLOCK by seeing if it is not killed in the block. Note that we are
1934 only determining whether there is a store that kills it. Because
1935 of the order in which clean iterates over values, we are guaranteed
1936 that altered operands will have caused us to be eliminated from the
1937 ANTIC_IN set already. */
1939 static bool
1940 value_dies_in_block_x (pre_expr expr, basic_block block)
1942 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1943 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1944 gimple def;
1945 gimple_stmt_iterator gsi;
1946 unsigned id = get_expression_id (expr);
1947 bool res = false;
1948 ao_ref ref;
1950 if (!vuse)
1951 return false;
1953 /* Lookup a previously calculated result. */
1954 if (EXPR_DIES (block)
1955 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1956 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1958 /* A memory expression {e, VUSE} dies in the block if there is a
1959 statement that may clobber e. If, starting statement walk from the
1960 top of the basic block, a statement uses VUSE there can be no kill
1961 inbetween that use and the original statement that loaded {e, VUSE},
1962 so we can stop walking. */
1963 ref.base = NULL_TREE;
1964 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1966 tree def_vuse, def_vdef;
1967 def = gsi_stmt (gsi);
1968 def_vuse = gimple_vuse (def);
1969 def_vdef = gimple_vdef (def);
1971 /* Not a memory statement. */
1972 if (!def_vuse)
1973 continue;
1975 /* Not a may-def. */
1976 if (!def_vdef)
1978 /* A load with the same VUSE, we're done. */
1979 if (def_vuse == vuse)
1980 break;
1982 continue;
1985 /* Init ref only if we really need it. */
1986 if (ref.base == NULL_TREE
1987 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1988 refx->operands))
1990 res = true;
1991 break;
1993 /* If the statement may clobber expr, it dies. */
1994 if (stmt_may_clobber_ref_p_1 (def, &ref))
1996 res = true;
1997 break;
2001 /* Remember the result. */
2002 if (!EXPR_DIES (block))
2003 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
2004 bitmap_set_bit (EXPR_DIES (block), id * 2);
2005 if (res)
2006 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
2008 return res;
2012 #define union_contains_value(SET1, SET2, VAL) \
2013 (bitmap_set_contains_value ((SET1), (VAL)) \
2014 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
2016 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
2018 static bool
2019 vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2,
2020 vn_reference_op_t vro)
2022 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
2024 struct pre_expr_d temp;
2025 temp.kind = NAME;
2026 temp.id = 0;
2027 PRE_EXPR_NAME (&temp) = vro->op0;
2028 temp.id = lookup_expression_id (&temp);
2029 if (temp.id == 0)
2030 return false;
2031 if (!union_contains_value (set1, set2,
2032 get_expr_value_id (&temp)))
2033 return false;
2035 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
2037 struct pre_expr_d temp;
2038 temp.kind = NAME;
2039 temp.id = 0;
2040 PRE_EXPR_NAME (&temp) = vro->op1;
2041 temp.id = lookup_expression_id (&temp);
2042 if (temp.id == 0)
2043 return false;
2044 if (!union_contains_value (set1, set2,
2045 get_expr_value_id (&temp)))
2046 return false;
2049 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
2051 struct pre_expr_d temp;
2052 temp.kind = NAME;
2053 temp.id = 0;
2054 PRE_EXPR_NAME (&temp) = vro->op2;
2055 temp.id = lookup_expression_id (&temp);
2056 if (temp.id == 0)
2057 return false;
2058 if (!union_contains_value (set1, set2,
2059 get_expr_value_id (&temp)))
2060 return false;
2063 return true;
2066 /* Determine if the expression EXPR is valid in SET1 U SET2.
2067 ONLY SET2 CAN BE NULL.
2068 This means that we have a leader for each part of the expression
2069 (if it consists of values), or the expression is an SSA_NAME.
2070 For loads/calls, we also see if the vuse is killed in this block. */
2072 static bool
2073 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2074 basic_block block)
2076 switch (expr->kind)
2078 case NAME:
2079 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2080 case NARY:
2082 unsigned int i;
2083 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2084 for (i = 0; i < nary->length; i++)
2086 if (TREE_CODE (nary->op[i]) == SSA_NAME)
2088 struct pre_expr_d temp;
2089 temp.kind = NAME;
2090 temp.id = 0;
2091 PRE_EXPR_NAME (&temp) = nary->op[i];
2092 temp.id = lookup_expression_id (&temp);
2093 if (temp.id == 0)
2094 return false;
2095 if (!union_contains_value (set1, set2,
2096 get_expr_value_id (&temp)))
2097 return false;
2100 /* If the NARY may trap make sure the block does not contain
2101 a possible exit point.
2102 ??? This is overly conservative if we translate AVAIL_OUT
2103 as the available expression might be after the exit point. */
2104 if (BB_MAY_NOTRETURN (block)
2105 && vn_nary_may_trap (nary))
2106 return false;
2107 return true;
2109 break;
2110 case REFERENCE:
2112 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2113 vn_reference_op_t vro;
2114 unsigned int i;
2116 for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++)
2118 if (!vro_valid_in_sets (set1, set2, vro))
2119 return false;
2121 if (ref->vuse)
2123 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2124 if (!gimple_nop_p (def_stmt)
2125 && gimple_bb (def_stmt) != block
2126 && !dominated_by_p (CDI_DOMINATORS,
2127 block, gimple_bb (def_stmt)))
2128 return false;
2130 return !value_dies_in_block_x (expr, block);
2132 default:
2133 gcc_unreachable ();
2137 /* Clean the set of expressions that are no longer valid in SET1 or
2138 SET2. This means expressions that are made up of values we have no
2139 leaders for in SET1 or SET2. This version is used for partial
2140 anticipation, which means it is not valid in either ANTIC_IN or
2141 PA_IN. */
2143 static void
2144 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2146 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2147 pre_expr expr;
2148 int i;
2150 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2152 if (!valid_in_sets (set1, set2, expr, block))
2153 bitmap_remove_from_set (set1, expr);
2155 VEC_free (pre_expr, heap, exprs);
2158 /* Clean the set of expressions that are no longer valid in SET. This
2159 means expressions that are made up of values we have no leaders for
2160 in SET. */
2162 static void
2163 clean (bitmap_set_t set, basic_block block)
2165 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2166 pre_expr expr;
2167 int i;
2169 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2171 if (!valid_in_sets (set, NULL, expr, block))
2172 bitmap_remove_from_set (set, expr);
2174 VEC_free (pre_expr, heap, exprs);
2177 static sbitmap has_abnormal_preds;
2179 /* List of blocks that may have changed during ANTIC computation and
2180 thus need to be iterated over. */
2182 static sbitmap changed_blocks;
2184 /* Decide whether to defer a block for a later iteration, or PHI
2185 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2186 should defer the block, and true if we processed it. */
2188 static bool
2189 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2190 basic_block block, basic_block phiblock)
2192 if (!BB_VISITED (phiblock))
2194 SET_BIT (changed_blocks, block->index);
2195 BB_VISITED (block) = 0;
2196 BB_DEFERRED (block) = 1;
2197 return false;
2199 else
2200 phi_translate_set (dest, source, block, phiblock);
2201 return true;
2204 /* Compute the ANTIC set for BLOCK.
2206 If succs(BLOCK) > 1 then
2207 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2208 else if succs(BLOCK) == 1 then
2209 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2211 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2214 static bool
2215 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2217 bool changed = false;
2218 bitmap_set_t S, old, ANTIC_OUT;
2219 bitmap_iterator bi;
2220 unsigned int bii;
2221 edge e;
2222 edge_iterator ei;
2224 old = ANTIC_OUT = S = NULL;
2225 BB_VISITED (block) = 1;
2227 /* If any edges from predecessors are abnormal, antic_in is empty,
2228 so do nothing. */
2229 if (block_has_abnormal_pred_edge)
2230 goto maybe_dump_sets;
2232 old = ANTIC_IN (block);
2233 ANTIC_OUT = bitmap_set_new ();
2235 /* If the block has no successors, ANTIC_OUT is empty. */
2236 if (EDGE_COUNT (block->succs) == 0)
2238 /* If we have one successor, we could have some phi nodes to
2239 translate through. */
2240 else if (single_succ_p (block))
2242 basic_block succ_bb = single_succ (block);
2244 /* We trade iterations of the dataflow equations for having to
2245 phi translate the maximal set, which is incredibly slow
2246 (since the maximal set often has 300+ members, even when you
2247 have a small number of blocks).
2248 Basically, we defer the computation of ANTIC for this block
2249 until we have processed it's successor, which will inevitably
2250 have a *much* smaller set of values to phi translate once
2251 clean has been run on it.
2252 The cost of doing this is that we technically perform more
2253 iterations, however, they are lower cost iterations.
2255 Timings for PRE on tramp3d-v4:
2256 without maximal set fix: 11 seconds
2257 with maximal set fix/without deferring: 26 seconds
2258 with maximal set fix/with deferring: 11 seconds
2261 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2262 block, succ_bb))
2264 changed = true;
2265 goto maybe_dump_sets;
2268 /* If we have multiple successors, we take the intersection of all of
2269 them. Note that in the case of loop exit phi nodes, we may have
2270 phis to translate through. */
2271 else
2273 VEC(basic_block, heap) * worklist;
2274 size_t i;
2275 basic_block bprime, first = NULL;
2277 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2278 FOR_EACH_EDGE (e, ei, block->succs)
2280 if (!first
2281 && BB_VISITED (e->dest))
2282 first = e->dest;
2283 else if (BB_VISITED (e->dest))
2284 VEC_quick_push (basic_block, worklist, e->dest);
2287 /* Of multiple successors we have to have visited one already. */
2288 if (!first)
2290 SET_BIT (changed_blocks, block->index);
2291 BB_VISITED (block) = 0;
2292 BB_DEFERRED (block) = 1;
2293 changed = true;
2294 VEC_free (basic_block, heap, worklist);
2295 goto maybe_dump_sets;
2298 if (!gimple_seq_empty_p (phi_nodes (first)))
2299 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2300 else
2301 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2303 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2305 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2307 bitmap_set_t tmp = bitmap_set_new ();
2308 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2309 bitmap_set_and (ANTIC_OUT, tmp);
2310 bitmap_set_free (tmp);
2312 else
2313 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2315 VEC_free (basic_block, heap, worklist);
2318 /* Generate ANTIC_OUT - TMP_GEN. */
2319 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2321 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2322 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2323 TMP_GEN (block));
2325 /* Then union in the ANTIC_OUT - TMP_GEN values,
2326 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2327 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2328 bitmap_value_insert_into_set (ANTIC_IN (block),
2329 expression_for_id (bii));
2331 clean (ANTIC_IN (block), block);
2333 /* !old->expressions can happen when we deferred a block. */
2334 if (!old->expressions || !bitmap_set_equal (old, ANTIC_IN (block)))
2336 changed = true;
2337 SET_BIT (changed_blocks, block->index);
2338 FOR_EACH_EDGE (e, ei, block->preds)
2339 SET_BIT (changed_blocks, e->src->index);
2341 else
2342 RESET_BIT (changed_blocks, block->index);
2344 maybe_dump_sets:
2345 if (dump_file && (dump_flags & TDF_DETAILS))
2347 if (!BB_DEFERRED (block) || BB_VISITED (block))
2349 if (ANTIC_OUT)
2350 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2352 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2353 block->index);
2355 if (S)
2356 print_bitmap_set (dump_file, S, "S", block->index);
2358 else
2360 fprintf (dump_file,
2361 "Block %d was deferred for a future iteration.\n",
2362 block->index);
2365 if (old)
2366 bitmap_set_free (old);
2367 if (S)
2368 bitmap_set_free (S);
2369 if (ANTIC_OUT)
2370 bitmap_set_free (ANTIC_OUT);
2371 return changed;
2374 /* Compute PARTIAL_ANTIC for BLOCK.
2376 If succs(BLOCK) > 1 then
2377 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2378 in ANTIC_OUT for all succ(BLOCK)
2379 else if succs(BLOCK) == 1 then
2380 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2382 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2383 - ANTIC_IN[BLOCK])
2386 static bool
2387 compute_partial_antic_aux (basic_block block,
2388 bool block_has_abnormal_pred_edge)
2390 bool changed = false;
2391 bitmap_set_t old_PA_IN;
2392 bitmap_set_t PA_OUT;
2393 edge e;
2394 edge_iterator ei;
2395 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2397 old_PA_IN = PA_OUT = NULL;
2399 /* If any edges from predecessors are abnormal, antic_in is empty,
2400 so do nothing. */
2401 if (block_has_abnormal_pred_edge)
2402 goto maybe_dump_sets;
2404 /* If there are too many partially anticipatable values in the
2405 block, phi_translate_set can take an exponential time: stop
2406 before the translation starts. */
2407 if (max_pa
2408 && single_succ_p (block)
2409 && bitmap_count_bits (PA_IN (single_succ (block))->values) > max_pa)
2410 goto maybe_dump_sets;
2412 old_PA_IN = PA_IN (block);
2413 PA_OUT = bitmap_set_new ();
2415 /* If the block has no successors, ANTIC_OUT is empty. */
2416 if (EDGE_COUNT (block->succs) == 0)
2418 /* If we have one successor, we could have some phi nodes to
2419 translate through. Note that we can't phi translate across DFS
2420 back edges in partial antic, because it uses a union operation on
2421 the successors. For recurrences like IV's, we will end up
2422 generating a new value in the set on each go around (i + 3 (VH.1)
2423 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2424 else if (single_succ_p (block))
2426 basic_block succ = single_succ (block);
2427 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2428 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2430 /* If we have multiple successors, we take the union of all of
2431 them. */
2432 else
2434 VEC(basic_block, heap) * worklist;
2435 size_t i;
2436 basic_block bprime;
2438 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2439 FOR_EACH_EDGE (e, ei, block->succs)
2441 if (e->flags & EDGE_DFS_BACK)
2442 continue;
2443 VEC_quick_push (basic_block, worklist, e->dest);
2445 if (VEC_length (basic_block, worklist) > 0)
2447 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2449 unsigned int i;
2450 bitmap_iterator bi;
2452 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2453 bitmap_value_insert_into_set (PA_OUT,
2454 expression_for_id (i));
2455 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2457 bitmap_set_t pa_in = bitmap_set_new ();
2458 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2459 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2460 bitmap_value_insert_into_set (PA_OUT,
2461 expression_for_id (i));
2462 bitmap_set_free (pa_in);
2464 else
2465 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2466 bitmap_value_insert_into_set (PA_OUT,
2467 expression_for_id (i));
2470 VEC_free (basic_block, heap, worklist);
2473 /* PA_IN starts with PA_OUT - TMP_GEN.
2474 Then we subtract things from ANTIC_IN. */
2475 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2477 /* For partial antic, we want to put back in the phi results, since
2478 we will properly avoid making them partially antic over backedges. */
2479 bitmap_ior_into (PA_IN (block)->values, PHI_GEN (block)->values);
2480 bitmap_ior_into (PA_IN (block)->expressions, PHI_GEN (block)->expressions);
2482 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2483 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2485 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2487 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2489 changed = true;
2490 SET_BIT (changed_blocks, block->index);
2491 FOR_EACH_EDGE (e, ei, block->preds)
2492 SET_BIT (changed_blocks, e->src->index);
2494 else
2495 RESET_BIT (changed_blocks, block->index);
2497 maybe_dump_sets:
2498 if (dump_file && (dump_flags & TDF_DETAILS))
2500 if (PA_OUT)
2501 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2503 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2505 if (old_PA_IN)
2506 bitmap_set_free (old_PA_IN);
2507 if (PA_OUT)
2508 bitmap_set_free (PA_OUT);
2509 return changed;
2512 /* Compute ANTIC and partial ANTIC sets. */
2514 static void
2515 compute_antic (void)
2517 bool changed = true;
2518 int num_iterations = 0;
2519 basic_block block;
2520 int i;
2522 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2523 We pre-build the map of blocks with incoming abnormal edges here. */
2524 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2525 sbitmap_zero (has_abnormal_preds);
2527 FOR_EACH_BB (block)
2529 edge_iterator ei;
2530 edge e;
2532 FOR_EACH_EDGE (e, ei, block->preds)
2534 e->flags &= ~EDGE_DFS_BACK;
2535 if (e->flags & EDGE_ABNORMAL)
2537 SET_BIT (has_abnormal_preds, block->index);
2538 break;
2542 BB_VISITED (block) = 0;
2543 BB_DEFERRED (block) = 0;
2545 /* While we are here, give empty ANTIC_IN sets to each block. */
2546 ANTIC_IN (block) = bitmap_set_new ();
2547 PA_IN (block) = bitmap_set_new ();
2550 /* At the exit block we anticipate nothing. */
2551 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2552 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2553 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2555 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2556 sbitmap_ones (changed_blocks);
2557 while (changed)
2559 if (dump_file && (dump_flags & TDF_DETAILS))
2560 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2561 num_iterations++;
2562 changed = false;
2563 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2565 if (TEST_BIT (changed_blocks, postorder[i]))
2567 basic_block block = BASIC_BLOCK (postorder[i]);
2568 changed |= compute_antic_aux (block,
2569 TEST_BIT (has_abnormal_preds,
2570 block->index));
2573 #ifdef ENABLE_CHECKING
2574 /* Theoretically possible, but *highly* unlikely. */
2575 gcc_assert (num_iterations < 500);
2576 #endif
2579 statistics_histogram_event (cfun, "compute_antic iterations",
2580 num_iterations);
2582 if (do_partial_partial)
2584 sbitmap_ones (changed_blocks);
2585 mark_dfs_back_edges ();
2586 num_iterations = 0;
2587 changed = true;
2588 while (changed)
2590 if (dump_file && (dump_flags & TDF_DETAILS))
2591 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2592 num_iterations++;
2593 changed = false;
2594 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2596 if (TEST_BIT (changed_blocks, postorder[i]))
2598 basic_block block = BASIC_BLOCK (postorder[i]);
2599 changed
2600 |= compute_partial_antic_aux (block,
2601 TEST_BIT (has_abnormal_preds,
2602 block->index));
2605 #ifdef ENABLE_CHECKING
2606 /* Theoretically possible, but *highly* unlikely. */
2607 gcc_assert (num_iterations < 500);
2608 #endif
2610 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2611 num_iterations);
2613 sbitmap_free (has_abnormal_preds);
2614 sbitmap_free (changed_blocks);
2617 /* Return true if we can value number the call in STMT. This is true
2618 if we have a pure or constant call. */
2620 static bool
2621 can_value_number_call (gimple stmt)
2623 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2624 return true;
2625 return false;
2628 /* Return true if OP is a tree which we can perform PRE on.
2629 This may not match the operations we can value number, but in
2630 a perfect world would. */
2632 static bool
2633 can_PRE_operation (tree op)
2635 return UNARY_CLASS_P (op)
2636 || BINARY_CLASS_P (op)
2637 || COMPARISON_CLASS_P (op)
2638 || TREE_CODE (op) == INDIRECT_REF
2639 || TREE_CODE (op) == COMPONENT_REF
2640 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2641 || TREE_CODE (op) == CALL_EXPR
2642 || TREE_CODE (op) == ARRAY_REF;
2646 /* Inserted expressions are placed onto this worklist, which is used
2647 for performing quick dead code elimination of insertions we made
2648 that didn't turn out to be necessary. */
2649 static VEC(gimple,heap) *inserted_exprs;
2650 static bitmap inserted_phi_names;
2652 /* Pool allocated fake store expressions are placed onto this
2653 worklist, which, after performing dead code elimination, is walked
2654 to see which expressions need to be put into GC'able memory */
2655 static VEC(gimple, heap) *need_creation;
2657 /* The actual worker for create_component_ref_by_pieces. */
2659 static tree
2660 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2661 unsigned int *operand, gimple_seq *stmts,
2662 gimple domstmt)
2664 vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
2665 *operand);
2666 tree genop;
2667 ++*operand;
2668 switch (currop->opcode)
2670 case CALL_EXPR:
2672 tree folded, sc = currop->op1;
2673 unsigned int nargs = 0;
2674 tree *args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2675 ref->operands) - 1);
2676 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2678 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2679 operand, stmts,
2680 domstmt);
2681 nargs++;
2683 folded = build_call_array (currop->type,
2684 TREE_CODE (currop->op0) == FUNCTION_DECL
2685 ? build_fold_addr_expr (currop->op0)
2686 : currop->op0,
2687 nargs, args);
2688 free (args);
2689 if (sc)
2691 pre_expr scexpr = get_or_alloc_expr_for (sc);
2692 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2693 if (!sc)
2694 return NULL_TREE;
2695 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2697 return folded;
2699 break;
2700 case TARGET_MEM_REF:
2702 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
2703 *operand);
2704 pre_expr op0expr;
2705 tree genop0 = NULL_TREE;
2706 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2707 stmts, domstmt);
2708 if (!baseop)
2709 return NULL_TREE;
2710 if (currop->op0)
2712 op0expr = get_or_alloc_expr_for (currop->op0);
2713 genop0 = find_or_generate_expression (block, op0expr,
2714 stmts, domstmt);
2715 if (!genop0)
2716 return NULL_TREE;
2718 if (DECL_P (baseop))
2719 return build6 (TARGET_MEM_REF, currop->type,
2720 baseop, NULL_TREE,
2721 genop0, currop->op1, currop->op2,
2722 unshare_expr (nextop->op1));
2723 else
2724 return build6 (TARGET_MEM_REF, currop->type,
2725 NULL_TREE, baseop,
2726 genop0, currop->op1, currop->op2,
2727 unshare_expr (nextop->op1));
2729 break;
2730 case ADDR_EXPR:
2731 if (currop->op0)
2733 gcc_assert (is_gimple_min_invariant (currop->op0));
2734 return currop->op0;
2736 /* Fallthrough. */
2737 case REALPART_EXPR:
2738 case IMAGPART_EXPR:
2739 case VIEW_CONVERT_EXPR:
2741 tree folded;
2742 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2743 operand,
2744 stmts, domstmt);
2745 if (!genop0)
2746 return NULL_TREE;
2747 folded = fold_build1 (currop->opcode, currop->type,
2748 genop0);
2749 return folded;
2751 break;
2752 case ALIGN_INDIRECT_REF:
2753 case MISALIGNED_INDIRECT_REF:
2754 case INDIRECT_REF:
2756 tree folded;
2757 tree genop1 = create_component_ref_by_pieces_1 (block, ref,
2758 operand,
2759 stmts, domstmt);
2760 if (!genop1)
2761 return NULL_TREE;
2762 genop1 = fold_convert (build_pointer_type (currop->type),
2763 genop1);
2765 if (currop->opcode == MISALIGNED_INDIRECT_REF)
2766 folded = fold_build2 (currop->opcode, currop->type,
2767 genop1, currop->op1);
2768 else
2769 folded = fold_build1 (currop->opcode, currop->type,
2770 genop1);
2771 return folded;
2773 break;
2774 case BIT_FIELD_REF:
2776 tree folded;
2777 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2778 stmts, domstmt);
2779 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2780 pre_expr op2expr = get_or_alloc_expr_for (currop->op1);
2781 tree genop1;
2782 tree genop2;
2784 if (!genop0)
2785 return NULL_TREE;
2786 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2787 if (!genop1)
2788 return NULL_TREE;
2789 genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt);
2790 if (!genop2)
2791 return NULL_TREE;
2792 folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1,
2793 genop2);
2794 return folded;
2797 /* For array ref vn_reference_op's, operand 1 of the array ref
2798 is op0 of the reference op and operand 3 of the array ref is
2799 op1. */
2800 case ARRAY_RANGE_REF:
2801 case ARRAY_REF:
2803 tree genop0;
2804 tree genop1 = currop->op0;
2805 pre_expr op1expr;
2806 tree genop2 = currop->op1;
2807 pre_expr op2expr;
2808 tree genop3 = currop->op2;
2809 pre_expr op3expr;
2810 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2811 stmts, domstmt);
2812 if (!genop0)
2813 return NULL_TREE;
2814 op1expr = get_or_alloc_expr_for (genop1);
2815 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2816 if (!genop1)
2817 return NULL_TREE;
2818 if (genop2)
2820 /* Drop zero minimum index. */
2821 if (tree_int_cst_equal (genop2, integer_zero_node))
2822 genop2 = NULL_TREE;
2823 else
2825 op2expr = get_or_alloc_expr_for (genop2);
2826 genop2 = find_or_generate_expression (block, op2expr, stmts,
2827 domstmt);
2828 if (!genop2)
2829 return NULL_TREE;
2832 if (genop3)
2834 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2835 /* We can't always put a size in units of the element alignment
2836 here as the element alignment may be not visible. See
2837 PR43783. Simply drop the element size for constant
2838 sizes. */
2839 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2840 genop3 = NULL_TREE;
2841 else
2843 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2844 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2845 op3expr = get_or_alloc_expr_for (genop3);
2846 genop3 = find_or_generate_expression (block, op3expr, stmts,
2847 domstmt);
2848 if (!genop3)
2849 return NULL_TREE;
2852 return build4 (currop->opcode, currop->type, genop0, genop1,
2853 genop2, genop3);
2855 case COMPONENT_REF:
2857 tree op0;
2858 tree op1;
2859 tree genop2 = currop->op1;
2860 pre_expr op2expr;
2861 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2862 stmts, domstmt);
2863 if (!op0)
2864 return NULL_TREE;
2865 /* op1 should be a FIELD_DECL, which are represented by
2866 themselves. */
2867 op1 = currop->op0;
2868 if (genop2)
2870 op2expr = get_or_alloc_expr_for (genop2);
2871 genop2 = find_or_generate_expression (block, op2expr, stmts,
2872 domstmt);
2873 if (!genop2)
2874 return NULL_TREE;
2877 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1,
2878 genop2);
2880 break;
2881 case SSA_NAME:
2883 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2884 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2885 return genop;
2887 case STRING_CST:
2888 case INTEGER_CST:
2889 case COMPLEX_CST:
2890 case VECTOR_CST:
2891 case REAL_CST:
2892 case CONSTRUCTOR:
2893 case VAR_DECL:
2894 case PARM_DECL:
2895 case CONST_DECL:
2896 case RESULT_DECL:
2897 case FUNCTION_DECL:
2898 return currop->op0;
2900 default:
2901 gcc_unreachable ();
2905 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2906 COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2907 trying to rename aggregates into ssa form directly, which is a no no.
2909 Thus, this routine doesn't create temporaries, it just builds a
2910 single access expression for the array, calling
2911 find_or_generate_expression to build the innermost pieces.
2913 This function is a subroutine of create_expression_by_pieces, and
2914 should not be called on it's own unless you really know what you
2915 are doing. */
2917 static tree
2918 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2919 gimple_seq *stmts, gimple domstmt)
2921 unsigned int op = 0;
2922 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2925 /* Find a leader for an expression, or generate one using
2926 create_expression_by_pieces if it's ANTIC but
2927 complex.
2928 BLOCK is the basic_block we are looking for leaders in.
2929 EXPR is the expression to find a leader or generate for.
2930 STMTS is the statement list to put the inserted expressions on.
2931 Returns the SSA_NAME of the LHS of the generated expression or the
2932 leader.
2933 DOMSTMT if non-NULL is a statement that should be dominated by
2934 all uses in the generated expression. If DOMSTMT is non-NULL this
2935 routine can fail and return NULL_TREE. Otherwise it will assert
2936 on failure. */
2938 static tree
2939 find_or_generate_expression (basic_block block, pre_expr expr,
2940 gimple_seq *stmts, gimple domstmt)
2942 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2943 get_expr_value_id (expr), domstmt);
2944 tree genop = NULL;
2945 if (leader)
2947 if (leader->kind == NAME)
2948 genop = PRE_EXPR_NAME (leader);
2949 else if (leader->kind == CONSTANT)
2950 genop = PRE_EXPR_CONSTANT (leader);
2953 /* If it's still NULL, it must be a complex expression, so generate
2954 it recursively. Not so for FRE though. */
2955 if (genop == NULL
2956 && !in_fre)
2958 bitmap_set_t exprset;
2959 unsigned int lookfor = get_expr_value_id (expr);
2960 bool handled = false;
2961 bitmap_iterator bi;
2962 unsigned int i;
2964 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
2965 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
2967 pre_expr temp = expression_for_id (i);
2968 if (temp->kind != NAME)
2970 handled = true;
2971 genop = create_expression_by_pieces (block, temp, stmts,
2972 domstmt,
2973 get_expr_type (expr));
2974 break;
2977 if (!handled && domstmt)
2978 return NULL_TREE;
2980 gcc_assert (handled);
2982 return genop;
2985 #define NECESSARY GF_PLF_1
2987 /* Create an expression in pieces, so that we can handle very complex
2988 expressions that may be ANTIC, but not necessary GIMPLE.
2989 BLOCK is the basic block the expression will be inserted into,
2990 EXPR is the expression to insert (in value form)
2991 STMTS is a statement list to append the necessary insertions into.
2993 This function will die if we hit some value that shouldn't be
2994 ANTIC but is (IE there is no leader for it, or its components).
2995 This function may also generate expressions that are themselves
2996 partially or fully redundant. Those that are will be either made
2997 fully redundant during the next iteration of insert (for partially
2998 redundant ones), or eliminated by eliminate (for fully redundant
2999 ones).
3001 If DOMSTMT is non-NULL then we make sure that all uses in the
3002 expressions dominate that statement. In this case the function
3003 can return NULL_TREE to signal failure. */
3005 static tree
3006 create_expression_by_pieces (basic_block block, pre_expr expr,
3007 gimple_seq *stmts, gimple domstmt, tree type)
3009 tree temp, name;
3010 tree folded;
3011 gimple_seq forced_stmts = NULL;
3012 unsigned int value_id;
3013 gimple_stmt_iterator gsi;
3014 tree exprtype = type ? type : get_expr_type (expr);
3015 pre_expr nameexpr;
3016 gimple newstmt;
3018 switch (expr->kind)
3020 /* We may hit the NAME/CONSTANT case if we have to convert types
3021 that value numbering saw through. */
3022 case NAME:
3023 folded = PRE_EXPR_NAME (expr);
3024 break;
3025 case CONSTANT:
3026 folded = PRE_EXPR_CONSTANT (expr);
3027 break;
3028 case REFERENCE:
3030 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
3031 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
3033 break;
3034 case NARY:
3036 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
3037 switch (nary->length)
3039 case 2:
3041 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3042 pre_expr op2 = get_or_alloc_expr_for (nary->op[1]);
3043 tree genop1 = find_or_generate_expression (block, op1,
3044 stmts, domstmt);
3045 tree genop2 = find_or_generate_expression (block, op2,
3046 stmts, domstmt);
3047 if (!genop1 || !genop2)
3048 return NULL_TREE;
3049 /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
3050 may be a constant with the wrong type. */
3051 if (nary->opcode == POINTER_PLUS_EXPR)
3053 genop1 = fold_convert (nary->type, genop1);
3054 genop2 = fold_convert (sizetype, genop2);
3056 else
3058 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3059 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
3062 folded = fold_build2 (nary->opcode, nary->type,
3063 genop1, genop2);
3065 break;
3066 case 1:
3068 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3069 tree genop1 = find_or_generate_expression (block, op1,
3070 stmts, domstmt);
3071 if (!genop1)
3072 return NULL_TREE;
3073 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3075 folded = fold_build1 (nary->opcode, nary->type,
3076 genop1);
3078 break;
3079 default:
3080 return NULL_TREE;
3083 break;
3084 default:
3085 return NULL_TREE;
3088 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3089 folded = fold_convert (exprtype, folded);
3091 /* Force the generated expression to be a sequence of GIMPLE
3092 statements.
3093 We have to call unshare_expr because force_gimple_operand may
3094 modify the tree we pass to it. */
3095 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3096 false, NULL);
3098 /* If we have any intermediate expressions to the value sets, add them
3099 to the value sets and chain them in the instruction stream. */
3100 if (forced_stmts)
3102 gsi = gsi_start (forced_stmts);
3103 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3105 gimple stmt = gsi_stmt (gsi);
3106 tree forcedname = gimple_get_lhs (stmt);
3107 pre_expr nameexpr;
3109 VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3110 if (TREE_CODE (forcedname) == SSA_NAME)
3112 VN_INFO_GET (forcedname)->valnum = forcedname;
3113 VN_INFO (forcedname)->value_id = get_next_value_id ();
3114 nameexpr = get_or_alloc_expr_for_name (forcedname);
3115 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3116 if (!in_fre)
3117 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3118 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3120 mark_symbols_for_renaming (stmt);
3122 gimple_seq_add_seq (stmts, forced_stmts);
3125 /* Build and insert the assignment of the end result to the temporary
3126 that we will return. */
3127 if (!pretemp || exprtype != TREE_TYPE (pretemp))
3129 pretemp = create_tmp_var (exprtype, "pretmp");
3130 get_var_ann (pretemp);
3133 temp = pretemp;
3134 add_referenced_var (temp);
3136 if (TREE_CODE (exprtype) == COMPLEX_TYPE
3137 || TREE_CODE (exprtype) == VECTOR_TYPE)
3138 DECL_GIMPLE_REG_P (temp) = 1;
3140 newstmt = gimple_build_assign (temp, folded);
3141 name = make_ssa_name (temp, newstmt);
3142 gimple_assign_set_lhs (newstmt, name);
3143 gimple_set_plf (newstmt, NECESSARY, false);
3145 gimple_seq_add_stmt (stmts, newstmt);
3146 VEC_safe_push (gimple, heap, inserted_exprs, newstmt);
3148 /* All the symbols in NEWEXPR should be put into SSA form. */
3149 mark_symbols_for_renaming (newstmt);
3151 /* Add a value number to the temporary.
3152 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3153 we are creating the expression by pieces, and this particular piece of
3154 the expression may have been represented. There is no harm in replacing
3155 here. */
3156 VN_INFO_GET (name)->valnum = name;
3157 value_id = get_expr_value_id (expr);
3158 VN_INFO (name)->value_id = value_id;
3159 nameexpr = get_or_alloc_expr_for_name (name);
3160 add_to_value (value_id, nameexpr);
3161 if (!in_fre)
3162 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3163 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3165 pre_stats.insertions++;
3166 if (dump_file && (dump_flags & TDF_DETAILS))
3168 fprintf (dump_file, "Inserted ");
3169 print_gimple_stmt (dump_file, newstmt, 0, 0);
3170 fprintf (dump_file, " in predecessor %d\n", block->index);
3173 return name;
3177 /* Returns true if we want to inhibit the insertions of PHI nodes
3178 for the given EXPR for basic block BB (a member of a loop).
3179 We want to do this, when we fear that the induction variable we
3180 create might inhibit vectorization. */
3182 static bool
3183 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3185 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3186 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3187 vn_reference_op_t op;
3188 unsigned i;
3190 /* If we aren't going to vectorize we don't inhibit anything. */
3191 if (!flag_tree_vectorize)
3192 return false;
3194 /* Otherwise we inhibit the insertion when the address of the
3195 memory reference is a simple induction variable. In other
3196 cases the vectorizer won't do anything anyway (either it's
3197 loop invariant or a complicated expression). */
3198 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
3200 switch (op->opcode)
3202 case ARRAY_REF:
3203 case ARRAY_RANGE_REF:
3204 if (TREE_CODE (op->op0) != SSA_NAME)
3205 break;
3206 /* Fallthru. */
3207 case SSA_NAME:
3209 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3210 affine_iv iv;
3211 /* Default defs are loop invariant. */
3212 if (!defbb)
3213 break;
3214 /* Defined outside this loop, also loop invariant. */
3215 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3216 break;
3217 /* If it's a simple induction variable inhibit insertion,
3218 the vectorizer might be interested in this one. */
3219 if (simple_iv (bb->loop_father, bb->loop_father,
3220 op->op0, &iv, true))
3221 return true;
3222 /* No simple IV, vectorizer can't do anything, hence no
3223 reason to inhibit the transformation for this operand. */
3224 break;
3226 default:
3227 break;
3230 return false;
3233 /* Insert the to-be-made-available values of expression EXPRNUM for each
3234 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3235 merge the result with a phi node, given the same value number as
3236 NODE. Return true if we have inserted new stuff. */
3238 static bool
3239 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3240 pre_expr *avail)
3242 pre_expr expr = expression_for_id (exprnum);
3243 pre_expr newphi;
3244 unsigned int val = get_expr_value_id (expr);
3245 edge pred;
3246 bool insertions = false;
3247 bool nophi = false;
3248 basic_block bprime;
3249 pre_expr eprime;
3250 edge_iterator ei;
3251 tree type = get_expr_type (expr);
3252 tree temp;
3253 gimple phi;
3255 if (dump_file && (dump_flags & TDF_DETAILS))
3257 fprintf (dump_file, "Found partial redundancy for expression ");
3258 print_pre_expr (dump_file, expr);
3259 fprintf (dump_file, " (%04d)\n", val);
3262 /* Make sure we aren't creating an induction variable. */
3263 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
3265 bool firstinsideloop = false;
3266 bool secondinsideloop = false;
3267 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3268 EDGE_PRED (block, 0)->src);
3269 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3270 EDGE_PRED (block, 1)->src);
3271 /* Induction variables only have one edge inside the loop. */
3272 if ((firstinsideloop ^ secondinsideloop)
3273 && (expr->kind != REFERENCE
3274 || inhibit_phi_insertion (block, expr)))
3276 if (dump_file && (dump_flags & TDF_DETAILS))
3277 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3278 nophi = true;
3282 /* Make the necessary insertions. */
3283 FOR_EACH_EDGE (pred, ei, block->preds)
3285 gimple_seq stmts = NULL;
3286 tree builtexpr;
3287 bprime = pred->src;
3288 eprime = avail[bprime->index];
3290 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3292 builtexpr = create_expression_by_pieces (bprime,
3293 eprime,
3294 &stmts, NULL,
3295 type);
3296 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3297 gsi_insert_seq_on_edge (pred, stmts);
3298 avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
3299 insertions = true;
3301 else if (eprime->kind == CONSTANT)
3303 /* Constants may not have the right type, fold_convert
3304 should give us back a constant with the right type.
3306 tree constant = PRE_EXPR_CONSTANT (eprime);
3307 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3309 tree builtexpr = fold_convert (type, constant);
3310 if (!is_gimple_min_invariant (builtexpr))
3312 tree forcedexpr = force_gimple_operand (builtexpr,
3313 &stmts, true,
3314 NULL);
3315 if (!is_gimple_min_invariant (forcedexpr))
3317 if (forcedexpr != builtexpr)
3319 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3320 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3322 if (stmts)
3324 gimple_stmt_iterator gsi;
3325 gsi = gsi_start (stmts);
3326 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3328 gimple stmt = gsi_stmt (gsi);
3329 VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3330 gimple_set_plf (stmt, NECESSARY, false);
3332 gsi_insert_seq_on_edge (pred, stmts);
3334 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3339 else if (eprime->kind == NAME)
3341 /* We may have to do a conversion because our value
3342 numbering can look through types in certain cases, but
3343 our IL requires all operands of a phi node have the same
3344 type. */
3345 tree name = PRE_EXPR_NAME (eprime);
3346 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3348 tree builtexpr;
3349 tree forcedexpr;
3350 builtexpr = fold_convert (type, name);
3351 forcedexpr = force_gimple_operand (builtexpr,
3352 &stmts, true,
3353 NULL);
3355 if (forcedexpr != name)
3357 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3358 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3361 if (stmts)
3363 gimple_stmt_iterator gsi;
3364 gsi = gsi_start (stmts);
3365 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3367 gimple stmt = gsi_stmt (gsi);
3368 VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3369 gimple_set_plf (stmt, NECESSARY, false);
3371 gsi_insert_seq_on_edge (pred, stmts);
3373 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3377 /* If we didn't want a phi node, and we made insertions, we still have
3378 inserted new stuff, and thus return true. If we didn't want a phi node,
3379 and didn't make insertions, we haven't added anything new, so return
3380 false. */
3381 if (nophi && insertions)
3382 return true;
3383 else if (nophi && !insertions)
3384 return false;
3386 /* Now build a phi for the new variable. */
3387 if (!prephitemp || TREE_TYPE (prephitemp) != type)
3389 prephitemp = create_tmp_var (type, "prephitmp");
3390 get_var_ann (prephitemp);
3393 temp = prephitemp;
3394 add_referenced_var (temp);
3396 if (TREE_CODE (type) == COMPLEX_TYPE
3397 || TREE_CODE (type) == VECTOR_TYPE)
3398 DECL_GIMPLE_REG_P (temp) = 1;
3399 phi = create_phi_node (temp, block);
3401 gimple_set_plf (phi, NECESSARY, false);
3402 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3403 VN_INFO (gimple_phi_result (phi))->value_id = val;
3404 VEC_safe_push (gimple, heap, inserted_exprs, phi);
3405 bitmap_set_bit (inserted_phi_names,
3406 SSA_NAME_VERSION (gimple_phi_result (phi)));
3407 FOR_EACH_EDGE (pred, ei, block->preds)
3409 pre_expr ae = avail[pred->src->index];
3410 gcc_assert (get_expr_type (ae) == type
3411 || useless_type_conversion_p (type, get_expr_type (ae)));
3412 if (ae->kind == CONSTANT)
3413 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3414 else
3415 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
3416 UNKNOWN_LOCATION);
3419 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3420 add_to_value (val, newphi);
3422 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3423 this insertion, since we test for the existence of this value in PHI_GEN
3424 before proceeding with the partial redundancy checks in insert_aux.
3426 The value may exist in AVAIL_OUT, in particular, it could be represented
3427 by the expression we are trying to eliminate, in which case we want the
3428 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3429 inserted there.
3431 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3432 this block, because if it did, it would have existed in our dominator's
3433 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3436 bitmap_insert_into_set (PHI_GEN (block), newphi);
3437 bitmap_value_replace_in_set (AVAIL_OUT (block),
3438 newphi);
3439 bitmap_insert_into_set (NEW_SETS (block),
3440 newphi);
3442 if (dump_file && (dump_flags & TDF_DETAILS))
3444 fprintf (dump_file, "Created phi ");
3445 print_gimple_stmt (dump_file, phi, 0, 0);
3446 fprintf (dump_file, " in block %d\n", block->index);
3448 pre_stats.phis++;
3449 return true;
3454 /* Perform insertion of partially redundant values.
3455 For BLOCK, do the following:
3456 1. Propagate the NEW_SETS of the dominator into the current block.
3457 If the block has multiple predecessors,
3458 2a. Iterate over the ANTIC expressions for the block to see if
3459 any of them are partially redundant.
3460 2b. If so, insert them into the necessary predecessors to make
3461 the expression fully redundant.
3462 2c. Insert a new PHI merging the values of the predecessors.
3463 2d. Insert the new PHI, and the new expressions, into the
3464 NEW_SETS set.
3465 3. Recursively call ourselves on the dominator children of BLOCK.
3467 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3468 do_regular_insertion and do_partial_insertion.
3472 static bool
3473 do_regular_insertion (basic_block block, basic_block dom)
3475 bool new_stuff = false;
3476 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3477 pre_expr expr;
3478 int i;
3480 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3482 if (expr->kind != NAME)
3484 pre_expr *avail;
3485 unsigned int val;
3486 bool by_some = false;
3487 bool cant_insert = false;
3488 bool all_same = true;
3489 pre_expr first_s = NULL;
3490 edge pred;
3491 basic_block bprime;
3492 pre_expr eprime = NULL;
3493 edge_iterator ei;
3494 pre_expr edoubleprime = NULL;
3495 bool do_insertion = false;
3497 val = get_expr_value_id (expr);
3498 if (bitmap_set_contains_value (PHI_GEN (block), val))
3499 continue;
3500 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3502 if (dump_file && (dump_flags & TDF_DETAILS))
3503 fprintf (dump_file, "Found fully redundant value\n");
3504 continue;
3507 avail = XCNEWVEC (pre_expr, last_basic_block);
3508 FOR_EACH_EDGE (pred, ei, block->preds)
3510 unsigned int vprime;
3512 /* We should never run insertion for the exit block
3513 and so not come across fake pred edges. */
3514 gcc_assert (!(pred->flags & EDGE_FAKE));
3515 bprime = pred->src;
3516 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3517 bprime, block);
3519 /* eprime will generally only be NULL if the
3520 value of the expression, translated
3521 through the PHI for this predecessor, is
3522 undefined. If that is the case, we can't
3523 make the expression fully redundant,
3524 because its value is undefined along a
3525 predecessor path. We can thus break out
3526 early because it doesn't matter what the
3527 rest of the results are. */
3528 if (eprime == NULL)
3530 cant_insert = true;
3531 break;
3534 eprime = fully_constant_expression (eprime);
3535 vprime = get_expr_value_id (eprime);
3536 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3537 vprime, NULL);
3538 if (edoubleprime == NULL)
3540 avail[bprime->index] = eprime;
3541 all_same = false;
3543 else
3545 avail[bprime->index] = edoubleprime;
3546 by_some = true;
3547 /* We want to perform insertions to remove a redundancy on
3548 a path in the CFG we want to optimize for speed. */
3549 if (optimize_edge_for_speed_p (pred))
3550 do_insertion = true;
3551 if (first_s == NULL)
3552 first_s = edoubleprime;
3553 else if (!pre_expr_eq (first_s, edoubleprime))
3554 all_same = false;
3557 /* If we can insert it, it's not the same value
3558 already existing along every predecessor, and
3559 it's defined by some predecessor, it is
3560 partially redundant. */
3561 if (!cant_insert && !all_same && by_some && do_insertion
3562 && dbg_cnt (treepre_insert))
3564 if (insert_into_preds_of_block (block, get_expression_id (expr),
3565 avail))
3566 new_stuff = true;
3568 /* If all edges produce the same value and that value is
3569 an invariant, then the PHI has the same value on all
3570 edges. Note this. */
3571 else if (!cant_insert && all_same && eprime
3572 && (edoubleprime->kind == CONSTANT
3573 || edoubleprime->kind == NAME)
3574 && !value_id_constant_p (val))
3576 unsigned int j;
3577 bitmap_iterator bi;
3578 bitmap_set_t exprset = VEC_index (bitmap_set_t,
3579 value_expressions, val);
3581 unsigned int new_val = get_expr_value_id (edoubleprime);
3582 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3584 pre_expr expr = expression_for_id (j);
3586 if (expr->kind == NAME)
3588 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3589 /* Just reset the value id and valnum so it is
3590 the same as the constant we have discovered. */
3591 if (edoubleprime->kind == CONSTANT)
3593 info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3594 pre_stats.constified++;
3596 else
3597 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3598 info->value_id = new_val;
3602 free (avail);
3606 VEC_free (pre_expr, heap, exprs);
3607 return new_stuff;
3611 /* Perform insertion for partially anticipatable expressions. There
3612 is only one case we will perform insertion for these. This case is
3613 if the expression is partially anticipatable, and fully available.
3614 In this case, we know that putting it earlier will enable us to
3615 remove the later computation. */
3618 static bool
3619 do_partial_partial_insertion (basic_block block, basic_block dom)
3621 bool new_stuff = false;
3622 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
3623 pre_expr expr;
3624 int i;
3626 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3628 if (expr->kind != NAME)
3630 pre_expr *avail;
3631 unsigned int val;
3632 bool by_all = true;
3633 bool cant_insert = false;
3634 edge pred;
3635 basic_block bprime;
3636 pre_expr eprime = NULL;
3637 edge_iterator ei;
3639 val = get_expr_value_id (expr);
3640 if (bitmap_set_contains_value (PHI_GEN (block), val))
3641 continue;
3642 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3643 continue;
3645 avail = XCNEWVEC (pre_expr, last_basic_block);
3646 FOR_EACH_EDGE (pred, ei, block->preds)
3648 unsigned int vprime;
3649 pre_expr edoubleprime;
3651 /* We should never run insertion for the exit block
3652 and so not come across fake pred edges. */
3653 gcc_assert (!(pred->flags & EDGE_FAKE));
3654 bprime = pred->src;
3655 eprime = phi_translate (expr, ANTIC_IN (block),
3656 PA_IN (block),
3657 bprime, block);
3659 /* eprime will generally only be NULL if the
3660 value of the expression, translated
3661 through the PHI for this predecessor, is
3662 undefined. If that is the case, we can't
3663 make the expression fully redundant,
3664 because its value is undefined along a
3665 predecessor path. We can thus break out
3666 early because it doesn't matter what the
3667 rest of the results are. */
3668 if (eprime == NULL)
3670 cant_insert = true;
3671 break;
3674 eprime = fully_constant_expression (eprime);
3675 vprime = get_expr_value_id (eprime);
3676 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3677 vprime, NULL);
3678 if (edoubleprime == NULL)
3680 by_all = false;
3681 break;
3683 else
3684 avail[bprime->index] = edoubleprime;
3688 /* If we can insert it, it's not the same value
3689 already existing along every predecessor, and
3690 it's defined by some predecessor, it is
3691 partially redundant. */
3692 if (!cant_insert && by_all && dbg_cnt (treepre_insert))
3694 pre_stats.pa_insert++;
3695 if (insert_into_preds_of_block (block, get_expression_id (expr),
3696 avail))
3697 new_stuff = true;
3699 free (avail);
3703 VEC_free (pre_expr, heap, exprs);
3704 return new_stuff;
3707 static bool
3708 insert_aux (basic_block block)
3710 basic_block son;
3711 bool new_stuff = false;
3713 if (block)
3715 basic_block dom;
3716 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3717 if (dom)
3719 unsigned i;
3720 bitmap_iterator bi;
3721 bitmap_set_t newset = NEW_SETS (dom);
3722 if (newset)
3724 /* Note that we need to value_replace both NEW_SETS, and
3725 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3726 represented by some non-simple expression here that we want
3727 to replace it with. */
3728 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3730 pre_expr expr = expression_for_id (i);
3731 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3732 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3735 if (!single_pred_p (block))
3737 new_stuff |= do_regular_insertion (block, dom);
3738 if (do_partial_partial)
3739 new_stuff |= do_partial_partial_insertion (block, dom);
3743 for (son = first_dom_son (CDI_DOMINATORS, block);
3744 son;
3745 son = next_dom_son (CDI_DOMINATORS, son))
3747 new_stuff |= insert_aux (son);
3750 return new_stuff;
3753 /* Perform insertion of partially redundant values. */
3755 static void
3756 insert (void)
3758 bool new_stuff = true;
3759 basic_block bb;
3760 int num_iterations = 0;
3762 FOR_ALL_BB (bb)
3763 NEW_SETS (bb) = bitmap_set_new ();
3765 while (new_stuff)
3767 num_iterations++;
3768 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3770 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3774 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3776 static void
3777 add_to_exp_gen (basic_block block, tree op)
3779 if (!in_fre)
3781 pre_expr result;
3782 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3783 return;
3784 result = get_or_alloc_expr_for_name (op);
3785 bitmap_value_insert_into_set (EXP_GEN (block), result);
3789 /* Create value ids for PHI in BLOCK. */
3791 static void
3792 make_values_for_phi (gimple phi, basic_block block)
3794 tree result = gimple_phi_result (phi);
3796 /* We have no need for virtual phis, as they don't represent
3797 actual computations. */
3798 if (is_gimple_reg (result))
3800 pre_expr e = get_or_alloc_expr_for_name (result);
3801 add_to_value (get_expr_value_id (e), e);
3802 bitmap_insert_into_set (PHI_GEN (block), e);
3803 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3804 if (!in_fre)
3806 unsigned i;
3807 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3809 tree arg = gimple_phi_arg_def (phi, i);
3810 if (TREE_CODE (arg) == SSA_NAME)
3812 e = get_or_alloc_expr_for_name (arg);
3813 add_to_value (get_expr_value_id (e), e);
3820 /* Compute the AVAIL set for all basic blocks.
3822 This function performs value numbering of the statements in each basic
3823 block. The AVAIL sets are built from information we glean while doing
3824 this value numbering, since the AVAIL sets contain only one entry per
3825 value.
3827 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3828 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3830 static void
3831 compute_avail (void)
3834 basic_block block, son;
3835 basic_block *worklist;
3836 size_t sp = 0;
3837 unsigned i;
3839 /* We pretend that default definitions are defined in the entry block.
3840 This includes function arguments and the static chain decl. */
3841 for (i = 1; i < num_ssa_names; ++i)
3843 tree name = ssa_name (i);
3844 pre_expr e;
3845 if (!name
3846 || !SSA_NAME_IS_DEFAULT_DEF (name)
3847 || has_zero_uses (name)
3848 || !is_gimple_reg (name))
3849 continue;
3851 e = get_or_alloc_expr_for_name (name);
3852 add_to_value (get_expr_value_id (e), e);
3853 if (!in_fre)
3854 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3855 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3858 /* Allocate the worklist. */
3859 worklist = XNEWVEC (basic_block, n_basic_blocks);
3861 /* Seed the algorithm by putting the dominator children of the entry
3862 block on the worklist. */
3863 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3864 son;
3865 son = next_dom_son (CDI_DOMINATORS, son))
3866 worklist[sp++] = son;
3868 /* Loop until the worklist is empty. */
3869 while (sp)
3871 gimple_stmt_iterator gsi;
3872 gimple stmt;
3873 basic_block dom;
3874 unsigned int stmt_uid = 1;
3876 /* Pick a block from the worklist. */
3877 block = worklist[--sp];
3879 /* Initially, the set of available values in BLOCK is that of
3880 its immediate dominator. */
3881 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3882 if (dom)
3883 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3885 /* Generate values for PHI nodes. */
3886 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3887 make_values_for_phi (gsi_stmt (gsi), block);
3889 BB_MAY_NOTRETURN (block) = 0;
3891 /* Now compute value numbers and populate value sets with all
3892 the expressions computed in BLOCK. */
3893 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3895 ssa_op_iter iter;
3896 tree op;
3898 stmt = gsi_stmt (gsi);
3899 gimple_set_uid (stmt, stmt_uid++);
3901 /* Cache whether the basic-block has any non-visible side-effect
3902 or control flow.
3903 If this isn't a call or it is the last stmt in the
3904 basic-block then the CFG represents things correctly. */
3905 if (is_gimple_call (stmt)
3906 && !stmt_ends_bb_p (stmt))
3908 /* Non-looping const functions always return normally.
3909 Otherwise the call might not return or have side-effects
3910 that forbids hoisting possibly trapping expressions
3911 before it. */
3912 int flags = gimple_call_flags (stmt);
3913 if (!(flags & ECF_CONST)
3914 || (flags & ECF_LOOPING_CONST_OR_PURE))
3915 BB_MAY_NOTRETURN (block) = 1;
3918 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3920 pre_expr e = get_or_alloc_expr_for_name (op);
3922 add_to_value (get_expr_value_id (e), e);
3923 if (!in_fre)
3924 bitmap_insert_into_set (TMP_GEN (block), e);
3925 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3928 if (gimple_has_volatile_ops (stmt)
3929 || stmt_could_throw_p (stmt))
3930 continue;
3932 switch (gimple_code (stmt))
3934 case GIMPLE_RETURN:
3935 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3936 add_to_exp_gen (block, op);
3937 continue;
3939 case GIMPLE_CALL:
3941 vn_reference_t ref;
3942 unsigned int i;
3943 vn_reference_op_t vro;
3944 pre_expr result = NULL;
3945 VEC(vn_reference_op_s, heap) *ops = NULL;
3947 if (!can_value_number_call (stmt))
3948 continue;
3950 copy_reference_ops_from_call (stmt, &ops);
3951 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3952 gimple_expr_type (stmt),
3953 ops, &ref, false);
3954 VEC_free (vn_reference_op_s, heap, ops);
3955 if (!ref)
3956 continue;
3958 for (i = 0; VEC_iterate (vn_reference_op_s,
3959 ref->operands, i,
3960 vro); i++)
3962 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
3963 add_to_exp_gen (block, vro->op0);
3964 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3965 add_to_exp_gen (block, vro->op1);
3966 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3967 add_to_exp_gen (block, vro->op2);
3969 result = (pre_expr) pool_alloc (pre_expr_pool);
3970 result->kind = REFERENCE;
3971 result->id = 0;
3972 PRE_EXPR_REFERENCE (result) = ref;
3974 get_or_alloc_expression_id (result);
3975 add_to_value (get_expr_value_id (result), result);
3976 if (!in_fre)
3977 bitmap_value_insert_into_set (EXP_GEN (block), result);
3978 continue;
3981 case GIMPLE_ASSIGN:
3983 pre_expr result = NULL;
3984 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
3986 case tcc_unary:
3987 case tcc_binary:
3988 case tcc_comparison:
3990 vn_nary_op_t nary;
3991 unsigned int i;
3993 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
3994 gimple_assign_rhs_code (stmt),
3995 gimple_expr_type (stmt),
3996 gimple_assign_rhs1 (stmt),
3997 gimple_assign_rhs2 (stmt),
3998 NULL_TREE, NULL_TREE, &nary);
4000 if (!nary)
4001 continue;
4003 for (i = 0; i < nary->length; i++)
4004 if (TREE_CODE (nary->op[i]) == SSA_NAME)
4005 add_to_exp_gen (block, nary->op[i]);
4007 result = (pre_expr) pool_alloc (pre_expr_pool);
4008 result->kind = NARY;
4009 result->id = 0;
4010 PRE_EXPR_NARY (result) = nary;
4011 break;
4014 case tcc_declaration:
4015 case tcc_reference:
4017 vn_reference_t ref;
4018 unsigned int i;
4019 vn_reference_op_t vro;
4021 vn_reference_lookup (gimple_assign_rhs1 (stmt),
4022 gimple_vuse (stmt),
4023 true, &ref);
4024 if (!ref)
4025 continue;
4027 for (i = 0; VEC_iterate (vn_reference_op_s,
4028 ref->operands, i,
4029 vro); i++)
4031 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4032 add_to_exp_gen (block, vro->op0);
4033 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4034 add_to_exp_gen (block, vro->op1);
4035 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4036 add_to_exp_gen (block, vro->op2);
4038 result = (pre_expr) pool_alloc (pre_expr_pool);
4039 result->kind = REFERENCE;
4040 result->id = 0;
4041 PRE_EXPR_REFERENCE (result) = ref;
4042 break;
4045 default:
4046 /* For any other statement that we don't
4047 recognize, simply add all referenced
4048 SSA_NAMEs to EXP_GEN. */
4049 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4050 add_to_exp_gen (block, op);
4051 continue;
4054 get_or_alloc_expression_id (result);
4055 add_to_value (get_expr_value_id (result), result);
4056 if (!in_fre)
4057 bitmap_value_insert_into_set (EXP_GEN (block), result);
4059 continue;
4061 default:
4062 break;
4066 /* Put the dominator children of BLOCK on the worklist of blocks
4067 to compute available sets for. */
4068 for (son = first_dom_son (CDI_DOMINATORS, block);
4069 son;
4070 son = next_dom_son (CDI_DOMINATORS, son))
4071 worklist[sp++] = son;
4074 free (worklist);
4077 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4078 than the available expressions for it. The insertion point is
4079 right before the first use in STMT. Returns the SSA_NAME that should
4080 be used for replacement. */
4082 static tree
4083 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4085 basic_block bb = gimple_bb (stmt);
4086 gimple_stmt_iterator gsi;
4087 gimple_seq stmts = NULL;
4088 tree expr;
4089 pre_expr e;
4091 /* First create a value expression from the expression we want
4092 to insert and associate it with the value handle for SSA_VN. */
4093 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4094 if (e == NULL)
4095 return NULL_TREE;
4097 /* Then use create_expression_by_pieces to generate a valid
4098 expression to insert at this point of the IL stream. */
4099 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4100 if (expr == NULL_TREE)
4101 return NULL_TREE;
4102 gsi = gsi_for_stmt (stmt);
4103 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4105 return expr;
4108 /* Eliminate fully redundant computations. */
4110 static unsigned int
4111 eliminate (void)
4113 VEC (gimple, heap) *to_remove = NULL;
4114 basic_block b;
4115 unsigned int todo = 0;
4116 gimple_stmt_iterator gsi;
4117 gimple stmt;
4118 unsigned i;
4120 FOR_EACH_BB (b)
4122 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4124 stmt = gsi_stmt (gsi);
4126 /* Lookup the RHS of the expression, see if we have an
4127 available computation for it. If so, replace the RHS with
4128 the available computation. */
4129 if (gimple_has_lhs (stmt)
4130 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
4131 && !gimple_assign_ssa_name_copy_p (stmt)
4132 && (!gimple_assign_single_p (stmt)
4133 || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
4134 && !gimple_has_volatile_ops (stmt)
4135 && !has_zero_uses (gimple_get_lhs (stmt)))
4137 tree lhs = gimple_get_lhs (stmt);
4138 tree rhs = NULL_TREE;
4139 tree sprime = NULL;
4140 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4141 pre_expr sprimeexpr;
4143 if (gimple_assign_single_p (stmt))
4144 rhs = gimple_assign_rhs1 (stmt);
4146 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4147 get_expr_value_id (lhsexpr),
4148 NULL);
4150 if (sprimeexpr)
4152 if (sprimeexpr->kind == CONSTANT)
4153 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4154 else if (sprimeexpr->kind == NAME)
4155 sprime = PRE_EXPR_NAME (sprimeexpr);
4156 else
4157 gcc_unreachable ();
4160 /* If there is no existing leader but SCCVN knows this
4161 value is constant, use that constant. */
4162 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4164 sprime = VN_INFO (lhs)->valnum;
4165 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4166 TREE_TYPE (sprime)))
4167 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4169 if (dump_file && (dump_flags & TDF_DETAILS))
4171 fprintf (dump_file, "Replaced ");
4172 print_gimple_expr (dump_file, stmt, 0, 0);
4173 fprintf (dump_file, " with ");
4174 print_generic_expr (dump_file, sprime, 0);
4175 fprintf (dump_file, " in ");
4176 print_gimple_stmt (dump_file, stmt, 0, 0);
4178 pre_stats.eliminations++;
4179 propagate_tree_value_into_stmt (&gsi, sprime);
4180 stmt = gsi_stmt (gsi);
4181 update_stmt (stmt);
4182 continue;
4185 /* If there is no existing usable leader but SCCVN thinks
4186 it has an expression it wants to use as replacement,
4187 insert that. */
4188 if (!sprime || sprime == lhs)
4190 tree val = VN_INFO (lhs)->valnum;
4191 if (val != VN_TOP
4192 && TREE_CODE (val) == SSA_NAME
4193 && VN_INFO (val)->needs_insertion
4194 && can_PRE_operation (vn_get_expr_for (val)))
4195 sprime = do_SCCVN_insertion (stmt, val);
4197 if (sprime
4198 && sprime != lhs
4199 && (rhs == NULL_TREE
4200 || TREE_CODE (rhs) != SSA_NAME
4201 || may_propagate_copy (rhs, sprime)))
4203 gcc_assert (sprime != rhs);
4205 if (dump_file && (dump_flags & TDF_DETAILS))
4207 fprintf (dump_file, "Replaced ");
4208 print_gimple_expr (dump_file, stmt, 0, 0);
4209 fprintf (dump_file, " with ");
4210 print_generic_expr (dump_file, sprime, 0);
4211 fprintf (dump_file, " in ");
4212 print_gimple_stmt (dump_file, stmt, 0, 0);
4215 if (TREE_CODE (sprime) == SSA_NAME)
4216 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4217 NECESSARY, true);
4218 /* We need to make sure the new and old types actually match,
4219 which may require adding a simple cast, which fold_convert
4220 will do for us. */
4221 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4222 && !useless_type_conversion_p (gimple_expr_type (stmt),
4223 TREE_TYPE (sprime)))
4224 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4226 pre_stats.eliminations++;
4227 propagate_tree_value_into_stmt (&gsi, sprime);
4228 stmt = gsi_stmt (gsi);
4229 update_stmt (stmt);
4231 /* If we removed EH side effects from the statement, clean
4232 its EH information. */
4233 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4235 bitmap_set_bit (need_eh_cleanup,
4236 gimple_bb (stmt)->index);
4237 if (dump_file && (dump_flags & TDF_DETAILS))
4238 fprintf (dump_file, " Removed EH side effects.\n");
4242 /* If the statement is a scalar store, see if the expression
4243 has the same value number as its rhs. If so, the store is
4244 dead. */
4245 else if (gimple_assign_single_p (stmt)
4246 && !is_gimple_reg (gimple_assign_lhs (stmt))
4247 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4248 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4250 tree rhs = gimple_assign_rhs1 (stmt);
4251 tree val;
4252 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4253 gimple_vuse (stmt), true, NULL);
4254 if (TREE_CODE (rhs) == SSA_NAME)
4255 rhs = VN_INFO (rhs)->valnum;
4256 if (val
4257 && operand_equal_p (val, rhs, 0))
4259 if (dump_file && (dump_flags & TDF_DETAILS))
4261 fprintf (dump_file, "Deleted redundant store ");
4262 print_gimple_stmt (dump_file, stmt, 0, 0);
4265 /* Queue stmt for removal. */
4266 VEC_safe_push (gimple, heap, to_remove, stmt);
4269 /* Visit COND_EXPRs and fold the comparison with the
4270 available value-numbers. */
4271 else if (gimple_code (stmt) == GIMPLE_COND)
4273 tree op0 = gimple_cond_lhs (stmt);
4274 tree op1 = gimple_cond_rhs (stmt);
4275 tree result;
4277 if (TREE_CODE (op0) == SSA_NAME)
4278 op0 = VN_INFO (op0)->valnum;
4279 if (TREE_CODE (op1) == SSA_NAME)
4280 op1 = VN_INFO (op1)->valnum;
4281 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4282 op0, op1);
4283 if (result && TREE_CODE (result) == INTEGER_CST)
4285 if (integer_zerop (result))
4286 gimple_cond_make_false (stmt);
4287 else
4288 gimple_cond_make_true (stmt);
4289 update_stmt (stmt);
4290 todo = TODO_cleanup_cfg;
4293 /* Visit indirect calls and turn them into direct calls if
4294 possible. */
4295 if (gimple_code (stmt) == GIMPLE_CALL
4296 && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
4298 tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
4299 if (TREE_CODE (fn) == ADDR_EXPR
4300 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4302 if (dump_file && (dump_flags & TDF_DETAILS))
4304 fprintf (dump_file, "Replacing call target with ");
4305 print_generic_expr (dump_file, fn, 0);
4306 fprintf (dump_file, " in ");
4307 print_gimple_stmt (dump_file, stmt, 0, 0);
4310 gimple_call_set_fn (stmt, fn);
4311 update_stmt (stmt);
4312 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4314 bitmap_set_bit (need_eh_cleanup,
4315 gimple_bb (stmt)->index);
4316 if (dump_file && (dump_flags & TDF_DETAILS))
4317 fprintf (dump_file, " Removed EH side effects.\n");
4320 /* Changing an indirect call to a direct call may
4321 have exposed different semantics. This may
4322 require an SSA update. */
4323 todo |= TODO_update_ssa_only_virtuals;
4328 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4330 gimple stmt, phi = gsi_stmt (gsi);
4331 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4332 pre_expr sprimeexpr, resexpr;
4333 gimple_stmt_iterator gsi2;
4335 /* We want to perform redundant PHI elimination. Do so by
4336 replacing the PHI with a single copy if possible.
4337 Do not touch inserted, single-argument or virtual PHIs. */
4338 if (gimple_phi_num_args (phi) == 1
4339 || !is_gimple_reg (res)
4340 || bitmap_bit_p (inserted_phi_names, SSA_NAME_VERSION (res)))
4342 gsi_next (&gsi);
4343 continue;
4346 resexpr = get_or_alloc_expr_for_name (res);
4347 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4348 get_expr_value_id (resexpr), NULL);
4349 if (sprimeexpr)
4351 if (sprimeexpr->kind == CONSTANT)
4352 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4353 else if (sprimeexpr->kind == NAME)
4354 sprime = PRE_EXPR_NAME (sprimeexpr);
4355 else
4356 gcc_unreachable ();
4358 if (!sprimeexpr
4359 || sprime == res)
4361 gsi_next (&gsi);
4362 continue;
4365 if (dump_file && (dump_flags & TDF_DETAILS))
4367 fprintf (dump_file, "Replaced redundant PHI node defining ");
4368 print_generic_expr (dump_file, res, 0);
4369 fprintf (dump_file, " with ");
4370 print_generic_expr (dump_file, sprime, 0);
4371 fprintf (dump_file, "\n");
4374 remove_phi_node (&gsi, false);
4376 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4377 sprime = fold_convert (TREE_TYPE (res), sprime);
4378 stmt = gimple_build_assign (res, sprime);
4379 SSA_NAME_DEF_STMT (res) = stmt;
4380 if (TREE_CODE (sprime) == SSA_NAME)
4381 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4382 NECESSARY, true);
4383 gsi2 = gsi_after_labels (b);
4384 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4385 /* Queue the copy for eventual removal. */
4386 VEC_safe_push (gimple, heap, to_remove, stmt);
4387 pre_stats.eliminations++;
4391 /* We cannot remove stmts during BB walk, especially not release SSA
4392 names there as this confuses the VN machinery. The stmts ending
4393 up in to_remove are either stores or simple copies. */
4394 for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
4396 tree lhs = gimple_assign_lhs (stmt);
4397 tree rhs = gimple_assign_rhs1 (stmt);
4398 use_operand_p use_p;
4399 gimple use_stmt;
4401 /* If there is a single use only, propagate the equivalency
4402 instead of keeping the copy. */
4403 if (TREE_CODE (lhs) == SSA_NAME
4404 && TREE_CODE (rhs) == SSA_NAME
4405 && single_imm_use (lhs, &use_p, &use_stmt)
4406 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4408 SET_USE (use_p, gimple_assign_rhs1 (stmt));
4409 update_stmt (use_stmt);
4412 /* If this is a store or a now unused copy, remove it. */
4413 if (TREE_CODE (lhs) != SSA_NAME
4414 || has_zero_uses (lhs))
4416 gsi = gsi_for_stmt (stmt);
4417 unlink_stmt_vdef (stmt);
4418 gsi_remove (&gsi, true);
4419 release_defs (stmt);
4422 VEC_free (gimple, heap, to_remove);
4424 return todo;
4427 /* Borrow a bit of tree-ssa-dce.c for the moment.
4428 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4429 this may be a bit faster, and we may want critical edges kept split. */
4431 /* If OP's defining statement has not already been determined to be necessary,
4432 mark that statement necessary. Return the stmt, if it is newly
4433 necessary. */
4435 static inline gimple
4436 mark_operand_necessary (tree op)
4438 gimple stmt;
4440 gcc_assert (op);
4442 if (TREE_CODE (op) != SSA_NAME)
4443 return NULL;
4445 stmt = SSA_NAME_DEF_STMT (op);
4446 gcc_assert (stmt);
4448 if (gimple_plf (stmt, NECESSARY)
4449 || gimple_nop_p (stmt))
4450 return NULL;
4452 gimple_set_plf (stmt, NECESSARY, true);
4453 return stmt;
4456 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4457 to insert PHI nodes sometimes, and because value numbering of casts isn't
4458 perfect, we sometimes end up inserting dead code. This simple DCE-like
4459 pass removes any insertions we made that weren't actually used. */
4461 static void
4462 remove_dead_inserted_code (void)
4464 VEC(gimple,heap) *worklist = NULL;
4465 int i;
4466 gimple t;
4468 worklist = VEC_alloc (gimple, heap, VEC_length (gimple, inserted_exprs));
4469 for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
4471 if (gimple_plf (t, NECESSARY))
4472 VEC_quick_push (gimple, worklist, t);
4474 while (VEC_length (gimple, worklist) > 0)
4476 t = VEC_pop (gimple, worklist);
4478 /* PHI nodes are somewhat special in that each PHI alternative has
4479 data and control dependencies. All the statements feeding the
4480 PHI node's arguments are always necessary. */
4481 if (gimple_code (t) == GIMPLE_PHI)
4483 unsigned k;
4485 VEC_reserve (gimple, heap, worklist, gimple_phi_num_args (t));
4486 for (k = 0; k < gimple_phi_num_args (t); k++)
4488 tree arg = PHI_ARG_DEF (t, k);
4489 if (TREE_CODE (arg) == SSA_NAME)
4491 gimple n = mark_operand_necessary (arg);
4492 if (n)
4493 VEC_quick_push (gimple, worklist, n);
4497 else
4499 /* Propagate through the operands. Examine all the USE, VUSE and
4500 VDEF operands in this statement. Mark all the statements
4501 which feed this statement's uses as necessary. */
4502 ssa_op_iter iter;
4503 tree use;
4505 /* The operands of VDEF expressions are also needed as they
4506 represent potential definitions that may reach this
4507 statement (VDEF operands allow us to follow def-def
4508 links). */
4510 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4512 gimple n = mark_operand_necessary (use);
4513 if (n)
4514 VEC_safe_push (gimple, heap, worklist, n);
4519 for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
4521 if (!gimple_plf (t, NECESSARY))
4523 gimple_stmt_iterator gsi;
4525 if (dump_file && (dump_flags & TDF_DETAILS))
4527 fprintf (dump_file, "Removing unnecessary insertion:");
4528 print_gimple_stmt (dump_file, t, 0, 0);
4531 gsi = gsi_for_stmt (t);
4532 if (gimple_code (t) == GIMPLE_PHI)
4533 remove_phi_node (&gsi, true);
4534 else
4536 gsi_remove (&gsi, true);
4537 release_defs (t);
4541 VEC_free (gimple, heap, worklist);
4544 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4545 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4546 the number of visited blocks. */
4548 static int
4549 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4551 edge_iterator *stack;
4552 int sp;
4553 int post_order_num = 0;
4554 sbitmap visited;
4555 int count;
4557 if (include_entry_exit)
4558 post_order[post_order_num++] = EXIT_BLOCK;
4560 /* Allocate stack for back-tracking up CFG. */
4561 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4562 sp = 0;
4564 /* Allocate bitmap to track nodes that have been visited. */
4565 visited = sbitmap_alloc (last_basic_block);
4567 /* None of the nodes in the CFG have been visited yet. */
4568 sbitmap_zero (visited);
4570 /* Push the last edge on to the stack. */
4571 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4573 while (sp)
4575 edge_iterator ei;
4576 basic_block src;
4577 basic_block dest;
4579 /* Look at the edge on the top of the stack. */
4580 ei = stack[sp - 1];
4581 src = ei_edge (ei)->src;
4582 dest = ei_edge (ei)->dest;
4584 /* Check if the edge destination has been visited yet. */
4585 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4587 /* Mark that we have visited the destination. */
4588 SET_BIT (visited, src->index);
4590 if (EDGE_COUNT (src->preds) > 0)
4591 /* Since the DEST node has been visited for the first
4592 time, check its successors. */
4593 stack[sp++] = ei_start (src->preds);
4594 else
4595 post_order[post_order_num++] = src->index;
4597 else
4599 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4600 post_order[post_order_num++] = dest->index;
4602 if (!ei_one_before_end_p (ei))
4603 ei_next (&stack[sp - 1]);
4604 else
4605 sp--;
4609 if (include_entry_exit)
4611 post_order[post_order_num++] = ENTRY_BLOCK;
4612 count = post_order_num;
4614 else
4615 count = post_order_num + 2;
4617 free (stack);
4618 sbitmap_free (visited);
4619 return post_order_num;
4623 /* Initialize data structures used by PRE. */
4625 static void
4626 init_pre (bool do_fre)
4628 basic_block bb;
4630 next_expression_id = 1;
4631 expressions = NULL;
4632 VEC_safe_push (pre_expr, heap, expressions, NULL);
4633 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4634 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4635 get_max_value_id() + 1);
4636 name_to_id = NULL;
4638 in_fre = do_fre;
4640 inserted_exprs = NULL;
4641 need_creation = NULL;
4642 pretemp = NULL_TREE;
4643 storetemp = NULL_TREE;
4644 prephitemp = NULL_TREE;
4646 connect_infinite_loops_to_exit ();
4647 memset (&pre_stats, 0, sizeof (pre_stats));
4650 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4651 my_rev_post_order_compute (postorder, false);
4653 FOR_ALL_BB (bb)
4654 bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1);
4656 calculate_dominance_info (CDI_POST_DOMINATORS);
4657 calculate_dominance_info (CDI_DOMINATORS);
4659 bitmap_obstack_initialize (&grand_bitmap_obstack);
4660 inserted_phi_names = BITMAP_ALLOC (&grand_bitmap_obstack);
4661 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4662 expr_pred_trans_eq, free);
4663 expression_to_id = htab_create (num_ssa_names * 3,
4664 pre_expr_hash,
4665 pre_expr_eq, NULL);
4666 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4667 sizeof (struct bitmap_set), 30);
4668 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4669 sizeof (struct pre_expr_d), 30);
4670 FOR_ALL_BB (bb)
4672 EXP_GEN (bb) = bitmap_set_new ();
4673 PHI_GEN (bb) = bitmap_set_new ();
4674 TMP_GEN (bb) = bitmap_set_new ();
4675 AVAIL_OUT (bb) = bitmap_set_new ();
4678 need_eh_cleanup = BITMAP_ALLOC (NULL);
4682 /* Deallocate data structures used by PRE. */
4684 static void
4685 fini_pre (bool do_fre)
4687 basic_block bb;
4689 free (postorder);
4690 VEC_free (bitmap_set_t, heap, value_expressions);
4691 VEC_free (gimple, heap, inserted_exprs);
4692 VEC_free (gimple, heap, need_creation);
4693 bitmap_obstack_release (&grand_bitmap_obstack);
4694 free_alloc_pool (bitmap_set_pool);
4695 free_alloc_pool (pre_expr_pool);
4696 htab_delete (phi_translate_table);
4697 htab_delete (expression_to_id);
4698 VEC_free (unsigned, heap, name_to_id);
4700 FOR_ALL_BB (bb)
4702 free (bb->aux);
4703 bb->aux = NULL;
4706 free_dominance_info (CDI_POST_DOMINATORS);
4708 if (!bitmap_empty_p (need_eh_cleanup))
4710 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4711 cleanup_tree_cfg ();
4714 BITMAP_FREE (need_eh_cleanup);
4716 if (!do_fre)
4717 loop_optimizer_finalize ();
4720 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4721 only wants to do full redundancy elimination. */
4723 static unsigned int
4724 execute_pre (bool do_fre)
4726 unsigned int todo = 0;
4728 do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
4730 /* This has to happen before SCCVN runs because
4731 loop_optimizer_init may create new phis, etc. */
4732 if (!do_fre)
4733 loop_optimizer_init (LOOPS_NORMAL);
4735 if (!run_scc_vn (do_fre))
4737 if (!do_fre)
4739 remove_dead_inserted_code ();
4740 loop_optimizer_finalize ();
4743 return 0;
4745 init_pre (do_fre);
4746 scev_initialize ();
4749 /* Collect and value number expressions computed in each basic block. */
4750 compute_avail ();
4752 if (dump_file && (dump_flags & TDF_DETAILS))
4754 basic_block bb;
4756 FOR_ALL_BB (bb)
4758 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4759 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4760 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4761 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4765 /* Insert can get quite slow on an incredibly large number of basic
4766 blocks due to some quadratic behavior. Until this behavior is
4767 fixed, don't run it when he have an incredibly large number of
4768 bb's. If we aren't going to run insert, there is no point in
4769 computing ANTIC, either, even though it's plenty fast. */
4770 if (!do_fre && n_basic_blocks < 4000)
4772 compute_antic ();
4773 insert ();
4776 /* Remove all the redundant expressions. */
4777 todo |= eliminate ();
4779 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4780 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4781 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4782 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4783 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4785 /* Make sure to remove fake edges before committing our inserts.
4786 This makes sure we don't end up with extra critical edges that
4787 we would need to split. */
4788 remove_fake_exit_edges ();
4789 gsi_commit_edge_inserts ();
4791 clear_expression_ids ();
4792 free_scc_vn ();
4793 if (!do_fre)
4794 remove_dead_inserted_code ();
4796 scev_finalize ();
4797 fini_pre (do_fre);
4799 return todo;
4802 /* Gate and execute functions for PRE. */
4804 static unsigned int
4805 do_pre (void)
4807 return execute_pre (false);
4810 static bool
4811 gate_pre (void)
4813 return flag_tree_pre != 0;
4816 struct gimple_opt_pass pass_pre =
4819 GIMPLE_PASS,
4820 "pre", /* name */
4821 gate_pre, /* gate */
4822 do_pre, /* execute */
4823 NULL, /* sub */
4824 NULL, /* next */
4825 0, /* static_pass_number */
4826 TV_TREE_PRE, /* tv_id */
4827 PROP_no_crit_edges | PROP_cfg
4828 | PROP_ssa, /* properties_required */
4829 0, /* properties_provided */
4830 0, /* properties_destroyed */
4831 TODO_rebuild_alias, /* todo_flags_start */
4832 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
4833 | TODO_verify_ssa /* todo_flags_finish */
4838 /* Gate and execute functions for FRE. */
4840 static unsigned int
4841 execute_fre (void)
4843 return execute_pre (true);
4846 static bool
4847 gate_fre (void)
4849 return flag_tree_fre != 0;
4852 struct gimple_opt_pass pass_fre =
4855 GIMPLE_PASS,
4856 "fre", /* name */
4857 gate_fre, /* gate */
4858 execute_fre, /* execute */
4859 NULL, /* sub */
4860 NULL, /* next */
4861 0, /* static_pass_number */
4862 TV_TREE_FRE, /* tv_id */
4863 PROP_cfg | PROP_ssa, /* properties_required */
4864 0, /* properties_provided */
4865 0, /* properties_destroyed */
4866 0, /* todo_flags_start */
4867 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */