* decl.c (compute_array_index_type): Use type_dependent_expression_p.
[official-gcc.git] / gcc / tree-ssa-pre.c
blob6d10df87a0e48c204246acf5c5bc6fc4ed1c5177
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
5 <stevenb@suse.de>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
12 any later version.
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "hash-table.h"
34 #include "tree-iterator.h"
35 #include "alloc-pool.h"
36 #include "obstack.h"
37 #include "tree-pass.h"
38 #include "flags.h"
39 #include "bitmap.h"
40 #include "langhooks.h"
41 #include "cfgloop.h"
42 #include "tree-ssa-sccvn.h"
43 #include "tree-scalar-evolution.h"
44 #include "params.h"
45 #include "dbgcnt.h"
47 /* TODO:
49 1. Avail sets can be shared by making an avail_find_leader that
50 walks up the dominator tree and looks in those avail sets.
51 This might affect code optimality, it's unclear right now.
52 2. Strength reduction can be performed by anticipating expressions
53 we can repair later on.
54 3. We can do back-substitution or smarter value numbering to catch
55 commutative expressions split up over multiple statements.
58 /* For ease of terminology, "expression node" in the below refers to
59 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
60 represent the actual statement containing the expressions we care about,
61 and we cache the value number by putting it in the expression. */
63 /* Basic algorithm
65 First we walk the statements to generate the AVAIL sets, the
66 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
67 generation of values/expressions by a given block. We use them
68 when computing the ANTIC sets. The AVAIL sets consist of
69 SSA_NAME's that represent values, so we know what values are
70 available in what blocks. AVAIL is a forward dataflow problem. In
71 SSA, values are never killed, so we don't need a kill set, or a
72 fixpoint iteration, in order to calculate the AVAIL sets. In
73 traditional parlance, AVAIL sets tell us the downsafety of the
74 expressions/values.
76 Next, we generate the ANTIC sets. These sets represent the
77 anticipatable expressions. ANTIC is a backwards dataflow
78 problem. An expression is anticipatable in a given block if it could
79 be generated in that block. This means that if we had to perform
80 an insertion in that block, of the value of that expression, we
81 could. Calculating the ANTIC sets requires phi translation of
82 expressions, because the flow goes backwards through phis. We must
83 iterate to a fixpoint of the ANTIC sets, because we have a kill
84 set. Even in SSA form, values are not live over the entire
85 function, only from their definition point onwards. So we have to
86 remove values from the ANTIC set once we go past the definition
87 point of the leaders that make them up.
88 compute_antic/compute_antic_aux performs this computation.
90 Third, we perform insertions to make partially redundant
91 expressions fully redundant.
93 An expression is partially redundant (excluding partial
94 anticipation) if:
96 1. It is AVAIL in some, but not all, of the predecessors of a
97 given block.
98 2. It is ANTIC in all the predecessors.
100 In order to make it fully redundant, we insert the expression into
101 the predecessors where it is not available, but is ANTIC.
103 For the partial anticipation case, we only perform insertion if it
104 is partially anticipated in some block, and fully available in all
105 of the predecessors.
107 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
108 performs these steps.
110 Fourth, we eliminate fully redundant expressions.
111 This is a simple statement walk that replaces redundant
112 calculations with the now available values. */
114 /* Representations of value numbers:
116 Value numbers are represented by a representative SSA_NAME. We
117 will create fake SSA_NAME's in situations where we need a
118 representative but do not have one (because it is a complex
119 expression). In order to facilitate storing the value numbers in
120 bitmaps, and keep the number of wasted SSA_NAME's down, we also
121 associate a value_id with each value number, and create full blown
122 ssa_name's only where we actually need them (IE in operands of
123 existing expressions).
125 Theoretically you could replace all the value_id's with
126 SSA_NAME_VERSION, but this would allocate a large number of
127 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
128 It would also require an additional indirection at each point we
129 use the value id. */
131 /* Representation of expressions on value numbers:
133 Expressions consisting of value numbers are represented the same
134 way as our VN internally represents them, with an additional
135 "pre_expr" wrapping around them in order to facilitate storing all
136 of the expressions in the same sets. */
138 /* Representation of sets:
140 The dataflow sets do not need to be sorted in any particular order
141 for the majority of their lifetime, are simply represented as two
142 bitmaps, one that keeps track of values present in the set, and one
143 that keeps track of expressions present in the set.
145 When we need them in topological order, we produce it on demand by
146 transforming the bitmap into an array and sorting it into topo
147 order. */
149 /* Type of expression, used to know which member of the PRE_EXPR union
150 is valid. */
152 enum pre_expr_kind
154 NAME,
155 NARY,
156 REFERENCE,
157 CONSTANT
160 typedef union pre_expr_union_d
162 tree name;
163 tree constant;
164 vn_nary_op_t nary;
165 vn_reference_t reference;
166 } pre_expr_union;
168 typedef struct pre_expr_d : typed_noop_remove <pre_expr_d>
170 enum pre_expr_kind kind;
171 unsigned int id;
172 pre_expr_union u;
174 /* hash_table support. */
175 typedef pre_expr_d T;
176 static inline hashval_t hash (const pre_expr_d *);
177 static inline int equal (const pre_expr_d *, const pre_expr_d *);
178 } *pre_expr;
180 #define PRE_EXPR_NAME(e) (e)->u.name
181 #define PRE_EXPR_NARY(e) (e)->u.nary
182 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
183 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
185 /* Compare E1 and E1 for equality. */
187 inline int
188 pre_expr_d::equal (const struct pre_expr_d *e1, const struct pre_expr_d *e2)
190 if (e1->kind != e2->kind)
191 return false;
193 switch (e1->kind)
195 case CONSTANT:
196 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
197 PRE_EXPR_CONSTANT (e2));
198 case NAME:
199 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
200 case NARY:
201 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
202 case REFERENCE:
203 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
204 PRE_EXPR_REFERENCE (e2));
205 default:
206 gcc_unreachable ();
210 /* Hash E. */
212 inline hashval_t
213 pre_expr_d::hash (const struct pre_expr_d *e)
215 switch (e->kind)
217 case CONSTANT:
218 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
219 case NAME:
220 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
221 case NARY:
222 return PRE_EXPR_NARY (e)->hashcode;
223 case REFERENCE:
224 return PRE_EXPR_REFERENCE (e)->hashcode;
225 default:
226 gcc_unreachable ();
230 /* Next global expression id number. */
231 static unsigned int next_expression_id;
233 /* Mapping from expression to id number we can use in bitmap sets. */
234 DEF_VEC_P (pre_expr);
235 DEF_VEC_ALLOC_P (pre_expr, heap);
236 static VEC(pre_expr, heap) *expressions;
237 static hash_table <pre_expr_d> expression_to_id;
238 static VEC(unsigned, heap) *name_to_id;
240 /* Allocate an expression id for EXPR. */
242 static inline unsigned int
243 alloc_expression_id (pre_expr expr)
245 struct pre_expr_d **slot;
246 /* Make sure we won't overflow. */
247 gcc_assert (next_expression_id + 1 > next_expression_id);
248 expr->id = next_expression_id++;
249 VEC_safe_push (pre_expr, heap, expressions, expr);
250 if (expr->kind == NAME)
252 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
253 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
254 re-allocations by using VEC_reserve upfront. There is no
255 VEC_quick_grow_cleared unfortunately. */
256 unsigned old_len = VEC_length (unsigned, name_to_id);
257 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names - old_len);
258 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
259 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
260 VEC_replace (unsigned, name_to_id, version, expr->id);
262 else
264 slot = expression_to_id.find_slot (expr, INSERT);
265 gcc_assert (!*slot);
266 *slot = expr;
268 return next_expression_id - 1;
271 /* Return the expression id for tree EXPR. */
273 static inline unsigned int
274 get_expression_id (const pre_expr expr)
276 return expr->id;
279 static inline unsigned int
280 lookup_expression_id (const pre_expr expr)
282 struct pre_expr_d **slot;
284 if (expr->kind == NAME)
286 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
287 if (VEC_length (unsigned, name_to_id) <= version)
288 return 0;
289 return VEC_index (unsigned, name_to_id, version);
291 else
293 slot = expression_to_id.find_slot (expr, NO_INSERT);
294 if (!slot)
295 return 0;
296 return ((pre_expr)*slot)->id;
300 /* Return the existing expression id for EXPR, or create one if one
301 does not exist yet. */
303 static inline unsigned int
304 get_or_alloc_expression_id (pre_expr expr)
306 unsigned int id = lookup_expression_id (expr);
307 if (id == 0)
308 return alloc_expression_id (expr);
309 return expr->id = id;
312 /* Return the expression that has expression id ID */
314 static inline pre_expr
315 expression_for_id (unsigned int id)
317 return VEC_index (pre_expr, expressions, id);
320 /* Free the expression id field in all of our expressions,
321 and then destroy the expressions array. */
323 static void
324 clear_expression_ids (void)
326 VEC_free (pre_expr, heap, expressions);
329 static alloc_pool pre_expr_pool;
331 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
333 static pre_expr
334 get_or_alloc_expr_for_name (tree name)
336 struct pre_expr_d expr;
337 pre_expr result;
338 unsigned int result_id;
340 expr.kind = NAME;
341 expr.id = 0;
342 PRE_EXPR_NAME (&expr) = name;
343 result_id = lookup_expression_id (&expr);
344 if (result_id != 0)
345 return expression_for_id (result_id);
347 result = (pre_expr) pool_alloc (pre_expr_pool);
348 result->kind = NAME;
349 PRE_EXPR_NAME (result) = name;
350 alloc_expression_id (result);
351 return result;
354 static bool in_fre = false;
356 /* An unordered bitmap set. One bitmap tracks values, the other,
357 expressions. */
358 typedef struct bitmap_set
360 bitmap_head expressions;
361 bitmap_head values;
362 } *bitmap_set_t;
364 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
365 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
367 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
368 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
370 /* Mapping from value id to expressions with that value_id. */
371 DEF_VEC_P (bitmap_set_t);
372 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
373 static VEC(bitmap_set_t, heap) *value_expressions;
375 /* Sets that we need to keep track of. */
376 typedef struct bb_bitmap_sets
378 /* The EXP_GEN set, which represents expressions/values generated in
379 a basic block. */
380 bitmap_set_t exp_gen;
382 /* The PHI_GEN set, which represents PHI results generated in a
383 basic block. */
384 bitmap_set_t phi_gen;
386 /* The TMP_GEN set, which represents results/temporaries generated
387 in a basic block. IE the LHS of an expression. */
388 bitmap_set_t tmp_gen;
390 /* The AVAIL_OUT set, which represents which values are available in
391 a given basic block. */
392 bitmap_set_t avail_out;
394 /* The ANTIC_IN set, which represents which values are anticipatable
395 in a given basic block. */
396 bitmap_set_t antic_in;
398 /* The PA_IN set, which represents which values are
399 partially anticipatable in a given basic block. */
400 bitmap_set_t pa_in;
402 /* The NEW_SETS set, which is used during insertion to augment the
403 AVAIL_OUT set of blocks with the new insertions performed during
404 the current iteration. */
405 bitmap_set_t new_sets;
407 /* A cache for value_dies_in_block_x. */
408 bitmap expr_dies;
410 /* True if we have visited this block during ANTIC calculation. */
411 unsigned int visited : 1;
413 /* True we have deferred processing this block during ANTIC
414 calculation until its successor is processed. */
415 unsigned int deferred : 1;
417 /* True when the block contains a call that might not return. */
418 unsigned int contains_may_not_return_call : 1;
419 } *bb_value_sets_t;
421 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
422 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
423 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
424 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
425 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
426 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
427 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
428 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
429 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
430 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
431 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
434 /* Basic block list in postorder. */
435 static int *postorder;
437 /* This structure is used to keep track of statistics on what
438 optimization PRE was able to perform. */
439 static struct
441 /* The number of RHS computations eliminated by PRE. */
442 int eliminations;
444 /* The number of new expressions/temporaries generated by PRE. */
445 int insertions;
447 /* The number of inserts found due to partial anticipation */
448 int pa_insert;
450 /* The number of new PHI nodes added by PRE. */
451 int phis;
453 /* The number of values found constant. */
454 int constified;
456 } pre_stats;
458 static bool do_partial_partial;
459 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
460 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
461 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
462 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
463 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
464 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
465 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
466 unsigned int, bool);
467 static bitmap_set_t bitmap_set_new (void);
468 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
469 gimple, tree);
470 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
471 gimple);
472 static unsigned int get_expr_value_id (pre_expr);
474 /* We can add and remove elements and entries to and from sets
475 and hash tables, so we use alloc pools for them. */
477 static alloc_pool bitmap_set_pool;
478 static bitmap_obstack grand_bitmap_obstack;
480 /* Set of blocks with statements that have had their EH properties changed. */
481 static bitmap need_eh_cleanup;
483 /* Set of blocks with statements that have had their AB properties changed. */
484 static bitmap need_ab_cleanup;
486 /* A three tuple {e, pred, v} used to cache phi translations in the
487 phi_translate_table. */
489 typedef struct expr_pred_trans_d : typed_free_remove<expr_pred_trans_d>
491 /* The expression. */
492 pre_expr e;
494 /* The predecessor block along which we translated the expression. */
495 basic_block pred;
497 /* The value that resulted from the translation. */
498 pre_expr v;
500 /* The hashcode for the expression, pred pair. This is cached for
501 speed reasons. */
502 hashval_t hashcode;
504 /* hash_table support. */
505 typedef expr_pred_trans_d T;
506 static inline hashval_t hash (const expr_pred_trans_d *);
507 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
508 } *expr_pred_trans_t;
509 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
511 inline hashval_t
512 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
514 return e->hashcode;
517 inline int
518 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
519 const expr_pred_trans_d *ve2)
521 basic_block b1 = ve1->pred;
522 basic_block b2 = ve2->pred;
524 /* If they are not translations for the same basic block, they can't
525 be equal. */
526 if (b1 != b2)
527 return false;
528 return pre_expr_d::equal (ve1->e, ve2->e);
531 /* The phi_translate_table caches phi translations for a given
532 expression and predecessor. */
533 static hash_table <expr_pred_trans_d> phi_translate_table;
535 /* Search in the phi translation table for the translation of
536 expression E in basic block PRED.
537 Return the translated value, if found, NULL otherwise. */
539 static inline pre_expr
540 phi_trans_lookup (pre_expr e, basic_block pred)
542 expr_pred_trans_t *slot;
543 struct expr_pred_trans_d ept;
545 ept.e = e;
546 ept.pred = pred;
547 ept.hashcode = iterative_hash_hashval_t (pre_expr_d::hash (e), pred->index);
548 slot = phi_translate_table.find_slot_with_hash (&ept, ept.hashcode,
549 NO_INSERT);
550 if (!slot)
551 return NULL;
552 else
553 return (*slot)->v;
557 /* Add the tuple mapping from {expression E, basic block PRED} to
558 value V, to the phi translation table. */
560 static inline void
561 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
563 expr_pred_trans_t *slot;
564 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
565 new_pair->e = e;
566 new_pair->pred = pred;
567 new_pair->v = v;
568 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_d::hash (e),
569 pred->index);
571 slot = phi_translate_table.find_slot_with_hash (new_pair,
572 new_pair->hashcode, INSERT);
573 free (*slot);
574 *slot = new_pair;
578 /* Add expression E to the expression set of value id V. */
580 static void
581 add_to_value (unsigned int v, pre_expr e)
583 bitmap_set_t set;
585 gcc_assert (get_expr_value_id (e) == v);
587 if (v >= VEC_length (bitmap_set_t, value_expressions))
589 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
590 v + 1);
593 set = VEC_index (bitmap_set_t, value_expressions, v);
594 if (!set)
596 set = bitmap_set_new ();
597 VEC_replace (bitmap_set_t, value_expressions, v, set);
600 bitmap_insert_into_set_1 (set, e, v, true);
603 /* Create a new bitmap set and return it. */
605 static bitmap_set_t
606 bitmap_set_new (void)
608 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
609 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
610 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
611 return ret;
614 /* Return the value id for a PRE expression EXPR. */
616 static unsigned int
617 get_expr_value_id (pre_expr expr)
619 switch (expr->kind)
621 case CONSTANT:
623 unsigned int id;
624 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
625 if (id == 0)
627 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
628 add_to_value (id, expr);
630 return id;
632 case NAME:
633 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
634 case NARY:
635 return PRE_EXPR_NARY (expr)->value_id;
636 case REFERENCE:
637 return PRE_EXPR_REFERENCE (expr)->value_id;
638 default:
639 gcc_unreachable ();
643 /* Remove an expression EXPR from a bitmapped set. */
645 static void
646 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
648 unsigned int val = get_expr_value_id (expr);
649 if (!value_id_constant_p (val))
651 bitmap_clear_bit (&set->values, val);
652 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
656 static void
657 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
658 unsigned int val, bool allow_constants)
660 if (allow_constants || !value_id_constant_p (val))
662 /* We specifically expect this and only this function to be able to
663 insert constants into a set. */
664 bitmap_set_bit (&set->values, val);
665 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
669 /* Insert an expression EXPR into a bitmapped set. */
671 static void
672 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
674 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
677 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
679 static void
680 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
682 bitmap_copy (&dest->expressions, &orig->expressions);
683 bitmap_copy (&dest->values, &orig->values);
687 /* Free memory used up by SET. */
688 static void
689 bitmap_set_free (bitmap_set_t set)
691 bitmap_clear (&set->expressions);
692 bitmap_clear (&set->values);
696 /* Generate an topological-ordered array of bitmap set SET. */
698 static VEC(pre_expr, heap) *
699 sorted_array_from_bitmap_set (bitmap_set_t set)
701 unsigned int i, j;
702 bitmap_iterator bi, bj;
703 VEC(pre_expr, heap) *result;
705 /* Pre-allocate roughly enough space for the array. */
706 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values));
708 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
710 /* The number of expressions having a given value is usually
711 relatively small. Thus, rather than making a vector of all
712 the expressions and sorting it by value-id, we walk the values
713 and check in the reverse mapping that tells us what expressions
714 have a given value, to filter those in our set. As a result,
715 the expressions are inserted in value-id order, which means
716 topological order.
718 If this is somehow a significant lose for some cases, we can
719 choose which set to walk based on the set size. */
720 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
721 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
723 if (bitmap_bit_p (&set->expressions, j))
724 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
728 return result;
731 /* Perform bitmapped set operation DEST &= ORIG. */
733 static void
734 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
736 bitmap_iterator bi;
737 unsigned int i;
739 if (dest != orig)
741 bitmap_head temp;
742 bitmap_initialize (&temp, &grand_bitmap_obstack);
744 bitmap_and_into (&dest->values, &orig->values);
745 bitmap_copy (&temp, &dest->expressions);
746 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
748 pre_expr expr = expression_for_id (i);
749 unsigned int value_id = get_expr_value_id (expr);
750 if (!bitmap_bit_p (&dest->values, value_id))
751 bitmap_clear_bit (&dest->expressions, i);
753 bitmap_clear (&temp);
757 /* Subtract all values and expressions contained in ORIG from DEST. */
759 static bitmap_set_t
760 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
762 bitmap_set_t result = bitmap_set_new ();
763 bitmap_iterator bi;
764 unsigned int i;
766 bitmap_and_compl (&result->expressions, &dest->expressions,
767 &orig->expressions);
769 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
771 pre_expr expr = expression_for_id (i);
772 unsigned int value_id = get_expr_value_id (expr);
773 bitmap_set_bit (&result->values, value_id);
776 return result;
779 /* Subtract all the values in bitmap set B from bitmap set A. */
781 static void
782 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
784 unsigned int i;
785 bitmap_iterator bi;
786 bitmap_head temp;
788 bitmap_initialize (&temp, &grand_bitmap_obstack);
790 bitmap_copy (&temp, &a->expressions);
791 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
793 pre_expr expr = expression_for_id (i);
794 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
795 bitmap_remove_from_set (a, expr);
797 bitmap_clear (&temp);
801 /* Return true if bitmapped set SET contains the value VALUE_ID. */
803 static bool
804 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
806 if (value_id_constant_p (value_id))
807 return true;
809 if (!set || bitmap_empty_p (&set->expressions))
810 return false;
812 return bitmap_bit_p (&set->values, value_id);
815 static inline bool
816 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
818 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
821 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
823 static void
824 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
825 const pre_expr expr)
827 bitmap_set_t exprset;
828 unsigned int i;
829 bitmap_iterator bi;
831 if (value_id_constant_p (lookfor))
832 return;
834 if (!bitmap_set_contains_value (set, lookfor))
835 return;
837 /* The number of expressions having a given value is usually
838 significantly less than the total number of expressions in SET.
839 Thus, rather than check, for each expression in SET, whether it
840 has the value LOOKFOR, we walk the reverse mapping that tells us
841 what expressions have a given value, and see if any of those
842 expressions are in our set. For large testcases, this is about
843 5-10x faster than walking the bitmap. If this is somehow a
844 significant lose for some cases, we can choose which set to walk
845 based on the set size. */
846 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
847 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
849 if (bitmap_clear_bit (&set->expressions, i))
851 bitmap_set_bit (&set->expressions, get_expression_id (expr));
852 return;
857 /* Return true if two bitmap sets are equal. */
859 static bool
860 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
862 return bitmap_equal_p (&a->values, &b->values);
865 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
866 and add it otherwise. */
868 static void
869 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
871 unsigned int val = get_expr_value_id (expr);
873 if (bitmap_set_contains_value (set, val))
874 bitmap_set_replace_value (set, val, expr);
875 else
876 bitmap_insert_into_set (set, expr);
879 /* Insert EXPR into SET if EXPR's value is not already present in
880 SET. */
882 static void
883 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
885 unsigned int val = get_expr_value_id (expr);
887 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
889 /* Constant values are always considered to be part of the set. */
890 if (value_id_constant_p (val))
891 return;
893 /* If the value membership changed, add the expression. */
894 if (bitmap_set_bit (&set->values, val))
895 bitmap_set_bit (&set->expressions, expr->id);
898 /* Print out EXPR to outfile. */
900 static void
901 print_pre_expr (FILE *outfile, const pre_expr expr)
903 switch (expr->kind)
905 case CONSTANT:
906 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
907 break;
908 case NAME:
909 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
910 break;
911 case NARY:
913 unsigned int i;
914 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
915 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
916 for (i = 0; i < nary->length; i++)
918 print_generic_expr (outfile, nary->op[i], 0);
919 if (i != (unsigned) nary->length - 1)
920 fprintf (outfile, ",");
922 fprintf (outfile, "}");
924 break;
926 case REFERENCE:
928 vn_reference_op_t vro;
929 unsigned int i;
930 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
931 fprintf (outfile, "{");
932 for (i = 0;
933 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
934 i++)
936 bool closebrace = false;
937 if (vro->opcode != SSA_NAME
938 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
940 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
941 if (vro->op0)
943 fprintf (outfile, "<");
944 closebrace = true;
947 if (vro->op0)
949 print_generic_expr (outfile, vro->op0, 0);
950 if (vro->op1)
952 fprintf (outfile, ",");
953 print_generic_expr (outfile, vro->op1, 0);
955 if (vro->op2)
957 fprintf (outfile, ",");
958 print_generic_expr (outfile, vro->op2, 0);
961 if (closebrace)
962 fprintf (outfile, ">");
963 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
964 fprintf (outfile, ",");
966 fprintf (outfile, "}");
967 if (ref->vuse)
969 fprintf (outfile, "@");
970 print_generic_expr (outfile, ref->vuse, 0);
973 break;
976 void debug_pre_expr (pre_expr);
978 /* Like print_pre_expr but always prints to stderr. */
979 DEBUG_FUNCTION void
980 debug_pre_expr (pre_expr e)
982 print_pre_expr (stderr, e);
983 fprintf (stderr, "\n");
986 /* Print out SET to OUTFILE. */
988 static void
989 print_bitmap_set (FILE *outfile, bitmap_set_t set,
990 const char *setname, int blockindex)
992 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
993 if (set)
995 bool first = true;
996 unsigned i;
997 bitmap_iterator bi;
999 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1001 const pre_expr expr = expression_for_id (i);
1003 if (!first)
1004 fprintf (outfile, ", ");
1005 first = false;
1006 print_pre_expr (outfile, expr);
1008 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1011 fprintf (outfile, " }\n");
1014 void debug_bitmap_set (bitmap_set_t);
1016 DEBUG_FUNCTION void
1017 debug_bitmap_set (bitmap_set_t set)
1019 print_bitmap_set (stderr, set, "debug", 0);
1022 void debug_bitmap_sets_for (basic_block);
1024 DEBUG_FUNCTION void
1025 debug_bitmap_sets_for (basic_block bb)
1027 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1028 if (!in_fre)
1030 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1031 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1032 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1033 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1034 if (do_partial_partial)
1035 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1036 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1040 /* Print out the expressions that have VAL to OUTFILE. */
1042 static void
1043 print_value_expressions (FILE *outfile, unsigned int val)
1045 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1046 if (set)
1048 char s[10];
1049 sprintf (s, "%04d", val);
1050 print_bitmap_set (outfile, set, s, 0);
1055 DEBUG_FUNCTION void
1056 debug_value_expressions (unsigned int val)
1058 print_value_expressions (stderr, val);
1061 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1062 represent it. */
1064 static pre_expr
1065 get_or_alloc_expr_for_constant (tree constant)
1067 unsigned int result_id;
1068 unsigned int value_id;
1069 struct pre_expr_d expr;
1070 pre_expr newexpr;
1072 expr.kind = CONSTANT;
1073 PRE_EXPR_CONSTANT (&expr) = constant;
1074 result_id = lookup_expression_id (&expr);
1075 if (result_id != 0)
1076 return expression_for_id (result_id);
1078 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1079 newexpr->kind = CONSTANT;
1080 PRE_EXPR_CONSTANT (newexpr) = constant;
1081 alloc_expression_id (newexpr);
1082 value_id = get_or_alloc_constant_value_id (constant);
1083 add_to_value (value_id, newexpr);
1084 return newexpr;
1087 /* Given a value id V, find the actual tree representing the constant
1088 value if there is one, and return it. Return NULL if we can't find
1089 a constant. */
1091 static tree
1092 get_constant_for_value_id (unsigned int v)
1094 if (value_id_constant_p (v))
1096 unsigned int i;
1097 bitmap_iterator bi;
1098 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1100 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1102 pre_expr expr = expression_for_id (i);
1103 if (expr->kind == CONSTANT)
1104 return PRE_EXPR_CONSTANT (expr);
1107 return NULL;
1110 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1111 Currently only supports constants and SSA_NAMES. */
1112 static pre_expr
1113 get_or_alloc_expr_for (tree t)
1115 if (TREE_CODE (t) == SSA_NAME)
1116 return get_or_alloc_expr_for_name (t);
1117 else if (is_gimple_min_invariant (t))
1118 return get_or_alloc_expr_for_constant (t);
1119 else
1121 /* More complex expressions can result from SCCVN expression
1122 simplification that inserts values for them. As they all
1123 do not have VOPs the get handled by the nary ops struct. */
1124 vn_nary_op_t result;
1125 unsigned int result_id;
1126 vn_nary_op_lookup (t, &result);
1127 if (result != NULL)
1129 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1130 e->kind = NARY;
1131 PRE_EXPR_NARY (e) = result;
1132 result_id = lookup_expression_id (e);
1133 if (result_id != 0)
1135 pool_free (pre_expr_pool, e);
1136 e = expression_for_id (result_id);
1137 return e;
1139 alloc_expression_id (e);
1140 return e;
1143 return NULL;
1146 /* Return the folded version of T if T, when folded, is a gimple
1147 min_invariant. Otherwise, return T. */
1149 static pre_expr
1150 fully_constant_expression (pre_expr e)
1152 switch (e->kind)
1154 case CONSTANT:
1155 return e;
1156 case NARY:
1158 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1159 switch (TREE_CODE_CLASS (nary->opcode))
1161 case tcc_binary:
1162 case tcc_comparison:
1164 /* We have to go from trees to pre exprs to value ids to
1165 constants. */
1166 tree naryop0 = nary->op[0];
1167 tree naryop1 = nary->op[1];
1168 tree result;
1169 if (!is_gimple_min_invariant (naryop0))
1171 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1172 unsigned int vrep0 = get_expr_value_id (rep0);
1173 tree const0 = get_constant_for_value_id (vrep0);
1174 if (const0)
1175 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1177 if (!is_gimple_min_invariant (naryop1))
1179 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1180 unsigned int vrep1 = get_expr_value_id (rep1);
1181 tree const1 = get_constant_for_value_id (vrep1);
1182 if (const1)
1183 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1185 result = fold_binary (nary->opcode, nary->type,
1186 naryop0, naryop1);
1187 if (result && is_gimple_min_invariant (result))
1188 return get_or_alloc_expr_for_constant (result);
1189 /* We might have simplified the expression to a
1190 SSA_NAME for example from x_1 * 1. But we cannot
1191 insert a PHI for x_1 unconditionally as x_1 might
1192 not be available readily. */
1193 return e;
1195 case tcc_reference:
1196 if (nary->opcode != REALPART_EXPR
1197 && nary->opcode != IMAGPART_EXPR
1198 && nary->opcode != VIEW_CONVERT_EXPR)
1199 return e;
1200 /* Fallthrough. */
1201 case tcc_unary:
1203 /* We have to go from trees to pre exprs to value ids to
1204 constants. */
1205 tree naryop0 = nary->op[0];
1206 tree const0, result;
1207 if (is_gimple_min_invariant (naryop0))
1208 const0 = naryop0;
1209 else
1211 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1212 unsigned int vrep0 = get_expr_value_id (rep0);
1213 const0 = get_constant_for_value_id (vrep0);
1215 result = NULL;
1216 if (const0)
1218 tree type1 = TREE_TYPE (nary->op[0]);
1219 const0 = fold_convert (type1, const0);
1220 result = fold_unary (nary->opcode, nary->type, const0);
1222 if (result && is_gimple_min_invariant (result))
1223 return get_or_alloc_expr_for_constant (result);
1224 return e;
1226 default:
1227 return e;
1230 case REFERENCE:
1232 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1233 tree folded;
1234 if ((folded = fully_constant_vn_reference_p (ref)))
1235 return get_or_alloc_expr_for_constant (folded);
1236 return e;
1238 default:
1239 return e;
1241 return e;
1244 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1245 it has the value it would have in BLOCK. Set *SAME_VALID to true
1246 in case the new vuse doesn't change the value id of the OPERANDS. */
1248 static tree
1249 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1250 alias_set_type set, tree type, tree vuse,
1251 basic_block phiblock,
1252 basic_block block, bool *same_valid)
1254 gimple phi = SSA_NAME_DEF_STMT (vuse);
1255 ao_ref ref;
1256 edge e = NULL;
1257 bool use_oracle;
1259 *same_valid = true;
1261 if (gimple_bb (phi) != phiblock)
1262 return vuse;
1264 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1266 /* Use the alias-oracle to find either the PHI node in this block,
1267 the first VUSE used in this block that is equivalent to vuse or
1268 the first VUSE which definition in this block kills the value. */
1269 if (gimple_code (phi) == GIMPLE_PHI)
1270 e = find_edge (block, phiblock);
1271 else if (use_oracle)
1272 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1274 vuse = gimple_vuse (phi);
1275 phi = SSA_NAME_DEF_STMT (vuse);
1276 if (gimple_bb (phi) != phiblock)
1277 return vuse;
1278 if (gimple_code (phi) == GIMPLE_PHI)
1280 e = find_edge (block, phiblock);
1281 break;
1284 else
1285 return NULL_TREE;
1287 if (e)
1289 if (use_oracle)
1291 bitmap visited = NULL;
1292 unsigned int cnt;
1293 /* Try to find a vuse that dominates this phi node by skipping
1294 non-clobbering statements. */
1295 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited);
1296 if (visited)
1297 BITMAP_FREE (visited);
1299 else
1300 vuse = NULL_TREE;
1301 if (!vuse)
1303 /* If we didn't find any, the value ID can't stay the same,
1304 but return the translated vuse. */
1305 *same_valid = false;
1306 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1308 /* ??? We would like to return vuse here as this is the canonical
1309 upmost vdef that this reference is associated with. But during
1310 insertion of the references into the hash tables we only ever
1311 directly insert with their direct gimple_vuse, hence returning
1312 something else would make us not find the other expression. */
1313 return PHI_ARG_DEF (phi, e->dest_idx);
1316 return NULL_TREE;
1319 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1320 SET2. This is used to avoid making a set consisting of the union
1321 of PA_IN and ANTIC_IN during insert. */
1323 static inline pre_expr
1324 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1326 pre_expr result;
1328 result = bitmap_find_leader (set1, val, NULL);
1329 if (!result && set2)
1330 result = bitmap_find_leader (set2, val, NULL);
1331 return result;
1334 /* Get the tree type for our PRE expression e. */
1336 static tree
1337 get_expr_type (const pre_expr e)
1339 switch (e->kind)
1341 case NAME:
1342 return TREE_TYPE (PRE_EXPR_NAME (e));
1343 case CONSTANT:
1344 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1345 case REFERENCE:
1346 return PRE_EXPR_REFERENCE (e)->type;
1347 case NARY:
1348 return PRE_EXPR_NARY (e)->type;
1350 gcc_unreachable();
1353 /* Get a representative SSA_NAME for a given expression.
1354 Since all of our sub-expressions are treated as values, we require
1355 them to be SSA_NAME's for simplicity.
1356 Prior versions of GVNPRE used to use "value handles" here, so that
1357 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1358 either case, the operands are really values (IE we do not expect
1359 them to be usable without finding leaders). */
1361 static tree
1362 get_representative_for (const pre_expr e)
1364 tree name;
1365 unsigned int value_id = get_expr_value_id (e);
1367 switch (e->kind)
1369 case NAME:
1370 return PRE_EXPR_NAME (e);
1371 case CONSTANT:
1372 return PRE_EXPR_CONSTANT (e);
1373 case NARY:
1374 case REFERENCE:
1376 /* Go through all of the expressions representing this value
1377 and pick out an SSA_NAME. */
1378 unsigned int i;
1379 bitmap_iterator bi;
1380 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1381 value_id);
1382 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1384 pre_expr rep = expression_for_id (i);
1385 if (rep->kind == NAME)
1386 return PRE_EXPR_NAME (rep);
1389 break;
1391 /* If we reached here we couldn't find an SSA_NAME. This can
1392 happen when we've discovered a value that has never appeared in
1393 the program as set to an SSA_NAME, most likely as the result of
1394 phi translation. */
1395 if (dump_file)
1397 fprintf (dump_file,
1398 "Could not find SSA_NAME representative for expression:");
1399 print_pre_expr (dump_file, e);
1400 fprintf (dump_file, "\n");
1403 /* Build and insert the assignment of the end result to the temporary
1404 that we will return. */
1405 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1406 VN_INFO_GET (name)->value_id = value_id;
1407 if (e->kind == CONSTANT)
1408 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1409 else
1410 VN_INFO (name)->valnum = name;
1412 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1413 if (dump_file)
1415 fprintf (dump_file, "Created SSA_NAME representative ");
1416 print_generic_expr (dump_file, name, 0);
1417 fprintf (dump_file, " for expression:");
1418 print_pre_expr (dump_file, e);
1419 fprintf (dump_file, "\n");
1422 return name;
1427 static pre_expr
1428 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1429 basic_block pred, basic_block phiblock);
1431 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1432 the phis in PRED. Return NULL if we can't find a leader for each part
1433 of the translated expression. */
1435 static pre_expr
1436 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1437 basic_block pred, basic_block phiblock)
1439 switch (expr->kind)
1441 case NARY:
1443 unsigned int i;
1444 bool changed = false;
1445 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1446 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1447 sizeof_vn_nary_op (nary->length));
1448 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1450 for (i = 0; i < newnary->length; i++)
1452 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1453 continue;
1454 else
1456 pre_expr leader, result;
1457 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1458 leader = find_leader_in_sets (op_val_id, set1, set2);
1459 result = phi_translate (leader, set1, set2, pred, phiblock);
1460 if (result && result != leader)
1462 tree name = get_representative_for (result);
1463 if (!name)
1464 return NULL;
1465 newnary->op[i] = name;
1467 else if (!result)
1468 return NULL;
1470 changed |= newnary->op[i] != nary->op[i];
1473 if (changed)
1475 pre_expr constant;
1476 unsigned int new_val_id;
1478 tree result = vn_nary_op_lookup_pieces (newnary->length,
1479 newnary->opcode,
1480 newnary->type,
1481 &newnary->op[0],
1482 &nary);
1483 if (result && is_gimple_min_invariant (result))
1484 return get_or_alloc_expr_for_constant (result);
1486 expr = (pre_expr) pool_alloc (pre_expr_pool);
1487 expr->kind = NARY;
1488 expr->id = 0;
1489 if (nary)
1491 PRE_EXPR_NARY (expr) = nary;
1492 constant = fully_constant_expression (expr);
1493 if (constant != expr)
1494 return constant;
1496 new_val_id = nary->value_id;
1497 get_or_alloc_expression_id (expr);
1499 else
1501 new_val_id = get_next_value_id ();
1502 VEC_safe_grow_cleared (bitmap_set_t, heap,
1503 value_expressions,
1504 get_max_value_id() + 1);
1505 nary = vn_nary_op_insert_pieces (newnary->length,
1506 newnary->opcode,
1507 newnary->type,
1508 &newnary->op[0],
1509 result, new_val_id);
1510 PRE_EXPR_NARY (expr) = nary;
1511 constant = fully_constant_expression (expr);
1512 if (constant != expr)
1513 return constant;
1514 get_or_alloc_expression_id (expr);
1516 add_to_value (new_val_id, expr);
1518 return expr;
1520 break;
1522 case REFERENCE:
1524 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1525 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1526 tree vuse = ref->vuse;
1527 tree newvuse = vuse;
1528 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1529 bool changed = false, same_valid = true;
1530 unsigned int i, j, n;
1531 vn_reference_op_t operand;
1532 vn_reference_t newref;
1534 for (i = 0, j = 0;
1535 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1537 pre_expr opresult;
1538 pre_expr leader;
1539 tree op[3];
1540 tree type = operand->type;
1541 vn_reference_op_s newop = *operand;
1542 op[0] = operand->op0;
1543 op[1] = operand->op1;
1544 op[2] = operand->op2;
1545 for (n = 0; n < 3; ++n)
1547 unsigned int op_val_id;
1548 if (!op[n])
1549 continue;
1550 if (TREE_CODE (op[n]) != SSA_NAME)
1552 /* We can't possibly insert these. */
1553 if (n != 0
1554 && !is_gimple_min_invariant (op[n]))
1555 break;
1556 continue;
1558 op_val_id = VN_INFO (op[n])->value_id;
1559 leader = find_leader_in_sets (op_val_id, set1, set2);
1560 if (!leader)
1561 break;
1562 /* Make sure we do not recursively translate ourselves
1563 like for translating a[n_1] with the leader for
1564 n_1 being a[n_1]. */
1565 if (get_expression_id (leader) != get_expression_id (expr))
1567 opresult = phi_translate (leader, set1, set2,
1568 pred, phiblock);
1569 if (!opresult)
1570 break;
1571 if (opresult != leader)
1573 tree name = get_representative_for (opresult);
1574 if (!name)
1575 break;
1576 changed |= name != op[n];
1577 op[n] = name;
1581 if (n != 3)
1583 if (newoperands)
1584 VEC_free (vn_reference_op_s, heap, newoperands);
1585 return NULL;
1587 if (!newoperands)
1588 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1589 /* We may have changed from an SSA_NAME to a constant */
1590 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1591 newop.opcode = TREE_CODE (op[0]);
1592 newop.type = type;
1593 newop.op0 = op[0];
1594 newop.op1 = op[1];
1595 newop.op2 = op[2];
1596 /* If it transforms a non-constant ARRAY_REF into a constant
1597 one, adjust the constant offset. */
1598 if (newop.opcode == ARRAY_REF
1599 && newop.off == -1
1600 && TREE_CODE (op[0]) == INTEGER_CST
1601 && TREE_CODE (op[1]) == INTEGER_CST
1602 && TREE_CODE (op[2]) == INTEGER_CST)
1604 double_int off = tree_to_double_int (op[0]);
1605 off = double_int_add (off,
1606 double_int_neg
1607 (tree_to_double_int (op[1])));
1608 off = double_int_mul (off, tree_to_double_int (op[2]));
1609 if (double_int_fits_in_shwi_p (off))
1610 newop.off = off.low;
1612 VEC_replace (vn_reference_op_s, newoperands, j, newop);
1613 /* If it transforms from an SSA_NAME to an address, fold with
1614 a preceding indirect reference. */
1615 if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR
1616 && VEC_index (vn_reference_op_s,
1617 newoperands, j - 1).opcode == MEM_REF)
1618 vn_reference_fold_indirect (&newoperands, &j);
1620 if (i != VEC_length (vn_reference_op_s, operands))
1622 if (newoperands)
1623 VEC_free (vn_reference_op_s, heap, newoperands);
1624 return NULL;
1627 if (vuse)
1629 newvuse = translate_vuse_through_block (newoperands,
1630 ref->set, ref->type,
1631 vuse, phiblock, pred,
1632 &same_valid);
1633 if (newvuse == NULL_TREE)
1635 VEC_free (vn_reference_op_s, heap, newoperands);
1636 return NULL;
1640 if (changed || newvuse != vuse)
1642 unsigned int new_val_id;
1643 pre_expr constant;
1645 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1646 ref->type,
1647 newoperands,
1648 &newref, VN_WALK);
1649 if (result)
1650 VEC_free (vn_reference_op_s, heap, newoperands);
1652 /* We can always insert constants, so if we have a partial
1653 redundant constant load of another type try to translate it
1654 to a constant of appropriate type. */
1655 if (result && is_gimple_min_invariant (result))
1657 tree tem = result;
1658 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1660 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1661 if (tem && !is_gimple_min_invariant (tem))
1662 tem = NULL_TREE;
1664 if (tem)
1665 return get_or_alloc_expr_for_constant (tem);
1668 /* If we'd have to convert things we would need to validate
1669 if we can insert the translated expression. So fail
1670 here for now - we cannot insert an alias with a different
1671 type in the VN tables either, as that would assert. */
1672 if (result
1673 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1674 return NULL;
1675 else if (!result && newref
1676 && !useless_type_conversion_p (ref->type, newref->type))
1678 VEC_free (vn_reference_op_s, heap, newoperands);
1679 return NULL;
1682 expr = (pre_expr) pool_alloc (pre_expr_pool);
1683 expr->kind = REFERENCE;
1684 expr->id = 0;
1686 if (newref)
1688 PRE_EXPR_REFERENCE (expr) = newref;
1689 constant = fully_constant_expression (expr);
1690 if (constant != expr)
1691 return constant;
1693 new_val_id = newref->value_id;
1694 get_or_alloc_expression_id (expr);
1696 else
1698 if (changed || !same_valid)
1700 new_val_id = get_next_value_id ();
1701 VEC_safe_grow_cleared (bitmap_set_t, heap,
1702 value_expressions,
1703 get_max_value_id() + 1);
1705 else
1706 new_val_id = ref->value_id;
1707 newref = vn_reference_insert_pieces (newvuse, ref->set,
1708 ref->type,
1709 newoperands,
1710 result, new_val_id);
1711 newoperands = NULL;
1712 PRE_EXPR_REFERENCE (expr) = newref;
1713 constant = fully_constant_expression (expr);
1714 if (constant != expr)
1715 return constant;
1716 get_or_alloc_expression_id (expr);
1718 add_to_value (new_val_id, expr);
1720 VEC_free (vn_reference_op_s, heap, newoperands);
1721 return expr;
1723 break;
1725 case NAME:
1727 gimple phi = NULL;
1728 edge e;
1729 gimple def_stmt;
1730 tree name = PRE_EXPR_NAME (expr);
1732 def_stmt = SSA_NAME_DEF_STMT (name);
1733 if (gimple_code (def_stmt) == GIMPLE_PHI
1734 && gimple_bb (def_stmt) == phiblock)
1735 phi = def_stmt;
1736 else
1737 return expr;
1739 e = find_edge (pred, gimple_bb (phi));
1740 if (e)
1742 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1743 pre_expr newexpr;
1745 if (TREE_CODE (def) == SSA_NAME)
1746 def = VN_INFO (def)->valnum;
1748 /* Handle constant. */
1749 if (is_gimple_min_invariant (def))
1750 return get_or_alloc_expr_for_constant (def);
1752 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1753 return NULL;
1755 newexpr = get_or_alloc_expr_for_name (def);
1756 return newexpr;
1759 return expr;
1761 default:
1762 gcc_unreachable ();
1766 /* Wrapper around phi_translate_1 providing caching functionality. */
1768 static pre_expr
1769 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1770 basic_block pred, basic_block phiblock)
1772 pre_expr phitrans;
1774 if (!expr)
1775 return NULL;
1777 /* Constants contain no values that need translation. */
1778 if (expr->kind == CONSTANT)
1779 return expr;
1781 if (value_id_constant_p (get_expr_value_id (expr)))
1782 return expr;
1784 if (expr->kind != NAME)
1786 phitrans = phi_trans_lookup (expr, pred);
1787 if (phitrans)
1788 return phitrans;
1791 /* Translate. */
1792 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1794 /* Don't add empty translations to the cache. Neither add
1795 translations of NAMEs as those are cheap to translate. */
1796 if (phitrans
1797 && expr->kind != NAME)
1798 phi_trans_add (expr, phitrans, pred);
1800 return phitrans;
1804 /* For each expression in SET, translate the values through phi nodes
1805 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1806 expressions in DEST. */
1808 static void
1809 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1810 basic_block phiblock)
1812 VEC (pre_expr, heap) *exprs;
1813 pre_expr expr;
1814 int i;
1816 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1818 bitmap_set_copy (dest, set);
1819 return;
1822 exprs = sorted_array_from_bitmap_set (set);
1823 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
1825 pre_expr translated;
1826 translated = phi_translate (expr, set, NULL, pred, phiblock);
1827 if (!translated)
1828 continue;
1830 /* We might end up with multiple expressions from SET being
1831 translated to the same value. In this case we do not want
1832 to retain the NARY or REFERENCE expression but prefer a NAME
1833 which would be the leader. */
1834 if (translated->kind == NAME)
1835 bitmap_value_replace_in_set (dest, translated);
1836 else
1837 bitmap_value_insert_into_set (dest, translated);
1839 VEC_free (pre_expr, heap, exprs);
1842 /* Find the leader for a value (i.e., the name representing that
1843 value) in a given set, and return it. If STMT is non-NULL it
1844 makes sure the defining statement for the leader dominates it.
1845 Return NULL if no leader is found. */
1847 static pre_expr
1848 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1850 if (value_id_constant_p (val))
1852 unsigned int i;
1853 bitmap_iterator bi;
1854 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1856 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1858 pre_expr expr = expression_for_id (i);
1859 if (expr->kind == CONSTANT)
1860 return expr;
1863 if (bitmap_set_contains_value (set, val))
1865 /* Rather than walk the entire bitmap of expressions, and see
1866 whether any of them has the value we are looking for, we look
1867 at the reverse mapping, which tells us the set of expressions
1868 that have a given value (IE value->expressions with that
1869 value) and see if any of those expressions are in our set.
1870 The number of expressions per value is usually significantly
1871 less than the number of expressions in the set. In fact, for
1872 large testcases, doing it this way is roughly 5-10x faster
1873 than walking the bitmap.
1874 If this is somehow a significant lose for some cases, we can
1875 choose which set to walk based on which set is smaller. */
1876 unsigned int i;
1877 bitmap_iterator bi;
1878 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1880 EXECUTE_IF_AND_IN_BITMAP (&exprset->expressions,
1881 &set->expressions, 0, i, bi)
1883 pre_expr val = expression_for_id (i);
1884 /* At the point where stmt is not null, there should always
1885 be an SSA_NAME first in the list of expressions. */
1886 if (stmt)
1888 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1889 if (gimple_code (def_stmt) != GIMPLE_PHI
1890 && gimple_bb (def_stmt) == gimple_bb (stmt)
1891 /* PRE insertions are at the end of the basic-block
1892 and have UID 0. */
1893 && (gimple_uid (def_stmt) == 0
1894 || gimple_uid (def_stmt) >= gimple_uid (stmt)))
1895 continue;
1897 return val;
1900 return NULL;
1903 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1904 BLOCK by seeing if it is not killed in the block. Note that we are
1905 only determining whether there is a store that kills it. Because
1906 of the order in which clean iterates over values, we are guaranteed
1907 that altered operands will have caused us to be eliminated from the
1908 ANTIC_IN set already. */
1910 static bool
1911 value_dies_in_block_x (pre_expr expr, basic_block block)
1913 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1914 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1915 gimple def;
1916 gimple_stmt_iterator gsi;
1917 unsigned id = get_expression_id (expr);
1918 bool res = false;
1919 ao_ref ref;
1921 if (!vuse)
1922 return false;
1924 /* Lookup a previously calculated result. */
1925 if (EXPR_DIES (block)
1926 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1927 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1929 /* A memory expression {e, VUSE} dies in the block if there is a
1930 statement that may clobber e. If, starting statement walk from the
1931 top of the basic block, a statement uses VUSE there can be no kill
1932 inbetween that use and the original statement that loaded {e, VUSE},
1933 so we can stop walking. */
1934 ref.base = NULL_TREE;
1935 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1937 tree def_vuse, def_vdef;
1938 def = gsi_stmt (gsi);
1939 def_vuse = gimple_vuse (def);
1940 def_vdef = gimple_vdef (def);
1942 /* Not a memory statement. */
1943 if (!def_vuse)
1944 continue;
1946 /* Not a may-def. */
1947 if (!def_vdef)
1949 /* A load with the same VUSE, we're done. */
1950 if (def_vuse == vuse)
1951 break;
1953 continue;
1956 /* Init ref only if we really need it. */
1957 if (ref.base == NULL_TREE
1958 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1959 refx->operands))
1961 res = true;
1962 break;
1964 /* If the statement may clobber expr, it dies. */
1965 if (stmt_may_clobber_ref_p_1 (def, &ref))
1967 res = true;
1968 break;
1972 /* Remember the result. */
1973 if (!EXPR_DIES (block))
1974 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1975 bitmap_set_bit (EXPR_DIES (block), id * 2);
1976 if (res)
1977 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1979 return res;
1983 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1984 contains its value-id. */
1986 static bool
1987 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1989 if (op && TREE_CODE (op) == SSA_NAME)
1991 unsigned int value_id = VN_INFO (op)->value_id;
1992 if (!(bitmap_set_contains_value (set1, value_id)
1993 || (set2 && bitmap_set_contains_value (set2, value_id))))
1994 return false;
1996 return true;
1999 /* Determine if the expression EXPR is valid in SET1 U SET2.
2000 ONLY SET2 CAN BE NULL.
2001 This means that we have a leader for each part of the expression
2002 (if it consists of values), or the expression is an SSA_NAME.
2003 For loads/calls, we also see if the vuse is killed in this block. */
2005 static bool
2006 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2007 basic_block block)
2009 switch (expr->kind)
2011 case NAME:
2012 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2013 case NARY:
2015 unsigned int i;
2016 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2017 for (i = 0; i < nary->length; i++)
2018 if (!op_valid_in_sets (set1, set2, nary->op[i]))
2019 return false;
2020 return true;
2022 break;
2023 case REFERENCE:
2025 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2026 vn_reference_op_t vro;
2027 unsigned int i;
2029 FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro)
2031 if (!op_valid_in_sets (set1, set2, vro->op0)
2032 || !op_valid_in_sets (set1, set2, vro->op1)
2033 || !op_valid_in_sets (set1, set2, vro->op2))
2034 return false;
2036 return true;
2038 default:
2039 gcc_unreachable ();
2043 /* Clean the set of expressions that are no longer valid in SET1 or
2044 SET2. This means expressions that are made up of values we have no
2045 leaders for in SET1 or SET2. This version is used for partial
2046 anticipation, which means it is not valid in either ANTIC_IN or
2047 PA_IN. */
2049 static void
2050 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2052 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2053 pre_expr expr;
2054 int i;
2056 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2058 if (!valid_in_sets (set1, set2, expr, block))
2059 bitmap_remove_from_set (set1, expr);
2061 VEC_free (pre_expr, heap, exprs);
2064 /* Clean the set of expressions that are no longer valid in SET. This
2065 means expressions that are made up of values we have no leaders for
2066 in SET. */
2068 static void
2069 clean (bitmap_set_t set, basic_block block)
2071 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2072 pre_expr expr;
2073 int i;
2075 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2077 if (!valid_in_sets (set, NULL, expr, block))
2078 bitmap_remove_from_set (set, expr);
2080 VEC_free (pre_expr, heap, exprs);
2083 /* Clean the set of expressions that are no longer valid in SET because
2084 they are clobbered in BLOCK or because they trap and may not be executed. */
2086 static void
2087 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2089 bitmap_iterator bi;
2090 unsigned i;
2092 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2094 pre_expr expr = expression_for_id (i);
2095 if (expr->kind == REFERENCE)
2097 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2098 if (ref->vuse)
2100 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2101 if (!gimple_nop_p (def_stmt)
2102 && ((gimple_bb (def_stmt) != block
2103 && !dominated_by_p (CDI_DOMINATORS,
2104 block, gimple_bb (def_stmt)))
2105 || (gimple_bb (def_stmt) == block
2106 && value_dies_in_block_x (expr, block))))
2107 bitmap_remove_from_set (set, expr);
2110 else if (expr->kind == NARY)
2112 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2113 /* If the NARY may trap make sure the block does not contain
2114 a possible exit point.
2115 ??? This is overly conservative if we translate AVAIL_OUT
2116 as the available expression might be after the exit point. */
2117 if (BB_MAY_NOTRETURN (block)
2118 && vn_nary_may_trap (nary))
2119 bitmap_remove_from_set (set, expr);
2124 static sbitmap has_abnormal_preds;
2126 /* List of blocks that may have changed during ANTIC computation and
2127 thus need to be iterated over. */
2129 static sbitmap changed_blocks;
2131 /* Decide whether to defer a block for a later iteration, or PHI
2132 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2133 should defer the block, and true if we processed it. */
2135 static bool
2136 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2137 basic_block block, basic_block phiblock)
2139 if (!BB_VISITED (phiblock))
2141 SET_BIT (changed_blocks, block->index);
2142 BB_VISITED (block) = 0;
2143 BB_DEFERRED (block) = 1;
2144 return false;
2146 else
2147 phi_translate_set (dest, source, block, phiblock);
2148 return true;
2151 /* Compute the ANTIC set for BLOCK.
2153 If succs(BLOCK) > 1 then
2154 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2155 else if succs(BLOCK) == 1 then
2156 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2158 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2161 static bool
2162 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2164 bool changed = false;
2165 bitmap_set_t S, old, ANTIC_OUT;
2166 bitmap_iterator bi;
2167 unsigned int bii;
2168 edge e;
2169 edge_iterator ei;
2171 old = ANTIC_OUT = S = NULL;
2172 BB_VISITED (block) = 1;
2174 /* If any edges from predecessors are abnormal, antic_in is empty,
2175 so do nothing. */
2176 if (block_has_abnormal_pred_edge)
2177 goto maybe_dump_sets;
2179 old = ANTIC_IN (block);
2180 ANTIC_OUT = bitmap_set_new ();
2182 /* If the block has no successors, ANTIC_OUT is empty. */
2183 if (EDGE_COUNT (block->succs) == 0)
2185 /* If we have one successor, we could have some phi nodes to
2186 translate through. */
2187 else if (single_succ_p (block))
2189 basic_block succ_bb = single_succ (block);
2191 /* We trade iterations of the dataflow equations for having to
2192 phi translate the maximal set, which is incredibly slow
2193 (since the maximal set often has 300+ members, even when you
2194 have a small number of blocks).
2195 Basically, we defer the computation of ANTIC for this block
2196 until we have processed it's successor, which will inevitably
2197 have a *much* smaller set of values to phi translate once
2198 clean has been run on it.
2199 The cost of doing this is that we technically perform more
2200 iterations, however, they are lower cost iterations.
2202 Timings for PRE on tramp3d-v4:
2203 without maximal set fix: 11 seconds
2204 with maximal set fix/without deferring: 26 seconds
2205 with maximal set fix/with deferring: 11 seconds
2208 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2209 block, succ_bb))
2211 changed = true;
2212 goto maybe_dump_sets;
2215 /* If we have multiple successors, we take the intersection of all of
2216 them. Note that in the case of loop exit phi nodes, we may have
2217 phis to translate through. */
2218 else
2220 VEC(basic_block, heap) * worklist;
2221 size_t i;
2222 basic_block bprime, first = NULL;
2224 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2225 FOR_EACH_EDGE (e, ei, block->succs)
2227 if (!first
2228 && BB_VISITED (e->dest))
2229 first = e->dest;
2230 else if (BB_VISITED (e->dest))
2231 VEC_quick_push (basic_block, worklist, e->dest);
2234 /* Of multiple successors we have to have visited one already. */
2235 if (!first)
2237 SET_BIT (changed_blocks, block->index);
2238 BB_VISITED (block) = 0;
2239 BB_DEFERRED (block) = 1;
2240 changed = true;
2241 VEC_free (basic_block, heap, worklist);
2242 goto maybe_dump_sets;
2245 if (!gimple_seq_empty_p (phi_nodes (first)))
2246 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2247 else
2248 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2250 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2252 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2254 bitmap_set_t tmp = bitmap_set_new ();
2255 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2256 bitmap_set_and (ANTIC_OUT, tmp);
2257 bitmap_set_free (tmp);
2259 else
2260 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2262 VEC_free (basic_block, heap, worklist);
2265 /* Prune expressions that are clobbered in block and thus become
2266 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2267 prune_clobbered_mems (ANTIC_OUT, block);
2269 /* Generate ANTIC_OUT - TMP_GEN. */
2270 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2272 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2273 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2274 TMP_GEN (block));
2276 /* Then union in the ANTIC_OUT - TMP_GEN values,
2277 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2278 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2279 bitmap_value_insert_into_set (ANTIC_IN (block),
2280 expression_for_id (bii));
2282 clean (ANTIC_IN (block), block);
2284 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2286 changed = true;
2287 SET_BIT (changed_blocks, block->index);
2288 FOR_EACH_EDGE (e, ei, block->preds)
2289 SET_BIT (changed_blocks, e->src->index);
2291 else
2292 RESET_BIT (changed_blocks, block->index);
2294 maybe_dump_sets:
2295 if (dump_file && (dump_flags & TDF_DETAILS))
2297 if (!BB_DEFERRED (block) || BB_VISITED (block))
2299 if (ANTIC_OUT)
2300 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2302 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2303 block->index);
2305 if (S)
2306 print_bitmap_set (dump_file, S, "S", block->index);
2308 else
2310 fprintf (dump_file,
2311 "Block %d was deferred for a future iteration.\n",
2312 block->index);
2315 if (old)
2316 bitmap_set_free (old);
2317 if (S)
2318 bitmap_set_free (S);
2319 if (ANTIC_OUT)
2320 bitmap_set_free (ANTIC_OUT);
2321 return changed;
2324 /* Compute PARTIAL_ANTIC for BLOCK.
2326 If succs(BLOCK) > 1 then
2327 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2328 in ANTIC_OUT for all succ(BLOCK)
2329 else if succs(BLOCK) == 1 then
2330 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2332 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2333 - ANTIC_IN[BLOCK])
2336 static bool
2337 compute_partial_antic_aux (basic_block block,
2338 bool block_has_abnormal_pred_edge)
2340 bool changed = false;
2341 bitmap_set_t old_PA_IN;
2342 bitmap_set_t PA_OUT;
2343 edge e;
2344 edge_iterator ei;
2345 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2347 old_PA_IN = PA_OUT = NULL;
2349 /* If any edges from predecessors are abnormal, antic_in is empty,
2350 so do nothing. */
2351 if (block_has_abnormal_pred_edge)
2352 goto maybe_dump_sets;
2354 /* If there are too many partially anticipatable values in the
2355 block, phi_translate_set can take an exponential time: stop
2356 before the translation starts. */
2357 if (max_pa
2358 && single_succ_p (block)
2359 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2360 goto maybe_dump_sets;
2362 old_PA_IN = PA_IN (block);
2363 PA_OUT = bitmap_set_new ();
2365 /* If the block has no successors, ANTIC_OUT is empty. */
2366 if (EDGE_COUNT (block->succs) == 0)
2368 /* If we have one successor, we could have some phi nodes to
2369 translate through. Note that we can't phi translate across DFS
2370 back edges in partial antic, because it uses a union operation on
2371 the successors. For recurrences like IV's, we will end up
2372 generating a new value in the set on each go around (i + 3 (VH.1)
2373 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2374 else if (single_succ_p (block))
2376 basic_block succ = single_succ (block);
2377 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2378 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2380 /* If we have multiple successors, we take the union of all of
2381 them. */
2382 else
2384 VEC(basic_block, heap) * worklist;
2385 size_t i;
2386 basic_block bprime;
2388 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2389 FOR_EACH_EDGE (e, ei, block->succs)
2391 if (e->flags & EDGE_DFS_BACK)
2392 continue;
2393 VEC_quick_push (basic_block, worklist, e->dest);
2395 if (VEC_length (basic_block, worklist) > 0)
2397 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2399 unsigned int i;
2400 bitmap_iterator bi;
2402 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2403 bitmap_value_insert_into_set (PA_OUT,
2404 expression_for_id (i));
2405 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2407 bitmap_set_t pa_in = bitmap_set_new ();
2408 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2409 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2410 bitmap_value_insert_into_set (PA_OUT,
2411 expression_for_id (i));
2412 bitmap_set_free (pa_in);
2414 else
2415 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2416 bitmap_value_insert_into_set (PA_OUT,
2417 expression_for_id (i));
2420 VEC_free (basic_block, heap, worklist);
2423 /* Prune expressions that are clobbered in block and thus become
2424 invalid if translated from PA_OUT to PA_IN. */
2425 prune_clobbered_mems (PA_OUT, block);
2427 /* PA_IN starts with PA_OUT - TMP_GEN.
2428 Then we subtract things from ANTIC_IN. */
2429 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2431 /* For partial antic, we want to put back in the phi results, since
2432 we will properly avoid making them partially antic over backedges. */
2433 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2434 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2436 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2437 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2439 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2441 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2443 changed = true;
2444 SET_BIT (changed_blocks, block->index);
2445 FOR_EACH_EDGE (e, ei, block->preds)
2446 SET_BIT (changed_blocks, e->src->index);
2448 else
2449 RESET_BIT (changed_blocks, block->index);
2451 maybe_dump_sets:
2452 if (dump_file && (dump_flags & TDF_DETAILS))
2454 if (PA_OUT)
2455 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2457 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2459 if (old_PA_IN)
2460 bitmap_set_free (old_PA_IN);
2461 if (PA_OUT)
2462 bitmap_set_free (PA_OUT);
2463 return changed;
2466 /* Compute ANTIC and partial ANTIC sets. */
2468 static void
2469 compute_antic (void)
2471 bool changed = true;
2472 int num_iterations = 0;
2473 basic_block block;
2474 int i;
2476 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2477 We pre-build the map of blocks with incoming abnormal edges here. */
2478 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2479 sbitmap_zero (has_abnormal_preds);
2481 FOR_EACH_BB (block)
2483 edge_iterator ei;
2484 edge e;
2486 FOR_EACH_EDGE (e, ei, block->preds)
2488 e->flags &= ~EDGE_DFS_BACK;
2489 if (e->flags & EDGE_ABNORMAL)
2491 SET_BIT (has_abnormal_preds, block->index);
2492 break;
2496 BB_VISITED (block) = 0;
2497 BB_DEFERRED (block) = 0;
2499 /* While we are here, give empty ANTIC_IN sets to each block. */
2500 ANTIC_IN (block) = bitmap_set_new ();
2501 PA_IN (block) = bitmap_set_new ();
2504 /* At the exit block we anticipate nothing. */
2505 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2506 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2507 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2509 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2510 sbitmap_ones (changed_blocks);
2511 while (changed)
2513 if (dump_file && (dump_flags & TDF_DETAILS))
2514 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2515 /* ??? We need to clear our PHI translation cache here as the
2516 ANTIC sets shrink and we restrict valid translations to
2517 those having operands with leaders in ANTIC. Same below
2518 for PA ANTIC computation. */
2519 num_iterations++;
2520 changed = false;
2521 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2523 if (TEST_BIT (changed_blocks, postorder[i]))
2525 basic_block block = BASIC_BLOCK (postorder[i]);
2526 changed |= compute_antic_aux (block,
2527 TEST_BIT (has_abnormal_preds,
2528 block->index));
2531 /* Theoretically possible, but *highly* unlikely. */
2532 gcc_checking_assert (num_iterations < 500);
2535 statistics_histogram_event (cfun, "compute_antic iterations",
2536 num_iterations);
2538 if (do_partial_partial)
2540 sbitmap_ones (changed_blocks);
2541 mark_dfs_back_edges ();
2542 num_iterations = 0;
2543 changed = true;
2544 while (changed)
2546 if (dump_file && (dump_flags & TDF_DETAILS))
2547 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2548 num_iterations++;
2549 changed = false;
2550 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2552 if (TEST_BIT (changed_blocks, postorder[i]))
2554 basic_block block = BASIC_BLOCK (postorder[i]);
2555 changed
2556 |= compute_partial_antic_aux (block,
2557 TEST_BIT (has_abnormal_preds,
2558 block->index));
2561 /* Theoretically possible, but *highly* unlikely. */
2562 gcc_checking_assert (num_iterations < 500);
2564 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2565 num_iterations);
2567 sbitmap_free (has_abnormal_preds);
2568 sbitmap_free (changed_blocks);
2571 /* Return true if OP is a tree which we can perform PRE on.
2572 This may not match the operations we can value number, but in
2573 a perfect world would. */
2575 static bool
2576 can_PRE_operation (tree op)
2578 return UNARY_CLASS_P (op)
2579 || BINARY_CLASS_P (op)
2580 || COMPARISON_CLASS_P (op)
2581 || TREE_CODE (op) == MEM_REF
2582 || TREE_CODE (op) == COMPONENT_REF
2583 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2584 || TREE_CODE (op) == CALL_EXPR
2585 || TREE_CODE (op) == ARRAY_REF;
2589 /* Inserted expressions are placed onto this worklist, which is used
2590 for performing quick dead code elimination of insertions we made
2591 that didn't turn out to be necessary. */
2592 static bitmap inserted_exprs;
2594 /* The actual worker for create_component_ref_by_pieces. */
2596 static tree
2597 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2598 unsigned int *operand, gimple_seq *stmts,
2599 gimple domstmt)
2601 vn_reference_op_t currop = &VEC_index (vn_reference_op_s, ref->operands,
2602 *operand);
2603 tree genop;
2604 ++*operand;
2605 switch (currop->opcode)
2607 case CALL_EXPR:
2609 tree folded, sc = NULL_TREE;
2610 unsigned int nargs = 0;
2611 tree fn, *args;
2612 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2613 fn = currop->op0;
2614 else
2616 pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2617 fn = find_or_generate_expression (block, op0, stmts, domstmt);
2618 if (!fn)
2619 return NULL_TREE;
2621 if (currop->op1)
2623 pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2624 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2625 if (!sc)
2626 return NULL_TREE;
2628 args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2629 ref->operands) - 1);
2630 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2632 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2633 operand, stmts,
2634 domstmt);
2635 if (!args[nargs])
2637 free (args);
2638 return NULL_TREE;
2640 nargs++;
2642 folded = build_call_array (currop->type,
2643 (TREE_CODE (fn) == FUNCTION_DECL
2644 ? build_fold_addr_expr (fn) : fn),
2645 nargs, args);
2646 free (args);
2647 if (sc)
2648 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2649 return folded;
2652 case MEM_REF:
2654 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2655 stmts, domstmt);
2656 tree offset = currop->op0;
2657 if (!baseop)
2658 return NULL_TREE;
2659 if (TREE_CODE (baseop) == ADDR_EXPR
2660 && handled_component_p (TREE_OPERAND (baseop, 0)))
2662 HOST_WIDE_INT off;
2663 tree base;
2664 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2665 &off);
2666 gcc_assert (base);
2667 offset = int_const_binop (PLUS_EXPR, offset,
2668 build_int_cst (TREE_TYPE (offset),
2669 off));
2670 baseop = build_fold_addr_expr (base);
2672 return fold_build2 (MEM_REF, currop->type, baseop, offset);
2675 case TARGET_MEM_REF:
2677 pre_expr op0expr, op1expr;
2678 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2679 vn_reference_op_t nextop = &VEC_index (vn_reference_op_s, ref->operands,
2680 ++*operand);
2681 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2682 stmts, domstmt);
2683 if (!baseop)
2684 return NULL_TREE;
2685 if (currop->op0)
2687 op0expr = get_or_alloc_expr_for (currop->op0);
2688 genop0 = find_or_generate_expression (block, op0expr,
2689 stmts, domstmt);
2690 if (!genop0)
2691 return NULL_TREE;
2693 if (nextop->op0)
2695 op1expr = get_or_alloc_expr_for (nextop->op0);
2696 genop1 = find_or_generate_expression (block, op1expr,
2697 stmts, domstmt);
2698 if (!genop1)
2699 return NULL_TREE;
2701 return build5 (TARGET_MEM_REF, currop->type,
2702 baseop, currop->op2, genop0, currop->op1, genop1);
2705 case ADDR_EXPR:
2706 if (currop->op0)
2708 gcc_assert (is_gimple_min_invariant (currop->op0));
2709 return currop->op0;
2711 /* Fallthrough. */
2712 case REALPART_EXPR:
2713 case IMAGPART_EXPR:
2714 case VIEW_CONVERT_EXPR:
2716 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2717 operand,
2718 stmts, domstmt);
2719 if (!genop0)
2720 return NULL_TREE;
2722 return fold_build1 (currop->opcode, currop->type, genop0);
2725 case WITH_SIZE_EXPR:
2727 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2728 stmts, domstmt);
2729 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2730 tree genop1;
2732 if (!genop0)
2733 return NULL_TREE;
2735 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2736 if (!genop1)
2737 return NULL_TREE;
2739 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2742 case BIT_FIELD_REF:
2744 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2745 stmts, domstmt);
2746 tree op1 = currop->op0;
2747 tree op2 = currop->op1;
2749 if (!genop0)
2750 return NULL_TREE;
2752 return fold_build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2755 /* For array ref vn_reference_op's, operand 1 of the array ref
2756 is op0 of the reference op and operand 3 of the array ref is
2757 op1. */
2758 case ARRAY_RANGE_REF:
2759 case ARRAY_REF:
2761 tree genop0;
2762 tree genop1 = currop->op0;
2763 pre_expr op1expr;
2764 tree genop2 = currop->op1;
2765 pre_expr op2expr;
2766 tree genop3 = currop->op2;
2767 pre_expr op3expr;
2768 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2769 stmts, domstmt);
2770 if (!genop0)
2771 return NULL_TREE;
2772 op1expr = get_or_alloc_expr_for (genop1);
2773 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2774 if (!genop1)
2775 return NULL_TREE;
2776 if (genop2)
2778 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2779 /* Drop zero minimum index if redundant. */
2780 if (integer_zerop (genop2)
2781 && (!domain_type
2782 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2783 genop2 = NULL_TREE;
2784 else
2786 op2expr = get_or_alloc_expr_for (genop2);
2787 genop2 = find_or_generate_expression (block, op2expr, stmts,
2788 domstmt);
2789 if (!genop2)
2790 return NULL_TREE;
2793 if (genop3)
2795 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2796 /* We can't always put a size in units of the element alignment
2797 here as the element alignment may be not visible. See
2798 PR43783. Simply drop the element size for constant
2799 sizes. */
2800 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2801 genop3 = NULL_TREE;
2802 else
2804 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2805 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2806 op3expr = get_or_alloc_expr_for (genop3);
2807 genop3 = find_or_generate_expression (block, op3expr, stmts,
2808 domstmt);
2809 if (!genop3)
2810 return NULL_TREE;
2813 return build4 (currop->opcode, currop->type, genop0, genop1,
2814 genop2, genop3);
2816 case COMPONENT_REF:
2818 tree op0;
2819 tree op1;
2820 tree genop2 = currop->op1;
2821 pre_expr op2expr;
2822 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2823 stmts, domstmt);
2824 if (!op0)
2825 return NULL_TREE;
2826 /* op1 should be a FIELD_DECL, which are represented by
2827 themselves. */
2828 op1 = currop->op0;
2829 if (genop2)
2831 op2expr = get_or_alloc_expr_for (genop2);
2832 genop2 = find_or_generate_expression (block, op2expr, stmts,
2833 domstmt);
2834 if (!genop2)
2835 return NULL_TREE;
2838 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2841 case SSA_NAME:
2843 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2844 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2845 return genop;
2847 case STRING_CST:
2848 case INTEGER_CST:
2849 case COMPLEX_CST:
2850 case VECTOR_CST:
2851 case REAL_CST:
2852 case CONSTRUCTOR:
2853 case VAR_DECL:
2854 case PARM_DECL:
2855 case CONST_DECL:
2856 case RESULT_DECL:
2857 case FUNCTION_DECL:
2858 return currop->op0;
2860 default:
2861 gcc_unreachable ();
2865 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2866 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2867 trying to rename aggregates into ssa form directly, which is a no no.
2869 Thus, this routine doesn't create temporaries, it just builds a
2870 single access expression for the array, calling
2871 find_or_generate_expression to build the innermost pieces.
2873 This function is a subroutine of create_expression_by_pieces, and
2874 should not be called on it's own unless you really know what you
2875 are doing. */
2877 static tree
2878 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2879 gimple_seq *stmts, gimple domstmt)
2881 unsigned int op = 0;
2882 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2885 /* Find a leader for an expression, or generate one using
2886 create_expression_by_pieces if it's ANTIC but
2887 complex.
2888 BLOCK is the basic_block we are looking for leaders in.
2889 EXPR is the expression to find a leader or generate for.
2890 STMTS is the statement list to put the inserted expressions on.
2891 Returns the SSA_NAME of the LHS of the generated expression or the
2892 leader.
2893 DOMSTMT if non-NULL is a statement that should be dominated by
2894 all uses in the generated expression. If DOMSTMT is non-NULL this
2895 routine can fail and return NULL_TREE. Otherwise it will assert
2896 on failure. */
2898 static tree
2899 find_or_generate_expression (basic_block block, pre_expr expr,
2900 gimple_seq *stmts, gimple domstmt)
2902 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2903 get_expr_value_id (expr), domstmt);
2904 tree genop = NULL;
2905 if (leader)
2907 if (leader->kind == NAME)
2908 genop = PRE_EXPR_NAME (leader);
2909 else if (leader->kind == CONSTANT)
2910 genop = PRE_EXPR_CONSTANT (leader);
2913 /* If it's still NULL, it must be a complex expression, so generate
2914 it recursively. Not so if inserting expressions for values generated
2915 by SCCVN. */
2916 if (genop == NULL
2917 && !domstmt)
2919 bitmap_set_t exprset;
2920 unsigned int lookfor = get_expr_value_id (expr);
2921 bool handled = false;
2922 bitmap_iterator bi;
2923 unsigned int i;
2925 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
2926 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
2928 pre_expr temp = expression_for_id (i);
2929 if (temp->kind != NAME)
2931 handled = true;
2932 genop = create_expression_by_pieces (block, temp, stmts,
2933 domstmt,
2934 get_expr_type (expr));
2935 break;
2938 if (!handled && domstmt)
2939 return NULL_TREE;
2941 gcc_assert (handled);
2943 return genop;
2946 #define NECESSARY GF_PLF_1
2948 /* Create an expression in pieces, so that we can handle very complex
2949 expressions that may be ANTIC, but not necessary GIMPLE.
2950 BLOCK is the basic block the expression will be inserted into,
2951 EXPR is the expression to insert (in value form)
2952 STMTS is a statement list to append the necessary insertions into.
2954 This function will die if we hit some value that shouldn't be
2955 ANTIC but is (IE there is no leader for it, or its components).
2956 This function may also generate expressions that are themselves
2957 partially or fully redundant. Those that are will be either made
2958 fully redundant during the next iteration of insert (for partially
2959 redundant ones), or eliminated by eliminate (for fully redundant
2960 ones).
2962 If DOMSTMT is non-NULL then we make sure that all uses in the
2963 expressions dominate that statement. In this case the function
2964 can return NULL_TREE to signal failure. */
2966 static tree
2967 create_expression_by_pieces (basic_block block, pre_expr expr,
2968 gimple_seq *stmts, gimple domstmt, tree type)
2970 tree name;
2971 tree folded;
2972 gimple_seq forced_stmts = NULL;
2973 unsigned int value_id;
2974 gimple_stmt_iterator gsi;
2975 tree exprtype = type ? type : get_expr_type (expr);
2976 pre_expr nameexpr;
2977 gimple newstmt;
2979 switch (expr->kind)
2981 /* We may hit the NAME/CONSTANT case if we have to convert types
2982 that value numbering saw through. */
2983 case NAME:
2984 folded = PRE_EXPR_NAME (expr);
2985 break;
2986 case CONSTANT:
2987 folded = PRE_EXPR_CONSTANT (expr);
2988 break;
2989 case REFERENCE:
2991 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2992 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
2994 break;
2995 case NARY:
2997 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2998 tree genop[4];
2999 unsigned i;
3000 for (i = 0; i < nary->length; ++i)
3002 pre_expr op = get_or_alloc_expr_for (nary->op[i]);
3003 genop[i] = find_or_generate_expression (block, op,
3004 stmts, domstmt);
3005 if (!genop[i])
3006 return NULL_TREE;
3007 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
3008 may have conversions stripped. */
3009 if (nary->opcode == POINTER_PLUS_EXPR)
3011 if (i == 0)
3012 genop[i] = fold_convert (nary->type, genop[i]);
3013 else if (i == 1)
3014 genop[i] = convert_to_ptrofftype (genop[i]);
3016 else
3017 genop[i] = fold_convert (TREE_TYPE (nary->op[i]), genop[i]);
3019 if (nary->opcode == CONSTRUCTOR)
3021 VEC(constructor_elt,gc) *elts = NULL;
3022 for (i = 0; i < nary->length; ++i)
3023 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
3024 folded = build_constructor (nary->type, elts);
3026 else
3028 switch (nary->length)
3030 case 1:
3031 folded = fold_build1 (nary->opcode, nary->type,
3032 genop[0]);
3033 break;
3034 case 2:
3035 folded = fold_build2 (nary->opcode, nary->type,
3036 genop[0], genop[1]);
3037 break;
3038 case 3:
3039 folded = fold_build3 (nary->opcode, nary->type,
3040 genop[0], genop[1], genop[3]);
3041 break;
3042 default:
3043 gcc_unreachable ();
3047 break;
3048 default:
3049 return NULL_TREE;
3052 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3053 folded = fold_convert (exprtype, folded);
3055 /* Force the generated expression to be a sequence of GIMPLE
3056 statements.
3057 We have to call unshare_expr because force_gimple_operand may
3058 modify the tree we pass to it. */
3059 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3060 false, NULL);
3062 /* If we have any intermediate expressions to the value sets, add them
3063 to the value sets and chain them in the instruction stream. */
3064 if (forced_stmts)
3066 gsi = gsi_start (forced_stmts);
3067 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3069 gimple stmt = gsi_stmt (gsi);
3070 tree forcedname = gimple_get_lhs (stmt);
3071 pre_expr nameexpr;
3073 if (TREE_CODE (forcedname) == SSA_NAME)
3075 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3076 VN_INFO_GET (forcedname)->valnum = forcedname;
3077 VN_INFO (forcedname)->value_id = get_next_value_id ();
3078 nameexpr = get_or_alloc_expr_for_name (forcedname);
3079 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3080 if (!in_fre)
3081 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3082 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3085 gimple_seq_add_seq (stmts, forced_stmts);
3088 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
3089 newstmt = gimple_build_assign (name, folded);
3090 gimple_set_plf (newstmt, NECESSARY, false);
3092 gimple_seq_add_stmt (stmts, newstmt);
3093 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
3095 /* Fold the last statement. */
3096 gsi = gsi_last (*stmts);
3097 if (fold_stmt_inplace (&gsi))
3098 update_stmt (gsi_stmt (gsi));
3100 /* Add a value number to the temporary.
3101 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3102 we are creating the expression by pieces, and this particular piece of
3103 the expression may have been represented. There is no harm in replacing
3104 here. */
3105 VN_INFO_GET (name)->valnum = name;
3106 value_id = get_expr_value_id (expr);
3107 VN_INFO (name)->value_id = value_id;
3108 nameexpr = get_or_alloc_expr_for_name (name);
3109 add_to_value (value_id, nameexpr);
3110 if (NEW_SETS (block))
3111 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3112 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3114 pre_stats.insertions++;
3115 if (dump_file && (dump_flags & TDF_DETAILS))
3117 fprintf (dump_file, "Inserted ");
3118 print_gimple_stmt (dump_file, newstmt, 0, 0);
3119 fprintf (dump_file, " in predecessor %d\n", block->index);
3122 return name;
3126 /* Returns true if we want to inhibit the insertions of PHI nodes
3127 for the given EXPR for basic block BB (a member of a loop).
3128 We want to do this, when we fear that the induction variable we
3129 create might inhibit vectorization. */
3131 static bool
3132 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3134 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3135 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3136 vn_reference_op_t op;
3137 unsigned i;
3139 /* If we aren't going to vectorize we don't inhibit anything. */
3140 if (!flag_tree_vectorize)
3141 return false;
3143 /* Otherwise we inhibit the insertion when the address of the
3144 memory reference is a simple induction variable. In other
3145 cases the vectorizer won't do anything anyway (either it's
3146 loop invariant or a complicated expression). */
3147 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
3149 switch (op->opcode)
3151 case CALL_EXPR:
3152 /* Calls are not a problem. */
3153 return false;
3155 case ARRAY_REF:
3156 case ARRAY_RANGE_REF:
3157 if (TREE_CODE (op->op0) != SSA_NAME)
3158 break;
3159 /* Fallthru. */
3160 case SSA_NAME:
3162 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3163 affine_iv iv;
3164 /* Default defs are loop invariant. */
3165 if (!defbb)
3166 break;
3167 /* Defined outside this loop, also loop invariant. */
3168 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3169 break;
3170 /* If it's a simple induction variable inhibit insertion,
3171 the vectorizer might be interested in this one. */
3172 if (simple_iv (bb->loop_father, bb->loop_father,
3173 op->op0, &iv, true))
3174 return true;
3175 /* No simple IV, vectorizer can't do anything, hence no
3176 reason to inhibit the transformation for this operand. */
3177 break;
3179 default:
3180 break;
3183 return false;
3186 /* Insert the to-be-made-available values of expression EXPRNUM for each
3187 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3188 merge the result with a phi node, given the same value number as
3189 NODE. Return true if we have inserted new stuff. */
3191 static bool
3192 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3193 VEC(pre_expr, heap) *avail)
3195 pre_expr expr = expression_for_id (exprnum);
3196 pre_expr newphi;
3197 unsigned int val = get_expr_value_id (expr);
3198 edge pred;
3199 bool insertions = false;
3200 bool nophi = false;
3201 basic_block bprime;
3202 pre_expr eprime;
3203 edge_iterator ei;
3204 tree type = get_expr_type (expr);
3205 tree temp;
3206 gimple phi;
3208 /* Make sure we aren't creating an induction variable. */
3209 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
3211 bool firstinsideloop = false;
3212 bool secondinsideloop = false;
3213 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3214 EDGE_PRED (block, 0)->src);
3215 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3216 EDGE_PRED (block, 1)->src);
3217 /* Induction variables only have one edge inside the loop. */
3218 if ((firstinsideloop ^ secondinsideloop)
3219 && (expr->kind != REFERENCE
3220 || inhibit_phi_insertion (block, expr)))
3222 if (dump_file && (dump_flags & TDF_DETAILS))
3223 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3224 nophi = true;
3228 /* Make the necessary insertions. */
3229 FOR_EACH_EDGE (pred, ei, block->preds)
3231 gimple_seq stmts = NULL;
3232 tree builtexpr;
3233 bprime = pred->src;
3234 eprime = VEC_index (pre_expr, avail, pred->dest_idx);
3236 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3238 builtexpr = create_expression_by_pieces (bprime,
3239 eprime,
3240 &stmts, NULL,
3241 type);
3242 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3243 gsi_insert_seq_on_edge (pred, stmts);
3244 VEC_replace (pre_expr, avail, pred->dest_idx,
3245 get_or_alloc_expr_for_name (builtexpr));
3246 insertions = true;
3248 else if (eprime->kind == CONSTANT)
3250 /* Constants may not have the right type, fold_convert
3251 should give us back a constant with the right type. */
3252 tree constant = PRE_EXPR_CONSTANT (eprime);
3253 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3255 tree builtexpr = fold_convert (type, constant);
3256 if (!is_gimple_min_invariant (builtexpr))
3258 tree forcedexpr = force_gimple_operand (builtexpr,
3259 &stmts, true,
3260 NULL);
3261 if (!is_gimple_min_invariant (forcedexpr))
3263 if (forcedexpr != builtexpr)
3265 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3266 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3268 if (stmts)
3270 gimple_stmt_iterator gsi;
3271 gsi = gsi_start (stmts);
3272 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3274 gimple stmt = gsi_stmt (gsi);
3275 tree lhs = gimple_get_lhs (stmt);
3276 if (TREE_CODE (lhs) == SSA_NAME)
3277 bitmap_set_bit (inserted_exprs,
3278 SSA_NAME_VERSION (lhs));
3279 gimple_set_plf (stmt, NECESSARY, false);
3281 gsi_insert_seq_on_edge (pred, stmts);
3283 VEC_replace (pre_expr, avail, pred->dest_idx,
3284 get_or_alloc_expr_for_name (forcedexpr));
3287 else
3288 VEC_replace (pre_expr, avail, pred->dest_idx,
3289 get_or_alloc_expr_for_constant (builtexpr));
3292 else if (eprime->kind == NAME)
3294 /* We may have to do a conversion because our value
3295 numbering can look through types in certain cases, but
3296 our IL requires all operands of a phi node have the same
3297 type. */
3298 tree name = PRE_EXPR_NAME (eprime);
3299 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3301 tree builtexpr;
3302 tree forcedexpr;
3303 builtexpr = fold_convert (type, name);
3304 forcedexpr = force_gimple_operand (builtexpr,
3305 &stmts, true,
3306 NULL);
3308 if (forcedexpr != name)
3310 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3311 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3314 if (stmts)
3316 gimple_stmt_iterator gsi;
3317 gsi = gsi_start (stmts);
3318 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3320 gimple stmt = gsi_stmt (gsi);
3321 tree lhs = gimple_get_lhs (stmt);
3322 if (TREE_CODE (lhs) == SSA_NAME)
3323 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3324 gimple_set_plf (stmt, NECESSARY, false);
3326 gsi_insert_seq_on_edge (pred, stmts);
3328 VEC_replace (pre_expr, avail, pred->dest_idx,
3329 get_or_alloc_expr_for_name (forcedexpr));
3333 /* If we didn't want a phi node, and we made insertions, we still have
3334 inserted new stuff, and thus return true. If we didn't want a phi node,
3335 and didn't make insertions, we haven't added anything new, so return
3336 false. */
3337 if (nophi && insertions)
3338 return true;
3339 else if (nophi && !insertions)
3340 return false;
3342 /* Now build a phi for the new variable. */
3343 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3344 phi = create_phi_node (temp, block);
3346 gimple_set_plf (phi, NECESSARY, false);
3347 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3348 VN_INFO (gimple_phi_result (phi))->value_id = val;
3349 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
3350 FOR_EACH_EDGE (pred, ei, block->preds)
3352 pre_expr ae = VEC_index (pre_expr, avail, pred->dest_idx);
3353 gcc_assert (get_expr_type (ae) == type
3354 || useless_type_conversion_p (type, get_expr_type (ae)));
3355 if (ae->kind == CONSTANT)
3356 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3357 else
3358 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3361 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3362 add_to_value (val, newphi);
3364 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3365 this insertion, since we test for the existence of this value in PHI_GEN
3366 before proceeding with the partial redundancy checks in insert_aux.
3368 The value may exist in AVAIL_OUT, in particular, it could be represented
3369 by the expression we are trying to eliminate, in which case we want the
3370 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3371 inserted there.
3373 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3374 this block, because if it did, it would have existed in our dominator's
3375 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3378 bitmap_insert_into_set (PHI_GEN (block), newphi);
3379 bitmap_value_replace_in_set (AVAIL_OUT (block),
3380 newphi);
3381 bitmap_insert_into_set (NEW_SETS (block),
3382 newphi);
3384 if (dump_file && (dump_flags & TDF_DETAILS))
3386 fprintf (dump_file, "Created phi ");
3387 print_gimple_stmt (dump_file, phi, 0, 0);
3388 fprintf (dump_file, " in block %d\n", block->index);
3390 pre_stats.phis++;
3391 return true;
3396 /* Perform insertion of partially redundant values.
3397 For BLOCK, do the following:
3398 1. Propagate the NEW_SETS of the dominator into the current block.
3399 If the block has multiple predecessors,
3400 2a. Iterate over the ANTIC expressions for the block to see if
3401 any of them are partially redundant.
3402 2b. If so, insert them into the necessary predecessors to make
3403 the expression fully redundant.
3404 2c. Insert a new PHI merging the values of the predecessors.
3405 2d. Insert the new PHI, and the new expressions, into the
3406 NEW_SETS set.
3407 3. Recursively call ourselves on the dominator children of BLOCK.
3409 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3410 do_regular_insertion and do_partial_insertion.
3414 static bool
3415 do_regular_insertion (basic_block block, basic_block dom)
3417 bool new_stuff = false;
3418 VEC (pre_expr, heap) *exprs;
3419 pre_expr expr;
3420 VEC (pre_expr, heap) *avail = NULL;
3421 int i;
3423 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3424 VEC_safe_grow (pre_expr, heap, avail, EDGE_COUNT (block->preds));
3426 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3428 if (expr->kind != NAME)
3430 unsigned int val;
3431 bool by_some = false;
3432 bool cant_insert = false;
3433 bool all_same = true;
3434 pre_expr first_s = NULL;
3435 edge pred;
3436 basic_block bprime;
3437 pre_expr eprime = NULL;
3438 edge_iterator ei;
3439 pre_expr edoubleprime = NULL;
3440 bool do_insertion = false;
3442 val = get_expr_value_id (expr);
3443 if (bitmap_set_contains_value (PHI_GEN (block), val))
3444 continue;
3445 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3447 if (dump_file && (dump_flags & TDF_DETAILS))
3448 fprintf (dump_file, "Found fully redundant value\n");
3449 continue;
3452 FOR_EACH_EDGE (pred, ei, block->preds)
3454 unsigned int vprime;
3456 /* We should never run insertion for the exit block
3457 and so not come across fake pred edges. */
3458 gcc_assert (!(pred->flags & EDGE_FAKE));
3459 bprime = pred->src;
3460 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3461 bprime, block);
3463 /* eprime will generally only be NULL if the
3464 value of the expression, translated
3465 through the PHI for this predecessor, is
3466 undefined. If that is the case, we can't
3467 make the expression fully redundant,
3468 because its value is undefined along a
3469 predecessor path. We can thus break out
3470 early because it doesn't matter what the
3471 rest of the results are. */
3472 if (eprime == NULL)
3474 VEC_replace (pre_expr, avail, pred->dest_idx, NULL);
3475 cant_insert = true;
3476 break;
3479 eprime = fully_constant_expression (eprime);
3480 vprime = get_expr_value_id (eprime);
3481 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3482 vprime, NULL);
3483 if (edoubleprime == NULL)
3485 VEC_replace (pre_expr, avail, pred->dest_idx, eprime);
3486 all_same = false;
3488 else
3490 VEC_replace (pre_expr, avail, pred->dest_idx, edoubleprime);
3491 by_some = true;
3492 /* We want to perform insertions to remove a redundancy on
3493 a path in the CFG we want to optimize for speed. */
3494 if (optimize_edge_for_speed_p (pred))
3495 do_insertion = true;
3496 if (first_s == NULL)
3497 first_s = edoubleprime;
3498 else if (!pre_expr_d::equal (first_s, edoubleprime))
3499 all_same = false;
3502 /* If we can insert it, it's not the same value
3503 already existing along every predecessor, and
3504 it's defined by some predecessor, it is
3505 partially redundant. */
3506 if (!cant_insert && !all_same && by_some)
3508 if (!do_insertion)
3510 if (dump_file && (dump_flags & TDF_DETAILS))
3512 fprintf (dump_file, "Skipping partial redundancy for "
3513 "expression ");
3514 print_pre_expr (dump_file, expr);
3515 fprintf (dump_file, " (%04d), no redundancy on to be "
3516 "optimized for speed edge\n", val);
3519 else if (dbg_cnt (treepre_insert))
3521 if (dump_file && (dump_flags & TDF_DETAILS))
3523 fprintf (dump_file, "Found partial redundancy for "
3524 "expression ");
3525 print_pre_expr (dump_file, expr);
3526 fprintf (dump_file, " (%04d)\n",
3527 get_expr_value_id (expr));
3529 if (insert_into_preds_of_block (block,
3530 get_expression_id (expr),
3531 avail))
3532 new_stuff = true;
3535 /* If all edges produce the same value and that value is
3536 an invariant, then the PHI has the same value on all
3537 edges. Note this. */
3538 else if (!cant_insert && all_same && eprime
3539 && (edoubleprime->kind == CONSTANT
3540 || edoubleprime->kind == NAME)
3541 && !value_id_constant_p (val))
3543 unsigned int j;
3544 bitmap_iterator bi;
3545 bitmap_set_t exprset = VEC_index (bitmap_set_t,
3546 value_expressions, val);
3548 unsigned int new_val = get_expr_value_id (edoubleprime);
3549 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3551 pre_expr expr = expression_for_id (j);
3553 if (expr->kind == NAME)
3555 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3556 /* Just reset the value id and valnum so it is
3557 the same as the constant we have discovered. */
3558 if (edoubleprime->kind == CONSTANT)
3560 info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3561 pre_stats.constified++;
3563 else
3564 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3565 info->value_id = new_val;
3572 VEC_free (pre_expr, heap, exprs);
3573 VEC_free (pre_expr, heap, avail);
3574 return new_stuff;
3578 /* Perform insertion for partially anticipatable expressions. There
3579 is only one case we will perform insertion for these. This case is
3580 if the expression is partially anticipatable, and fully available.
3581 In this case, we know that putting it earlier will enable us to
3582 remove the later computation. */
3585 static bool
3586 do_partial_partial_insertion (basic_block block, basic_block dom)
3588 bool new_stuff = false;
3589 VEC (pre_expr, heap) *exprs;
3590 pre_expr expr;
3591 VEC (pre_expr, heap) *avail = NULL;
3592 int i;
3594 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3595 VEC_safe_grow (pre_expr, heap, avail, EDGE_COUNT (block->preds));
3597 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3599 if (expr->kind != NAME)
3601 unsigned int val;
3602 bool by_all = true;
3603 bool cant_insert = false;
3604 edge pred;
3605 basic_block bprime;
3606 pre_expr eprime = NULL;
3607 edge_iterator ei;
3609 val = get_expr_value_id (expr);
3610 if (bitmap_set_contains_value (PHI_GEN (block), val))
3611 continue;
3612 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3613 continue;
3615 FOR_EACH_EDGE (pred, ei, block->preds)
3617 unsigned int vprime;
3618 pre_expr edoubleprime;
3620 /* We should never run insertion for the exit block
3621 and so not come across fake pred edges. */
3622 gcc_assert (!(pred->flags & EDGE_FAKE));
3623 bprime = pred->src;
3624 eprime = phi_translate (expr, ANTIC_IN (block),
3625 PA_IN (block),
3626 bprime, block);
3628 /* eprime will generally only be NULL if the
3629 value of the expression, translated
3630 through the PHI for this predecessor, is
3631 undefined. If that is the case, we can't
3632 make the expression fully redundant,
3633 because its value is undefined along a
3634 predecessor path. We can thus break out
3635 early because it doesn't matter what the
3636 rest of the results are. */
3637 if (eprime == NULL)
3639 VEC_replace (pre_expr, avail, pred->dest_idx, NULL);
3640 cant_insert = true;
3641 break;
3644 eprime = fully_constant_expression (eprime);
3645 vprime = get_expr_value_id (eprime);
3646 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3647 vprime, NULL);
3648 VEC_replace (pre_expr, avail, pred->dest_idx, edoubleprime);
3649 if (edoubleprime == NULL)
3651 by_all = false;
3652 break;
3656 /* If we can insert it, it's not the same value
3657 already existing along every predecessor, and
3658 it's defined by some predecessor, it is
3659 partially redundant. */
3660 if (!cant_insert && by_all)
3662 edge succ;
3663 bool do_insertion = false;
3665 /* Insert only if we can remove a later expression on a path
3666 that we want to optimize for speed.
3667 The phi node that we will be inserting in BLOCK is not free,
3668 and inserting it for the sake of !optimize_for_speed successor
3669 may cause regressions on the speed path. */
3670 FOR_EACH_EDGE (succ, ei, block->succs)
3672 if (bitmap_set_contains_value (PA_IN (succ->dest), val))
3674 if (optimize_edge_for_speed_p (succ))
3675 do_insertion = true;
3679 if (!do_insertion)
3681 if (dump_file && (dump_flags & TDF_DETAILS))
3683 fprintf (dump_file, "Skipping partial partial redundancy "
3684 "for expression ");
3685 print_pre_expr (dump_file, expr);
3686 fprintf (dump_file, " (%04d), not partially anticipated "
3687 "on any to be optimized for speed edges\n", val);
3690 else if (dbg_cnt (treepre_insert))
3692 pre_stats.pa_insert++;
3693 if (dump_file && (dump_flags & TDF_DETAILS))
3695 fprintf (dump_file, "Found partial partial redundancy "
3696 "for expression ");
3697 print_pre_expr (dump_file, expr);
3698 fprintf (dump_file, " (%04d)\n",
3699 get_expr_value_id (expr));
3701 if (insert_into_preds_of_block (block,
3702 get_expression_id (expr),
3703 avail))
3704 new_stuff = true;
3710 VEC_free (pre_expr, heap, exprs);
3711 VEC_free (pre_expr, heap, avail);
3712 return new_stuff;
3715 static bool
3716 insert_aux (basic_block block)
3718 basic_block son;
3719 bool new_stuff = false;
3721 if (block)
3723 basic_block dom;
3724 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3725 if (dom)
3727 unsigned i;
3728 bitmap_iterator bi;
3729 bitmap_set_t newset = NEW_SETS (dom);
3730 if (newset)
3732 /* Note that we need to value_replace both NEW_SETS, and
3733 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3734 represented by some non-simple expression here that we want
3735 to replace it with. */
3736 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3738 pre_expr expr = expression_for_id (i);
3739 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3740 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3743 if (!single_pred_p (block))
3745 new_stuff |= do_regular_insertion (block, dom);
3746 if (do_partial_partial)
3747 new_stuff |= do_partial_partial_insertion (block, dom);
3751 for (son = first_dom_son (CDI_DOMINATORS, block);
3752 son;
3753 son = next_dom_son (CDI_DOMINATORS, son))
3755 new_stuff |= insert_aux (son);
3758 return new_stuff;
3761 /* Perform insertion of partially redundant values. */
3763 static void
3764 insert (void)
3766 bool new_stuff = true;
3767 basic_block bb;
3768 int num_iterations = 0;
3770 FOR_ALL_BB (bb)
3771 NEW_SETS (bb) = bitmap_set_new ();
3773 while (new_stuff)
3775 num_iterations++;
3776 if (dump_file && dump_flags & TDF_DETAILS)
3777 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3778 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3780 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3784 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3786 static void
3787 add_to_exp_gen (basic_block block, tree op)
3789 if (!in_fre)
3791 pre_expr result;
3792 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3793 return;
3794 result = get_or_alloc_expr_for_name (op);
3795 bitmap_value_insert_into_set (EXP_GEN (block), result);
3799 /* Create value ids for PHI in BLOCK. */
3801 static void
3802 make_values_for_phi (gimple phi, basic_block block)
3804 tree result = gimple_phi_result (phi);
3806 /* We have no need for virtual phis, as they don't represent
3807 actual computations. */
3808 if (!virtual_operand_p (result))
3810 pre_expr e = get_or_alloc_expr_for_name (result);
3811 add_to_value (get_expr_value_id (e), e);
3812 bitmap_insert_into_set (PHI_GEN (block), e);
3813 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3814 if (!in_fre)
3816 unsigned i;
3817 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3819 tree arg = gimple_phi_arg_def (phi, i);
3820 if (TREE_CODE (arg) == SSA_NAME)
3822 e = get_or_alloc_expr_for_name (arg);
3823 add_to_value (get_expr_value_id (e), e);
3830 /* Compute the AVAIL set for all basic blocks.
3832 This function performs value numbering of the statements in each basic
3833 block. The AVAIL sets are built from information we glean while doing
3834 this value numbering, since the AVAIL sets contain only one entry per
3835 value.
3837 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3838 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3840 static void
3841 compute_avail (void)
3844 basic_block block, son;
3845 basic_block *worklist;
3846 size_t sp = 0;
3847 unsigned i;
3849 /* We pretend that default definitions are defined in the entry block.
3850 This includes function arguments and the static chain decl. */
3851 for (i = 1; i < num_ssa_names; ++i)
3853 tree name = ssa_name (i);
3854 pre_expr e;
3855 if (!name
3856 || !SSA_NAME_IS_DEFAULT_DEF (name)
3857 || has_zero_uses (name)
3858 || virtual_operand_p (name))
3859 continue;
3861 e = get_or_alloc_expr_for_name (name);
3862 add_to_value (get_expr_value_id (e), e);
3863 if (!in_fre)
3864 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3865 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3868 /* Allocate the worklist. */
3869 worklist = XNEWVEC (basic_block, n_basic_blocks);
3871 /* Seed the algorithm by putting the dominator children of the entry
3872 block on the worklist. */
3873 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3874 son;
3875 son = next_dom_son (CDI_DOMINATORS, son))
3876 worklist[sp++] = son;
3878 /* Loop until the worklist is empty. */
3879 while (sp)
3881 gimple_stmt_iterator gsi;
3882 gimple stmt;
3883 basic_block dom;
3884 unsigned int stmt_uid = 1;
3886 /* Pick a block from the worklist. */
3887 block = worklist[--sp];
3889 /* Initially, the set of available values in BLOCK is that of
3890 its immediate dominator. */
3891 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3892 if (dom)
3893 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3895 /* Generate values for PHI nodes. */
3896 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3897 make_values_for_phi (gsi_stmt (gsi), block);
3899 BB_MAY_NOTRETURN (block) = 0;
3901 /* Now compute value numbers and populate value sets with all
3902 the expressions computed in BLOCK. */
3903 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3905 ssa_op_iter iter;
3906 tree op;
3908 stmt = gsi_stmt (gsi);
3909 gimple_set_uid (stmt, stmt_uid++);
3911 /* Cache whether the basic-block has any non-visible side-effect
3912 or control flow.
3913 If this isn't a call or it is the last stmt in the
3914 basic-block then the CFG represents things correctly. */
3915 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3917 /* Non-looping const functions always return normally.
3918 Otherwise the call might not return or have side-effects
3919 that forbids hoisting possibly trapping expressions
3920 before it. */
3921 int flags = gimple_call_flags (stmt);
3922 if (!(flags & ECF_CONST)
3923 || (flags & ECF_LOOPING_CONST_OR_PURE))
3924 BB_MAY_NOTRETURN (block) = 1;
3927 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3929 pre_expr e = get_or_alloc_expr_for_name (op);
3931 add_to_value (get_expr_value_id (e), e);
3932 if (!in_fre)
3933 bitmap_insert_into_set (TMP_GEN (block), e);
3934 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3937 if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt))
3938 continue;
3940 switch (gimple_code (stmt))
3942 case GIMPLE_RETURN:
3943 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3944 add_to_exp_gen (block, op);
3945 continue;
3947 case GIMPLE_CALL:
3949 vn_reference_t ref;
3950 unsigned int i;
3951 vn_reference_op_t vro;
3952 pre_expr result = NULL;
3953 VEC(vn_reference_op_s, heap) *ops = NULL;
3955 /* We can value number only calls to real functions. */
3956 if (gimple_call_internal_p (stmt))
3957 continue;
3959 copy_reference_ops_from_call (stmt, &ops);
3960 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3961 gimple_expr_type (stmt),
3962 ops, &ref, VN_NOWALK);
3963 VEC_free (vn_reference_op_s, heap, ops);
3964 if (!ref)
3965 continue;
3967 for (i = 0; VEC_iterate (vn_reference_op_s,
3968 ref->operands, i,
3969 vro); i++)
3971 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
3972 add_to_exp_gen (block, vro->op0);
3973 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3974 add_to_exp_gen (block, vro->op1);
3975 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3976 add_to_exp_gen (block, vro->op2);
3979 /* If the value of the call is not invalidated in
3980 this block until it is computed, add the expression
3981 to EXP_GEN. */
3982 if (!gimple_vuse (stmt)
3983 || gimple_code
3984 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3985 || gimple_bb (SSA_NAME_DEF_STMT
3986 (gimple_vuse (stmt))) != block)
3988 result = (pre_expr) pool_alloc (pre_expr_pool);
3989 result->kind = REFERENCE;
3990 result->id = 0;
3991 PRE_EXPR_REFERENCE (result) = ref;
3993 get_or_alloc_expression_id (result);
3994 add_to_value (get_expr_value_id (result), result);
3995 if (!in_fre)
3996 bitmap_value_insert_into_set (EXP_GEN (block), result);
3998 continue;
4001 case GIMPLE_ASSIGN:
4003 pre_expr result = NULL;
4004 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
4006 case tcc_unary:
4007 case tcc_binary:
4008 case tcc_comparison:
4010 vn_nary_op_t nary;
4011 unsigned int i;
4013 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
4014 gimple_assign_rhs_code (stmt),
4015 gimple_expr_type (stmt),
4016 gimple_assign_rhs1_ptr (stmt),
4017 &nary);
4019 if (!nary)
4020 continue;
4022 for (i = 0; i < nary->length; i++)
4023 if (TREE_CODE (nary->op[i]) == SSA_NAME)
4024 add_to_exp_gen (block, nary->op[i]);
4026 /* If the NARY traps and there was a preceding
4027 point in the block that might not return avoid
4028 adding the nary to EXP_GEN. */
4029 if (BB_MAY_NOTRETURN (block)
4030 && vn_nary_may_trap (nary))
4031 continue;
4033 result = (pre_expr) pool_alloc (pre_expr_pool);
4034 result->kind = NARY;
4035 result->id = 0;
4036 PRE_EXPR_NARY (result) = nary;
4037 break;
4040 case tcc_declaration:
4041 case tcc_reference:
4043 vn_reference_t ref;
4044 unsigned int i;
4045 vn_reference_op_t vro;
4047 vn_reference_lookup (gimple_assign_rhs1 (stmt),
4048 gimple_vuse (stmt),
4049 VN_WALK, &ref);
4050 if (!ref)
4051 continue;
4053 for (i = 0; VEC_iterate (vn_reference_op_s,
4054 ref->operands, i,
4055 vro); i++)
4057 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4058 add_to_exp_gen (block, vro->op0);
4059 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4060 add_to_exp_gen (block, vro->op1);
4061 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4062 add_to_exp_gen (block, vro->op2);
4065 /* If the value of the reference is not invalidated in
4066 this block until it is computed, add the expression
4067 to EXP_GEN. */
4068 if (gimple_vuse (stmt))
4070 gimple def_stmt;
4071 bool ok = true;
4072 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
4073 while (!gimple_nop_p (def_stmt)
4074 && gimple_code (def_stmt) != GIMPLE_PHI
4075 && gimple_bb (def_stmt) == block)
4077 if (stmt_may_clobber_ref_p
4078 (def_stmt, gimple_assign_rhs1 (stmt)))
4080 ok = false;
4081 break;
4083 def_stmt
4084 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
4086 if (!ok)
4087 continue;
4090 result = (pre_expr) pool_alloc (pre_expr_pool);
4091 result->kind = REFERENCE;
4092 result->id = 0;
4093 PRE_EXPR_REFERENCE (result) = ref;
4094 break;
4097 default:
4098 /* For any other statement that we don't
4099 recognize, simply add all referenced
4100 SSA_NAMEs to EXP_GEN. */
4101 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4102 add_to_exp_gen (block, op);
4103 continue;
4106 get_or_alloc_expression_id (result);
4107 add_to_value (get_expr_value_id (result), result);
4108 if (!in_fre)
4109 bitmap_value_insert_into_set (EXP_GEN (block), result);
4111 continue;
4113 default:
4114 break;
4118 /* Put the dominator children of BLOCK on the worklist of blocks
4119 to compute available sets for. */
4120 for (son = first_dom_son (CDI_DOMINATORS, block);
4121 son;
4122 son = next_dom_son (CDI_DOMINATORS, son))
4123 worklist[sp++] = son;
4126 free (worklist);
4129 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4130 than the available expressions for it. The insertion point is
4131 right before the first use in STMT. Returns the SSA_NAME that should
4132 be used for replacement. */
4134 static tree
4135 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4137 basic_block bb = gimple_bb (stmt);
4138 gimple_stmt_iterator gsi;
4139 gimple_seq stmts = NULL;
4140 tree expr;
4141 pre_expr e;
4143 /* First create a value expression from the expression we want
4144 to insert and associate it with the value handle for SSA_VN. */
4145 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4146 if (e == NULL)
4147 return NULL_TREE;
4149 /* Then use create_expression_by_pieces to generate a valid
4150 expression to insert at this point of the IL stream. */
4151 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4152 if (expr == NULL_TREE)
4153 return NULL_TREE;
4154 gsi = gsi_for_stmt (stmt);
4155 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4157 return expr;
4160 /* Eliminate fully redundant computations. */
4162 static unsigned int
4163 eliminate (void)
4165 VEC (gimple, heap) *to_remove = NULL;
4166 VEC (gimple, heap) *to_update = NULL;
4167 basic_block b;
4168 unsigned int todo = 0;
4169 gimple_stmt_iterator gsi;
4170 gimple stmt;
4171 unsigned i;
4173 FOR_EACH_BB (b)
4175 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4177 tree lhs = NULL_TREE;
4178 tree rhs = NULL_TREE;
4180 stmt = gsi_stmt (gsi);
4182 if (gimple_has_lhs (stmt))
4183 lhs = gimple_get_lhs (stmt);
4185 if (gimple_assign_single_p (stmt))
4186 rhs = gimple_assign_rhs1 (stmt);
4188 /* Lookup the RHS of the expression, see if we have an
4189 available computation for it. If so, replace the RHS with
4190 the available computation.
4192 See PR43491.
4193 We don't replace global register variable when it is a the RHS of
4194 a single assign. We do replace local register variable since gcc
4195 does not guarantee local variable will be allocated in register. */
4196 if (gimple_has_lhs (stmt)
4197 && TREE_CODE (lhs) == SSA_NAME
4198 && !gimple_assign_ssa_name_copy_p (stmt)
4199 && (!gimple_assign_single_p (stmt)
4200 || (!is_gimple_min_invariant (rhs)
4201 && (gimple_assign_rhs_code (stmt) != VAR_DECL
4202 || !is_global_var (rhs)
4203 || !DECL_HARD_REGISTER (rhs))))
4204 && !gimple_has_volatile_ops (stmt)
4205 && !has_zero_uses (lhs))
4207 tree sprime = NULL;
4208 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4209 pre_expr sprimeexpr;
4210 gimple orig_stmt = stmt;
4212 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4213 get_expr_value_id (lhsexpr),
4214 NULL);
4216 if (sprimeexpr)
4218 if (sprimeexpr->kind == CONSTANT)
4219 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4220 else if (sprimeexpr->kind == NAME)
4221 sprime = PRE_EXPR_NAME (sprimeexpr);
4222 else
4223 gcc_unreachable ();
4226 /* If there is no existing leader but SCCVN knows this
4227 value is constant, use that constant. */
4228 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4230 sprime = VN_INFO (lhs)->valnum;
4231 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4232 TREE_TYPE (sprime)))
4233 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4235 if (dump_file && (dump_flags & TDF_DETAILS))
4237 fprintf (dump_file, "Replaced ");
4238 print_gimple_expr (dump_file, stmt, 0, 0);
4239 fprintf (dump_file, " with ");
4240 print_generic_expr (dump_file, sprime, 0);
4241 fprintf (dump_file, " in ");
4242 print_gimple_stmt (dump_file, stmt, 0, 0);
4244 pre_stats.eliminations++;
4245 propagate_tree_value_into_stmt (&gsi, sprime);
4246 stmt = gsi_stmt (gsi);
4247 update_stmt (stmt);
4249 /* If we removed EH side-effects from the statement, clean
4250 its EH information. */
4251 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4253 bitmap_set_bit (need_eh_cleanup,
4254 gimple_bb (stmt)->index);
4255 if (dump_file && (dump_flags & TDF_DETAILS))
4256 fprintf (dump_file, " Removed EH side-effects.\n");
4258 continue;
4261 /* If there is no existing usable leader but SCCVN thinks
4262 it has an expression it wants to use as replacement,
4263 insert that. */
4264 if (!sprime || sprime == lhs)
4266 tree val = VN_INFO (lhs)->valnum;
4267 if (val != VN_TOP
4268 && TREE_CODE (val) == SSA_NAME
4269 && VN_INFO (val)->needs_insertion
4270 && can_PRE_operation (vn_get_expr_for (val)))
4271 sprime = do_SCCVN_insertion (stmt, val);
4273 if (sprime
4274 && sprime != lhs
4275 && (rhs == NULL_TREE
4276 || TREE_CODE (rhs) != SSA_NAME
4277 || may_propagate_copy (rhs, sprime)))
4279 bool can_make_abnormal_goto
4280 = is_gimple_call (stmt)
4281 && stmt_can_make_abnormal_goto (stmt);
4283 gcc_assert (sprime != rhs);
4285 if (dump_file && (dump_flags & TDF_DETAILS))
4287 fprintf (dump_file, "Replaced ");
4288 print_gimple_expr (dump_file, stmt, 0, 0);
4289 fprintf (dump_file, " with ");
4290 print_generic_expr (dump_file, sprime, 0);
4291 fprintf (dump_file, " in ");
4292 print_gimple_stmt (dump_file, stmt, 0, 0);
4295 if (TREE_CODE (sprime) == SSA_NAME)
4296 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4297 NECESSARY, true);
4298 /* We need to make sure the new and old types actually match,
4299 which may require adding a simple cast, which fold_convert
4300 will do for us. */
4301 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4302 && !useless_type_conversion_p (gimple_expr_type (stmt),
4303 TREE_TYPE (sprime)))
4304 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4306 pre_stats.eliminations++;
4307 propagate_tree_value_into_stmt (&gsi, sprime);
4308 stmt = gsi_stmt (gsi);
4309 update_stmt (stmt);
4311 /* If we removed EH side-effects from the statement, clean
4312 its EH information. */
4313 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4315 bitmap_set_bit (need_eh_cleanup,
4316 gimple_bb (stmt)->index);
4317 if (dump_file && (dump_flags & TDF_DETAILS))
4318 fprintf (dump_file, " Removed EH side-effects.\n");
4321 /* Likewise for AB side-effects. */
4322 if (can_make_abnormal_goto
4323 && !stmt_can_make_abnormal_goto (stmt))
4325 bitmap_set_bit (need_ab_cleanup,
4326 gimple_bb (stmt)->index);
4327 if (dump_file && (dump_flags & TDF_DETAILS))
4328 fprintf (dump_file, " Removed AB side-effects.\n");
4332 /* If the statement is a scalar store, see if the expression
4333 has the same value number as its rhs. If so, the store is
4334 dead. */
4335 else if (gimple_assign_single_p (stmt)
4336 && !gimple_has_volatile_ops (stmt)
4337 && !is_gimple_reg (gimple_assign_lhs (stmt))
4338 && (TREE_CODE (rhs) == SSA_NAME
4339 || is_gimple_min_invariant (rhs)))
4341 tree val;
4342 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4343 gimple_vuse (stmt), VN_WALK, NULL);
4344 if (TREE_CODE (rhs) == SSA_NAME)
4345 rhs = VN_INFO (rhs)->valnum;
4346 if (val
4347 && operand_equal_p (val, rhs, 0))
4349 if (dump_file && (dump_flags & TDF_DETAILS))
4351 fprintf (dump_file, "Deleted redundant store ");
4352 print_gimple_stmt (dump_file, stmt, 0, 0);
4355 /* Queue stmt for removal. */
4356 VEC_safe_push (gimple, heap, to_remove, stmt);
4359 /* Visit COND_EXPRs and fold the comparison with the
4360 available value-numbers. */
4361 else if (gimple_code (stmt) == GIMPLE_COND)
4363 tree op0 = gimple_cond_lhs (stmt);
4364 tree op1 = gimple_cond_rhs (stmt);
4365 tree result;
4367 if (TREE_CODE (op0) == SSA_NAME)
4368 op0 = VN_INFO (op0)->valnum;
4369 if (TREE_CODE (op1) == SSA_NAME)
4370 op1 = VN_INFO (op1)->valnum;
4371 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4372 op0, op1);
4373 if (result && TREE_CODE (result) == INTEGER_CST)
4375 if (integer_zerop (result))
4376 gimple_cond_make_false (stmt);
4377 else
4378 gimple_cond_make_true (stmt);
4379 update_stmt (stmt);
4380 todo = TODO_cleanup_cfg;
4383 /* Visit indirect calls and turn them into direct calls if
4384 possible. */
4385 if (is_gimple_call (stmt))
4387 tree orig_fn = gimple_call_fn (stmt);
4388 tree fn;
4389 if (!orig_fn)
4390 continue;
4391 if (TREE_CODE (orig_fn) == SSA_NAME)
4392 fn = VN_INFO (orig_fn)->valnum;
4393 else if (TREE_CODE (orig_fn) == OBJ_TYPE_REF
4394 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn)) == SSA_NAME)
4395 fn = VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn))->valnum;
4396 else
4397 continue;
4398 if (gimple_call_addr_fndecl (fn) != NULL_TREE
4399 && useless_type_conversion_p (TREE_TYPE (orig_fn),
4400 TREE_TYPE (fn)))
4402 bool can_make_abnormal_goto
4403 = stmt_can_make_abnormal_goto (stmt);
4404 bool was_noreturn = gimple_call_noreturn_p (stmt);
4406 if (dump_file && (dump_flags & TDF_DETAILS))
4408 fprintf (dump_file, "Replacing call target with ");
4409 print_generic_expr (dump_file, fn, 0);
4410 fprintf (dump_file, " in ");
4411 print_gimple_stmt (dump_file, stmt, 0, 0);
4414 gimple_call_set_fn (stmt, fn);
4415 VEC_safe_push (gimple, heap, to_update, stmt);
4417 /* When changing a call into a noreturn call, cfg cleanup
4418 is needed to fix up the noreturn call. */
4419 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4420 todo |= TODO_cleanup_cfg;
4422 /* If we removed EH side-effects from the statement, clean
4423 its EH information. */
4424 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4426 bitmap_set_bit (need_eh_cleanup,
4427 gimple_bb (stmt)->index);
4428 if (dump_file && (dump_flags & TDF_DETAILS))
4429 fprintf (dump_file, " Removed EH side-effects.\n");
4432 /* Likewise for AB side-effects. */
4433 if (can_make_abnormal_goto
4434 && !stmt_can_make_abnormal_goto (stmt))
4436 bitmap_set_bit (need_ab_cleanup,
4437 gimple_bb (stmt)->index);
4438 if (dump_file && (dump_flags & TDF_DETAILS))
4439 fprintf (dump_file, " Removed AB side-effects.\n");
4442 /* Changing an indirect call to a direct call may
4443 have exposed different semantics. This may
4444 require an SSA update. */
4445 todo |= TODO_update_ssa_only_virtuals;
4450 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4452 gimple stmt, phi = gsi_stmt (gsi);
4453 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4454 pre_expr sprimeexpr, resexpr;
4455 gimple_stmt_iterator gsi2;
4457 /* We want to perform redundant PHI elimination. Do so by
4458 replacing the PHI with a single copy if possible.
4459 Do not touch inserted, single-argument or virtual PHIs. */
4460 if (gimple_phi_num_args (phi) == 1
4461 || virtual_operand_p (res))
4463 gsi_next (&gsi);
4464 continue;
4467 resexpr = get_or_alloc_expr_for_name (res);
4468 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4469 get_expr_value_id (resexpr), NULL);
4470 if (sprimeexpr)
4472 if (sprimeexpr->kind == CONSTANT)
4473 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4474 else if (sprimeexpr->kind == NAME)
4475 sprime = PRE_EXPR_NAME (sprimeexpr);
4476 else
4477 gcc_unreachable ();
4479 if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum))
4481 sprime = VN_INFO (res)->valnum;
4482 if (!useless_type_conversion_p (TREE_TYPE (res),
4483 TREE_TYPE (sprime)))
4484 sprime = fold_convert (TREE_TYPE (res), sprime);
4486 if (!sprime
4487 || sprime == res)
4489 gsi_next (&gsi);
4490 continue;
4493 if (dump_file && (dump_flags & TDF_DETAILS))
4495 fprintf (dump_file, "Replaced redundant PHI node defining ");
4496 print_generic_expr (dump_file, res, 0);
4497 fprintf (dump_file, " with ");
4498 print_generic_expr (dump_file, sprime, 0);
4499 fprintf (dump_file, "\n");
4502 remove_phi_node (&gsi, false);
4504 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4505 && TREE_CODE (sprime) == SSA_NAME)
4506 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4508 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4509 sprime = fold_convert (TREE_TYPE (res), sprime);
4510 stmt = gimple_build_assign (res, sprime);
4511 SSA_NAME_DEF_STMT (res) = stmt;
4512 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4514 gsi2 = gsi_after_labels (b);
4515 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4516 /* Queue the copy for eventual removal. */
4517 VEC_safe_push (gimple, heap, to_remove, stmt);
4518 /* If we inserted this PHI node ourself, it's not an elimination. */
4519 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4520 pre_stats.phis--;
4521 else
4522 pre_stats.eliminations++;
4526 /* We cannot remove stmts during BB walk, especially not release SSA
4527 names there as this confuses the VN machinery. The stmts ending
4528 up in to_remove are either stores or simple copies. */
4529 FOR_EACH_VEC_ELT (gimple, to_remove, i, stmt)
4531 tree lhs = gimple_assign_lhs (stmt);
4532 tree rhs = gimple_assign_rhs1 (stmt);
4533 use_operand_p use_p;
4534 gimple use_stmt;
4536 /* If there is a single use only, propagate the equivalency
4537 instead of keeping the copy. */
4538 if (TREE_CODE (lhs) == SSA_NAME
4539 && TREE_CODE (rhs) == SSA_NAME
4540 && single_imm_use (lhs, &use_p, &use_stmt)
4541 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4543 SET_USE (use_p, rhs);
4544 update_stmt (use_stmt);
4545 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4546 && TREE_CODE (rhs) == SSA_NAME)
4547 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4550 /* If this is a store or a now unused copy, remove it. */
4551 if (TREE_CODE (lhs) != SSA_NAME
4552 || has_zero_uses (lhs))
4554 basic_block bb = gimple_bb (stmt);
4555 gsi = gsi_for_stmt (stmt);
4556 unlink_stmt_vdef (stmt);
4557 if (gsi_remove (&gsi, true))
4558 bitmap_set_bit (need_eh_cleanup, bb->index);
4559 if (TREE_CODE (lhs) == SSA_NAME)
4560 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4561 release_defs (stmt);
4564 VEC_free (gimple, heap, to_remove);
4566 /* We cannot update call statements with virtual operands during
4567 SSA walk. This might remove them which in turn makes our
4568 VN lattice invalid. */
4569 FOR_EACH_VEC_ELT (gimple, to_update, i, stmt)
4570 update_stmt (stmt);
4571 VEC_free (gimple, heap, to_update);
4573 return todo;
4576 /* Borrow a bit of tree-ssa-dce.c for the moment.
4577 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4578 this may be a bit faster, and we may want critical edges kept split. */
4580 /* If OP's defining statement has not already been determined to be necessary,
4581 mark that statement necessary. Return the stmt, if it is newly
4582 necessary. */
4584 static inline gimple
4585 mark_operand_necessary (tree op)
4587 gimple stmt;
4589 gcc_assert (op);
4591 if (TREE_CODE (op) != SSA_NAME)
4592 return NULL;
4594 stmt = SSA_NAME_DEF_STMT (op);
4595 gcc_assert (stmt);
4597 if (gimple_plf (stmt, NECESSARY)
4598 || gimple_nop_p (stmt))
4599 return NULL;
4601 gimple_set_plf (stmt, NECESSARY, true);
4602 return stmt;
4605 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4606 to insert PHI nodes sometimes, and because value numbering of casts isn't
4607 perfect, we sometimes end up inserting dead code. This simple DCE-like
4608 pass removes any insertions we made that weren't actually used. */
4610 static void
4611 remove_dead_inserted_code (void)
4613 bitmap worklist;
4614 unsigned i;
4615 bitmap_iterator bi;
4616 gimple t;
4618 worklist = BITMAP_ALLOC (NULL);
4619 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4621 t = SSA_NAME_DEF_STMT (ssa_name (i));
4622 if (gimple_plf (t, NECESSARY))
4623 bitmap_set_bit (worklist, i);
4625 while (!bitmap_empty_p (worklist))
4627 i = bitmap_first_set_bit (worklist);
4628 bitmap_clear_bit (worklist, i);
4629 t = SSA_NAME_DEF_STMT (ssa_name (i));
4631 /* PHI nodes are somewhat special in that each PHI alternative has
4632 data and control dependencies. All the statements feeding the
4633 PHI node's arguments are always necessary. */
4634 if (gimple_code (t) == GIMPLE_PHI)
4636 unsigned k;
4638 for (k = 0; k < gimple_phi_num_args (t); k++)
4640 tree arg = PHI_ARG_DEF (t, k);
4641 if (TREE_CODE (arg) == SSA_NAME)
4643 gimple n = mark_operand_necessary (arg);
4644 if (n)
4645 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4649 else
4651 /* Propagate through the operands. Examine all the USE, VUSE and
4652 VDEF operands in this statement. Mark all the statements
4653 which feed this statement's uses as necessary. */
4654 ssa_op_iter iter;
4655 tree use;
4657 /* The operands of VDEF expressions are also needed as they
4658 represent potential definitions that may reach this
4659 statement (VDEF operands allow us to follow def-def
4660 links). */
4662 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4664 gimple n = mark_operand_necessary (use);
4665 if (n)
4666 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4671 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4673 t = SSA_NAME_DEF_STMT (ssa_name (i));
4674 if (!gimple_plf (t, NECESSARY))
4676 gimple_stmt_iterator gsi;
4678 if (dump_file && (dump_flags & TDF_DETAILS))
4680 fprintf (dump_file, "Removing unnecessary insertion:");
4681 print_gimple_stmt (dump_file, t, 0, 0);
4684 gsi = gsi_for_stmt (t);
4685 if (gimple_code (t) == GIMPLE_PHI)
4686 remove_phi_node (&gsi, true);
4687 else
4689 gsi_remove (&gsi, true);
4690 release_defs (t);
4694 BITMAP_FREE (worklist);
4697 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4698 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4699 the number of visited blocks. */
4701 static int
4702 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4704 edge_iterator *stack;
4705 int sp;
4706 int post_order_num = 0;
4707 sbitmap visited;
4709 if (include_entry_exit)
4710 post_order[post_order_num++] = EXIT_BLOCK;
4712 /* Allocate stack for back-tracking up CFG. */
4713 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4714 sp = 0;
4716 /* Allocate bitmap to track nodes that have been visited. */
4717 visited = sbitmap_alloc (last_basic_block);
4719 /* None of the nodes in the CFG have been visited yet. */
4720 sbitmap_zero (visited);
4722 /* Push the last edge on to the stack. */
4723 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4725 while (sp)
4727 edge_iterator ei;
4728 basic_block src;
4729 basic_block dest;
4731 /* Look at the edge on the top of the stack. */
4732 ei = stack[sp - 1];
4733 src = ei_edge (ei)->src;
4734 dest = ei_edge (ei)->dest;
4736 /* Check if the edge destination has been visited yet. */
4737 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4739 /* Mark that we have visited the destination. */
4740 SET_BIT (visited, src->index);
4742 if (EDGE_COUNT (src->preds) > 0)
4743 /* Since the DEST node has been visited for the first
4744 time, check its successors. */
4745 stack[sp++] = ei_start (src->preds);
4746 else
4747 post_order[post_order_num++] = src->index;
4749 else
4751 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4752 post_order[post_order_num++] = dest->index;
4754 if (!ei_one_before_end_p (ei))
4755 ei_next (&stack[sp - 1]);
4756 else
4757 sp--;
4761 if (include_entry_exit)
4762 post_order[post_order_num++] = ENTRY_BLOCK;
4764 free (stack);
4765 sbitmap_free (visited);
4766 return post_order_num;
4770 /* Initialize data structures used by PRE. */
4772 static void
4773 init_pre (bool do_fre)
4775 basic_block bb;
4777 next_expression_id = 1;
4778 expressions = NULL;
4779 VEC_safe_push (pre_expr, heap, expressions, (pre_expr)NULL);
4780 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4781 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4782 get_max_value_id() + 1);
4783 name_to_id = NULL;
4785 in_fre = do_fre;
4787 inserted_exprs = BITMAP_ALLOC (NULL);
4789 connect_infinite_loops_to_exit ();
4790 memset (&pre_stats, 0, sizeof (pre_stats));
4793 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4794 my_rev_post_order_compute (postorder, false);
4796 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4798 calculate_dominance_info (CDI_POST_DOMINATORS);
4799 calculate_dominance_info (CDI_DOMINATORS);
4801 bitmap_obstack_initialize (&grand_bitmap_obstack);
4802 phi_translate_table.create (5110);
4803 expression_to_id.create (num_ssa_names * 3);
4804 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4805 sizeof (struct bitmap_set), 30);
4806 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4807 sizeof (struct pre_expr_d), 30);
4808 FOR_ALL_BB (bb)
4810 EXP_GEN (bb) = bitmap_set_new ();
4811 PHI_GEN (bb) = bitmap_set_new ();
4812 TMP_GEN (bb) = bitmap_set_new ();
4813 AVAIL_OUT (bb) = bitmap_set_new ();
4816 need_eh_cleanup = BITMAP_ALLOC (NULL);
4817 need_ab_cleanup = BITMAP_ALLOC (NULL);
4821 /* Deallocate data structures used by PRE. */
4823 static void
4824 fini_pre (bool do_fre)
4826 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4827 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4829 free (postorder);
4830 VEC_free (bitmap_set_t, heap, value_expressions);
4831 BITMAP_FREE (inserted_exprs);
4832 bitmap_obstack_release (&grand_bitmap_obstack);
4833 free_alloc_pool (bitmap_set_pool);
4834 free_alloc_pool (pre_expr_pool);
4835 phi_translate_table.dispose ();
4836 expression_to_id.dispose ();
4837 VEC_free (unsigned, heap, name_to_id);
4839 free_aux_for_blocks ();
4841 free_dominance_info (CDI_POST_DOMINATORS);
4843 if (do_eh_cleanup)
4844 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4846 if (do_ab_cleanup)
4847 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4849 BITMAP_FREE (need_eh_cleanup);
4850 BITMAP_FREE (need_ab_cleanup);
4852 if (do_eh_cleanup || do_ab_cleanup)
4853 cleanup_tree_cfg ();
4855 if (!do_fre)
4856 loop_optimizer_finalize ();
4859 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4860 only wants to do full redundancy elimination. */
4862 static unsigned int
4863 execute_pre (bool do_fre)
4865 unsigned int todo = 0;
4867 do_partial_partial =
4868 flag_tree_partial_pre && optimize_function_for_speed_p (cfun);
4870 /* This has to happen before SCCVN runs because
4871 loop_optimizer_init may create new phis, etc. */
4872 if (!do_fre)
4873 loop_optimizer_init (LOOPS_NORMAL);
4875 if (!run_scc_vn (do_fre ? VN_WALKREWRITE : VN_WALK))
4877 if (!do_fre)
4878 loop_optimizer_finalize ();
4880 return 0;
4883 init_pre (do_fre);
4884 scev_initialize ();
4886 /* Collect and value number expressions computed in each basic block. */
4887 compute_avail ();
4889 if (dump_file && (dump_flags & TDF_DETAILS))
4891 basic_block bb;
4893 FOR_ALL_BB (bb)
4895 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4896 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4897 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4898 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4902 /* Insert can get quite slow on an incredibly large number of basic
4903 blocks due to some quadratic behavior. Until this behavior is
4904 fixed, don't run it when he have an incredibly large number of
4905 bb's. If we aren't going to run insert, there is no point in
4906 computing ANTIC, either, even though it's plenty fast. */
4907 if (!do_fre && n_basic_blocks < 4000)
4909 compute_antic ();
4910 insert ();
4913 /* Make sure to remove fake edges before committing our inserts.
4914 This makes sure we don't end up with extra critical edges that
4915 we would need to split. */
4916 remove_fake_exit_edges ();
4917 gsi_commit_edge_inserts ();
4919 /* Remove all the redundant expressions. */
4920 todo |= eliminate ();
4922 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4923 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4924 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4925 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4926 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4928 clear_expression_ids ();
4929 if (!do_fre)
4931 remove_dead_inserted_code ();
4932 todo |= TODO_verify_flow;
4935 scev_finalize ();
4936 fini_pre (do_fre);
4938 if (!do_fre)
4939 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4940 case we can merge the block with the remaining predecessor of the block.
4941 It should either:
4942 - call merge_blocks after each tail merge iteration
4943 - call merge_blocks after all tail merge iterations
4944 - mark TODO_cleanup_cfg when necessary
4945 - share the cfg cleanup with fini_pre. */
4946 todo |= tail_merge_optimize (todo);
4947 free_scc_vn ();
4949 return todo;
4952 /* Gate and execute functions for PRE. */
4954 static unsigned int
4955 do_pre (void)
4957 return execute_pre (false);
4960 static bool
4961 gate_pre (void)
4963 return flag_tree_pre != 0;
4966 struct gimple_opt_pass pass_pre =
4969 GIMPLE_PASS,
4970 "pre", /* name */
4971 gate_pre, /* gate */
4972 do_pre, /* execute */
4973 NULL, /* sub */
4974 NULL, /* next */
4975 0, /* static_pass_number */
4976 TV_TREE_PRE, /* tv_id */
4977 PROP_no_crit_edges | PROP_cfg
4978 | PROP_ssa, /* properties_required */
4979 0, /* properties_provided */
4980 0, /* properties_destroyed */
4981 TODO_rebuild_alias, /* todo_flags_start */
4982 TODO_update_ssa_only_virtuals | TODO_ggc_collect
4983 | TODO_verify_ssa /* todo_flags_finish */
4988 /* Gate and execute functions for FRE. */
4990 static unsigned int
4991 execute_fre (void)
4993 return execute_pre (true);
4996 static bool
4997 gate_fre (void)
4999 return flag_tree_fre != 0;
5002 struct gimple_opt_pass pass_fre =
5005 GIMPLE_PASS,
5006 "fre", /* name */
5007 gate_fre, /* gate */
5008 execute_fre, /* execute */
5009 NULL, /* sub */
5010 NULL, /* next */
5011 0, /* static_pass_number */
5012 TV_TREE_FRE, /* tv_id */
5013 PROP_cfg | PROP_ssa, /* properties_required */
5014 0, /* properties_provided */
5015 0, /* properties_destroyed */
5016 0, /* todo_flags_start */
5017 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */