re PR bootstrap/54281 (Fails to bootstrap with --disable-nls)
[official-gcc.git] / gcc / tree-ssa-pre.c
blobe1135395f65746fdd3a06e0921dc287fb50c98cd
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
5 <stevenb@suse.de>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
12 any later version.
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "hash-table.h"
34 #include "tree-iterator.h"
35 #include "alloc-pool.h"
36 #include "obstack.h"
37 #include "tree-pass.h"
38 #include "flags.h"
39 #include "bitmap.h"
40 #include "langhooks.h"
41 #include "cfgloop.h"
42 #include "tree-ssa-sccvn.h"
43 #include "tree-scalar-evolution.h"
44 #include "params.h"
45 #include "dbgcnt.h"
47 /* TODO:
49 1. Avail sets can be shared by making an avail_find_leader that
50 walks up the dominator tree and looks in those avail sets.
51 This might affect code optimality, it's unclear right now.
52 2. Strength reduction can be performed by anticipating expressions
53 we can repair later on.
54 3. We can do back-substitution or smarter value numbering to catch
55 commutative expressions split up over multiple statements.
58 /* For ease of terminology, "expression node" in the below refers to
59 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
60 represent the actual statement containing the expressions we care about,
61 and we cache the value number by putting it in the expression. */
63 /* Basic algorithm
65 First we walk the statements to generate the AVAIL sets, the
66 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
67 generation of values/expressions by a given block. We use them
68 when computing the ANTIC sets. The AVAIL sets consist of
69 SSA_NAME's that represent values, so we know what values are
70 available in what blocks. AVAIL is a forward dataflow problem. In
71 SSA, values are never killed, so we don't need a kill set, or a
72 fixpoint iteration, in order to calculate the AVAIL sets. In
73 traditional parlance, AVAIL sets tell us the downsafety of the
74 expressions/values.
76 Next, we generate the ANTIC sets. These sets represent the
77 anticipatable expressions. ANTIC is a backwards dataflow
78 problem. An expression is anticipatable in a given block if it could
79 be generated in that block. This means that if we had to perform
80 an insertion in that block, of the value of that expression, we
81 could. Calculating the ANTIC sets requires phi translation of
82 expressions, because the flow goes backwards through phis. We must
83 iterate to a fixpoint of the ANTIC sets, because we have a kill
84 set. Even in SSA form, values are not live over the entire
85 function, only from their definition point onwards. So we have to
86 remove values from the ANTIC set once we go past the definition
87 point of the leaders that make them up.
88 compute_antic/compute_antic_aux performs this computation.
90 Third, we perform insertions to make partially redundant
91 expressions fully redundant.
93 An expression is partially redundant (excluding partial
94 anticipation) if:
96 1. It is AVAIL in some, but not all, of the predecessors of a
97 given block.
98 2. It is ANTIC in all the predecessors.
100 In order to make it fully redundant, we insert the expression into
101 the predecessors where it is not available, but is ANTIC.
103 For the partial anticipation case, we only perform insertion if it
104 is partially anticipated in some block, and fully available in all
105 of the predecessors.
107 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
108 performs these steps.
110 Fourth, we eliminate fully redundant expressions.
111 This is a simple statement walk that replaces redundant
112 calculations with the now available values. */
114 /* Representations of value numbers:
116 Value numbers are represented by a representative SSA_NAME. We
117 will create fake SSA_NAME's in situations where we need a
118 representative but do not have one (because it is a complex
119 expression). In order to facilitate storing the value numbers in
120 bitmaps, and keep the number of wasted SSA_NAME's down, we also
121 associate a value_id with each value number, and create full blown
122 ssa_name's only where we actually need them (IE in operands of
123 existing expressions).
125 Theoretically you could replace all the value_id's with
126 SSA_NAME_VERSION, but this would allocate a large number of
127 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
128 It would also require an additional indirection at each point we
129 use the value id. */
131 /* Representation of expressions on value numbers:
133 Expressions consisting of value numbers are represented the same
134 way as our VN internally represents them, with an additional
135 "pre_expr" wrapping around them in order to facilitate storing all
136 of the expressions in the same sets. */
138 /* Representation of sets:
140 The dataflow sets do not need to be sorted in any particular order
141 for the majority of their lifetime, are simply represented as two
142 bitmaps, one that keeps track of values present in the set, and one
143 that keeps track of expressions present in the set.
145 When we need them in topological order, we produce it on demand by
146 transforming the bitmap into an array and sorting it into topo
147 order. */
149 /* Type of expression, used to know which member of the PRE_EXPR union
150 is valid. */
152 enum pre_expr_kind
154 NAME,
155 NARY,
156 REFERENCE,
157 CONSTANT
160 typedef union pre_expr_union_d
162 tree name;
163 tree constant;
164 vn_nary_op_t nary;
165 vn_reference_t reference;
166 } pre_expr_union;
168 typedef struct pre_expr_d : typed_noop_remove <pre_expr_d>
170 enum pre_expr_kind kind;
171 unsigned int id;
172 pre_expr_union u;
174 /* hash_table support. */
175 typedef pre_expr_d T;
176 static inline hashval_t hash (const pre_expr_d *);
177 static inline int equal (const pre_expr_d *, const pre_expr_d *);
178 } *pre_expr;
180 #define PRE_EXPR_NAME(e) (e)->u.name
181 #define PRE_EXPR_NARY(e) (e)->u.nary
182 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
183 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
185 /* Compare E1 and E1 for equality. */
187 inline int
188 pre_expr_d::equal (const struct pre_expr_d *e1, const struct pre_expr_d *e2)
190 if (e1->kind != e2->kind)
191 return false;
193 switch (e1->kind)
195 case CONSTANT:
196 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
197 PRE_EXPR_CONSTANT (e2));
198 case NAME:
199 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
200 case NARY:
201 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
202 case REFERENCE:
203 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
204 PRE_EXPR_REFERENCE (e2));
205 default:
206 gcc_unreachable ();
210 /* Hash E. */
212 inline hashval_t
213 pre_expr_d::hash (const struct pre_expr_d *e)
215 switch (e->kind)
217 case CONSTANT:
218 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
219 case NAME:
220 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
221 case NARY:
222 return PRE_EXPR_NARY (e)->hashcode;
223 case REFERENCE:
224 return PRE_EXPR_REFERENCE (e)->hashcode;
225 default:
226 gcc_unreachable ();
230 /* Next global expression id number. */
231 static unsigned int next_expression_id;
233 /* Mapping from expression to id number we can use in bitmap sets. */
234 DEF_VEC_P (pre_expr);
235 DEF_VEC_ALLOC_P (pre_expr, heap);
236 static VEC(pre_expr, heap) *expressions;
237 static hash_table <pre_expr_d> expression_to_id;
238 static VEC(unsigned, heap) *name_to_id;
240 /* Allocate an expression id for EXPR. */
242 static inline unsigned int
243 alloc_expression_id (pre_expr expr)
245 struct pre_expr_d **slot;
246 /* Make sure we won't overflow. */
247 gcc_assert (next_expression_id + 1 > next_expression_id);
248 expr->id = next_expression_id++;
249 VEC_safe_push (pre_expr, heap, expressions, expr);
250 if (expr->kind == NAME)
252 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
253 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
254 re-allocations by using VEC_reserve upfront. There is no
255 VEC_quick_grow_cleared unfortunately. */
256 unsigned old_len = VEC_length (unsigned, name_to_id);
257 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names - old_len);
258 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
259 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
260 VEC_replace (unsigned, name_to_id, version, expr->id);
262 else
264 slot = expression_to_id.find_slot (expr, INSERT);
265 gcc_assert (!*slot);
266 *slot = expr;
268 return next_expression_id - 1;
271 /* Return the expression id for tree EXPR. */
273 static inline unsigned int
274 get_expression_id (const pre_expr expr)
276 return expr->id;
279 static inline unsigned int
280 lookup_expression_id (const pre_expr expr)
282 struct pre_expr_d **slot;
284 if (expr->kind == NAME)
286 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
287 if (VEC_length (unsigned, name_to_id) <= version)
288 return 0;
289 return VEC_index (unsigned, name_to_id, version);
291 else
293 slot = expression_to_id.find_slot (expr, NO_INSERT);
294 if (!slot)
295 return 0;
296 return ((pre_expr)*slot)->id;
300 /* Return the existing expression id for EXPR, or create one if one
301 does not exist yet. */
303 static inline unsigned int
304 get_or_alloc_expression_id (pre_expr expr)
306 unsigned int id = lookup_expression_id (expr);
307 if (id == 0)
308 return alloc_expression_id (expr);
309 return expr->id = id;
312 /* Return the expression that has expression id ID */
314 static inline pre_expr
315 expression_for_id (unsigned int id)
317 return VEC_index (pre_expr, expressions, id);
320 /* Free the expression id field in all of our expressions,
321 and then destroy the expressions array. */
323 static void
324 clear_expression_ids (void)
326 VEC_free (pre_expr, heap, expressions);
329 static alloc_pool pre_expr_pool;
331 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
333 static pre_expr
334 get_or_alloc_expr_for_name (tree name)
336 struct pre_expr_d expr;
337 pre_expr result;
338 unsigned int result_id;
340 expr.kind = NAME;
341 expr.id = 0;
342 PRE_EXPR_NAME (&expr) = name;
343 result_id = lookup_expression_id (&expr);
344 if (result_id != 0)
345 return expression_for_id (result_id);
347 result = (pre_expr) pool_alloc (pre_expr_pool);
348 result->kind = NAME;
349 PRE_EXPR_NAME (result) = name;
350 alloc_expression_id (result);
351 return result;
354 static bool in_fre = false;
356 /* An unordered bitmap set. One bitmap tracks values, the other,
357 expressions. */
358 typedef struct bitmap_set
360 bitmap_head expressions;
361 bitmap_head values;
362 } *bitmap_set_t;
364 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
365 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
367 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
368 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
370 /* Mapping from value id to expressions with that value_id. */
371 DEF_VEC_P (bitmap_set_t);
372 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
373 static VEC(bitmap_set_t, heap) *value_expressions;
375 /* Sets that we need to keep track of. */
376 typedef struct bb_bitmap_sets
378 /* The EXP_GEN set, which represents expressions/values generated in
379 a basic block. */
380 bitmap_set_t exp_gen;
382 /* The PHI_GEN set, which represents PHI results generated in a
383 basic block. */
384 bitmap_set_t phi_gen;
386 /* The TMP_GEN set, which represents results/temporaries generated
387 in a basic block. IE the LHS of an expression. */
388 bitmap_set_t tmp_gen;
390 /* The AVAIL_OUT set, which represents which values are available in
391 a given basic block. */
392 bitmap_set_t avail_out;
394 /* The ANTIC_IN set, which represents which values are anticipatable
395 in a given basic block. */
396 bitmap_set_t antic_in;
398 /* The PA_IN set, which represents which values are
399 partially anticipatable in a given basic block. */
400 bitmap_set_t pa_in;
402 /* The NEW_SETS set, which is used during insertion to augment the
403 AVAIL_OUT set of blocks with the new insertions performed during
404 the current iteration. */
405 bitmap_set_t new_sets;
407 /* A cache for value_dies_in_block_x. */
408 bitmap expr_dies;
410 /* True if we have visited this block during ANTIC calculation. */
411 unsigned int visited : 1;
413 /* True we have deferred processing this block during ANTIC
414 calculation until its successor is processed. */
415 unsigned int deferred : 1;
417 /* True when the block contains a call that might not return. */
418 unsigned int contains_may_not_return_call : 1;
419 } *bb_value_sets_t;
421 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
422 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
423 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
424 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
425 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
426 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
427 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
428 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
429 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
430 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
431 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
434 /* Basic block list in postorder. */
435 static int *postorder;
437 /* This structure is used to keep track of statistics on what
438 optimization PRE was able to perform. */
439 static struct
441 /* The number of RHS computations eliminated by PRE. */
442 int eliminations;
444 /* The number of new expressions/temporaries generated by PRE. */
445 int insertions;
447 /* The number of inserts found due to partial anticipation */
448 int pa_insert;
450 /* The number of new PHI nodes added by PRE. */
451 int phis;
453 /* The number of values found constant. */
454 int constified;
456 } pre_stats;
458 static bool do_partial_partial;
459 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
460 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
461 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
462 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
463 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
464 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
465 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
466 unsigned int, bool);
467 static bitmap_set_t bitmap_set_new (void);
468 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
469 gimple, tree);
470 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
471 gimple);
472 static unsigned int get_expr_value_id (pre_expr);
474 /* We can add and remove elements and entries to and from sets
475 and hash tables, so we use alloc pools for them. */
477 static alloc_pool bitmap_set_pool;
478 static bitmap_obstack grand_bitmap_obstack;
480 /* Set of blocks with statements that have had their EH properties changed. */
481 static bitmap need_eh_cleanup;
483 /* Set of blocks with statements that have had their AB properties changed. */
484 static bitmap need_ab_cleanup;
486 /* A three tuple {e, pred, v} used to cache phi translations in the
487 phi_translate_table. */
489 typedef struct expr_pred_trans_d : typed_free_remove<expr_pred_trans_d>
491 /* The expression. */
492 pre_expr e;
494 /* The predecessor block along which we translated the expression. */
495 basic_block pred;
497 /* The value that resulted from the translation. */
498 pre_expr v;
500 /* The hashcode for the expression, pred pair. This is cached for
501 speed reasons. */
502 hashval_t hashcode;
504 /* hash_table support. */
505 typedef expr_pred_trans_d T;
506 static inline hashval_t hash (const expr_pred_trans_d *);
507 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
508 } *expr_pred_trans_t;
509 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
511 inline hashval_t
512 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
514 return e->hashcode;
517 inline int
518 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
519 const expr_pred_trans_d *ve2)
521 basic_block b1 = ve1->pred;
522 basic_block b2 = ve2->pred;
524 /* If they are not translations for the same basic block, they can't
525 be equal. */
526 if (b1 != b2)
527 return false;
528 return pre_expr_d::equal (ve1->e, ve2->e);
531 /* The phi_translate_table caches phi translations for a given
532 expression and predecessor. */
533 static hash_table <expr_pred_trans_d> phi_translate_table;
535 /* Search in the phi translation table for the translation of
536 expression E in basic block PRED.
537 Return the translated value, if found, NULL otherwise. */
539 static inline pre_expr
540 phi_trans_lookup (pre_expr e, basic_block pred)
542 expr_pred_trans_t *slot;
543 struct expr_pred_trans_d ept;
545 ept.e = e;
546 ept.pred = pred;
547 ept.hashcode = iterative_hash_hashval_t (pre_expr_d::hash (e), pred->index);
548 slot = phi_translate_table.find_slot_with_hash (&ept, ept.hashcode,
549 NO_INSERT);
550 if (!slot)
551 return NULL;
552 else
553 return (*slot)->v;
557 /* Add the tuple mapping from {expression E, basic block PRED} to
558 value V, to the phi translation table. */
560 static inline void
561 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
563 expr_pred_trans_t *slot;
564 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
565 new_pair->e = e;
566 new_pair->pred = pred;
567 new_pair->v = v;
568 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_d::hash (e),
569 pred->index);
571 slot = phi_translate_table.find_slot_with_hash (new_pair,
572 new_pair->hashcode, INSERT);
573 free (*slot);
574 *slot = new_pair;
578 /* Add expression E to the expression set of value id V. */
580 static void
581 add_to_value (unsigned int v, pre_expr e)
583 bitmap_set_t set;
585 gcc_assert (get_expr_value_id (e) == v);
587 if (v >= VEC_length (bitmap_set_t, value_expressions))
589 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
590 v + 1);
593 set = VEC_index (bitmap_set_t, value_expressions, v);
594 if (!set)
596 set = bitmap_set_new ();
597 VEC_replace (bitmap_set_t, value_expressions, v, set);
600 bitmap_insert_into_set_1 (set, e, v, true);
603 /* Create a new bitmap set and return it. */
605 static bitmap_set_t
606 bitmap_set_new (void)
608 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
609 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
610 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
611 return ret;
614 /* Return the value id for a PRE expression EXPR. */
616 static unsigned int
617 get_expr_value_id (pre_expr expr)
619 switch (expr->kind)
621 case CONSTANT:
623 unsigned int id;
624 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
625 if (id == 0)
627 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
628 add_to_value (id, expr);
630 return id;
632 case NAME:
633 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
634 case NARY:
635 return PRE_EXPR_NARY (expr)->value_id;
636 case REFERENCE:
637 return PRE_EXPR_REFERENCE (expr)->value_id;
638 default:
639 gcc_unreachable ();
643 /* Remove an expression EXPR from a bitmapped set. */
645 static void
646 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
648 unsigned int val = get_expr_value_id (expr);
649 if (!value_id_constant_p (val))
651 bitmap_clear_bit (&set->values, val);
652 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
656 static void
657 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
658 unsigned int val, bool allow_constants)
660 if (allow_constants || !value_id_constant_p (val))
662 /* We specifically expect this and only this function to be able to
663 insert constants into a set. */
664 bitmap_set_bit (&set->values, val);
665 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
669 /* Insert an expression EXPR into a bitmapped set. */
671 static void
672 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
674 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
677 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
679 static void
680 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
682 bitmap_copy (&dest->expressions, &orig->expressions);
683 bitmap_copy (&dest->values, &orig->values);
687 /* Free memory used up by SET. */
688 static void
689 bitmap_set_free (bitmap_set_t set)
691 bitmap_clear (&set->expressions);
692 bitmap_clear (&set->values);
696 /* Generate an topological-ordered array of bitmap set SET. */
698 static VEC(pre_expr, heap) *
699 sorted_array_from_bitmap_set (bitmap_set_t set)
701 unsigned int i, j;
702 bitmap_iterator bi, bj;
703 VEC(pre_expr, heap) *result;
705 /* Pre-allocate roughly enough space for the array. */
706 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values));
708 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
710 /* The number of expressions having a given value is usually
711 relatively small. Thus, rather than making a vector of all
712 the expressions and sorting it by value-id, we walk the values
713 and check in the reverse mapping that tells us what expressions
714 have a given value, to filter those in our set. As a result,
715 the expressions are inserted in value-id order, which means
716 topological order.
718 If this is somehow a significant lose for some cases, we can
719 choose which set to walk based on the set size. */
720 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
721 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
723 if (bitmap_bit_p (&set->expressions, j))
724 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
728 return result;
731 /* Perform bitmapped set operation DEST &= ORIG. */
733 static void
734 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
736 bitmap_iterator bi;
737 unsigned int i;
739 if (dest != orig)
741 bitmap_head temp;
742 bitmap_initialize (&temp, &grand_bitmap_obstack);
744 bitmap_and_into (&dest->values, &orig->values);
745 bitmap_copy (&temp, &dest->expressions);
746 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
748 pre_expr expr = expression_for_id (i);
749 unsigned int value_id = get_expr_value_id (expr);
750 if (!bitmap_bit_p (&dest->values, value_id))
751 bitmap_clear_bit (&dest->expressions, i);
753 bitmap_clear (&temp);
757 /* Subtract all values and expressions contained in ORIG from DEST. */
759 static bitmap_set_t
760 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
762 bitmap_set_t result = bitmap_set_new ();
763 bitmap_iterator bi;
764 unsigned int i;
766 bitmap_and_compl (&result->expressions, &dest->expressions,
767 &orig->expressions);
769 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
771 pre_expr expr = expression_for_id (i);
772 unsigned int value_id = get_expr_value_id (expr);
773 bitmap_set_bit (&result->values, value_id);
776 return result;
779 /* Subtract all the values in bitmap set B from bitmap set A. */
781 static void
782 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
784 unsigned int i;
785 bitmap_iterator bi;
786 bitmap_head temp;
788 bitmap_initialize (&temp, &grand_bitmap_obstack);
790 bitmap_copy (&temp, &a->expressions);
791 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
793 pre_expr expr = expression_for_id (i);
794 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
795 bitmap_remove_from_set (a, expr);
797 bitmap_clear (&temp);
801 /* Return true if bitmapped set SET contains the value VALUE_ID. */
803 static bool
804 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
806 if (value_id_constant_p (value_id))
807 return true;
809 if (!set || bitmap_empty_p (&set->expressions))
810 return false;
812 return bitmap_bit_p (&set->values, value_id);
815 static inline bool
816 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
818 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
821 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
823 static void
824 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
825 const pre_expr expr)
827 bitmap_set_t exprset;
828 unsigned int i;
829 bitmap_iterator bi;
831 if (value_id_constant_p (lookfor))
832 return;
834 if (!bitmap_set_contains_value (set, lookfor))
835 return;
837 /* The number of expressions having a given value is usually
838 significantly less than the total number of expressions in SET.
839 Thus, rather than check, for each expression in SET, whether it
840 has the value LOOKFOR, we walk the reverse mapping that tells us
841 what expressions have a given value, and see if any of those
842 expressions are in our set. For large testcases, this is about
843 5-10x faster than walking the bitmap. If this is somehow a
844 significant lose for some cases, we can choose which set to walk
845 based on the set size. */
846 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
847 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
849 if (bitmap_clear_bit (&set->expressions, i))
851 bitmap_set_bit (&set->expressions, get_expression_id (expr));
852 return;
857 /* Return true if two bitmap sets are equal. */
859 static bool
860 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
862 return bitmap_equal_p (&a->values, &b->values);
865 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
866 and add it otherwise. */
868 static void
869 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
871 unsigned int val = get_expr_value_id (expr);
873 if (bitmap_set_contains_value (set, val))
874 bitmap_set_replace_value (set, val, expr);
875 else
876 bitmap_insert_into_set (set, expr);
879 /* Insert EXPR into SET if EXPR's value is not already present in
880 SET. */
882 static void
883 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
885 unsigned int val = get_expr_value_id (expr);
887 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
889 /* Constant values are always considered to be part of the set. */
890 if (value_id_constant_p (val))
891 return;
893 /* If the value membership changed, add the expression. */
894 if (bitmap_set_bit (&set->values, val))
895 bitmap_set_bit (&set->expressions, expr->id);
898 /* Print out EXPR to outfile. */
900 static void
901 print_pre_expr (FILE *outfile, const pre_expr expr)
903 switch (expr->kind)
905 case CONSTANT:
906 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
907 break;
908 case NAME:
909 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
910 break;
911 case NARY:
913 unsigned int i;
914 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
915 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
916 for (i = 0; i < nary->length; i++)
918 print_generic_expr (outfile, nary->op[i], 0);
919 if (i != (unsigned) nary->length - 1)
920 fprintf (outfile, ",");
922 fprintf (outfile, "}");
924 break;
926 case REFERENCE:
928 vn_reference_op_t vro;
929 unsigned int i;
930 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
931 fprintf (outfile, "{");
932 for (i = 0;
933 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
934 i++)
936 bool closebrace = false;
937 if (vro->opcode != SSA_NAME
938 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
940 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
941 if (vro->op0)
943 fprintf (outfile, "<");
944 closebrace = true;
947 if (vro->op0)
949 print_generic_expr (outfile, vro->op0, 0);
950 if (vro->op1)
952 fprintf (outfile, ",");
953 print_generic_expr (outfile, vro->op1, 0);
955 if (vro->op2)
957 fprintf (outfile, ",");
958 print_generic_expr (outfile, vro->op2, 0);
961 if (closebrace)
962 fprintf (outfile, ">");
963 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
964 fprintf (outfile, ",");
966 fprintf (outfile, "}");
967 if (ref->vuse)
969 fprintf (outfile, "@");
970 print_generic_expr (outfile, ref->vuse, 0);
973 break;
976 void debug_pre_expr (pre_expr);
978 /* Like print_pre_expr but always prints to stderr. */
979 DEBUG_FUNCTION void
980 debug_pre_expr (pre_expr e)
982 print_pre_expr (stderr, e);
983 fprintf (stderr, "\n");
986 /* Print out SET to OUTFILE. */
988 static void
989 print_bitmap_set (FILE *outfile, bitmap_set_t set,
990 const char *setname, int blockindex)
992 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
993 if (set)
995 bool first = true;
996 unsigned i;
997 bitmap_iterator bi;
999 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1001 const pre_expr expr = expression_for_id (i);
1003 if (!first)
1004 fprintf (outfile, ", ");
1005 first = false;
1006 print_pre_expr (outfile, expr);
1008 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1011 fprintf (outfile, " }\n");
1014 void debug_bitmap_set (bitmap_set_t);
1016 DEBUG_FUNCTION void
1017 debug_bitmap_set (bitmap_set_t set)
1019 print_bitmap_set (stderr, set, "debug", 0);
1022 void debug_bitmap_sets_for (basic_block);
1024 DEBUG_FUNCTION void
1025 debug_bitmap_sets_for (basic_block bb)
1027 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1028 if (!in_fre)
1030 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1031 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1032 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1033 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1034 if (do_partial_partial)
1035 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1036 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1040 /* Print out the expressions that have VAL to OUTFILE. */
1042 static void
1043 print_value_expressions (FILE *outfile, unsigned int val)
1045 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1046 if (set)
1048 char s[10];
1049 sprintf (s, "%04d", val);
1050 print_bitmap_set (outfile, set, s, 0);
1055 DEBUG_FUNCTION void
1056 debug_value_expressions (unsigned int val)
1058 print_value_expressions (stderr, val);
1061 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1062 represent it. */
1064 static pre_expr
1065 get_or_alloc_expr_for_constant (tree constant)
1067 unsigned int result_id;
1068 unsigned int value_id;
1069 struct pre_expr_d expr;
1070 pre_expr newexpr;
1072 expr.kind = CONSTANT;
1073 PRE_EXPR_CONSTANT (&expr) = constant;
1074 result_id = lookup_expression_id (&expr);
1075 if (result_id != 0)
1076 return expression_for_id (result_id);
1078 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1079 newexpr->kind = CONSTANT;
1080 PRE_EXPR_CONSTANT (newexpr) = constant;
1081 alloc_expression_id (newexpr);
1082 value_id = get_or_alloc_constant_value_id (constant);
1083 add_to_value (value_id, newexpr);
1084 return newexpr;
1087 /* Given a value id V, find the actual tree representing the constant
1088 value if there is one, and return it. Return NULL if we can't find
1089 a constant. */
1091 static tree
1092 get_constant_for_value_id (unsigned int v)
1094 if (value_id_constant_p (v))
1096 unsigned int i;
1097 bitmap_iterator bi;
1098 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1100 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1102 pre_expr expr = expression_for_id (i);
1103 if (expr->kind == CONSTANT)
1104 return PRE_EXPR_CONSTANT (expr);
1107 return NULL;
1110 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1111 Currently only supports constants and SSA_NAMES. */
1112 static pre_expr
1113 get_or_alloc_expr_for (tree t)
1115 if (TREE_CODE (t) == SSA_NAME)
1116 return get_or_alloc_expr_for_name (t);
1117 else if (is_gimple_min_invariant (t))
1118 return get_or_alloc_expr_for_constant (t);
1119 else
1121 /* More complex expressions can result from SCCVN expression
1122 simplification that inserts values for them. As they all
1123 do not have VOPs the get handled by the nary ops struct. */
1124 vn_nary_op_t result;
1125 unsigned int result_id;
1126 vn_nary_op_lookup (t, &result);
1127 if (result != NULL)
1129 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1130 e->kind = NARY;
1131 PRE_EXPR_NARY (e) = result;
1132 result_id = lookup_expression_id (e);
1133 if (result_id != 0)
1135 pool_free (pre_expr_pool, e);
1136 e = expression_for_id (result_id);
1137 return e;
1139 alloc_expression_id (e);
1140 return e;
1143 return NULL;
1146 /* Return the folded version of T if T, when folded, is a gimple
1147 min_invariant. Otherwise, return T. */
1149 static pre_expr
1150 fully_constant_expression (pre_expr e)
1152 switch (e->kind)
1154 case CONSTANT:
1155 return e;
1156 case NARY:
1158 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1159 switch (TREE_CODE_CLASS (nary->opcode))
1161 case tcc_binary:
1162 case tcc_comparison:
1164 /* We have to go from trees to pre exprs to value ids to
1165 constants. */
1166 tree naryop0 = nary->op[0];
1167 tree naryop1 = nary->op[1];
1168 tree result;
1169 if (!is_gimple_min_invariant (naryop0))
1171 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1172 unsigned int vrep0 = get_expr_value_id (rep0);
1173 tree const0 = get_constant_for_value_id (vrep0);
1174 if (const0)
1175 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1177 if (!is_gimple_min_invariant (naryop1))
1179 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1180 unsigned int vrep1 = get_expr_value_id (rep1);
1181 tree const1 = get_constant_for_value_id (vrep1);
1182 if (const1)
1183 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1185 result = fold_binary (nary->opcode, nary->type,
1186 naryop0, naryop1);
1187 if (result && is_gimple_min_invariant (result))
1188 return get_or_alloc_expr_for_constant (result);
1189 /* We might have simplified the expression to a
1190 SSA_NAME for example from x_1 * 1. But we cannot
1191 insert a PHI for x_1 unconditionally as x_1 might
1192 not be available readily. */
1193 return e;
1195 case tcc_reference:
1196 if (nary->opcode != REALPART_EXPR
1197 && nary->opcode != IMAGPART_EXPR
1198 && nary->opcode != VIEW_CONVERT_EXPR)
1199 return e;
1200 /* Fallthrough. */
1201 case tcc_unary:
1203 /* We have to go from trees to pre exprs to value ids to
1204 constants. */
1205 tree naryop0 = nary->op[0];
1206 tree const0, result;
1207 if (is_gimple_min_invariant (naryop0))
1208 const0 = naryop0;
1209 else
1211 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1212 unsigned int vrep0 = get_expr_value_id (rep0);
1213 const0 = get_constant_for_value_id (vrep0);
1215 result = NULL;
1216 if (const0)
1218 tree type1 = TREE_TYPE (nary->op[0]);
1219 const0 = fold_convert (type1, const0);
1220 result = fold_unary (nary->opcode, nary->type, const0);
1222 if (result && is_gimple_min_invariant (result))
1223 return get_or_alloc_expr_for_constant (result);
1224 return e;
1226 default:
1227 return e;
1230 case REFERENCE:
1232 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1233 tree folded;
1234 if ((folded = fully_constant_vn_reference_p (ref)))
1235 return get_or_alloc_expr_for_constant (folded);
1236 return e;
1238 default:
1239 return e;
1241 return e;
1244 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1245 it has the value it would have in BLOCK. Set *SAME_VALID to true
1246 in case the new vuse doesn't change the value id of the OPERANDS. */
1248 static tree
1249 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1250 alias_set_type set, tree type, tree vuse,
1251 basic_block phiblock,
1252 basic_block block, bool *same_valid)
1254 gimple phi = SSA_NAME_DEF_STMT (vuse);
1255 ao_ref ref;
1256 edge e = NULL;
1257 bool use_oracle;
1259 *same_valid = true;
1261 if (gimple_bb (phi) != phiblock)
1262 return vuse;
1264 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1266 /* Use the alias-oracle to find either the PHI node in this block,
1267 the first VUSE used in this block that is equivalent to vuse or
1268 the first VUSE which definition in this block kills the value. */
1269 if (gimple_code (phi) == GIMPLE_PHI)
1270 e = find_edge (block, phiblock);
1271 else if (use_oracle)
1272 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1274 vuse = gimple_vuse (phi);
1275 phi = SSA_NAME_DEF_STMT (vuse);
1276 if (gimple_bb (phi) != phiblock)
1277 return vuse;
1278 if (gimple_code (phi) == GIMPLE_PHI)
1280 e = find_edge (block, phiblock);
1281 break;
1284 else
1285 return NULL_TREE;
1287 if (e)
1289 if (use_oracle)
1291 bitmap visited = NULL;
1292 /* Try to find a vuse that dominates this phi node by skipping
1293 non-clobbering statements. */
1294 vuse = get_continuation_for_phi (phi, &ref, &visited);
1295 if (visited)
1296 BITMAP_FREE (visited);
1298 else
1299 vuse = NULL_TREE;
1300 if (!vuse)
1302 /* If we didn't find any, the value ID can't stay the same,
1303 but return the translated vuse. */
1304 *same_valid = false;
1305 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1307 /* ??? We would like to return vuse here as this is the canonical
1308 upmost vdef that this reference is associated with. But during
1309 insertion of the references into the hash tables we only ever
1310 directly insert with their direct gimple_vuse, hence returning
1311 something else would make us not find the other expression. */
1312 return PHI_ARG_DEF (phi, e->dest_idx);
1315 return NULL_TREE;
1318 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1319 SET2. This is used to avoid making a set consisting of the union
1320 of PA_IN and ANTIC_IN during insert. */
1322 static inline pre_expr
1323 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1325 pre_expr result;
1327 result = bitmap_find_leader (set1, val, NULL);
1328 if (!result && set2)
1329 result = bitmap_find_leader (set2, val, NULL);
1330 return result;
1333 /* Get the tree type for our PRE expression e. */
1335 static tree
1336 get_expr_type (const pre_expr e)
1338 switch (e->kind)
1340 case NAME:
1341 return TREE_TYPE (PRE_EXPR_NAME (e));
1342 case CONSTANT:
1343 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1344 case REFERENCE:
1345 return PRE_EXPR_REFERENCE (e)->type;
1346 case NARY:
1347 return PRE_EXPR_NARY (e)->type;
1349 gcc_unreachable();
1352 /* Get a representative SSA_NAME for a given expression.
1353 Since all of our sub-expressions are treated as values, we require
1354 them to be SSA_NAME's for simplicity.
1355 Prior versions of GVNPRE used to use "value handles" here, so that
1356 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1357 either case, the operands are really values (IE we do not expect
1358 them to be usable without finding leaders). */
1360 static tree
1361 get_representative_for (const pre_expr e)
1363 tree name;
1364 unsigned int value_id = get_expr_value_id (e);
1366 switch (e->kind)
1368 case NAME:
1369 return PRE_EXPR_NAME (e);
1370 case CONSTANT:
1371 return PRE_EXPR_CONSTANT (e);
1372 case NARY:
1373 case REFERENCE:
1375 /* Go through all of the expressions representing this value
1376 and pick out an SSA_NAME. */
1377 unsigned int i;
1378 bitmap_iterator bi;
1379 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1380 value_id);
1381 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1383 pre_expr rep = expression_for_id (i);
1384 if (rep->kind == NAME)
1385 return PRE_EXPR_NAME (rep);
1388 break;
1390 /* If we reached here we couldn't find an SSA_NAME. This can
1391 happen when we've discovered a value that has never appeared in
1392 the program as set to an SSA_NAME, most likely as the result of
1393 phi translation. */
1394 if (dump_file)
1396 fprintf (dump_file,
1397 "Could not find SSA_NAME representative for expression:");
1398 print_pre_expr (dump_file, e);
1399 fprintf (dump_file, "\n");
1402 /* Build and insert the assignment of the end result to the temporary
1403 that we will return. */
1404 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1405 VN_INFO_GET (name)->value_id = value_id;
1406 if (e->kind == CONSTANT)
1407 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1408 else
1409 VN_INFO (name)->valnum = name;
1411 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1412 if (dump_file)
1414 fprintf (dump_file, "Created SSA_NAME representative ");
1415 print_generic_expr (dump_file, name, 0);
1416 fprintf (dump_file, " for expression:");
1417 print_pre_expr (dump_file, e);
1418 fprintf (dump_file, "\n");
1421 return name;
1426 static pre_expr
1427 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1428 basic_block pred, basic_block phiblock);
1430 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1431 the phis in PRED. Return NULL if we can't find a leader for each part
1432 of the translated expression. */
1434 static pre_expr
1435 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1436 basic_block pred, basic_block phiblock)
1438 switch (expr->kind)
1440 case NARY:
1442 unsigned int i;
1443 bool changed = false;
1444 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1445 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1446 sizeof_vn_nary_op (nary->length));
1447 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1449 for (i = 0; i < newnary->length; i++)
1451 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1452 continue;
1453 else
1455 pre_expr leader, result;
1456 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1457 leader = find_leader_in_sets (op_val_id, set1, set2);
1458 result = phi_translate (leader, set1, set2, pred, phiblock);
1459 if (result && result != leader)
1461 tree name = get_representative_for (result);
1462 if (!name)
1463 return NULL;
1464 newnary->op[i] = name;
1466 else if (!result)
1467 return NULL;
1469 changed |= newnary->op[i] != nary->op[i];
1472 if (changed)
1474 pre_expr constant;
1475 unsigned int new_val_id;
1477 tree result = vn_nary_op_lookup_pieces (newnary->length,
1478 newnary->opcode,
1479 newnary->type,
1480 &newnary->op[0],
1481 &nary);
1482 if (result && is_gimple_min_invariant (result))
1483 return get_or_alloc_expr_for_constant (result);
1485 expr = (pre_expr) pool_alloc (pre_expr_pool);
1486 expr->kind = NARY;
1487 expr->id = 0;
1488 if (nary)
1490 PRE_EXPR_NARY (expr) = nary;
1491 constant = fully_constant_expression (expr);
1492 if (constant != expr)
1493 return constant;
1495 new_val_id = nary->value_id;
1496 get_or_alloc_expression_id (expr);
1498 else
1500 new_val_id = get_next_value_id ();
1501 VEC_safe_grow_cleared (bitmap_set_t, heap,
1502 value_expressions,
1503 get_max_value_id() + 1);
1504 nary = vn_nary_op_insert_pieces (newnary->length,
1505 newnary->opcode,
1506 newnary->type,
1507 &newnary->op[0],
1508 result, new_val_id);
1509 PRE_EXPR_NARY (expr) = nary;
1510 constant = fully_constant_expression (expr);
1511 if (constant != expr)
1512 return constant;
1513 get_or_alloc_expression_id (expr);
1515 add_to_value (new_val_id, expr);
1517 return expr;
1519 break;
1521 case REFERENCE:
1523 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1524 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1525 tree vuse = ref->vuse;
1526 tree newvuse = vuse;
1527 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1528 bool changed = false, same_valid = true;
1529 unsigned int i, j, n;
1530 vn_reference_op_t operand;
1531 vn_reference_t newref;
1533 for (i = 0, j = 0;
1534 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1536 pre_expr opresult;
1537 pre_expr leader;
1538 tree op[3];
1539 tree type = operand->type;
1540 vn_reference_op_s newop = *operand;
1541 op[0] = operand->op0;
1542 op[1] = operand->op1;
1543 op[2] = operand->op2;
1544 for (n = 0; n < 3; ++n)
1546 unsigned int op_val_id;
1547 if (!op[n])
1548 continue;
1549 if (TREE_CODE (op[n]) != SSA_NAME)
1551 /* We can't possibly insert these. */
1552 if (n != 0
1553 && !is_gimple_min_invariant (op[n]))
1554 break;
1555 continue;
1557 op_val_id = VN_INFO (op[n])->value_id;
1558 leader = find_leader_in_sets (op_val_id, set1, set2);
1559 if (!leader)
1560 break;
1561 /* Make sure we do not recursively translate ourselves
1562 like for translating a[n_1] with the leader for
1563 n_1 being a[n_1]. */
1564 if (get_expression_id (leader) != get_expression_id (expr))
1566 opresult = phi_translate (leader, set1, set2,
1567 pred, phiblock);
1568 if (!opresult)
1569 break;
1570 if (opresult != leader)
1572 tree name = get_representative_for (opresult);
1573 if (!name)
1574 break;
1575 changed |= name != op[n];
1576 op[n] = name;
1580 if (n != 3)
1582 if (newoperands)
1583 VEC_free (vn_reference_op_s, heap, newoperands);
1584 return NULL;
1586 if (!newoperands)
1587 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1588 /* We may have changed from an SSA_NAME to a constant */
1589 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1590 newop.opcode = TREE_CODE (op[0]);
1591 newop.type = type;
1592 newop.op0 = op[0];
1593 newop.op1 = op[1];
1594 newop.op2 = op[2];
1595 /* If it transforms a non-constant ARRAY_REF into a constant
1596 one, adjust the constant offset. */
1597 if (newop.opcode == ARRAY_REF
1598 && newop.off == -1
1599 && TREE_CODE (op[0]) == INTEGER_CST
1600 && TREE_CODE (op[1]) == INTEGER_CST
1601 && TREE_CODE (op[2]) == INTEGER_CST)
1603 double_int off = tree_to_double_int (op[0]);
1604 off = double_int_add (off,
1605 double_int_neg
1606 (tree_to_double_int (op[1])));
1607 off = double_int_mul (off, tree_to_double_int (op[2]));
1608 if (double_int_fits_in_shwi_p (off))
1609 newop.off = off.low;
1611 VEC_replace (vn_reference_op_s, newoperands, j, newop);
1612 /* If it transforms from an SSA_NAME to an address, fold with
1613 a preceding indirect reference. */
1614 if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR
1615 && VEC_index (vn_reference_op_s,
1616 newoperands, j - 1).opcode == MEM_REF)
1617 vn_reference_fold_indirect (&newoperands, &j);
1619 if (i != VEC_length (vn_reference_op_s, operands))
1621 if (newoperands)
1622 VEC_free (vn_reference_op_s, heap, newoperands);
1623 return NULL;
1626 if (vuse)
1628 newvuse = translate_vuse_through_block (newoperands,
1629 ref->set, ref->type,
1630 vuse, phiblock, pred,
1631 &same_valid);
1632 if (newvuse == NULL_TREE)
1634 VEC_free (vn_reference_op_s, heap, newoperands);
1635 return NULL;
1639 if (changed || newvuse != vuse)
1641 unsigned int new_val_id;
1642 pre_expr constant;
1644 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1645 ref->type,
1646 newoperands,
1647 &newref, VN_WALK);
1648 if (result)
1649 VEC_free (vn_reference_op_s, heap, newoperands);
1651 /* We can always insert constants, so if we have a partial
1652 redundant constant load of another type try to translate it
1653 to a constant of appropriate type. */
1654 if (result && is_gimple_min_invariant (result))
1656 tree tem = result;
1657 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1659 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1660 if (tem && !is_gimple_min_invariant (tem))
1661 tem = NULL_TREE;
1663 if (tem)
1664 return get_or_alloc_expr_for_constant (tem);
1667 /* If we'd have to convert things we would need to validate
1668 if we can insert the translated expression. So fail
1669 here for now - we cannot insert an alias with a different
1670 type in the VN tables either, as that would assert. */
1671 if (result
1672 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1673 return NULL;
1674 else if (!result && newref
1675 && !useless_type_conversion_p (ref->type, newref->type))
1677 VEC_free (vn_reference_op_s, heap, newoperands);
1678 return NULL;
1681 expr = (pre_expr) pool_alloc (pre_expr_pool);
1682 expr->kind = REFERENCE;
1683 expr->id = 0;
1685 if (newref)
1687 PRE_EXPR_REFERENCE (expr) = newref;
1688 constant = fully_constant_expression (expr);
1689 if (constant != expr)
1690 return constant;
1692 new_val_id = newref->value_id;
1693 get_or_alloc_expression_id (expr);
1695 else
1697 if (changed || !same_valid)
1699 new_val_id = get_next_value_id ();
1700 VEC_safe_grow_cleared (bitmap_set_t, heap,
1701 value_expressions,
1702 get_max_value_id() + 1);
1704 else
1705 new_val_id = ref->value_id;
1706 newref = vn_reference_insert_pieces (newvuse, ref->set,
1707 ref->type,
1708 newoperands,
1709 result, new_val_id);
1710 newoperands = NULL;
1711 PRE_EXPR_REFERENCE (expr) = newref;
1712 constant = fully_constant_expression (expr);
1713 if (constant != expr)
1714 return constant;
1715 get_or_alloc_expression_id (expr);
1717 add_to_value (new_val_id, expr);
1719 VEC_free (vn_reference_op_s, heap, newoperands);
1720 return expr;
1722 break;
1724 case NAME:
1726 gimple phi = NULL;
1727 edge e;
1728 gimple def_stmt;
1729 tree name = PRE_EXPR_NAME (expr);
1731 def_stmt = SSA_NAME_DEF_STMT (name);
1732 if (gimple_code (def_stmt) == GIMPLE_PHI
1733 && gimple_bb (def_stmt) == phiblock)
1734 phi = def_stmt;
1735 else
1736 return expr;
1738 e = find_edge (pred, gimple_bb (phi));
1739 if (e)
1741 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1742 pre_expr newexpr;
1744 if (TREE_CODE (def) == SSA_NAME)
1745 def = VN_INFO (def)->valnum;
1747 /* Handle constant. */
1748 if (is_gimple_min_invariant (def))
1749 return get_or_alloc_expr_for_constant (def);
1751 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1752 return NULL;
1754 newexpr = get_or_alloc_expr_for_name (def);
1755 return newexpr;
1758 return expr;
1760 default:
1761 gcc_unreachable ();
1765 /* Wrapper around phi_translate_1 providing caching functionality. */
1767 static pre_expr
1768 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1769 basic_block pred, basic_block phiblock)
1771 pre_expr phitrans;
1773 if (!expr)
1774 return NULL;
1776 /* Constants contain no values that need translation. */
1777 if (expr->kind == CONSTANT)
1778 return expr;
1780 if (value_id_constant_p (get_expr_value_id (expr)))
1781 return expr;
1783 if (expr->kind != NAME)
1785 phitrans = phi_trans_lookup (expr, pred);
1786 if (phitrans)
1787 return phitrans;
1790 /* Translate. */
1791 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1793 /* Don't add empty translations to the cache. Neither add
1794 translations of NAMEs as those are cheap to translate. */
1795 if (phitrans
1796 && expr->kind != NAME)
1797 phi_trans_add (expr, phitrans, pred);
1799 return phitrans;
1803 /* For each expression in SET, translate the values through phi nodes
1804 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1805 expressions in DEST. */
1807 static void
1808 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1809 basic_block phiblock)
1811 VEC (pre_expr, heap) *exprs;
1812 pre_expr expr;
1813 int i;
1815 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1817 bitmap_set_copy (dest, set);
1818 return;
1821 exprs = sorted_array_from_bitmap_set (set);
1822 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
1824 pre_expr translated;
1825 translated = phi_translate (expr, set, NULL, pred, phiblock);
1826 if (!translated)
1827 continue;
1829 /* We might end up with multiple expressions from SET being
1830 translated to the same value. In this case we do not want
1831 to retain the NARY or REFERENCE expression but prefer a NAME
1832 which would be the leader. */
1833 if (translated->kind == NAME)
1834 bitmap_value_replace_in_set (dest, translated);
1835 else
1836 bitmap_value_insert_into_set (dest, translated);
1838 VEC_free (pre_expr, heap, exprs);
1841 /* Find the leader for a value (i.e., the name representing that
1842 value) in a given set, and return it. If STMT is non-NULL it
1843 makes sure the defining statement for the leader dominates it.
1844 Return NULL if no leader is found. */
1846 static pre_expr
1847 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1849 if (value_id_constant_p (val))
1851 unsigned int i;
1852 bitmap_iterator bi;
1853 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1855 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1857 pre_expr expr = expression_for_id (i);
1858 if (expr->kind == CONSTANT)
1859 return expr;
1862 if (bitmap_set_contains_value (set, val))
1864 /* Rather than walk the entire bitmap of expressions, and see
1865 whether any of them has the value we are looking for, we look
1866 at the reverse mapping, which tells us the set of expressions
1867 that have a given value (IE value->expressions with that
1868 value) and see if any of those expressions are in our set.
1869 The number of expressions per value is usually significantly
1870 less than the number of expressions in the set. In fact, for
1871 large testcases, doing it this way is roughly 5-10x faster
1872 than walking the bitmap.
1873 If this is somehow a significant lose for some cases, we can
1874 choose which set to walk based on which set is smaller. */
1875 unsigned int i;
1876 bitmap_iterator bi;
1877 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1879 EXECUTE_IF_AND_IN_BITMAP (&exprset->expressions,
1880 &set->expressions, 0, i, bi)
1882 pre_expr val = expression_for_id (i);
1883 /* At the point where stmt is not null, there should always
1884 be an SSA_NAME first in the list of expressions. */
1885 if (stmt)
1887 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1888 if (gimple_code (def_stmt) != GIMPLE_PHI
1889 && gimple_bb (def_stmt) == gimple_bb (stmt)
1890 /* PRE insertions are at the end of the basic-block
1891 and have UID 0. */
1892 && (gimple_uid (def_stmt) == 0
1893 || gimple_uid (def_stmt) >= gimple_uid (stmt)))
1894 continue;
1896 return val;
1899 return NULL;
1902 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1903 BLOCK by seeing if it is not killed in the block. Note that we are
1904 only determining whether there is a store that kills it. Because
1905 of the order in which clean iterates over values, we are guaranteed
1906 that altered operands will have caused us to be eliminated from the
1907 ANTIC_IN set already. */
1909 static bool
1910 value_dies_in_block_x (pre_expr expr, basic_block block)
1912 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1913 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1914 gimple def;
1915 gimple_stmt_iterator gsi;
1916 unsigned id = get_expression_id (expr);
1917 bool res = false;
1918 ao_ref ref;
1920 if (!vuse)
1921 return false;
1923 /* Lookup a previously calculated result. */
1924 if (EXPR_DIES (block)
1925 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1926 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1928 /* A memory expression {e, VUSE} dies in the block if there is a
1929 statement that may clobber e. If, starting statement walk from the
1930 top of the basic block, a statement uses VUSE there can be no kill
1931 inbetween that use and the original statement that loaded {e, VUSE},
1932 so we can stop walking. */
1933 ref.base = NULL_TREE;
1934 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1936 tree def_vuse, def_vdef;
1937 def = gsi_stmt (gsi);
1938 def_vuse = gimple_vuse (def);
1939 def_vdef = gimple_vdef (def);
1941 /* Not a memory statement. */
1942 if (!def_vuse)
1943 continue;
1945 /* Not a may-def. */
1946 if (!def_vdef)
1948 /* A load with the same VUSE, we're done. */
1949 if (def_vuse == vuse)
1950 break;
1952 continue;
1955 /* Init ref only if we really need it. */
1956 if (ref.base == NULL_TREE
1957 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1958 refx->operands))
1960 res = true;
1961 break;
1963 /* If the statement may clobber expr, it dies. */
1964 if (stmt_may_clobber_ref_p_1 (def, &ref))
1966 res = true;
1967 break;
1971 /* Remember the result. */
1972 if (!EXPR_DIES (block))
1973 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1974 bitmap_set_bit (EXPR_DIES (block), id * 2);
1975 if (res)
1976 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1978 return res;
1982 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1983 contains its value-id. */
1985 static bool
1986 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1988 if (op && TREE_CODE (op) == SSA_NAME)
1990 unsigned int value_id = VN_INFO (op)->value_id;
1991 if (!(bitmap_set_contains_value (set1, value_id)
1992 || (set2 && bitmap_set_contains_value (set2, value_id))))
1993 return false;
1995 return true;
1998 /* Determine if the expression EXPR is valid in SET1 U SET2.
1999 ONLY SET2 CAN BE NULL.
2000 This means that we have a leader for each part of the expression
2001 (if it consists of values), or the expression is an SSA_NAME.
2002 For loads/calls, we also see if the vuse is killed in this block. */
2004 static bool
2005 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2006 basic_block block)
2008 switch (expr->kind)
2010 case NAME:
2011 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2012 case NARY:
2014 unsigned int i;
2015 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2016 for (i = 0; i < nary->length; i++)
2017 if (!op_valid_in_sets (set1, set2, nary->op[i]))
2018 return false;
2019 return true;
2021 break;
2022 case REFERENCE:
2024 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2025 vn_reference_op_t vro;
2026 unsigned int i;
2028 FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro)
2030 if (!op_valid_in_sets (set1, set2, vro->op0)
2031 || !op_valid_in_sets (set1, set2, vro->op1)
2032 || !op_valid_in_sets (set1, set2, vro->op2))
2033 return false;
2035 return true;
2037 default:
2038 gcc_unreachable ();
2042 /* Clean the set of expressions that are no longer valid in SET1 or
2043 SET2. This means expressions that are made up of values we have no
2044 leaders for in SET1 or SET2. This version is used for partial
2045 anticipation, which means it is not valid in either ANTIC_IN or
2046 PA_IN. */
2048 static void
2049 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2051 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2052 pre_expr expr;
2053 int i;
2055 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2057 if (!valid_in_sets (set1, set2, expr, block))
2058 bitmap_remove_from_set (set1, expr);
2060 VEC_free (pre_expr, heap, exprs);
2063 /* Clean the set of expressions that are no longer valid in SET. This
2064 means expressions that are made up of values we have no leaders for
2065 in SET. */
2067 static void
2068 clean (bitmap_set_t set, basic_block block)
2070 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2071 pre_expr expr;
2072 int i;
2074 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2076 if (!valid_in_sets (set, NULL, expr, block))
2077 bitmap_remove_from_set (set, expr);
2079 VEC_free (pre_expr, heap, exprs);
2082 /* Clean the set of expressions that are no longer valid in SET because
2083 they are clobbered in BLOCK or because they trap and may not be executed. */
2085 static void
2086 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2088 bitmap_iterator bi;
2089 unsigned i;
2091 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2093 pre_expr expr = expression_for_id (i);
2094 if (expr->kind == REFERENCE)
2096 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2097 if (ref->vuse)
2099 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2100 if (!gimple_nop_p (def_stmt)
2101 && ((gimple_bb (def_stmt) != block
2102 && !dominated_by_p (CDI_DOMINATORS,
2103 block, gimple_bb (def_stmt)))
2104 || (gimple_bb (def_stmt) == block
2105 && value_dies_in_block_x (expr, block))))
2106 bitmap_remove_from_set (set, expr);
2109 else if (expr->kind == NARY)
2111 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2112 /* If the NARY may trap make sure the block does not contain
2113 a possible exit point.
2114 ??? This is overly conservative if we translate AVAIL_OUT
2115 as the available expression might be after the exit point. */
2116 if (BB_MAY_NOTRETURN (block)
2117 && vn_nary_may_trap (nary))
2118 bitmap_remove_from_set (set, expr);
2123 static sbitmap has_abnormal_preds;
2125 /* List of blocks that may have changed during ANTIC computation and
2126 thus need to be iterated over. */
2128 static sbitmap changed_blocks;
2130 /* Decide whether to defer a block for a later iteration, or PHI
2131 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2132 should defer the block, and true if we processed it. */
2134 static bool
2135 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2136 basic_block block, basic_block phiblock)
2138 if (!BB_VISITED (phiblock))
2140 SET_BIT (changed_blocks, block->index);
2141 BB_VISITED (block) = 0;
2142 BB_DEFERRED (block) = 1;
2143 return false;
2145 else
2146 phi_translate_set (dest, source, block, phiblock);
2147 return true;
2150 /* Compute the ANTIC set for BLOCK.
2152 If succs(BLOCK) > 1 then
2153 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2154 else if succs(BLOCK) == 1 then
2155 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2157 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2160 static bool
2161 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2163 bool changed = false;
2164 bitmap_set_t S, old, ANTIC_OUT;
2165 bitmap_iterator bi;
2166 unsigned int bii;
2167 edge e;
2168 edge_iterator ei;
2170 old = ANTIC_OUT = S = NULL;
2171 BB_VISITED (block) = 1;
2173 /* If any edges from predecessors are abnormal, antic_in is empty,
2174 so do nothing. */
2175 if (block_has_abnormal_pred_edge)
2176 goto maybe_dump_sets;
2178 old = ANTIC_IN (block);
2179 ANTIC_OUT = bitmap_set_new ();
2181 /* If the block has no successors, ANTIC_OUT is empty. */
2182 if (EDGE_COUNT (block->succs) == 0)
2184 /* If we have one successor, we could have some phi nodes to
2185 translate through. */
2186 else if (single_succ_p (block))
2188 basic_block succ_bb = single_succ (block);
2190 /* We trade iterations of the dataflow equations for having to
2191 phi translate the maximal set, which is incredibly slow
2192 (since the maximal set often has 300+ members, even when you
2193 have a small number of blocks).
2194 Basically, we defer the computation of ANTIC for this block
2195 until we have processed it's successor, which will inevitably
2196 have a *much* smaller set of values to phi translate once
2197 clean has been run on it.
2198 The cost of doing this is that we technically perform more
2199 iterations, however, they are lower cost iterations.
2201 Timings for PRE on tramp3d-v4:
2202 without maximal set fix: 11 seconds
2203 with maximal set fix/without deferring: 26 seconds
2204 with maximal set fix/with deferring: 11 seconds
2207 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2208 block, succ_bb))
2210 changed = true;
2211 goto maybe_dump_sets;
2214 /* If we have multiple successors, we take the intersection of all of
2215 them. Note that in the case of loop exit phi nodes, we may have
2216 phis to translate through. */
2217 else
2219 VEC(basic_block, heap) * worklist;
2220 size_t i;
2221 basic_block bprime, first = NULL;
2223 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2224 FOR_EACH_EDGE (e, ei, block->succs)
2226 if (!first
2227 && BB_VISITED (e->dest))
2228 first = e->dest;
2229 else if (BB_VISITED (e->dest))
2230 VEC_quick_push (basic_block, worklist, e->dest);
2233 /* Of multiple successors we have to have visited one already. */
2234 if (!first)
2236 SET_BIT (changed_blocks, block->index);
2237 BB_VISITED (block) = 0;
2238 BB_DEFERRED (block) = 1;
2239 changed = true;
2240 VEC_free (basic_block, heap, worklist);
2241 goto maybe_dump_sets;
2244 if (!gimple_seq_empty_p (phi_nodes (first)))
2245 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2246 else
2247 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2249 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2251 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2253 bitmap_set_t tmp = bitmap_set_new ();
2254 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2255 bitmap_set_and (ANTIC_OUT, tmp);
2256 bitmap_set_free (tmp);
2258 else
2259 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2261 VEC_free (basic_block, heap, worklist);
2264 /* Prune expressions that are clobbered in block and thus become
2265 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2266 prune_clobbered_mems (ANTIC_OUT, block);
2268 /* Generate ANTIC_OUT - TMP_GEN. */
2269 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2271 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2272 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2273 TMP_GEN (block));
2275 /* Then union in the ANTIC_OUT - TMP_GEN values,
2276 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2277 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2278 bitmap_value_insert_into_set (ANTIC_IN (block),
2279 expression_for_id (bii));
2281 clean (ANTIC_IN (block), block);
2283 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2285 changed = true;
2286 SET_BIT (changed_blocks, block->index);
2287 FOR_EACH_EDGE (e, ei, block->preds)
2288 SET_BIT (changed_blocks, e->src->index);
2290 else
2291 RESET_BIT (changed_blocks, block->index);
2293 maybe_dump_sets:
2294 if (dump_file && (dump_flags & TDF_DETAILS))
2296 if (!BB_DEFERRED (block) || BB_VISITED (block))
2298 if (ANTIC_OUT)
2299 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2301 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2302 block->index);
2304 if (S)
2305 print_bitmap_set (dump_file, S, "S", block->index);
2307 else
2309 fprintf (dump_file,
2310 "Block %d was deferred for a future iteration.\n",
2311 block->index);
2314 if (old)
2315 bitmap_set_free (old);
2316 if (S)
2317 bitmap_set_free (S);
2318 if (ANTIC_OUT)
2319 bitmap_set_free (ANTIC_OUT);
2320 return changed;
2323 /* Compute PARTIAL_ANTIC for BLOCK.
2325 If succs(BLOCK) > 1 then
2326 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2327 in ANTIC_OUT for all succ(BLOCK)
2328 else if succs(BLOCK) == 1 then
2329 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2331 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2332 - ANTIC_IN[BLOCK])
2335 static bool
2336 compute_partial_antic_aux (basic_block block,
2337 bool block_has_abnormal_pred_edge)
2339 bool changed = false;
2340 bitmap_set_t old_PA_IN;
2341 bitmap_set_t PA_OUT;
2342 edge e;
2343 edge_iterator ei;
2344 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2346 old_PA_IN = PA_OUT = NULL;
2348 /* If any edges from predecessors are abnormal, antic_in is empty,
2349 so do nothing. */
2350 if (block_has_abnormal_pred_edge)
2351 goto maybe_dump_sets;
2353 /* If there are too many partially anticipatable values in the
2354 block, phi_translate_set can take an exponential time: stop
2355 before the translation starts. */
2356 if (max_pa
2357 && single_succ_p (block)
2358 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2359 goto maybe_dump_sets;
2361 old_PA_IN = PA_IN (block);
2362 PA_OUT = bitmap_set_new ();
2364 /* If the block has no successors, ANTIC_OUT is empty. */
2365 if (EDGE_COUNT (block->succs) == 0)
2367 /* If we have one successor, we could have some phi nodes to
2368 translate through. Note that we can't phi translate across DFS
2369 back edges in partial antic, because it uses a union operation on
2370 the successors. For recurrences like IV's, we will end up
2371 generating a new value in the set on each go around (i + 3 (VH.1)
2372 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2373 else if (single_succ_p (block))
2375 basic_block succ = single_succ (block);
2376 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2377 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2379 /* If we have multiple successors, we take the union of all of
2380 them. */
2381 else
2383 VEC(basic_block, heap) * worklist;
2384 size_t i;
2385 basic_block bprime;
2387 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2388 FOR_EACH_EDGE (e, ei, block->succs)
2390 if (e->flags & EDGE_DFS_BACK)
2391 continue;
2392 VEC_quick_push (basic_block, worklist, e->dest);
2394 if (VEC_length (basic_block, worklist) > 0)
2396 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2398 unsigned int i;
2399 bitmap_iterator bi;
2401 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2402 bitmap_value_insert_into_set (PA_OUT,
2403 expression_for_id (i));
2404 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2406 bitmap_set_t pa_in = bitmap_set_new ();
2407 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2408 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2409 bitmap_value_insert_into_set (PA_OUT,
2410 expression_for_id (i));
2411 bitmap_set_free (pa_in);
2413 else
2414 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2415 bitmap_value_insert_into_set (PA_OUT,
2416 expression_for_id (i));
2419 VEC_free (basic_block, heap, worklist);
2422 /* Prune expressions that are clobbered in block and thus become
2423 invalid if translated from PA_OUT to PA_IN. */
2424 prune_clobbered_mems (PA_OUT, block);
2426 /* PA_IN starts with PA_OUT - TMP_GEN.
2427 Then we subtract things from ANTIC_IN. */
2428 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2430 /* For partial antic, we want to put back in the phi results, since
2431 we will properly avoid making them partially antic over backedges. */
2432 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2433 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2435 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2436 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2438 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2440 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2442 changed = true;
2443 SET_BIT (changed_blocks, block->index);
2444 FOR_EACH_EDGE (e, ei, block->preds)
2445 SET_BIT (changed_blocks, e->src->index);
2447 else
2448 RESET_BIT (changed_blocks, block->index);
2450 maybe_dump_sets:
2451 if (dump_file && (dump_flags & TDF_DETAILS))
2453 if (PA_OUT)
2454 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2456 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2458 if (old_PA_IN)
2459 bitmap_set_free (old_PA_IN);
2460 if (PA_OUT)
2461 bitmap_set_free (PA_OUT);
2462 return changed;
2465 /* Compute ANTIC and partial ANTIC sets. */
2467 static void
2468 compute_antic (void)
2470 bool changed = true;
2471 int num_iterations = 0;
2472 basic_block block;
2473 int i;
2475 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2476 We pre-build the map of blocks with incoming abnormal edges here. */
2477 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2478 sbitmap_zero (has_abnormal_preds);
2480 FOR_EACH_BB (block)
2482 edge_iterator ei;
2483 edge e;
2485 FOR_EACH_EDGE (e, ei, block->preds)
2487 e->flags &= ~EDGE_DFS_BACK;
2488 if (e->flags & EDGE_ABNORMAL)
2490 SET_BIT (has_abnormal_preds, block->index);
2491 break;
2495 BB_VISITED (block) = 0;
2496 BB_DEFERRED (block) = 0;
2498 /* While we are here, give empty ANTIC_IN sets to each block. */
2499 ANTIC_IN (block) = bitmap_set_new ();
2500 PA_IN (block) = bitmap_set_new ();
2503 /* At the exit block we anticipate nothing. */
2504 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2505 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2506 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2508 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2509 sbitmap_ones (changed_blocks);
2510 while (changed)
2512 if (dump_file && (dump_flags & TDF_DETAILS))
2513 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2514 /* ??? We need to clear our PHI translation cache here as the
2515 ANTIC sets shrink and we restrict valid translations to
2516 those having operands with leaders in ANTIC. Same below
2517 for PA ANTIC computation. */
2518 num_iterations++;
2519 changed = false;
2520 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2522 if (TEST_BIT (changed_blocks, postorder[i]))
2524 basic_block block = BASIC_BLOCK (postorder[i]);
2525 changed |= compute_antic_aux (block,
2526 TEST_BIT (has_abnormal_preds,
2527 block->index));
2530 /* Theoretically possible, but *highly* unlikely. */
2531 gcc_checking_assert (num_iterations < 500);
2534 statistics_histogram_event (cfun, "compute_antic iterations",
2535 num_iterations);
2537 if (do_partial_partial)
2539 sbitmap_ones (changed_blocks);
2540 mark_dfs_back_edges ();
2541 num_iterations = 0;
2542 changed = true;
2543 while (changed)
2545 if (dump_file && (dump_flags & TDF_DETAILS))
2546 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2547 num_iterations++;
2548 changed = false;
2549 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2551 if (TEST_BIT (changed_blocks, postorder[i]))
2553 basic_block block = BASIC_BLOCK (postorder[i]);
2554 changed
2555 |= compute_partial_antic_aux (block,
2556 TEST_BIT (has_abnormal_preds,
2557 block->index));
2560 /* Theoretically possible, but *highly* unlikely. */
2561 gcc_checking_assert (num_iterations < 500);
2563 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2564 num_iterations);
2566 sbitmap_free (has_abnormal_preds);
2567 sbitmap_free (changed_blocks);
2570 /* Return true if OP is a tree which we can perform PRE on.
2571 This may not match the operations we can value number, but in
2572 a perfect world would. */
2574 static bool
2575 can_PRE_operation (tree op)
2577 return UNARY_CLASS_P (op)
2578 || BINARY_CLASS_P (op)
2579 || COMPARISON_CLASS_P (op)
2580 || TREE_CODE (op) == MEM_REF
2581 || TREE_CODE (op) == COMPONENT_REF
2582 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2583 || TREE_CODE (op) == CALL_EXPR
2584 || TREE_CODE (op) == ARRAY_REF;
2588 /* Inserted expressions are placed onto this worklist, which is used
2589 for performing quick dead code elimination of insertions we made
2590 that didn't turn out to be necessary. */
2591 static bitmap inserted_exprs;
2593 /* The actual worker for create_component_ref_by_pieces. */
2595 static tree
2596 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2597 unsigned int *operand, gimple_seq *stmts,
2598 gimple domstmt)
2600 vn_reference_op_t currop = &VEC_index (vn_reference_op_s, ref->operands,
2601 *operand);
2602 tree genop;
2603 ++*operand;
2604 switch (currop->opcode)
2606 case CALL_EXPR:
2608 tree folded, sc = NULL_TREE;
2609 unsigned int nargs = 0;
2610 tree fn, *args;
2611 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2612 fn = currop->op0;
2613 else
2615 pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2616 fn = find_or_generate_expression (block, op0, stmts, domstmt);
2617 if (!fn)
2618 return NULL_TREE;
2620 if (currop->op1)
2622 pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2623 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2624 if (!sc)
2625 return NULL_TREE;
2627 args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2628 ref->operands) - 1);
2629 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2631 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2632 operand, stmts,
2633 domstmt);
2634 if (!args[nargs])
2636 free (args);
2637 return NULL_TREE;
2639 nargs++;
2641 folded = build_call_array (currop->type,
2642 (TREE_CODE (fn) == FUNCTION_DECL
2643 ? build_fold_addr_expr (fn) : fn),
2644 nargs, args);
2645 free (args);
2646 if (sc)
2647 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2648 return folded;
2651 case MEM_REF:
2653 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2654 stmts, domstmt);
2655 tree offset = currop->op0;
2656 if (!baseop)
2657 return NULL_TREE;
2658 if (TREE_CODE (baseop) == ADDR_EXPR
2659 && handled_component_p (TREE_OPERAND (baseop, 0)))
2661 HOST_WIDE_INT off;
2662 tree base;
2663 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2664 &off);
2665 gcc_assert (base);
2666 offset = int_const_binop (PLUS_EXPR, offset,
2667 build_int_cst (TREE_TYPE (offset),
2668 off));
2669 baseop = build_fold_addr_expr (base);
2671 return fold_build2 (MEM_REF, currop->type, baseop, offset);
2674 case TARGET_MEM_REF:
2676 pre_expr op0expr, op1expr;
2677 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2678 vn_reference_op_t nextop = &VEC_index (vn_reference_op_s, ref->operands,
2679 ++*operand);
2680 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2681 stmts, domstmt);
2682 if (!baseop)
2683 return NULL_TREE;
2684 if (currop->op0)
2686 op0expr = get_or_alloc_expr_for (currop->op0);
2687 genop0 = find_or_generate_expression (block, op0expr,
2688 stmts, domstmt);
2689 if (!genop0)
2690 return NULL_TREE;
2692 if (nextop->op0)
2694 op1expr = get_or_alloc_expr_for (nextop->op0);
2695 genop1 = find_or_generate_expression (block, op1expr,
2696 stmts, domstmt);
2697 if (!genop1)
2698 return NULL_TREE;
2700 return build5 (TARGET_MEM_REF, currop->type,
2701 baseop, currop->op2, genop0, currop->op1, genop1);
2704 case ADDR_EXPR:
2705 if (currop->op0)
2707 gcc_assert (is_gimple_min_invariant (currop->op0));
2708 return currop->op0;
2710 /* Fallthrough. */
2711 case REALPART_EXPR:
2712 case IMAGPART_EXPR:
2713 case VIEW_CONVERT_EXPR:
2715 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2716 operand,
2717 stmts, domstmt);
2718 if (!genop0)
2719 return NULL_TREE;
2721 return fold_build1 (currop->opcode, currop->type, genop0);
2724 case WITH_SIZE_EXPR:
2726 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2727 stmts, domstmt);
2728 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2729 tree genop1;
2731 if (!genop0)
2732 return NULL_TREE;
2734 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2735 if (!genop1)
2736 return NULL_TREE;
2738 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2741 case BIT_FIELD_REF:
2743 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2744 stmts, domstmt);
2745 tree op1 = currop->op0;
2746 tree op2 = currop->op1;
2748 if (!genop0)
2749 return NULL_TREE;
2751 return fold_build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2754 /* For array ref vn_reference_op's, operand 1 of the array ref
2755 is op0 of the reference op and operand 3 of the array ref is
2756 op1. */
2757 case ARRAY_RANGE_REF:
2758 case ARRAY_REF:
2760 tree genop0;
2761 tree genop1 = currop->op0;
2762 pre_expr op1expr;
2763 tree genop2 = currop->op1;
2764 pre_expr op2expr;
2765 tree genop3 = currop->op2;
2766 pre_expr op3expr;
2767 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2768 stmts, domstmt);
2769 if (!genop0)
2770 return NULL_TREE;
2771 op1expr = get_or_alloc_expr_for (genop1);
2772 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2773 if (!genop1)
2774 return NULL_TREE;
2775 if (genop2)
2777 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2778 /* Drop zero minimum index if redundant. */
2779 if (integer_zerop (genop2)
2780 && (!domain_type
2781 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2782 genop2 = NULL_TREE;
2783 else
2785 op2expr = get_or_alloc_expr_for (genop2);
2786 genop2 = find_or_generate_expression (block, op2expr, stmts,
2787 domstmt);
2788 if (!genop2)
2789 return NULL_TREE;
2792 if (genop3)
2794 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2795 /* We can't always put a size in units of the element alignment
2796 here as the element alignment may be not visible. See
2797 PR43783. Simply drop the element size for constant
2798 sizes. */
2799 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2800 genop3 = NULL_TREE;
2801 else
2803 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2804 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2805 op3expr = get_or_alloc_expr_for (genop3);
2806 genop3 = find_or_generate_expression (block, op3expr, stmts,
2807 domstmt);
2808 if (!genop3)
2809 return NULL_TREE;
2812 return build4 (currop->opcode, currop->type, genop0, genop1,
2813 genop2, genop3);
2815 case COMPONENT_REF:
2817 tree op0;
2818 tree op1;
2819 tree genop2 = currop->op1;
2820 pre_expr op2expr;
2821 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2822 stmts, domstmt);
2823 if (!op0)
2824 return NULL_TREE;
2825 /* op1 should be a FIELD_DECL, which are represented by
2826 themselves. */
2827 op1 = currop->op0;
2828 if (genop2)
2830 op2expr = get_or_alloc_expr_for (genop2);
2831 genop2 = find_or_generate_expression (block, op2expr, stmts,
2832 domstmt);
2833 if (!genop2)
2834 return NULL_TREE;
2837 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2840 case SSA_NAME:
2842 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2843 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2844 return genop;
2846 case STRING_CST:
2847 case INTEGER_CST:
2848 case COMPLEX_CST:
2849 case VECTOR_CST:
2850 case REAL_CST:
2851 case CONSTRUCTOR:
2852 case VAR_DECL:
2853 case PARM_DECL:
2854 case CONST_DECL:
2855 case RESULT_DECL:
2856 case FUNCTION_DECL:
2857 return currop->op0;
2859 default:
2860 gcc_unreachable ();
2864 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2865 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2866 trying to rename aggregates into ssa form directly, which is a no no.
2868 Thus, this routine doesn't create temporaries, it just builds a
2869 single access expression for the array, calling
2870 find_or_generate_expression to build the innermost pieces.
2872 This function is a subroutine of create_expression_by_pieces, and
2873 should not be called on it's own unless you really know what you
2874 are doing. */
2876 static tree
2877 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2878 gimple_seq *stmts, gimple domstmt)
2880 unsigned int op = 0;
2881 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2884 /* Find a leader for an expression, or generate one using
2885 create_expression_by_pieces if it's ANTIC but
2886 complex.
2887 BLOCK is the basic_block we are looking for leaders in.
2888 EXPR is the expression to find a leader or generate for.
2889 STMTS is the statement list to put the inserted expressions on.
2890 Returns the SSA_NAME of the LHS of the generated expression or the
2891 leader.
2892 DOMSTMT if non-NULL is a statement that should be dominated by
2893 all uses in the generated expression. If DOMSTMT is non-NULL this
2894 routine can fail and return NULL_TREE. Otherwise it will assert
2895 on failure. */
2897 static tree
2898 find_or_generate_expression (basic_block block, pre_expr expr,
2899 gimple_seq *stmts, gimple domstmt)
2901 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2902 get_expr_value_id (expr), domstmt);
2903 tree genop = NULL;
2904 if (leader)
2906 if (leader->kind == NAME)
2907 genop = PRE_EXPR_NAME (leader);
2908 else if (leader->kind == CONSTANT)
2909 genop = PRE_EXPR_CONSTANT (leader);
2912 /* If it's still NULL, it must be a complex expression, so generate
2913 it recursively. Not so if inserting expressions for values generated
2914 by SCCVN. */
2915 if (genop == NULL
2916 && !domstmt)
2918 bitmap_set_t exprset;
2919 unsigned int lookfor = get_expr_value_id (expr);
2920 bool handled = false;
2921 bitmap_iterator bi;
2922 unsigned int i;
2924 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
2925 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
2927 pre_expr temp = expression_for_id (i);
2928 if (temp->kind != NAME)
2930 handled = true;
2931 genop = create_expression_by_pieces (block, temp, stmts,
2932 domstmt,
2933 get_expr_type (expr));
2934 break;
2937 if (!handled && domstmt)
2938 return NULL_TREE;
2940 gcc_assert (handled);
2942 return genop;
2945 #define NECESSARY GF_PLF_1
2947 /* Create an expression in pieces, so that we can handle very complex
2948 expressions that may be ANTIC, but not necessary GIMPLE.
2949 BLOCK is the basic block the expression will be inserted into,
2950 EXPR is the expression to insert (in value form)
2951 STMTS is a statement list to append the necessary insertions into.
2953 This function will die if we hit some value that shouldn't be
2954 ANTIC but is (IE there is no leader for it, or its components).
2955 This function may also generate expressions that are themselves
2956 partially or fully redundant. Those that are will be either made
2957 fully redundant during the next iteration of insert (for partially
2958 redundant ones), or eliminated by eliminate (for fully redundant
2959 ones).
2961 If DOMSTMT is non-NULL then we make sure that all uses in the
2962 expressions dominate that statement. In this case the function
2963 can return NULL_TREE to signal failure. */
2965 static tree
2966 create_expression_by_pieces (basic_block block, pre_expr expr,
2967 gimple_seq *stmts, gimple domstmt, tree type)
2969 tree name;
2970 tree folded;
2971 gimple_seq forced_stmts = NULL;
2972 unsigned int value_id;
2973 gimple_stmt_iterator gsi;
2974 tree exprtype = type ? type : get_expr_type (expr);
2975 pre_expr nameexpr;
2976 gimple newstmt;
2978 switch (expr->kind)
2980 /* We may hit the NAME/CONSTANT case if we have to convert types
2981 that value numbering saw through. */
2982 case NAME:
2983 folded = PRE_EXPR_NAME (expr);
2984 break;
2985 case CONSTANT:
2986 folded = PRE_EXPR_CONSTANT (expr);
2987 break;
2988 case REFERENCE:
2990 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2991 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
2993 break;
2994 case NARY:
2996 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2997 tree genop[4];
2998 unsigned i;
2999 for (i = 0; i < nary->length; ++i)
3001 pre_expr op = get_or_alloc_expr_for (nary->op[i]);
3002 genop[i] = find_or_generate_expression (block, op,
3003 stmts, domstmt);
3004 if (!genop[i])
3005 return NULL_TREE;
3006 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
3007 may have conversions stripped. */
3008 if (nary->opcode == POINTER_PLUS_EXPR)
3010 if (i == 0)
3011 genop[i] = fold_convert (nary->type, genop[i]);
3012 else if (i == 1)
3013 genop[i] = convert_to_ptrofftype (genop[i]);
3015 else
3016 genop[i] = fold_convert (TREE_TYPE (nary->op[i]), genop[i]);
3018 if (nary->opcode == CONSTRUCTOR)
3020 VEC(constructor_elt,gc) *elts = NULL;
3021 for (i = 0; i < nary->length; ++i)
3022 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
3023 folded = build_constructor (nary->type, elts);
3025 else
3027 switch (nary->length)
3029 case 1:
3030 folded = fold_build1 (nary->opcode, nary->type,
3031 genop[0]);
3032 break;
3033 case 2:
3034 folded = fold_build2 (nary->opcode, nary->type,
3035 genop[0], genop[1]);
3036 break;
3037 case 3:
3038 folded = fold_build3 (nary->opcode, nary->type,
3039 genop[0], genop[1], genop[3]);
3040 break;
3041 default:
3042 gcc_unreachable ();
3046 break;
3047 default:
3048 return NULL_TREE;
3051 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3052 folded = fold_convert (exprtype, folded);
3054 /* Force the generated expression to be a sequence of GIMPLE
3055 statements.
3056 We have to call unshare_expr because force_gimple_operand may
3057 modify the tree we pass to it. */
3058 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3059 false, NULL);
3061 /* If we have any intermediate expressions to the value sets, add them
3062 to the value sets and chain them in the instruction stream. */
3063 if (forced_stmts)
3065 gsi = gsi_start (forced_stmts);
3066 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3068 gimple stmt = gsi_stmt (gsi);
3069 tree forcedname = gimple_get_lhs (stmt);
3070 pre_expr nameexpr;
3072 if (TREE_CODE (forcedname) == SSA_NAME)
3074 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3075 VN_INFO_GET (forcedname)->valnum = forcedname;
3076 VN_INFO (forcedname)->value_id = get_next_value_id ();
3077 nameexpr = get_or_alloc_expr_for_name (forcedname);
3078 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3079 if (!in_fre)
3080 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3081 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3084 gimple_seq_add_seq (stmts, forced_stmts);
3087 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
3088 newstmt = gimple_build_assign (name, folded);
3089 gimple_set_plf (newstmt, NECESSARY, false);
3091 gimple_seq_add_stmt (stmts, newstmt);
3092 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
3094 /* Fold the last statement. */
3095 gsi = gsi_last (*stmts);
3096 if (fold_stmt_inplace (&gsi))
3097 update_stmt (gsi_stmt (gsi));
3099 /* Add a value number to the temporary.
3100 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3101 we are creating the expression by pieces, and this particular piece of
3102 the expression may have been represented. There is no harm in replacing
3103 here. */
3104 VN_INFO_GET (name)->valnum = name;
3105 value_id = get_expr_value_id (expr);
3106 VN_INFO (name)->value_id = value_id;
3107 nameexpr = get_or_alloc_expr_for_name (name);
3108 add_to_value (value_id, nameexpr);
3109 if (NEW_SETS (block))
3110 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3111 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3113 pre_stats.insertions++;
3114 if (dump_file && (dump_flags & TDF_DETAILS))
3116 fprintf (dump_file, "Inserted ");
3117 print_gimple_stmt (dump_file, newstmt, 0, 0);
3118 fprintf (dump_file, " in predecessor %d\n", block->index);
3121 return name;
3125 /* Returns true if we want to inhibit the insertions of PHI nodes
3126 for the given EXPR for basic block BB (a member of a loop).
3127 We want to do this, when we fear that the induction variable we
3128 create might inhibit vectorization. */
3130 static bool
3131 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3133 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3134 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3135 vn_reference_op_t op;
3136 unsigned i;
3138 /* If we aren't going to vectorize we don't inhibit anything. */
3139 if (!flag_tree_vectorize)
3140 return false;
3142 /* Otherwise we inhibit the insertion when the address of the
3143 memory reference is a simple induction variable. In other
3144 cases the vectorizer won't do anything anyway (either it's
3145 loop invariant or a complicated expression). */
3146 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
3148 switch (op->opcode)
3150 case CALL_EXPR:
3151 /* Calls are not a problem. */
3152 return false;
3154 case ARRAY_REF:
3155 case ARRAY_RANGE_REF:
3156 if (TREE_CODE (op->op0) != SSA_NAME)
3157 break;
3158 /* Fallthru. */
3159 case SSA_NAME:
3161 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3162 affine_iv iv;
3163 /* Default defs are loop invariant. */
3164 if (!defbb)
3165 break;
3166 /* Defined outside this loop, also loop invariant. */
3167 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3168 break;
3169 /* If it's a simple induction variable inhibit insertion,
3170 the vectorizer might be interested in this one. */
3171 if (simple_iv (bb->loop_father, bb->loop_father,
3172 op->op0, &iv, true))
3173 return true;
3174 /* No simple IV, vectorizer can't do anything, hence no
3175 reason to inhibit the transformation for this operand. */
3176 break;
3178 default:
3179 break;
3182 return false;
3185 /* Insert the to-be-made-available values of expression EXPRNUM for each
3186 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3187 merge the result with a phi node, given the same value number as
3188 NODE. Return true if we have inserted new stuff. */
3190 static bool
3191 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3192 VEC(pre_expr, heap) *avail)
3194 pre_expr expr = expression_for_id (exprnum);
3195 pre_expr newphi;
3196 unsigned int val = get_expr_value_id (expr);
3197 edge pred;
3198 bool insertions = false;
3199 bool nophi = false;
3200 basic_block bprime;
3201 pre_expr eprime;
3202 edge_iterator ei;
3203 tree type = get_expr_type (expr);
3204 tree temp;
3205 gimple phi;
3207 /* Make sure we aren't creating an induction variable. */
3208 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
3210 bool firstinsideloop = false;
3211 bool secondinsideloop = false;
3212 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3213 EDGE_PRED (block, 0)->src);
3214 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3215 EDGE_PRED (block, 1)->src);
3216 /* Induction variables only have one edge inside the loop. */
3217 if ((firstinsideloop ^ secondinsideloop)
3218 && (expr->kind != REFERENCE
3219 || inhibit_phi_insertion (block, expr)))
3221 if (dump_file && (dump_flags & TDF_DETAILS))
3222 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3223 nophi = true;
3227 /* Make the necessary insertions. */
3228 FOR_EACH_EDGE (pred, ei, block->preds)
3230 gimple_seq stmts = NULL;
3231 tree builtexpr;
3232 bprime = pred->src;
3233 eprime = VEC_index (pre_expr, avail, pred->dest_idx);
3235 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3237 builtexpr = create_expression_by_pieces (bprime,
3238 eprime,
3239 &stmts, NULL,
3240 type);
3241 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3242 gsi_insert_seq_on_edge (pred, stmts);
3243 VEC_replace (pre_expr, avail, pred->dest_idx,
3244 get_or_alloc_expr_for_name (builtexpr));
3245 insertions = true;
3247 else if (eprime->kind == CONSTANT)
3249 /* Constants may not have the right type, fold_convert
3250 should give us back a constant with the right type. */
3251 tree constant = PRE_EXPR_CONSTANT (eprime);
3252 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3254 tree builtexpr = fold_convert (type, constant);
3255 if (!is_gimple_min_invariant (builtexpr))
3257 tree forcedexpr = force_gimple_operand (builtexpr,
3258 &stmts, true,
3259 NULL);
3260 if (!is_gimple_min_invariant (forcedexpr))
3262 if (forcedexpr != builtexpr)
3264 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3265 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3267 if (stmts)
3269 gimple_stmt_iterator gsi;
3270 gsi = gsi_start (stmts);
3271 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3273 gimple stmt = gsi_stmt (gsi);
3274 tree lhs = gimple_get_lhs (stmt);
3275 if (TREE_CODE (lhs) == SSA_NAME)
3276 bitmap_set_bit (inserted_exprs,
3277 SSA_NAME_VERSION (lhs));
3278 gimple_set_plf (stmt, NECESSARY, false);
3280 gsi_insert_seq_on_edge (pred, stmts);
3282 VEC_replace (pre_expr, avail, pred->dest_idx,
3283 get_or_alloc_expr_for_name (forcedexpr));
3286 else
3287 VEC_replace (pre_expr, avail, pred->dest_idx,
3288 get_or_alloc_expr_for_constant (builtexpr));
3291 else if (eprime->kind == NAME)
3293 /* We may have to do a conversion because our value
3294 numbering can look through types in certain cases, but
3295 our IL requires all operands of a phi node have the same
3296 type. */
3297 tree name = PRE_EXPR_NAME (eprime);
3298 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3300 tree builtexpr;
3301 tree forcedexpr;
3302 builtexpr = fold_convert (type, name);
3303 forcedexpr = force_gimple_operand (builtexpr,
3304 &stmts, true,
3305 NULL);
3307 if (forcedexpr != name)
3309 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3310 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3313 if (stmts)
3315 gimple_stmt_iterator gsi;
3316 gsi = gsi_start (stmts);
3317 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3319 gimple stmt = gsi_stmt (gsi);
3320 tree lhs = gimple_get_lhs (stmt);
3321 if (TREE_CODE (lhs) == SSA_NAME)
3322 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3323 gimple_set_plf (stmt, NECESSARY, false);
3325 gsi_insert_seq_on_edge (pred, stmts);
3327 VEC_replace (pre_expr, avail, pred->dest_idx,
3328 get_or_alloc_expr_for_name (forcedexpr));
3332 /* If we didn't want a phi node, and we made insertions, we still have
3333 inserted new stuff, and thus return true. If we didn't want a phi node,
3334 and didn't make insertions, we haven't added anything new, so return
3335 false. */
3336 if (nophi && insertions)
3337 return true;
3338 else if (nophi && !insertions)
3339 return false;
3341 /* Now build a phi for the new variable. */
3342 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3343 phi = create_phi_node (temp, block);
3345 gimple_set_plf (phi, NECESSARY, false);
3346 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3347 VN_INFO (gimple_phi_result (phi))->value_id = val;
3348 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
3349 FOR_EACH_EDGE (pred, ei, block->preds)
3351 pre_expr ae = VEC_index (pre_expr, avail, pred->dest_idx);
3352 gcc_assert (get_expr_type (ae) == type
3353 || useless_type_conversion_p (type, get_expr_type (ae)));
3354 if (ae->kind == CONSTANT)
3355 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3356 else
3357 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3360 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3361 add_to_value (val, newphi);
3363 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3364 this insertion, since we test for the existence of this value in PHI_GEN
3365 before proceeding with the partial redundancy checks in insert_aux.
3367 The value may exist in AVAIL_OUT, in particular, it could be represented
3368 by the expression we are trying to eliminate, in which case we want the
3369 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3370 inserted there.
3372 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3373 this block, because if it did, it would have existed in our dominator's
3374 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3377 bitmap_insert_into_set (PHI_GEN (block), newphi);
3378 bitmap_value_replace_in_set (AVAIL_OUT (block),
3379 newphi);
3380 bitmap_insert_into_set (NEW_SETS (block),
3381 newphi);
3383 if (dump_file && (dump_flags & TDF_DETAILS))
3385 fprintf (dump_file, "Created phi ");
3386 print_gimple_stmt (dump_file, phi, 0, 0);
3387 fprintf (dump_file, " in block %d\n", block->index);
3389 pre_stats.phis++;
3390 return true;
3395 /* Perform insertion of partially redundant values.
3396 For BLOCK, do the following:
3397 1. Propagate the NEW_SETS of the dominator into the current block.
3398 If the block has multiple predecessors,
3399 2a. Iterate over the ANTIC expressions for the block to see if
3400 any of them are partially redundant.
3401 2b. If so, insert them into the necessary predecessors to make
3402 the expression fully redundant.
3403 2c. Insert a new PHI merging the values of the predecessors.
3404 2d. Insert the new PHI, and the new expressions, into the
3405 NEW_SETS set.
3406 3. Recursively call ourselves on the dominator children of BLOCK.
3408 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3409 do_regular_insertion and do_partial_insertion.
3413 static bool
3414 do_regular_insertion (basic_block block, basic_block dom)
3416 bool new_stuff = false;
3417 VEC (pre_expr, heap) *exprs;
3418 pre_expr expr;
3419 VEC (pre_expr, heap) *avail = NULL;
3420 int i;
3422 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3423 VEC_safe_grow (pre_expr, heap, avail, EDGE_COUNT (block->preds));
3425 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3427 if (expr->kind != NAME)
3429 unsigned int val;
3430 bool by_some = false;
3431 bool cant_insert = false;
3432 bool all_same = true;
3433 pre_expr first_s = NULL;
3434 edge pred;
3435 basic_block bprime;
3436 pre_expr eprime = NULL;
3437 edge_iterator ei;
3438 pre_expr edoubleprime = NULL;
3439 bool do_insertion = false;
3441 val = get_expr_value_id (expr);
3442 if (bitmap_set_contains_value (PHI_GEN (block), val))
3443 continue;
3444 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3446 if (dump_file && (dump_flags & TDF_DETAILS))
3447 fprintf (dump_file, "Found fully redundant value\n");
3448 continue;
3451 FOR_EACH_EDGE (pred, ei, block->preds)
3453 unsigned int vprime;
3455 /* We should never run insertion for the exit block
3456 and so not come across fake pred edges. */
3457 gcc_assert (!(pred->flags & EDGE_FAKE));
3458 bprime = pred->src;
3459 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3460 bprime, block);
3462 /* eprime will generally only be NULL if the
3463 value of the expression, translated
3464 through the PHI for this predecessor, is
3465 undefined. If that is the case, we can't
3466 make the expression fully redundant,
3467 because its value is undefined along a
3468 predecessor path. We can thus break out
3469 early because it doesn't matter what the
3470 rest of the results are. */
3471 if (eprime == NULL)
3473 VEC_replace (pre_expr, avail, pred->dest_idx, NULL);
3474 cant_insert = true;
3475 break;
3478 eprime = fully_constant_expression (eprime);
3479 vprime = get_expr_value_id (eprime);
3480 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3481 vprime, NULL);
3482 if (edoubleprime == NULL)
3484 VEC_replace (pre_expr, avail, pred->dest_idx, eprime);
3485 all_same = false;
3487 else
3489 VEC_replace (pre_expr, avail, pred->dest_idx, edoubleprime);
3490 by_some = true;
3491 /* We want to perform insertions to remove a redundancy on
3492 a path in the CFG we want to optimize for speed. */
3493 if (optimize_edge_for_speed_p (pred))
3494 do_insertion = true;
3495 if (first_s == NULL)
3496 first_s = edoubleprime;
3497 else if (!pre_expr_d::equal (first_s, edoubleprime))
3498 all_same = false;
3501 /* If we can insert it, it's not the same value
3502 already existing along every predecessor, and
3503 it's defined by some predecessor, it is
3504 partially redundant. */
3505 if (!cant_insert && !all_same && by_some)
3507 if (!do_insertion)
3509 if (dump_file && (dump_flags & TDF_DETAILS))
3511 fprintf (dump_file, "Skipping partial redundancy for "
3512 "expression ");
3513 print_pre_expr (dump_file, expr);
3514 fprintf (dump_file, " (%04d), no redundancy on to be "
3515 "optimized for speed edge\n", val);
3518 else if (dbg_cnt (treepre_insert))
3520 if (dump_file && (dump_flags & TDF_DETAILS))
3522 fprintf (dump_file, "Found partial redundancy for "
3523 "expression ");
3524 print_pre_expr (dump_file, expr);
3525 fprintf (dump_file, " (%04d)\n",
3526 get_expr_value_id (expr));
3528 if (insert_into_preds_of_block (block,
3529 get_expression_id (expr),
3530 avail))
3531 new_stuff = true;
3534 /* If all edges produce the same value and that value is
3535 an invariant, then the PHI has the same value on all
3536 edges. Note this. */
3537 else if (!cant_insert && all_same && eprime
3538 && (edoubleprime->kind == CONSTANT
3539 || edoubleprime->kind == NAME)
3540 && !value_id_constant_p (val))
3542 unsigned int j;
3543 bitmap_iterator bi;
3544 bitmap_set_t exprset = VEC_index (bitmap_set_t,
3545 value_expressions, val);
3547 unsigned int new_val = get_expr_value_id (edoubleprime);
3548 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3550 pre_expr expr = expression_for_id (j);
3552 if (expr->kind == NAME)
3554 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3555 /* Just reset the value id and valnum so it is
3556 the same as the constant we have discovered. */
3557 if (edoubleprime->kind == CONSTANT)
3559 info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3560 pre_stats.constified++;
3562 else
3563 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3564 info->value_id = new_val;
3571 VEC_free (pre_expr, heap, exprs);
3572 VEC_free (pre_expr, heap, avail);
3573 return new_stuff;
3577 /* Perform insertion for partially anticipatable expressions. There
3578 is only one case we will perform insertion for these. This case is
3579 if the expression is partially anticipatable, and fully available.
3580 In this case, we know that putting it earlier will enable us to
3581 remove the later computation. */
3584 static bool
3585 do_partial_partial_insertion (basic_block block, basic_block dom)
3587 bool new_stuff = false;
3588 VEC (pre_expr, heap) *exprs;
3589 pre_expr expr;
3590 VEC (pre_expr, heap) *avail = NULL;
3591 int i;
3593 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3594 VEC_safe_grow (pre_expr, heap, avail, EDGE_COUNT (block->preds));
3596 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3598 if (expr->kind != NAME)
3600 unsigned int val;
3601 bool by_all = true;
3602 bool cant_insert = false;
3603 edge pred;
3604 basic_block bprime;
3605 pre_expr eprime = NULL;
3606 edge_iterator ei;
3608 val = get_expr_value_id (expr);
3609 if (bitmap_set_contains_value (PHI_GEN (block), val))
3610 continue;
3611 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3612 continue;
3614 FOR_EACH_EDGE (pred, ei, block->preds)
3616 unsigned int vprime;
3617 pre_expr edoubleprime;
3619 /* We should never run insertion for the exit block
3620 and so not come across fake pred edges. */
3621 gcc_assert (!(pred->flags & EDGE_FAKE));
3622 bprime = pred->src;
3623 eprime = phi_translate (expr, ANTIC_IN (block),
3624 PA_IN (block),
3625 bprime, block);
3627 /* eprime will generally only be NULL if the
3628 value of the expression, translated
3629 through the PHI for this predecessor, is
3630 undefined. If that is the case, we can't
3631 make the expression fully redundant,
3632 because its value is undefined along a
3633 predecessor path. We can thus break out
3634 early because it doesn't matter what the
3635 rest of the results are. */
3636 if (eprime == NULL)
3638 VEC_replace (pre_expr, avail, pred->dest_idx, NULL);
3639 cant_insert = true;
3640 break;
3643 eprime = fully_constant_expression (eprime);
3644 vprime = get_expr_value_id (eprime);
3645 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3646 vprime, NULL);
3647 VEC_replace (pre_expr, avail, pred->dest_idx, edoubleprime);
3648 if (edoubleprime == NULL)
3650 by_all = false;
3651 break;
3655 /* If we can insert it, it's not the same value
3656 already existing along every predecessor, and
3657 it's defined by some predecessor, it is
3658 partially redundant. */
3659 if (!cant_insert && by_all)
3661 edge succ;
3662 bool do_insertion = false;
3664 /* Insert only if we can remove a later expression on a path
3665 that we want to optimize for speed.
3666 The phi node that we will be inserting in BLOCK is not free,
3667 and inserting it for the sake of !optimize_for_speed successor
3668 may cause regressions on the speed path. */
3669 FOR_EACH_EDGE (succ, ei, block->succs)
3671 if (bitmap_set_contains_value (PA_IN (succ->dest), val))
3673 if (optimize_edge_for_speed_p (succ))
3674 do_insertion = true;
3678 if (!do_insertion)
3680 if (dump_file && (dump_flags & TDF_DETAILS))
3682 fprintf (dump_file, "Skipping partial partial redundancy "
3683 "for expression ");
3684 print_pre_expr (dump_file, expr);
3685 fprintf (dump_file, " (%04d), not partially anticipated "
3686 "on any to be optimized for speed edges\n", val);
3689 else if (dbg_cnt (treepre_insert))
3691 pre_stats.pa_insert++;
3692 if (dump_file && (dump_flags & TDF_DETAILS))
3694 fprintf (dump_file, "Found partial partial redundancy "
3695 "for expression ");
3696 print_pre_expr (dump_file, expr);
3697 fprintf (dump_file, " (%04d)\n",
3698 get_expr_value_id (expr));
3700 if (insert_into_preds_of_block (block,
3701 get_expression_id (expr),
3702 avail))
3703 new_stuff = true;
3709 VEC_free (pre_expr, heap, exprs);
3710 VEC_free (pre_expr, heap, avail);
3711 return new_stuff;
3714 static bool
3715 insert_aux (basic_block block)
3717 basic_block son;
3718 bool new_stuff = false;
3720 if (block)
3722 basic_block dom;
3723 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3724 if (dom)
3726 unsigned i;
3727 bitmap_iterator bi;
3728 bitmap_set_t newset = NEW_SETS (dom);
3729 if (newset)
3731 /* Note that we need to value_replace both NEW_SETS, and
3732 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3733 represented by some non-simple expression here that we want
3734 to replace it with. */
3735 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3737 pre_expr expr = expression_for_id (i);
3738 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3739 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3742 if (!single_pred_p (block))
3744 new_stuff |= do_regular_insertion (block, dom);
3745 if (do_partial_partial)
3746 new_stuff |= do_partial_partial_insertion (block, dom);
3750 for (son = first_dom_son (CDI_DOMINATORS, block);
3751 son;
3752 son = next_dom_son (CDI_DOMINATORS, son))
3754 new_stuff |= insert_aux (son);
3757 return new_stuff;
3760 /* Perform insertion of partially redundant values. */
3762 static void
3763 insert (void)
3765 bool new_stuff = true;
3766 basic_block bb;
3767 int num_iterations = 0;
3769 FOR_ALL_BB (bb)
3770 NEW_SETS (bb) = bitmap_set_new ();
3772 while (new_stuff)
3774 num_iterations++;
3775 if (dump_file && dump_flags & TDF_DETAILS)
3776 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3777 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3779 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3783 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3785 static void
3786 add_to_exp_gen (basic_block block, tree op)
3788 if (!in_fre)
3790 pre_expr result;
3791 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3792 return;
3793 result = get_or_alloc_expr_for_name (op);
3794 bitmap_value_insert_into_set (EXP_GEN (block), result);
3798 /* Create value ids for PHI in BLOCK. */
3800 static void
3801 make_values_for_phi (gimple phi, basic_block block)
3803 tree result = gimple_phi_result (phi);
3805 /* We have no need for virtual phis, as they don't represent
3806 actual computations. */
3807 if (!virtual_operand_p (result))
3809 pre_expr e = get_or_alloc_expr_for_name (result);
3810 add_to_value (get_expr_value_id (e), e);
3811 bitmap_insert_into_set (PHI_GEN (block), e);
3812 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3813 if (!in_fre)
3815 unsigned i;
3816 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3818 tree arg = gimple_phi_arg_def (phi, i);
3819 if (TREE_CODE (arg) == SSA_NAME)
3821 e = get_or_alloc_expr_for_name (arg);
3822 add_to_value (get_expr_value_id (e), e);
3829 /* Compute the AVAIL set for all basic blocks.
3831 This function performs value numbering of the statements in each basic
3832 block. The AVAIL sets are built from information we glean while doing
3833 this value numbering, since the AVAIL sets contain only one entry per
3834 value.
3836 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3837 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3839 static void
3840 compute_avail (void)
3843 basic_block block, son;
3844 basic_block *worklist;
3845 size_t sp = 0;
3846 unsigned i;
3848 /* We pretend that default definitions are defined in the entry block.
3849 This includes function arguments and the static chain decl. */
3850 for (i = 1; i < num_ssa_names; ++i)
3852 tree name = ssa_name (i);
3853 pre_expr e;
3854 if (!name
3855 || !SSA_NAME_IS_DEFAULT_DEF (name)
3856 || has_zero_uses (name)
3857 || virtual_operand_p (name))
3858 continue;
3860 e = get_or_alloc_expr_for_name (name);
3861 add_to_value (get_expr_value_id (e), e);
3862 if (!in_fre)
3863 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3864 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3867 /* Allocate the worklist. */
3868 worklist = XNEWVEC (basic_block, n_basic_blocks);
3870 /* Seed the algorithm by putting the dominator children of the entry
3871 block on the worklist. */
3872 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3873 son;
3874 son = next_dom_son (CDI_DOMINATORS, son))
3875 worklist[sp++] = son;
3877 /* Loop until the worklist is empty. */
3878 while (sp)
3880 gimple_stmt_iterator gsi;
3881 gimple stmt;
3882 basic_block dom;
3883 unsigned int stmt_uid = 1;
3885 /* Pick a block from the worklist. */
3886 block = worklist[--sp];
3888 /* Initially, the set of available values in BLOCK is that of
3889 its immediate dominator. */
3890 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3891 if (dom)
3892 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3894 /* Generate values for PHI nodes. */
3895 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3896 make_values_for_phi (gsi_stmt (gsi), block);
3898 BB_MAY_NOTRETURN (block) = 0;
3900 /* Now compute value numbers and populate value sets with all
3901 the expressions computed in BLOCK. */
3902 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3904 ssa_op_iter iter;
3905 tree op;
3907 stmt = gsi_stmt (gsi);
3908 gimple_set_uid (stmt, stmt_uid++);
3910 /* Cache whether the basic-block has any non-visible side-effect
3911 or control flow.
3912 If this isn't a call or it is the last stmt in the
3913 basic-block then the CFG represents things correctly. */
3914 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3916 /* Non-looping const functions always return normally.
3917 Otherwise the call might not return or have side-effects
3918 that forbids hoisting possibly trapping expressions
3919 before it. */
3920 int flags = gimple_call_flags (stmt);
3921 if (!(flags & ECF_CONST)
3922 || (flags & ECF_LOOPING_CONST_OR_PURE))
3923 BB_MAY_NOTRETURN (block) = 1;
3926 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3928 pre_expr e = get_or_alloc_expr_for_name (op);
3930 add_to_value (get_expr_value_id (e), e);
3931 if (!in_fre)
3932 bitmap_insert_into_set (TMP_GEN (block), e);
3933 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3936 if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt))
3937 continue;
3939 switch (gimple_code (stmt))
3941 case GIMPLE_RETURN:
3942 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3943 add_to_exp_gen (block, op);
3944 continue;
3946 case GIMPLE_CALL:
3948 vn_reference_t ref;
3949 unsigned int i;
3950 vn_reference_op_t vro;
3951 pre_expr result = NULL;
3952 VEC(vn_reference_op_s, heap) *ops = NULL;
3954 /* We can value number only calls to real functions. */
3955 if (gimple_call_internal_p (stmt))
3956 continue;
3958 copy_reference_ops_from_call (stmt, &ops);
3959 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3960 gimple_expr_type (stmt),
3961 ops, &ref, VN_NOWALK);
3962 VEC_free (vn_reference_op_s, heap, ops);
3963 if (!ref)
3964 continue;
3966 for (i = 0; VEC_iterate (vn_reference_op_s,
3967 ref->operands, i,
3968 vro); i++)
3970 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
3971 add_to_exp_gen (block, vro->op0);
3972 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3973 add_to_exp_gen (block, vro->op1);
3974 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3975 add_to_exp_gen (block, vro->op2);
3978 /* If the value of the call is not invalidated in
3979 this block until it is computed, add the expression
3980 to EXP_GEN. */
3981 if (!gimple_vuse (stmt)
3982 || gimple_code
3983 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3984 || gimple_bb (SSA_NAME_DEF_STMT
3985 (gimple_vuse (stmt))) != block)
3987 result = (pre_expr) pool_alloc (pre_expr_pool);
3988 result->kind = REFERENCE;
3989 result->id = 0;
3990 PRE_EXPR_REFERENCE (result) = ref;
3992 get_or_alloc_expression_id (result);
3993 add_to_value (get_expr_value_id (result), result);
3994 if (!in_fre)
3995 bitmap_value_insert_into_set (EXP_GEN (block), result);
3997 continue;
4000 case GIMPLE_ASSIGN:
4002 pre_expr result = NULL;
4003 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
4005 case tcc_unary:
4006 case tcc_binary:
4007 case tcc_comparison:
4009 vn_nary_op_t nary;
4010 unsigned int i;
4012 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
4013 gimple_assign_rhs_code (stmt),
4014 gimple_expr_type (stmt),
4015 gimple_assign_rhs1_ptr (stmt),
4016 &nary);
4018 if (!nary)
4019 continue;
4021 for (i = 0; i < nary->length; i++)
4022 if (TREE_CODE (nary->op[i]) == SSA_NAME)
4023 add_to_exp_gen (block, nary->op[i]);
4025 /* If the NARY traps and there was a preceding
4026 point in the block that might not return avoid
4027 adding the nary to EXP_GEN. */
4028 if (BB_MAY_NOTRETURN (block)
4029 && vn_nary_may_trap (nary))
4030 continue;
4032 result = (pre_expr) pool_alloc (pre_expr_pool);
4033 result->kind = NARY;
4034 result->id = 0;
4035 PRE_EXPR_NARY (result) = nary;
4036 break;
4039 case tcc_declaration:
4040 case tcc_reference:
4042 vn_reference_t ref;
4043 unsigned int i;
4044 vn_reference_op_t vro;
4046 vn_reference_lookup (gimple_assign_rhs1 (stmt),
4047 gimple_vuse (stmt),
4048 VN_WALK, &ref);
4049 if (!ref)
4050 continue;
4052 for (i = 0; VEC_iterate (vn_reference_op_s,
4053 ref->operands, i,
4054 vro); i++)
4056 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4057 add_to_exp_gen (block, vro->op0);
4058 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4059 add_to_exp_gen (block, vro->op1);
4060 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4061 add_to_exp_gen (block, vro->op2);
4064 /* If the value of the reference is not invalidated in
4065 this block until it is computed, add the expression
4066 to EXP_GEN. */
4067 if (gimple_vuse (stmt))
4069 gimple def_stmt;
4070 bool ok = true;
4071 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
4072 while (!gimple_nop_p (def_stmt)
4073 && gimple_code (def_stmt) != GIMPLE_PHI
4074 && gimple_bb (def_stmt) == block)
4076 if (stmt_may_clobber_ref_p
4077 (def_stmt, gimple_assign_rhs1 (stmt)))
4079 ok = false;
4080 break;
4082 def_stmt
4083 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
4085 if (!ok)
4086 continue;
4089 result = (pre_expr) pool_alloc (pre_expr_pool);
4090 result->kind = REFERENCE;
4091 result->id = 0;
4092 PRE_EXPR_REFERENCE (result) = ref;
4093 break;
4096 default:
4097 /* For any other statement that we don't
4098 recognize, simply add all referenced
4099 SSA_NAMEs to EXP_GEN. */
4100 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4101 add_to_exp_gen (block, op);
4102 continue;
4105 get_or_alloc_expression_id (result);
4106 add_to_value (get_expr_value_id (result), result);
4107 if (!in_fre)
4108 bitmap_value_insert_into_set (EXP_GEN (block), result);
4110 continue;
4112 default:
4113 break;
4117 /* Put the dominator children of BLOCK on the worklist of blocks
4118 to compute available sets for. */
4119 for (son = first_dom_son (CDI_DOMINATORS, block);
4120 son;
4121 son = next_dom_son (CDI_DOMINATORS, son))
4122 worklist[sp++] = son;
4125 free (worklist);
4128 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4129 than the available expressions for it. The insertion point is
4130 right before the first use in STMT. Returns the SSA_NAME that should
4131 be used for replacement. */
4133 static tree
4134 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4136 basic_block bb = gimple_bb (stmt);
4137 gimple_stmt_iterator gsi;
4138 gimple_seq stmts = NULL;
4139 tree expr;
4140 pre_expr e;
4142 /* First create a value expression from the expression we want
4143 to insert and associate it with the value handle for SSA_VN. */
4144 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4145 if (e == NULL)
4146 return NULL_TREE;
4148 /* Then use create_expression_by_pieces to generate a valid
4149 expression to insert at this point of the IL stream. */
4150 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4151 if (expr == NULL_TREE)
4152 return NULL_TREE;
4153 gsi = gsi_for_stmt (stmt);
4154 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4156 return expr;
4159 /* Eliminate fully redundant computations. */
4161 static unsigned int
4162 eliminate (void)
4164 VEC (gimple, heap) *to_remove = NULL;
4165 VEC (gimple, heap) *to_update = NULL;
4166 basic_block b;
4167 unsigned int todo = 0;
4168 gimple_stmt_iterator gsi;
4169 gimple stmt;
4170 unsigned i;
4172 FOR_EACH_BB (b)
4174 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4176 tree lhs = NULL_TREE;
4177 tree rhs = NULL_TREE;
4179 stmt = gsi_stmt (gsi);
4181 if (gimple_has_lhs (stmt))
4182 lhs = gimple_get_lhs (stmt);
4184 if (gimple_assign_single_p (stmt))
4185 rhs = gimple_assign_rhs1 (stmt);
4187 /* Lookup the RHS of the expression, see if we have an
4188 available computation for it. If so, replace the RHS with
4189 the available computation.
4191 See PR43491.
4192 We don't replace global register variable when it is a the RHS of
4193 a single assign. We do replace local register variable since gcc
4194 does not guarantee local variable will be allocated in register. */
4195 if (gimple_has_lhs (stmt)
4196 && TREE_CODE (lhs) == SSA_NAME
4197 && !gimple_assign_ssa_name_copy_p (stmt)
4198 && (!gimple_assign_single_p (stmt)
4199 || (!is_gimple_min_invariant (rhs)
4200 && (gimple_assign_rhs_code (stmt) != VAR_DECL
4201 || !is_global_var (rhs)
4202 || !DECL_HARD_REGISTER (rhs))))
4203 && !gimple_has_volatile_ops (stmt)
4204 && !has_zero_uses (lhs))
4206 tree sprime = NULL;
4207 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4208 pre_expr sprimeexpr;
4209 gimple orig_stmt = stmt;
4211 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4212 get_expr_value_id (lhsexpr),
4213 NULL);
4215 if (sprimeexpr)
4217 if (sprimeexpr->kind == CONSTANT)
4218 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4219 else if (sprimeexpr->kind == NAME)
4220 sprime = PRE_EXPR_NAME (sprimeexpr);
4221 else
4222 gcc_unreachable ();
4225 /* If there is no existing leader but SCCVN knows this
4226 value is constant, use that constant. */
4227 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4229 sprime = VN_INFO (lhs)->valnum;
4230 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4231 TREE_TYPE (sprime)))
4232 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4234 if (dump_file && (dump_flags & TDF_DETAILS))
4236 fprintf (dump_file, "Replaced ");
4237 print_gimple_expr (dump_file, stmt, 0, 0);
4238 fprintf (dump_file, " with ");
4239 print_generic_expr (dump_file, sprime, 0);
4240 fprintf (dump_file, " in ");
4241 print_gimple_stmt (dump_file, stmt, 0, 0);
4243 pre_stats.eliminations++;
4244 propagate_tree_value_into_stmt (&gsi, sprime);
4245 stmt = gsi_stmt (gsi);
4246 update_stmt (stmt);
4248 /* If we removed EH side-effects from the statement, clean
4249 its EH information. */
4250 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4252 bitmap_set_bit (need_eh_cleanup,
4253 gimple_bb (stmt)->index);
4254 if (dump_file && (dump_flags & TDF_DETAILS))
4255 fprintf (dump_file, " Removed EH side-effects.\n");
4257 continue;
4260 /* If there is no existing usable leader but SCCVN thinks
4261 it has an expression it wants to use as replacement,
4262 insert that. */
4263 if (!sprime || sprime == lhs)
4265 tree val = VN_INFO (lhs)->valnum;
4266 if (val != VN_TOP
4267 && TREE_CODE (val) == SSA_NAME
4268 && VN_INFO (val)->needs_insertion
4269 && can_PRE_operation (vn_get_expr_for (val)))
4270 sprime = do_SCCVN_insertion (stmt, val);
4272 if (sprime
4273 && sprime != lhs
4274 && (rhs == NULL_TREE
4275 || TREE_CODE (rhs) != SSA_NAME
4276 || may_propagate_copy (rhs, sprime)))
4278 bool can_make_abnormal_goto
4279 = is_gimple_call (stmt)
4280 && stmt_can_make_abnormal_goto (stmt);
4282 gcc_assert (sprime != rhs);
4284 if (dump_file && (dump_flags & TDF_DETAILS))
4286 fprintf (dump_file, "Replaced ");
4287 print_gimple_expr (dump_file, stmt, 0, 0);
4288 fprintf (dump_file, " with ");
4289 print_generic_expr (dump_file, sprime, 0);
4290 fprintf (dump_file, " in ");
4291 print_gimple_stmt (dump_file, stmt, 0, 0);
4294 if (TREE_CODE (sprime) == SSA_NAME)
4295 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4296 NECESSARY, true);
4297 /* We need to make sure the new and old types actually match,
4298 which may require adding a simple cast, which fold_convert
4299 will do for us. */
4300 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4301 && !useless_type_conversion_p (gimple_expr_type (stmt),
4302 TREE_TYPE (sprime)))
4303 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4305 pre_stats.eliminations++;
4306 propagate_tree_value_into_stmt (&gsi, sprime);
4307 stmt = gsi_stmt (gsi);
4308 update_stmt (stmt);
4310 /* If we removed EH side-effects from the statement, clean
4311 its EH information. */
4312 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4314 bitmap_set_bit (need_eh_cleanup,
4315 gimple_bb (stmt)->index);
4316 if (dump_file && (dump_flags & TDF_DETAILS))
4317 fprintf (dump_file, " Removed EH side-effects.\n");
4320 /* Likewise for AB side-effects. */
4321 if (can_make_abnormal_goto
4322 && !stmt_can_make_abnormal_goto (stmt))
4324 bitmap_set_bit (need_ab_cleanup,
4325 gimple_bb (stmt)->index);
4326 if (dump_file && (dump_flags & TDF_DETAILS))
4327 fprintf (dump_file, " Removed AB side-effects.\n");
4331 /* If the statement is a scalar store, see if the expression
4332 has the same value number as its rhs. If so, the store is
4333 dead. */
4334 else if (gimple_assign_single_p (stmt)
4335 && !gimple_has_volatile_ops (stmt)
4336 && !is_gimple_reg (gimple_assign_lhs (stmt))
4337 && (TREE_CODE (rhs) == SSA_NAME
4338 || is_gimple_min_invariant (rhs)))
4340 tree val;
4341 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4342 gimple_vuse (stmt), VN_WALK, NULL);
4343 if (TREE_CODE (rhs) == SSA_NAME)
4344 rhs = VN_INFO (rhs)->valnum;
4345 if (val
4346 && operand_equal_p (val, rhs, 0))
4348 if (dump_file && (dump_flags & TDF_DETAILS))
4350 fprintf (dump_file, "Deleted redundant store ");
4351 print_gimple_stmt (dump_file, stmt, 0, 0);
4354 /* Queue stmt for removal. */
4355 VEC_safe_push (gimple, heap, to_remove, stmt);
4358 /* Visit COND_EXPRs and fold the comparison with the
4359 available value-numbers. */
4360 else if (gimple_code (stmt) == GIMPLE_COND)
4362 tree op0 = gimple_cond_lhs (stmt);
4363 tree op1 = gimple_cond_rhs (stmt);
4364 tree result;
4366 if (TREE_CODE (op0) == SSA_NAME)
4367 op0 = VN_INFO (op0)->valnum;
4368 if (TREE_CODE (op1) == SSA_NAME)
4369 op1 = VN_INFO (op1)->valnum;
4370 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4371 op0, op1);
4372 if (result && TREE_CODE (result) == INTEGER_CST)
4374 if (integer_zerop (result))
4375 gimple_cond_make_false (stmt);
4376 else
4377 gimple_cond_make_true (stmt);
4378 update_stmt (stmt);
4379 todo = TODO_cleanup_cfg;
4382 /* Visit indirect calls and turn them into direct calls if
4383 possible. */
4384 if (is_gimple_call (stmt))
4386 tree orig_fn = gimple_call_fn (stmt);
4387 tree fn;
4388 if (!orig_fn)
4389 continue;
4390 if (TREE_CODE (orig_fn) == SSA_NAME)
4391 fn = VN_INFO (orig_fn)->valnum;
4392 else if (TREE_CODE (orig_fn) == OBJ_TYPE_REF
4393 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn)) == SSA_NAME)
4394 fn = VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn))->valnum;
4395 else
4396 continue;
4397 if (gimple_call_addr_fndecl (fn) != NULL_TREE
4398 && useless_type_conversion_p (TREE_TYPE (orig_fn),
4399 TREE_TYPE (fn)))
4401 bool can_make_abnormal_goto
4402 = stmt_can_make_abnormal_goto (stmt);
4403 bool was_noreturn = gimple_call_noreturn_p (stmt);
4405 if (dump_file && (dump_flags & TDF_DETAILS))
4407 fprintf (dump_file, "Replacing call target with ");
4408 print_generic_expr (dump_file, fn, 0);
4409 fprintf (dump_file, " in ");
4410 print_gimple_stmt (dump_file, stmt, 0, 0);
4413 gimple_call_set_fn (stmt, fn);
4414 VEC_safe_push (gimple, heap, to_update, stmt);
4416 /* When changing a call into a noreturn call, cfg cleanup
4417 is needed to fix up the noreturn call. */
4418 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4419 todo |= TODO_cleanup_cfg;
4421 /* If we removed EH side-effects from the statement, clean
4422 its EH information. */
4423 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4425 bitmap_set_bit (need_eh_cleanup,
4426 gimple_bb (stmt)->index);
4427 if (dump_file && (dump_flags & TDF_DETAILS))
4428 fprintf (dump_file, " Removed EH side-effects.\n");
4431 /* Likewise for AB side-effects. */
4432 if (can_make_abnormal_goto
4433 && !stmt_can_make_abnormal_goto (stmt))
4435 bitmap_set_bit (need_ab_cleanup,
4436 gimple_bb (stmt)->index);
4437 if (dump_file && (dump_flags & TDF_DETAILS))
4438 fprintf (dump_file, " Removed AB side-effects.\n");
4441 /* Changing an indirect call to a direct call may
4442 have exposed different semantics. This may
4443 require an SSA update. */
4444 todo |= TODO_update_ssa_only_virtuals;
4449 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4451 gimple stmt, phi = gsi_stmt (gsi);
4452 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4453 pre_expr sprimeexpr, resexpr;
4454 gimple_stmt_iterator gsi2;
4456 /* We want to perform redundant PHI elimination. Do so by
4457 replacing the PHI with a single copy if possible.
4458 Do not touch inserted, single-argument or virtual PHIs. */
4459 if (gimple_phi_num_args (phi) == 1
4460 || virtual_operand_p (res))
4462 gsi_next (&gsi);
4463 continue;
4466 resexpr = get_or_alloc_expr_for_name (res);
4467 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4468 get_expr_value_id (resexpr), NULL);
4469 if (sprimeexpr)
4471 if (sprimeexpr->kind == CONSTANT)
4472 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4473 else if (sprimeexpr->kind == NAME)
4474 sprime = PRE_EXPR_NAME (sprimeexpr);
4475 else
4476 gcc_unreachable ();
4478 if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum))
4480 sprime = VN_INFO (res)->valnum;
4481 if (!useless_type_conversion_p (TREE_TYPE (res),
4482 TREE_TYPE (sprime)))
4483 sprime = fold_convert (TREE_TYPE (res), sprime);
4485 if (!sprime
4486 || sprime == res)
4488 gsi_next (&gsi);
4489 continue;
4492 if (dump_file && (dump_flags & TDF_DETAILS))
4494 fprintf (dump_file, "Replaced redundant PHI node defining ");
4495 print_generic_expr (dump_file, res, 0);
4496 fprintf (dump_file, " with ");
4497 print_generic_expr (dump_file, sprime, 0);
4498 fprintf (dump_file, "\n");
4501 remove_phi_node (&gsi, false);
4503 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4504 && TREE_CODE (sprime) == SSA_NAME)
4505 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4507 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4508 sprime = fold_convert (TREE_TYPE (res), sprime);
4509 stmt = gimple_build_assign (res, sprime);
4510 SSA_NAME_DEF_STMT (res) = stmt;
4511 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4513 gsi2 = gsi_after_labels (b);
4514 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4515 /* Queue the copy for eventual removal. */
4516 VEC_safe_push (gimple, heap, to_remove, stmt);
4517 /* If we inserted this PHI node ourself, it's not an elimination. */
4518 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4519 pre_stats.phis--;
4520 else
4521 pre_stats.eliminations++;
4525 /* We cannot remove stmts during BB walk, especially not release SSA
4526 names there as this confuses the VN machinery. The stmts ending
4527 up in to_remove are either stores or simple copies. */
4528 FOR_EACH_VEC_ELT (gimple, to_remove, i, stmt)
4530 tree lhs = gimple_assign_lhs (stmt);
4531 tree rhs = gimple_assign_rhs1 (stmt);
4532 use_operand_p use_p;
4533 gimple use_stmt;
4535 /* If there is a single use only, propagate the equivalency
4536 instead of keeping the copy. */
4537 if (TREE_CODE (lhs) == SSA_NAME
4538 && TREE_CODE (rhs) == SSA_NAME
4539 && single_imm_use (lhs, &use_p, &use_stmt)
4540 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4542 SET_USE (use_p, rhs);
4543 update_stmt (use_stmt);
4544 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4545 && TREE_CODE (rhs) == SSA_NAME)
4546 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4549 /* If this is a store or a now unused copy, remove it. */
4550 if (TREE_CODE (lhs) != SSA_NAME
4551 || has_zero_uses (lhs))
4553 basic_block bb = gimple_bb (stmt);
4554 gsi = gsi_for_stmt (stmt);
4555 unlink_stmt_vdef (stmt);
4556 if (gsi_remove (&gsi, true))
4557 bitmap_set_bit (need_eh_cleanup, bb->index);
4558 if (TREE_CODE (lhs) == SSA_NAME)
4559 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4560 release_defs (stmt);
4563 VEC_free (gimple, heap, to_remove);
4565 /* We cannot update call statements with virtual operands during
4566 SSA walk. This might remove them which in turn makes our
4567 VN lattice invalid. */
4568 FOR_EACH_VEC_ELT (gimple, to_update, i, stmt)
4569 update_stmt (stmt);
4570 VEC_free (gimple, heap, to_update);
4572 return todo;
4575 /* Borrow a bit of tree-ssa-dce.c for the moment.
4576 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4577 this may be a bit faster, and we may want critical edges kept split. */
4579 /* If OP's defining statement has not already been determined to be necessary,
4580 mark that statement necessary. Return the stmt, if it is newly
4581 necessary. */
4583 static inline gimple
4584 mark_operand_necessary (tree op)
4586 gimple stmt;
4588 gcc_assert (op);
4590 if (TREE_CODE (op) != SSA_NAME)
4591 return NULL;
4593 stmt = SSA_NAME_DEF_STMT (op);
4594 gcc_assert (stmt);
4596 if (gimple_plf (stmt, NECESSARY)
4597 || gimple_nop_p (stmt))
4598 return NULL;
4600 gimple_set_plf (stmt, NECESSARY, true);
4601 return stmt;
4604 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4605 to insert PHI nodes sometimes, and because value numbering of casts isn't
4606 perfect, we sometimes end up inserting dead code. This simple DCE-like
4607 pass removes any insertions we made that weren't actually used. */
4609 static void
4610 remove_dead_inserted_code (void)
4612 bitmap worklist;
4613 unsigned i;
4614 bitmap_iterator bi;
4615 gimple t;
4617 worklist = BITMAP_ALLOC (NULL);
4618 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4620 t = SSA_NAME_DEF_STMT (ssa_name (i));
4621 if (gimple_plf (t, NECESSARY))
4622 bitmap_set_bit (worklist, i);
4624 while (!bitmap_empty_p (worklist))
4626 i = bitmap_first_set_bit (worklist);
4627 bitmap_clear_bit (worklist, i);
4628 t = SSA_NAME_DEF_STMT (ssa_name (i));
4630 /* PHI nodes are somewhat special in that each PHI alternative has
4631 data and control dependencies. All the statements feeding the
4632 PHI node's arguments are always necessary. */
4633 if (gimple_code (t) == GIMPLE_PHI)
4635 unsigned k;
4637 for (k = 0; k < gimple_phi_num_args (t); k++)
4639 tree arg = PHI_ARG_DEF (t, k);
4640 if (TREE_CODE (arg) == SSA_NAME)
4642 gimple n = mark_operand_necessary (arg);
4643 if (n)
4644 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4648 else
4650 /* Propagate through the operands. Examine all the USE, VUSE and
4651 VDEF operands in this statement. Mark all the statements
4652 which feed this statement's uses as necessary. */
4653 ssa_op_iter iter;
4654 tree use;
4656 /* The operands of VDEF expressions are also needed as they
4657 represent potential definitions that may reach this
4658 statement (VDEF operands allow us to follow def-def
4659 links). */
4661 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4663 gimple n = mark_operand_necessary (use);
4664 if (n)
4665 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4670 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4672 t = SSA_NAME_DEF_STMT (ssa_name (i));
4673 if (!gimple_plf (t, NECESSARY))
4675 gimple_stmt_iterator gsi;
4677 if (dump_file && (dump_flags & TDF_DETAILS))
4679 fprintf (dump_file, "Removing unnecessary insertion:");
4680 print_gimple_stmt (dump_file, t, 0, 0);
4683 gsi = gsi_for_stmt (t);
4684 if (gimple_code (t) == GIMPLE_PHI)
4685 remove_phi_node (&gsi, true);
4686 else
4688 gsi_remove (&gsi, true);
4689 release_defs (t);
4693 BITMAP_FREE (worklist);
4696 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4697 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4698 the number of visited blocks. */
4700 static int
4701 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4703 edge_iterator *stack;
4704 int sp;
4705 int post_order_num = 0;
4706 sbitmap visited;
4708 if (include_entry_exit)
4709 post_order[post_order_num++] = EXIT_BLOCK;
4711 /* Allocate stack for back-tracking up CFG. */
4712 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4713 sp = 0;
4715 /* Allocate bitmap to track nodes that have been visited. */
4716 visited = sbitmap_alloc (last_basic_block);
4718 /* None of the nodes in the CFG have been visited yet. */
4719 sbitmap_zero (visited);
4721 /* Push the last edge on to the stack. */
4722 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4724 while (sp)
4726 edge_iterator ei;
4727 basic_block src;
4728 basic_block dest;
4730 /* Look at the edge on the top of the stack. */
4731 ei = stack[sp - 1];
4732 src = ei_edge (ei)->src;
4733 dest = ei_edge (ei)->dest;
4735 /* Check if the edge destination has been visited yet. */
4736 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4738 /* Mark that we have visited the destination. */
4739 SET_BIT (visited, src->index);
4741 if (EDGE_COUNT (src->preds) > 0)
4742 /* Since the DEST node has been visited for the first
4743 time, check its successors. */
4744 stack[sp++] = ei_start (src->preds);
4745 else
4746 post_order[post_order_num++] = src->index;
4748 else
4750 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4751 post_order[post_order_num++] = dest->index;
4753 if (!ei_one_before_end_p (ei))
4754 ei_next (&stack[sp - 1]);
4755 else
4756 sp--;
4760 if (include_entry_exit)
4761 post_order[post_order_num++] = ENTRY_BLOCK;
4763 free (stack);
4764 sbitmap_free (visited);
4765 return post_order_num;
4769 /* Initialize data structures used by PRE. */
4771 static void
4772 init_pre (bool do_fre)
4774 basic_block bb;
4776 next_expression_id = 1;
4777 expressions = NULL;
4778 VEC_safe_push (pre_expr, heap, expressions, (pre_expr)NULL);
4779 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4780 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4781 get_max_value_id() + 1);
4782 name_to_id = NULL;
4784 in_fre = do_fre;
4786 inserted_exprs = BITMAP_ALLOC (NULL);
4788 connect_infinite_loops_to_exit ();
4789 memset (&pre_stats, 0, sizeof (pre_stats));
4792 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4793 my_rev_post_order_compute (postorder, false);
4795 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4797 calculate_dominance_info (CDI_POST_DOMINATORS);
4798 calculate_dominance_info (CDI_DOMINATORS);
4800 bitmap_obstack_initialize (&grand_bitmap_obstack);
4801 phi_translate_table.create (5110);
4802 expression_to_id.create (num_ssa_names * 3);
4803 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4804 sizeof (struct bitmap_set), 30);
4805 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4806 sizeof (struct pre_expr_d), 30);
4807 FOR_ALL_BB (bb)
4809 EXP_GEN (bb) = bitmap_set_new ();
4810 PHI_GEN (bb) = bitmap_set_new ();
4811 TMP_GEN (bb) = bitmap_set_new ();
4812 AVAIL_OUT (bb) = bitmap_set_new ();
4815 need_eh_cleanup = BITMAP_ALLOC (NULL);
4816 need_ab_cleanup = BITMAP_ALLOC (NULL);
4820 /* Deallocate data structures used by PRE. */
4822 static void
4823 fini_pre (bool do_fre)
4825 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4826 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4828 free (postorder);
4829 VEC_free (bitmap_set_t, heap, value_expressions);
4830 BITMAP_FREE (inserted_exprs);
4831 bitmap_obstack_release (&grand_bitmap_obstack);
4832 free_alloc_pool (bitmap_set_pool);
4833 free_alloc_pool (pre_expr_pool);
4834 phi_translate_table.dispose ();
4835 expression_to_id.dispose ();
4836 VEC_free (unsigned, heap, name_to_id);
4838 free_aux_for_blocks ();
4840 free_dominance_info (CDI_POST_DOMINATORS);
4842 if (do_eh_cleanup)
4843 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4845 if (do_ab_cleanup)
4846 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4848 BITMAP_FREE (need_eh_cleanup);
4849 BITMAP_FREE (need_ab_cleanup);
4851 if (do_eh_cleanup || do_ab_cleanup)
4852 cleanup_tree_cfg ();
4854 if (!do_fre)
4855 loop_optimizer_finalize ();
4858 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4859 only wants to do full redundancy elimination. */
4861 static unsigned int
4862 execute_pre (bool do_fre)
4864 unsigned int todo = 0;
4866 do_partial_partial =
4867 flag_tree_partial_pre && optimize_function_for_speed_p (cfun);
4869 /* This has to happen before SCCVN runs because
4870 loop_optimizer_init may create new phis, etc. */
4871 if (!do_fre)
4872 loop_optimizer_init (LOOPS_NORMAL);
4874 if (!run_scc_vn (do_fre ? VN_WALKREWRITE : VN_WALK))
4876 if (!do_fre)
4877 loop_optimizer_finalize ();
4879 return 0;
4882 init_pre (do_fre);
4883 scev_initialize ();
4885 /* Collect and value number expressions computed in each basic block. */
4886 compute_avail ();
4888 if (dump_file && (dump_flags & TDF_DETAILS))
4890 basic_block bb;
4892 FOR_ALL_BB (bb)
4894 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4895 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4896 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4897 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4901 /* Insert can get quite slow on an incredibly large number of basic
4902 blocks due to some quadratic behavior. Until this behavior is
4903 fixed, don't run it when he have an incredibly large number of
4904 bb's. If we aren't going to run insert, there is no point in
4905 computing ANTIC, either, even though it's plenty fast. */
4906 if (!do_fre && n_basic_blocks < 4000)
4908 compute_antic ();
4909 insert ();
4912 /* Make sure to remove fake edges before committing our inserts.
4913 This makes sure we don't end up with extra critical edges that
4914 we would need to split. */
4915 remove_fake_exit_edges ();
4916 gsi_commit_edge_inserts ();
4918 /* Remove all the redundant expressions. */
4919 todo |= eliminate ();
4921 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4922 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4923 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4924 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4925 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4927 clear_expression_ids ();
4928 if (!do_fre)
4930 remove_dead_inserted_code ();
4931 todo |= TODO_verify_flow;
4934 scev_finalize ();
4935 fini_pre (do_fre);
4937 if (!do_fre)
4938 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4939 case we can merge the block with the remaining predecessor of the block.
4940 It should either:
4941 - call merge_blocks after each tail merge iteration
4942 - call merge_blocks after all tail merge iterations
4943 - mark TODO_cleanup_cfg when necessary
4944 - share the cfg cleanup with fini_pre. */
4945 todo |= tail_merge_optimize (todo);
4946 free_scc_vn ();
4948 return todo;
4951 /* Gate and execute functions for PRE. */
4953 static unsigned int
4954 do_pre (void)
4956 return execute_pre (false);
4959 static bool
4960 gate_pre (void)
4962 return flag_tree_pre != 0;
4965 struct gimple_opt_pass pass_pre =
4968 GIMPLE_PASS,
4969 "pre", /* name */
4970 gate_pre, /* gate */
4971 do_pre, /* execute */
4972 NULL, /* sub */
4973 NULL, /* next */
4974 0, /* static_pass_number */
4975 TV_TREE_PRE, /* tv_id */
4976 PROP_no_crit_edges | PROP_cfg
4977 | PROP_ssa, /* properties_required */
4978 0, /* properties_provided */
4979 0, /* properties_destroyed */
4980 TODO_rebuild_alias, /* todo_flags_start */
4981 TODO_update_ssa_only_virtuals | TODO_ggc_collect
4982 | TODO_verify_ssa /* todo_flags_finish */
4987 /* Gate and execute functions for FRE. */
4989 static unsigned int
4990 execute_fre (void)
4992 return execute_pre (true);
4995 static bool
4996 gate_fre (void)
4998 return flag_tree_fre != 0;
5001 struct gimple_opt_pass pass_fre =
5004 GIMPLE_PASS,
5005 "fre", /* name */
5006 gate_fre, /* gate */
5007 execute_fre, /* execute */
5008 NULL, /* sub */
5009 NULL, /* next */
5010 0, /* static_pass_number */
5011 TV_TREE_FRE, /* tv_id */
5012 PROP_cfg | PROP_ssa, /* properties_required */
5013 0, /* properties_provided */
5014 0, /* properties_destroyed */
5015 0, /* todo_flags_start */
5016 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */