2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "double-int.h"
36 #include "fold-const.h"
38 #include "hard-reg-set.h"
40 #include "dominance.h"
43 #include "basic-block.h"
44 #include "gimple-pretty-print.h"
45 #include "tree-inline.h"
46 #include "hash-table.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-fold.h"
51 #include "gimple-expr.h"
55 #include "gimple-iterator.h"
56 #include "gimplify-me.h"
57 #include "gimple-ssa.h"
59 #include "tree-phinodes.h"
60 #include "ssa-iterators.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-ssa-loop.h"
64 #include "tree-into-ssa.h"
68 #include "statistics.h"
70 #include "fixed-value.h"
71 #include "insn-config.h"
82 #include "tree-iterator.h"
83 #include "alloc-pool.h"
85 #include "tree-pass.h"
86 #include "langhooks.h"
88 #include "tree-ssa-sccvn.h"
89 #include "tree-scalar-evolution.h"
94 #include "plugin-api.h"
97 #include "symbol-summary.h"
99 #include "tree-ssa-propagate.h"
100 #include "ipa-utils.h"
101 #include "tree-cfgcleanup.h"
105 1. Avail sets can be shared by making an avail_find_leader that
106 walks up the dominator tree and looks in those avail sets.
107 This might affect code optimality, it's unclear right now.
108 2. Strength reduction can be performed by anticipating expressions
109 we can repair later on.
110 3. We can do back-substitution or smarter value numbering to catch
111 commutative expressions split up over multiple statements.
114 /* For ease of terminology, "expression node" in the below refers to
115 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
116 represent the actual statement containing the expressions we care about,
117 and we cache the value number by putting it in the expression. */
121 First we walk the statements to generate the AVAIL sets, the
122 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
123 generation of values/expressions by a given block. We use them
124 when computing the ANTIC sets. The AVAIL sets consist of
125 SSA_NAME's that represent values, so we know what values are
126 available in what blocks. AVAIL is a forward dataflow problem. In
127 SSA, values are never killed, so we don't need a kill set, or a
128 fixpoint iteration, in order to calculate the AVAIL sets. In
129 traditional parlance, AVAIL sets tell us the downsafety of the
132 Next, we generate the ANTIC sets. These sets represent the
133 anticipatable expressions. ANTIC is a backwards dataflow
134 problem. An expression is anticipatable in a given block if it could
135 be generated in that block. This means that if we had to perform
136 an insertion in that block, of the value of that expression, we
137 could. Calculating the ANTIC sets requires phi translation of
138 expressions, because the flow goes backwards through phis. We must
139 iterate to a fixpoint of the ANTIC sets, because we have a kill
140 set. Even in SSA form, values are not live over the entire
141 function, only from their definition point onwards. So we have to
142 remove values from the ANTIC set once we go past the definition
143 point of the leaders that make them up.
144 compute_antic/compute_antic_aux performs this computation.
146 Third, we perform insertions to make partially redundant
147 expressions fully redundant.
149 An expression is partially redundant (excluding partial
152 1. It is AVAIL in some, but not all, of the predecessors of a
154 2. It is ANTIC in all the predecessors.
156 In order to make it fully redundant, we insert the expression into
157 the predecessors where it is not available, but is ANTIC.
159 For the partial anticipation case, we only perform insertion if it
160 is partially anticipated in some block, and fully available in all
163 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
164 performs these steps.
166 Fourth, we eliminate fully redundant expressions.
167 This is a simple statement walk that replaces redundant
168 calculations with the now available values. */
170 /* Representations of value numbers:
172 Value numbers are represented by a representative SSA_NAME. We
173 will create fake SSA_NAME's in situations where we need a
174 representative but do not have one (because it is a complex
175 expression). In order to facilitate storing the value numbers in
176 bitmaps, and keep the number of wasted SSA_NAME's down, we also
177 associate a value_id with each value number, and create full blown
178 ssa_name's only where we actually need them (IE in operands of
179 existing expressions).
181 Theoretically you could replace all the value_id's with
182 SSA_NAME_VERSION, but this would allocate a large number of
183 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
184 It would also require an additional indirection at each point we
187 /* Representation of expressions on value numbers:
189 Expressions consisting of value numbers are represented the same
190 way as our VN internally represents them, with an additional
191 "pre_expr" wrapping around them in order to facilitate storing all
192 of the expressions in the same sets. */
194 /* Representation of sets:
196 The dataflow sets do not need to be sorted in any particular order
197 for the majority of their lifetime, are simply represented as two
198 bitmaps, one that keeps track of values present in the set, and one
199 that keeps track of expressions present in the set.
201 When we need them in topological order, we produce it on demand by
202 transforming the bitmap into an array and sorting it into topo
205 /* Type of expression, used to know which member of the PRE_EXPR union
216 typedef union pre_expr_union_d
221 vn_reference_t reference
;
224 typedef struct pre_expr_d
: typed_noop_remove
<pre_expr_d
>
226 enum pre_expr_kind kind
;
230 /* hash_table support. */
231 typedef pre_expr_d
*value_type
;
232 typedef pre_expr_d
*compare_type
;
233 static inline hashval_t
hash (const pre_expr_d
*);
234 static inline int equal (const pre_expr_d
*, const pre_expr_d
*);
237 #define PRE_EXPR_NAME(e) (e)->u.name
238 #define PRE_EXPR_NARY(e) (e)->u.nary
239 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
240 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
242 /* Compare E1 and E1 for equality. */
245 pre_expr_d::equal (const pre_expr_d
*e1
, const pre_expr_d
*e2
)
247 if (e1
->kind
!= e2
->kind
)
253 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
254 PRE_EXPR_CONSTANT (e2
));
256 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
258 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
260 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
261 PRE_EXPR_REFERENCE (e2
));
270 pre_expr_d::hash (const pre_expr_d
*e
)
275 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
277 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
279 return PRE_EXPR_NARY (e
)->hashcode
;
281 return PRE_EXPR_REFERENCE (e
)->hashcode
;
287 /* Next global expression id number. */
288 static unsigned int next_expression_id
;
290 /* Mapping from expression to id number we can use in bitmap sets. */
291 static vec
<pre_expr
> expressions
;
292 static hash_table
<pre_expr_d
> *expression_to_id
;
293 static vec
<unsigned> name_to_id
;
295 /* Allocate an expression id for EXPR. */
297 static inline unsigned int
298 alloc_expression_id (pre_expr expr
)
300 struct pre_expr_d
**slot
;
301 /* Make sure we won't overflow. */
302 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
303 expr
->id
= next_expression_id
++;
304 expressions
.safe_push (expr
);
305 if (expr
->kind
== NAME
)
307 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
308 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
309 re-allocations by using vec::reserve upfront. */
310 unsigned old_len
= name_to_id
.length ();
311 name_to_id
.reserve (num_ssa_names
- old_len
);
312 name_to_id
.quick_grow_cleared (num_ssa_names
);
313 gcc_assert (name_to_id
[version
] == 0);
314 name_to_id
[version
] = expr
->id
;
318 slot
= expression_to_id
->find_slot (expr
, INSERT
);
322 return next_expression_id
- 1;
325 /* Return the expression id for tree EXPR. */
327 static inline unsigned int
328 get_expression_id (const pre_expr expr
)
333 static inline unsigned int
334 lookup_expression_id (const pre_expr expr
)
336 struct pre_expr_d
**slot
;
338 if (expr
->kind
== NAME
)
340 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
341 if (name_to_id
.length () <= version
)
343 return name_to_id
[version
];
347 slot
= expression_to_id
->find_slot (expr
, NO_INSERT
);
350 return ((pre_expr
)*slot
)->id
;
354 /* Return the existing expression id for EXPR, or create one if one
355 does not exist yet. */
357 static inline unsigned int
358 get_or_alloc_expression_id (pre_expr expr
)
360 unsigned int id
= lookup_expression_id (expr
);
362 return alloc_expression_id (expr
);
363 return expr
->id
= id
;
366 /* Return the expression that has expression id ID */
368 static inline pre_expr
369 expression_for_id (unsigned int id
)
371 return expressions
[id
];
374 /* Free the expression id field in all of our expressions,
375 and then destroy the expressions array. */
378 clear_expression_ids (void)
380 expressions
.release ();
383 static pool_allocator
<pre_expr_d
> pre_expr_pool ("pre_expr nodes", 30);
385 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
388 get_or_alloc_expr_for_name (tree name
)
390 struct pre_expr_d expr
;
392 unsigned int result_id
;
396 PRE_EXPR_NAME (&expr
) = name
;
397 result_id
= lookup_expression_id (&expr
);
399 return expression_for_id (result_id
);
401 result
= pre_expr_pool
.allocate ();
403 PRE_EXPR_NAME (result
) = name
;
404 alloc_expression_id (result
);
408 /* An unordered bitmap set. One bitmap tracks values, the other,
410 typedef struct bitmap_set
412 bitmap_head expressions
;
416 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
417 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
419 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
420 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
422 /* Mapping from value id to expressions with that value_id. */
423 static vec
<bitmap
> value_expressions
;
425 /* Sets that we need to keep track of. */
426 typedef struct bb_bitmap_sets
428 /* The EXP_GEN set, which represents expressions/values generated in
430 bitmap_set_t exp_gen
;
432 /* The PHI_GEN set, which represents PHI results generated in a
434 bitmap_set_t phi_gen
;
436 /* The TMP_GEN set, which represents results/temporaries generated
437 in a basic block. IE the LHS of an expression. */
438 bitmap_set_t tmp_gen
;
440 /* The AVAIL_OUT set, which represents which values are available in
441 a given basic block. */
442 bitmap_set_t avail_out
;
444 /* The ANTIC_IN set, which represents which values are anticipatable
445 in a given basic block. */
446 bitmap_set_t antic_in
;
448 /* The PA_IN set, which represents which values are
449 partially anticipatable in a given basic block. */
452 /* The NEW_SETS set, which is used during insertion to augment the
453 AVAIL_OUT set of blocks with the new insertions performed during
454 the current iteration. */
455 bitmap_set_t new_sets
;
457 /* A cache for value_dies_in_block_x. */
460 /* The live virtual operand on successor edges. */
463 /* True if we have visited this block during ANTIC calculation. */
464 unsigned int visited
: 1;
466 /* True when the block contains a call that might not return. */
467 unsigned int contains_may_not_return_call
: 1;
470 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
471 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
472 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
473 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
474 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
475 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
476 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
477 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
478 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
479 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
480 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
483 /* Basic block list in postorder. */
484 static int *postorder
;
485 static int postorder_num
;
487 /* This structure is used to keep track of statistics on what
488 optimization PRE was able to perform. */
491 /* The number of RHS computations eliminated by PRE. */
494 /* The number of new expressions/temporaries generated by PRE. */
497 /* The number of inserts found due to partial anticipation */
500 /* The number of new PHI nodes added by PRE. */
504 static bool do_partial_partial
;
505 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int);
506 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
507 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
508 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
509 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
510 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
511 static void bitmap_insert_into_set_1 (bitmap_set_t
, pre_expr
,
513 static bitmap_set_t
bitmap_set_new (void);
514 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
516 static tree
find_or_generate_expression (basic_block
, tree
, gimple_seq
*);
517 static unsigned int get_expr_value_id (pre_expr
);
519 /* We can add and remove elements and entries to and from sets
520 and hash tables, so we use alloc pools for them. */
522 static pool_allocator
<bitmap_set
> bitmap_set_pool ("Bitmap sets", 30);
523 static bitmap_obstack grand_bitmap_obstack
;
525 /* Set of blocks with statements that have had their EH properties changed. */
526 static bitmap need_eh_cleanup
;
528 /* Set of blocks with statements that have had their AB properties changed. */
529 static bitmap need_ab_cleanup
;
531 /* A three tuple {e, pred, v} used to cache phi translations in the
532 phi_translate_table. */
534 typedef struct expr_pred_trans_d
: typed_free_remove
<expr_pred_trans_d
>
536 /* The expression. */
539 /* The predecessor block along which we translated the expression. */
542 /* The value that resulted from the translation. */
545 /* The hashcode for the expression, pred pair. This is cached for
549 /* hash_table support. */
550 typedef expr_pred_trans_d
*value_type
;
551 typedef expr_pred_trans_d
*compare_type
;
552 static inline hashval_t
hash (const expr_pred_trans_d
*);
553 static inline int equal (const expr_pred_trans_d
*, const expr_pred_trans_d
*);
554 } *expr_pred_trans_t
;
555 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
558 expr_pred_trans_d::hash (const expr_pred_trans_d
*e
)
564 expr_pred_trans_d::equal (const expr_pred_trans_d
*ve1
,
565 const expr_pred_trans_d
*ve2
)
567 basic_block b1
= ve1
->pred
;
568 basic_block b2
= ve2
->pred
;
570 /* If they are not translations for the same basic block, they can't
574 return pre_expr_d::equal (ve1
->e
, ve2
->e
);
577 /* The phi_translate_table caches phi translations for a given
578 expression and predecessor. */
579 static hash_table
<expr_pred_trans_d
> *phi_translate_table
;
581 /* Add the tuple mapping from {expression E, basic block PRED} to
582 the phi translation table and return whether it pre-existed. */
585 phi_trans_add (expr_pred_trans_t
*entry
, pre_expr e
, basic_block pred
)
587 expr_pred_trans_t
*slot
;
588 expr_pred_trans_d tem
;
589 hashval_t hash
= iterative_hash_hashval_t (pre_expr_d::hash (e
),
594 slot
= phi_translate_table
->find_slot_with_hash (&tem
, hash
, INSERT
);
601 *entry
= *slot
= XNEW (struct expr_pred_trans_d
);
603 (*entry
)->pred
= pred
;
604 (*entry
)->hashcode
= hash
;
609 /* Add expression E to the expression set of value id V. */
612 add_to_value (unsigned int v
, pre_expr e
)
616 gcc_checking_assert (get_expr_value_id (e
) == v
);
618 if (v
>= value_expressions
.length ())
620 value_expressions
.safe_grow_cleared (v
+ 1);
623 set
= value_expressions
[v
];
626 set
= BITMAP_ALLOC (&grand_bitmap_obstack
);
627 value_expressions
[v
] = set
;
630 bitmap_set_bit (set
, get_or_alloc_expression_id (e
));
633 /* Create a new bitmap set and return it. */
636 bitmap_set_new (void)
638 bitmap_set_t ret
= bitmap_set_pool
.allocate ();
639 bitmap_initialize (&ret
->expressions
, &grand_bitmap_obstack
);
640 bitmap_initialize (&ret
->values
, &grand_bitmap_obstack
);
644 /* Return the value id for a PRE expression EXPR. */
647 get_expr_value_id (pre_expr expr
)
653 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
656 id
= VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
659 id
= PRE_EXPR_NARY (expr
)->value_id
;
662 id
= PRE_EXPR_REFERENCE (expr
)->value_id
;
667 /* ??? We cannot assert that expr has a value-id (it can be 0), because
668 we assign value-ids only to expressions that have a result
669 in set_hashtable_value_ids. */
673 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
676 sccvn_valnum_from_value_id (unsigned int val
)
680 bitmap exprset
= value_expressions
[val
];
681 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
683 pre_expr vexpr
= expression_for_id (i
);
684 if (vexpr
->kind
== NAME
)
685 return VN_INFO (PRE_EXPR_NAME (vexpr
))->valnum
;
686 else if (vexpr
->kind
== CONSTANT
)
687 return PRE_EXPR_CONSTANT (vexpr
);
692 /* Remove an expression EXPR from a bitmapped set. */
695 bitmap_remove_from_set (bitmap_set_t set
, pre_expr expr
)
697 unsigned int val
= get_expr_value_id (expr
);
698 if (!value_id_constant_p (val
))
700 bitmap_clear_bit (&set
->values
, val
);
701 bitmap_clear_bit (&set
->expressions
, get_expression_id (expr
));
706 bitmap_insert_into_set_1 (bitmap_set_t set
, pre_expr expr
,
707 unsigned int val
, bool allow_constants
)
709 if (allow_constants
|| !value_id_constant_p (val
))
711 /* We specifically expect this and only this function to be able to
712 insert constants into a set. */
713 bitmap_set_bit (&set
->values
, val
);
714 bitmap_set_bit (&set
->expressions
, get_or_alloc_expression_id (expr
));
718 /* Insert an expression EXPR into a bitmapped set. */
721 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
723 bitmap_insert_into_set_1 (set
, expr
, get_expr_value_id (expr
), false);
726 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
729 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
731 bitmap_copy (&dest
->expressions
, &orig
->expressions
);
732 bitmap_copy (&dest
->values
, &orig
->values
);
736 /* Free memory used up by SET. */
738 bitmap_set_free (bitmap_set_t set
)
740 bitmap_clear (&set
->expressions
);
741 bitmap_clear (&set
->values
);
745 /* Generate an topological-ordered array of bitmap set SET. */
748 sorted_array_from_bitmap_set (bitmap_set_t set
)
751 bitmap_iterator bi
, bj
;
752 vec
<pre_expr
> result
;
754 /* Pre-allocate enough space for the array. */
755 result
.create (bitmap_count_bits (&set
->expressions
));
757 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
759 /* The number of expressions having a given value is usually
760 relatively small. Thus, rather than making a vector of all
761 the expressions and sorting it by value-id, we walk the values
762 and check in the reverse mapping that tells us what expressions
763 have a given value, to filter those in our set. As a result,
764 the expressions are inserted in value-id order, which means
767 If this is somehow a significant lose for some cases, we can
768 choose which set to walk based on the set size. */
769 bitmap exprset
= value_expressions
[i
];
770 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bj
)
772 if (bitmap_bit_p (&set
->expressions
, j
))
773 result
.quick_push (expression_for_id (j
));
780 /* Perform bitmapped set operation DEST &= ORIG. */
783 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
791 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
793 bitmap_and_into (&dest
->values
, &orig
->values
);
794 bitmap_copy (&temp
, &dest
->expressions
);
795 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
797 pre_expr expr
= expression_for_id (i
);
798 unsigned int value_id
= get_expr_value_id (expr
);
799 if (!bitmap_bit_p (&dest
->values
, value_id
))
800 bitmap_clear_bit (&dest
->expressions
, i
);
802 bitmap_clear (&temp
);
806 /* Subtract all values and expressions contained in ORIG from DEST. */
809 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
811 bitmap_set_t result
= bitmap_set_new ();
815 bitmap_and_compl (&result
->expressions
, &dest
->expressions
,
818 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
820 pre_expr expr
= expression_for_id (i
);
821 unsigned int value_id
= get_expr_value_id (expr
);
822 bitmap_set_bit (&result
->values
, value_id
);
828 /* Subtract all the values in bitmap set B from bitmap set A. */
831 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
837 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
839 bitmap_copy (&temp
, &a
->expressions
);
840 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
842 pre_expr expr
= expression_for_id (i
);
843 if (bitmap_set_contains_value (b
, get_expr_value_id (expr
)))
844 bitmap_remove_from_set (a
, expr
);
846 bitmap_clear (&temp
);
850 /* Return true if bitmapped set SET contains the value VALUE_ID. */
853 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
855 if (value_id_constant_p (value_id
))
858 if (!set
|| bitmap_empty_p (&set
->expressions
))
861 return bitmap_bit_p (&set
->values
, value_id
);
865 bitmap_set_contains_expr (bitmap_set_t set
, const pre_expr expr
)
867 return bitmap_bit_p (&set
->expressions
, get_expression_id (expr
));
870 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
873 bitmap_set_replace_value (bitmap_set_t set
, unsigned int lookfor
,
880 if (value_id_constant_p (lookfor
))
883 if (!bitmap_set_contains_value (set
, lookfor
))
886 /* The number of expressions having a given value is usually
887 significantly less than the total number of expressions in SET.
888 Thus, rather than check, for each expression in SET, whether it
889 has the value LOOKFOR, we walk the reverse mapping that tells us
890 what expressions have a given value, and see if any of those
891 expressions are in our set. For large testcases, this is about
892 5-10x faster than walking the bitmap. If this is somehow a
893 significant lose for some cases, we can choose which set to walk
894 based on the set size. */
895 exprset
= value_expressions
[lookfor
];
896 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
898 if (bitmap_clear_bit (&set
->expressions
, i
))
900 bitmap_set_bit (&set
->expressions
, get_expression_id (expr
));
908 /* Return true if two bitmap sets are equal. */
911 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
913 return bitmap_equal_p (&a
->values
, &b
->values
);
916 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
917 and add it otherwise. */
920 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
922 unsigned int val
= get_expr_value_id (expr
);
924 if (bitmap_set_contains_value (set
, val
))
925 bitmap_set_replace_value (set
, val
, expr
);
927 bitmap_insert_into_set (set
, expr
);
930 /* Insert EXPR into SET if EXPR's value is not already present in
934 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
936 unsigned int val
= get_expr_value_id (expr
);
938 gcc_checking_assert (expr
->id
== get_or_alloc_expression_id (expr
));
940 /* Constant values are always considered to be part of the set. */
941 if (value_id_constant_p (val
))
944 /* If the value membership changed, add the expression. */
945 if (bitmap_set_bit (&set
->values
, val
))
946 bitmap_set_bit (&set
->expressions
, expr
->id
);
949 /* Print out EXPR to outfile. */
952 print_pre_expr (FILE *outfile
, const pre_expr expr
)
957 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
), 0);
960 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
), 0);
965 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
966 fprintf (outfile
, "{%s,", get_tree_code_name (nary
->opcode
));
967 for (i
= 0; i
< nary
->length
; i
++)
969 print_generic_expr (outfile
, nary
->op
[i
], 0);
970 if (i
!= (unsigned) nary
->length
- 1)
971 fprintf (outfile
, ",");
973 fprintf (outfile
, "}");
979 vn_reference_op_t vro
;
981 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
982 fprintf (outfile
, "{");
984 ref
->operands
.iterate (i
, &vro
);
987 bool closebrace
= false;
988 if (vro
->opcode
!= SSA_NAME
989 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
991 fprintf (outfile
, "%s", get_tree_code_name (vro
->opcode
));
994 fprintf (outfile
, "<");
1000 print_generic_expr (outfile
, vro
->op0
, 0);
1003 fprintf (outfile
, ",");
1004 print_generic_expr (outfile
, vro
->op1
, 0);
1008 fprintf (outfile
, ",");
1009 print_generic_expr (outfile
, vro
->op2
, 0);
1013 fprintf (outfile
, ">");
1014 if (i
!= ref
->operands
.length () - 1)
1015 fprintf (outfile
, ",");
1017 fprintf (outfile
, "}");
1020 fprintf (outfile
, "@");
1021 print_generic_expr (outfile
, ref
->vuse
, 0);
1027 void debug_pre_expr (pre_expr
);
1029 /* Like print_pre_expr but always prints to stderr. */
1031 debug_pre_expr (pre_expr e
)
1033 print_pre_expr (stderr
, e
);
1034 fprintf (stderr
, "\n");
1037 /* Print out SET to OUTFILE. */
1040 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
1041 const char *setname
, int blockindex
)
1043 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
1050 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1052 const pre_expr expr
= expression_for_id (i
);
1055 fprintf (outfile
, ", ");
1057 print_pre_expr (outfile
, expr
);
1059 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1062 fprintf (outfile
, " }\n");
1065 void debug_bitmap_set (bitmap_set_t
);
1068 debug_bitmap_set (bitmap_set_t set
)
1070 print_bitmap_set (stderr
, set
, "debug", 0);
1073 void debug_bitmap_sets_for (basic_block
);
1076 debug_bitmap_sets_for (basic_block bb
)
1078 print_bitmap_set (stderr
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
1079 print_bitmap_set (stderr
, EXP_GEN (bb
), "exp_gen", bb
->index
);
1080 print_bitmap_set (stderr
, PHI_GEN (bb
), "phi_gen", bb
->index
);
1081 print_bitmap_set (stderr
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
1082 print_bitmap_set (stderr
, ANTIC_IN (bb
), "antic_in", bb
->index
);
1083 if (do_partial_partial
)
1084 print_bitmap_set (stderr
, PA_IN (bb
), "pa_in", bb
->index
);
1085 print_bitmap_set (stderr
, NEW_SETS (bb
), "new_sets", bb
->index
);
1088 /* Print out the expressions that have VAL to OUTFILE. */
1091 print_value_expressions (FILE *outfile
, unsigned int val
)
1093 bitmap set
= value_expressions
[val
];
1098 sprintf (s
, "%04d", val
);
1099 x
.expressions
= *set
;
1100 print_bitmap_set (outfile
, &x
, s
, 0);
1106 debug_value_expressions (unsigned int val
)
1108 print_value_expressions (stderr
, val
);
1111 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1115 get_or_alloc_expr_for_constant (tree constant
)
1117 unsigned int result_id
;
1118 unsigned int value_id
;
1119 struct pre_expr_d expr
;
1122 expr
.kind
= CONSTANT
;
1123 PRE_EXPR_CONSTANT (&expr
) = constant
;
1124 result_id
= lookup_expression_id (&expr
);
1126 return expression_for_id (result_id
);
1128 newexpr
= pre_expr_pool
.allocate ();
1129 newexpr
->kind
= CONSTANT
;
1130 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1131 alloc_expression_id (newexpr
);
1132 value_id
= get_or_alloc_constant_value_id (constant
);
1133 add_to_value (value_id
, newexpr
);
1137 /* Given a value id V, find the actual tree representing the constant
1138 value if there is one, and return it. Return NULL if we can't find
1142 get_constant_for_value_id (unsigned int v
)
1144 if (value_id_constant_p (v
))
1148 bitmap exprset
= value_expressions
[v
];
1150 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1152 pre_expr expr
= expression_for_id (i
);
1153 if (expr
->kind
== CONSTANT
)
1154 return PRE_EXPR_CONSTANT (expr
);
1160 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1161 Currently only supports constants and SSA_NAMES. */
1163 get_or_alloc_expr_for (tree t
)
1165 if (TREE_CODE (t
) == SSA_NAME
)
1166 return get_or_alloc_expr_for_name (t
);
1167 else if (is_gimple_min_invariant (t
))
1168 return get_or_alloc_expr_for_constant (t
);
1171 /* More complex expressions can result from SCCVN expression
1172 simplification that inserts values for them. As they all
1173 do not have VOPs the get handled by the nary ops struct. */
1174 vn_nary_op_t result
;
1175 unsigned int result_id
;
1176 vn_nary_op_lookup (t
, &result
);
1179 pre_expr e
= pre_expr_pool
.allocate ();
1181 PRE_EXPR_NARY (e
) = result
;
1182 result_id
= lookup_expression_id (e
);
1185 pre_expr_pool
.remove (e
);
1186 e
= expression_for_id (result_id
);
1189 alloc_expression_id (e
);
1196 /* Return the folded version of T if T, when folded, is a gimple
1197 min_invariant. Otherwise, return T. */
1200 fully_constant_expression (pre_expr e
)
1208 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1209 switch (TREE_CODE_CLASS (nary
->opcode
))
1212 case tcc_comparison
:
1214 /* We have to go from trees to pre exprs to value ids to
1216 tree naryop0
= nary
->op
[0];
1217 tree naryop1
= nary
->op
[1];
1219 if (!is_gimple_min_invariant (naryop0
))
1221 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1222 unsigned int vrep0
= get_expr_value_id (rep0
);
1223 tree const0
= get_constant_for_value_id (vrep0
);
1225 naryop0
= fold_convert (TREE_TYPE (naryop0
), const0
);
1227 if (!is_gimple_min_invariant (naryop1
))
1229 pre_expr rep1
= get_or_alloc_expr_for (naryop1
);
1230 unsigned int vrep1
= get_expr_value_id (rep1
);
1231 tree const1
= get_constant_for_value_id (vrep1
);
1233 naryop1
= fold_convert (TREE_TYPE (naryop1
), const1
);
1235 result
= fold_binary (nary
->opcode
, nary
->type
,
1237 if (result
&& is_gimple_min_invariant (result
))
1238 return get_or_alloc_expr_for_constant (result
);
1239 /* We might have simplified the expression to a
1240 SSA_NAME for example from x_1 * 1. But we cannot
1241 insert a PHI for x_1 unconditionally as x_1 might
1242 not be available readily. */
1246 if (nary
->opcode
!= REALPART_EXPR
1247 && nary
->opcode
!= IMAGPART_EXPR
1248 && nary
->opcode
!= VIEW_CONVERT_EXPR
)
1253 /* We have to go from trees to pre exprs to value ids to
1255 tree naryop0
= nary
->op
[0];
1256 tree const0
, result
;
1257 if (is_gimple_min_invariant (naryop0
))
1261 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1262 unsigned int vrep0
= get_expr_value_id (rep0
);
1263 const0
= get_constant_for_value_id (vrep0
);
1268 tree type1
= TREE_TYPE (nary
->op
[0]);
1269 const0
= fold_convert (type1
, const0
);
1270 result
= fold_unary (nary
->opcode
, nary
->type
, const0
);
1272 if (result
&& is_gimple_min_invariant (result
))
1273 return get_or_alloc_expr_for_constant (result
);
1282 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1284 if ((folded
= fully_constant_vn_reference_p (ref
)))
1285 return get_or_alloc_expr_for_constant (folded
);
1294 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1295 it has the value it would have in BLOCK. Set *SAME_VALID to true
1296 in case the new vuse doesn't change the value id of the OPERANDS. */
1299 translate_vuse_through_block (vec
<vn_reference_op_s
> operands
,
1300 alias_set_type set
, tree type
, tree vuse
,
1301 basic_block phiblock
,
1302 basic_block block
, bool *same_valid
)
1304 gimple phi
= SSA_NAME_DEF_STMT (vuse
);
1311 if (gimple_bb (phi
) != phiblock
)
1314 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, type
, operands
);
1316 /* Use the alias-oracle to find either the PHI node in this block,
1317 the first VUSE used in this block that is equivalent to vuse or
1318 the first VUSE which definition in this block kills the value. */
1319 if (gimple_code (phi
) == GIMPLE_PHI
)
1320 e
= find_edge (block
, phiblock
);
1321 else if (use_oracle
)
1322 while (!stmt_may_clobber_ref_p_1 (phi
, &ref
))
1324 vuse
= gimple_vuse (phi
);
1325 phi
= SSA_NAME_DEF_STMT (vuse
);
1326 if (gimple_bb (phi
) != phiblock
)
1328 if (gimple_code (phi
) == GIMPLE_PHI
)
1330 e
= find_edge (block
, phiblock
);
1341 bitmap visited
= NULL
;
1343 /* Try to find a vuse that dominates this phi node by skipping
1344 non-clobbering statements. */
1345 vuse
= get_continuation_for_phi (phi
, &ref
, &cnt
, &visited
, false,
1348 BITMAP_FREE (visited
);
1354 /* If we didn't find any, the value ID can't stay the same,
1355 but return the translated vuse. */
1356 *same_valid
= false;
1357 vuse
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1359 /* ??? We would like to return vuse here as this is the canonical
1360 upmost vdef that this reference is associated with. But during
1361 insertion of the references into the hash tables we only ever
1362 directly insert with their direct gimple_vuse, hence returning
1363 something else would make us not find the other expression. */
1364 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1370 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1371 SET2. This is used to avoid making a set consisting of the union
1372 of PA_IN and ANTIC_IN during insert. */
1374 static inline pre_expr
1375 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
)
1379 result
= bitmap_find_leader (set1
, val
);
1380 if (!result
&& set2
)
1381 result
= bitmap_find_leader (set2
, val
);
1385 /* Get the tree type for our PRE expression e. */
1388 get_expr_type (const pre_expr e
)
1393 return TREE_TYPE (PRE_EXPR_NAME (e
));
1395 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1397 return PRE_EXPR_REFERENCE (e
)->type
;
1399 return PRE_EXPR_NARY (e
)->type
;
1404 /* Get a representative SSA_NAME for a given expression.
1405 Since all of our sub-expressions are treated as values, we require
1406 them to be SSA_NAME's for simplicity.
1407 Prior versions of GVNPRE used to use "value handles" here, so that
1408 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1409 either case, the operands are really values (IE we do not expect
1410 them to be usable without finding leaders). */
1413 get_representative_for (const pre_expr e
)
1416 unsigned int value_id
= get_expr_value_id (e
);
1421 return PRE_EXPR_NAME (e
);
1423 return PRE_EXPR_CONSTANT (e
);
1427 /* Go through all of the expressions representing this value
1428 and pick out an SSA_NAME. */
1431 bitmap exprs
= value_expressions
[value_id
];
1432 EXECUTE_IF_SET_IN_BITMAP (exprs
, 0, i
, bi
)
1434 pre_expr rep
= expression_for_id (i
);
1435 if (rep
->kind
== NAME
)
1436 return PRE_EXPR_NAME (rep
);
1437 else if (rep
->kind
== CONSTANT
)
1438 return PRE_EXPR_CONSTANT (rep
);
1444 /* If we reached here we couldn't find an SSA_NAME. This can
1445 happen when we've discovered a value that has never appeared in
1446 the program as set to an SSA_NAME, as the result of phi translation.
1448 ??? We should be able to re-use this when we insert the statement
1450 name
= make_temp_ssa_name (get_expr_type (e
), gimple_build_nop (), "pretmp");
1451 VN_INFO_GET (name
)->value_id
= value_id
;
1452 VN_INFO (name
)->valnum
= name
;
1453 /* ??? For now mark this SSA name for release by SCCVN. */
1454 VN_INFO (name
)->needs_insertion
= true;
1455 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1456 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1458 fprintf (dump_file
, "Created SSA_NAME representative ");
1459 print_generic_expr (dump_file
, name
, 0);
1460 fprintf (dump_file
, " for expression:");
1461 print_pre_expr (dump_file
, e
);
1462 fprintf (dump_file
, " (%04d)\n", value_id
);
1471 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1472 basic_block pred
, basic_block phiblock
);
1474 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1475 the phis in PRED. Return NULL if we can't find a leader for each part
1476 of the translated expression. */
1479 phi_translate_1 (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1480 basic_block pred
, basic_block phiblock
)
1487 bool changed
= false;
1488 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1489 vn_nary_op_t newnary
= XALLOCAVAR (struct vn_nary_op_s
,
1490 sizeof_vn_nary_op (nary
->length
));
1491 memcpy (newnary
, nary
, sizeof_vn_nary_op (nary
->length
));
1493 for (i
= 0; i
< newnary
->length
; i
++)
1495 if (TREE_CODE (newnary
->op
[i
]) != SSA_NAME
)
1499 pre_expr leader
, result
;
1500 unsigned int op_val_id
= VN_INFO (newnary
->op
[i
])->value_id
;
1501 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1502 result
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1503 if (result
&& result
!= leader
)
1505 tree name
= get_representative_for (result
);
1508 newnary
->op
[i
] = name
;
1513 changed
|= newnary
->op
[i
] != nary
->op
[i
];
1519 unsigned int new_val_id
;
1521 tree result
= vn_nary_op_lookup_pieces (newnary
->length
,
1526 if (result
&& is_gimple_min_invariant (result
))
1527 return get_or_alloc_expr_for_constant (result
);
1529 expr
= pre_expr_pool
.allocate ();
1534 PRE_EXPR_NARY (expr
) = nary
;
1535 constant
= fully_constant_expression (expr
);
1536 if (constant
!= expr
)
1539 new_val_id
= nary
->value_id
;
1540 get_or_alloc_expression_id (expr
);
1544 new_val_id
= get_next_value_id ();
1545 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
1546 nary
= vn_nary_op_insert_pieces (newnary
->length
,
1550 result
, new_val_id
);
1551 PRE_EXPR_NARY (expr
) = nary
;
1552 constant
= fully_constant_expression (expr
);
1553 if (constant
!= expr
)
1555 get_or_alloc_expression_id (expr
);
1557 add_to_value (new_val_id
, expr
);
1565 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1566 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1567 tree vuse
= ref
->vuse
;
1568 tree newvuse
= vuse
;
1569 vec
<vn_reference_op_s
> newoperands
= vNULL
;
1570 bool changed
= false, same_valid
= true;
1572 vn_reference_op_t operand
;
1573 vn_reference_t newref
;
1575 for (i
= 0; operands
.iterate (i
, &operand
); i
++)
1580 tree type
= operand
->type
;
1581 vn_reference_op_s newop
= *operand
;
1582 op
[0] = operand
->op0
;
1583 op
[1] = operand
->op1
;
1584 op
[2] = operand
->op2
;
1585 for (n
= 0; n
< 3; ++n
)
1587 unsigned int op_val_id
;
1590 if (TREE_CODE (op
[n
]) != SSA_NAME
)
1592 /* We can't possibly insert these. */
1594 && !is_gimple_min_invariant (op
[n
]))
1598 op_val_id
= VN_INFO (op
[n
])->value_id
;
1599 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1602 opresult
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1605 if (opresult
!= leader
)
1607 tree name
= get_representative_for (opresult
);
1610 changed
|= name
!= op
[n
];
1616 newoperands
.release ();
1621 if (!newoperands
.exists ())
1622 newoperands
= operands
.copy ();
1623 /* We may have changed from an SSA_NAME to a constant */
1624 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op
[0]) != SSA_NAME
)
1625 newop
.opcode
= TREE_CODE (op
[0]);
1630 newoperands
[i
] = newop
;
1632 gcc_checking_assert (i
== operands
.length ());
1636 newvuse
= translate_vuse_through_block (newoperands
.exists ()
1637 ? newoperands
: operands
,
1638 ref
->set
, ref
->type
,
1639 vuse
, phiblock
, pred
,
1641 if (newvuse
== NULL_TREE
)
1643 newoperands
.release ();
1648 if (changed
|| newvuse
!= vuse
)
1650 unsigned int new_val_id
;
1653 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1655 newoperands
.exists ()
1656 ? newoperands
: operands
,
1659 newoperands
.release ();
1661 /* We can always insert constants, so if we have a partial
1662 redundant constant load of another type try to translate it
1663 to a constant of appropriate type. */
1664 if (result
&& is_gimple_min_invariant (result
))
1667 if (!useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1669 tem
= fold_unary (VIEW_CONVERT_EXPR
, ref
->type
, result
);
1670 if (tem
&& !is_gimple_min_invariant (tem
))
1674 return get_or_alloc_expr_for_constant (tem
);
1677 /* If we'd have to convert things we would need to validate
1678 if we can insert the translated expression. So fail
1679 here for now - we cannot insert an alias with a different
1680 type in the VN tables either, as that would assert. */
1682 && !useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1684 else if (!result
&& newref
1685 && !useless_type_conversion_p (ref
->type
, newref
->type
))
1687 newoperands
.release ();
1691 expr
= pre_expr_pool
.allocate ();
1692 expr
->kind
= REFERENCE
;
1697 PRE_EXPR_REFERENCE (expr
) = newref
;
1698 constant
= fully_constant_expression (expr
);
1699 if (constant
!= expr
)
1702 new_val_id
= newref
->value_id
;
1703 get_or_alloc_expression_id (expr
);
1707 if (changed
|| !same_valid
)
1709 new_val_id
= get_next_value_id ();
1710 value_expressions
.safe_grow_cleared
1711 (get_max_value_id () + 1);
1714 new_val_id
= ref
->value_id
;
1715 if (!newoperands
.exists ())
1716 newoperands
= operands
.copy ();
1717 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1720 result
, new_val_id
);
1721 newoperands
= vNULL
;
1722 PRE_EXPR_REFERENCE (expr
) = newref
;
1723 constant
= fully_constant_expression (expr
);
1724 if (constant
!= expr
)
1726 get_or_alloc_expression_id (expr
);
1728 add_to_value (new_val_id
, expr
);
1730 newoperands
.release ();
1737 tree name
= PRE_EXPR_NAME (expr
);
1738 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
1739 /* If the SSA name is defined by a PHI node in this block,
1741 if (gimple_code (def_stmt
) == GIMPLE_PHI
1742 && gimple_bb (def_stmt
) == phiblock
)
1744 edge e
= find_edge (pred
, gimple_bb (def_stmt
));
1745 tree def
= PHI_ARG_DEF (def_stmt
, e
->dest_idx
);
1747 /* Handle constant. */
1748 if (is_gimple_min_invariant (def
))
1749 return get_or_alloc_expr_for_constant (def
);
1751 return get_or_alloc_expr_for_name (def
);
1753 /* Otherwise return it unchanged - it will get removed if its
1754 value is not available in PREDs AVAIL_OUT set of expressions
1755 by the subtraction of TMP_GEN. */
1764 /* Wrapper around phi_translate_1 providing caching functionality. */
1767 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1768 basic_block pred
, basic_block phiblock
)
1770 expr_pred_trans_t slot
= NULL
;
1776 /* Constants contain no values that need translation. */
1777 if (expr
->kind
== CONSTANT
)
1780 if (value_id_constant_p (get_expr_value_id (expr
)))
1783 /* Don't add translations of NAMEs as those are cheap to translate. */
1784 if (expr
->kind
!= NAME
)
1786 if (phi_trans_add (&slot
, expr
, pred
))
1788 /* Store NULL for the value we want to return in the case of
1794 phitrans
= phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
);
1801 /* Remove failed translations again, they cause insert
1802 iteration to not pick up new opportunities reliably. */
1803 phi_translate_table
->remove_elt_with_hash (slot
, slot
->hashcode
);
1810 /* For each expression in SET, translate the values through phi nodes
1811 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1812 expressions in DEST. */
1815 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1816 basic_block phiblock
)
1818 vec
<pre_expr
> exprs
;
1822 if (gimple_seq_empty_p (phi_nodes (phiblock
)))
1824 bitmap_set_copy (dest
, set
);
1828 exprs
= sorted_array_from_bitmap_set (set
);
1829 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
1831 pre_expr translated
;
1832 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1836 /* We might end up with multiple expressions from SET being
1837 translated to the same value. In this case we do not want
1838 to retain the NARY or REFERENCE expression but prefer a NAME
1839 which would be the leader. */
1840 if (translated
->kind
== NAME
)
1841 bitmap_value_replace_in_set (dest
, translated
);
1843 bitmap_value_insert_into_set (dest
, translated
);
1848 /* Find the leader for a value (i.e., the name representing that
1849 value) in a given set, and return it. Return NULL if no leader
1853 bitmap_find_leader (bitmap_set_t set
, unsigned int val
)
1855 if (value_id_constant_p (val
))
1859 bitmap exprset
= value_expressions
[val
];
1861 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1863 pre_expr expr
= expression_for_id (i
);
1864 if (expr
->kind
== CONSTANT
)
1868 if (bitmap_set_contains_value (set
, val
))
1870 /* Rather than walk the entire bitmap of expressions, and see
1871 whether any of them has the value we are looking for, we look
1872 at the reverse mapping, which tells us the set of expressions
1873 that have a given value (IE value->expressions with that
1874 value) and see if any of those expressions are in our set.
1875 The number of expressions per value is usually significantly
1876 less than the number of expressions in the set. In fact, for
1877 large testcases, doing it this way is roughly 5-10x faster
1878 than walking the bitmap.
1879 If this is somehow a significant lose for some cases, we can
1880 choose which set to walk based on which set is smaller. */
1883 bitmap exprset
= value_expressions
[val
];
1885 EXECUTE_IF_AND_IN_BITMAP (exprset
, &set
->expressions
, 0, i
, bi
)
1886 return expression_for_id (i
);
1891 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1892 BLOCK by seeing if it is not killed in the block. Note that we are
1893 only determining whether there is a store that kills it. Because
1894 of the order in which clean iterates over values, we are guaranteed
1895 that altered operands will have caused us to be eliminated from the
1896 ANTIC_IN set already. */
1899 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1901 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1902 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1904 gimple_stmt_iterator gsi
;
1905 unsigned id
= get_expression_id (expr
);
1912 /* Lookup a previously calculated result. */
1913 if (EXPR_DIES (block
)
1914 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1915 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1917 /* A memory expression {e, VUSE} dies in the block if there is a
1918 statement that may clobber e. If, starting statement walk from the
1919 top of the basic block, a statement uses VUSE there can be no kill
1920 inbetween that use and the original statement that loaded {e, VUSE},
1921 so we can stop walking. */
1922 ref
.base
= NULL_TREE
;
1923 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1925 tree def_vuse
, def_vdef
;
1926 def
= gsi_stmt (gsi
);
1927 def_vuse
= gimple_vuse (def
);
1928 def_vdef
= gimple_vdef (def
);
1930 /* Not a memory statement. */
1934 /* Not a may-def. */
1937 /* A load with the same VUSE, we're done. */
1938 if (def_vuse
== vuse
)
1944 /* Init ref only if we really need it. */
1945 if (ref
.base
== NULL_TREE
1946 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->type
,
1952 /* If the statement may clobber expr, it dies. */
1953 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1960 /* Remember the result. */
1961 if (!EXPR_DIES (block
))
1962 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1963 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1965 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1971 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1972 contains its value-id. */
1975 op_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree op
)
1977 if (op
&& TREE_CODE (op
) == SSA_NAME
)
1979 unsigned int value_id
= VN_INFO (op
)->value_id
;
1980 if (!(bitmap_set_contains_value (set1
, value_id
)
1981 || (set2
&& bitmap_set_contains_value (set2
, value_id
))))
1987 /* Determine if the expression EXPR is valid in SET1 U SET2.
1988 ONLY SET2 CAN BE NULL.
1989 This means that we have a leader for each part of the expression
1990 (if it consists of values), or the expression is an SSA_NAME.
1991 For loads/calls, we also see if the vuse is killed in this block. */
1994 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
)
1999 /* By construction all NAMEs are available. Non-available
2000 NAMEs are removed by subtracting TMP_GEN from the sets. */
2005 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2006 for (i
= 0; i
< nary
->length
; i
++)
2007 if (!op_valid_in_sets (set1
, set2
, nary
->op
[i
]))
2014 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2015 vn_reference_op_t vro
;
2018 FOR_EACH_VEC_ELT (ref
->operands
, i
, vro
)
2020 if (!op_valid_in_sets (set1
, set2
, vro
->op0
)
2021 || !op_valid_in_sets (set1
, set2
, vro
->op1
)
2022 || !op_valid_in_sets (set1
, set2
, vro
->op2
))
2032 /* Clean the set of expressions that are no longer valid in SET1 or
2033 SET2. This means expressions that are made up of values we have no
2034 leaders for in SET1 or SET2. This version is used for partial
2035 anticipation, which means it is not valid in either ANTIC_IN or
2039 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
)
2041 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set1
);
2045 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2047 if (!valid_in_sets (set1
, set2
, expr
))
2048 bitmap_remove_from_set (set1
, expr
);
2053 /* Clean the set of expressions that are no longer valid in SET. This
2054 means expressions that are made up of values we have no leaders for
2058 clean (bitmap_set_t set
)
2060 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set
);
2064 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2066 if (!valid_in_sets (set
, NULL
, expr
))
2067 bitmap_remove_from_set (set
, expr
);
2072 /* Clean the set of expressions that are no longer valid in SET because
2073 they are clobbered in BLOCK or because they trap and may not be executed. */
2076 prune_clobbered_mems (bitmap_set_t set
, basic_block block
)
2081 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
2083 pre_expr expr
= expression_for_id (i
);
2084 if (expr
->kind
== REFERENCE
)
2086 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2089 gimple def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
2090 if (!gimple_nop_p (def_stmt
)
2091 && ((gimple_bb (def_stmt
) != block
2092 && !dominated_by_p (CDI_DOMINATORS
,
2093 block
, gimple_bb (def_stmt
)))
2094 || (gimple_bb (def_stmt
) == block
2095 && value_dies_in_block_x (expr
, block
))))
2096 bitmap_remove_from_set (set
, expr
);
2099 else if (expr
->kind
== NARY
)
2101 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2102 /* If the NARY may trap make sure the block does not contain
2103 a possible exit point.
2104 ??? This is overly conservative if we translate AVAIL_OUT
2105 as the available expression might be after the exit point. */
2106 if (BB_MAY_NOTRETURN (block
)
2107 && vn_nary_may_trap (nary
))
2108 bitmap_remove_from_set (set
, expr
);
2113 static sbitmap has_abnormal_preds
;
2115 /* List of blocks that may have changed during ANTIC computation and
2116 thus need to be iterated over. */
2118 static sbitmap changed_blocks
;
2120 /* Compute the ANTIC set for BLOCK.
2122 If succs(BLOCK) > 1 then
2123 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2124 else if succs(BLOCK) == 1 then
2125 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2127 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2131 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2133 bool changed
= false;
2134 bitmap_set_t S
, old
, ANTIC_OUT
;
2140 old
= ANTIC_OUT
= S
= NULL
;
2141 BB_VISITED (block
) = 1;
2143 /* If any edges from predecessors are abnormal, antic_in is empty,
2145 if (block_has_abnormal_pred_edge
)
2146 goto maybe_dump_sets
;
2148 old
= ANTIC_IN (block
);
2149 ANTIC_OUT
= bitmap_set_new ();
2151 /* If the block has no successors, ANTIC_OUT is empty. */
2152 if (EDGE_COUNT (block
->succs
) == 0)
2154 /* If we have one successor, we could have some phi nodes to
2155 translate through. */
2156 else if (single_succ_p (block
))
2158 basic_block succ_bb
= single_succ (block
);
2159 gcc_assert (BB_VISITED (succ_bb
));
2160 phi_translate_set (ANTIC_OUT
, ANTIC_IN (succ_bb
), block
, succ_bb
);
2162 /* If we have multiple successors, we take the intersection of all of
2163 them. Note that in the case of loop exit phi nodes, we may have
2164 phis to translate through. */
2168 basic_block bprime
, first
= NULL
;
2170 auto_vec
<basic_block
> worklist (EDGE_COUNT (block
->succs
));
2171 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2174 && BB_VISITED (e
->dest
))
2176 else if (BB_VISITED (e
->dest
))
2177 worklist
.quick_push (e
->dest
);
2180 /* Of multiple successors we have to have visited one already
2181 which is guaranteed by iteration order. */
2182 gcc_assert (first
!= NULL
);
2184 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
), block
, first
);
2186 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2188 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2190 bitmap_set_t tmp
= bitmap_set_new ();
2191 phi_translate_set (tmp
, ANTIC_IN (bprime
), block
, bprime
);
2192 bitmap_set_and (ANTIC_OUT
, tmp
);
2193 bitmap_set_free (tmp
);
2196 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
2200 /* Prune expressions that are clobbered in block and thus become
2201 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2202 prune_clobbered_mems (ANTIC_OUT
, block
);
2204 /* Generate ANTIC_OUT - TMP_GEN. */
2205 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
2207 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2208 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
2211 /* Then union in the ANTIC_OUT - TMP_GEN values,
2212 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2213 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
2214 bitmap_value_insert_into_set (ANTIC_IN (block
),
2215 expression_for_id (bii
));
2217 clean (ANTIC_IN (block
));
2219 if (!bitmap_set_equal (old
, ANTIC_IN (block
)))
2222 bitmap_set_bit (changed_blocks
, block
->index
);
2223 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2224 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2227 bitmap_clear_bit (changed_blocks
, block
->index
);
2230 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2233 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2235 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2239 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2242 bitmap_set_free (old
);
2244 bitmap_set_free (S
);
2246 bitmap_set_free (ANTIC_OUT
);
2250 /* Compute PARTIAL_ANTIC for BLOCK.
2252 If succs(BLOCK) > 1 then
2253 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2254 in ANTIC_OUT for all succ(BLOCK)
2255 else if succs(BLOCK) == 1 then
2256 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2258 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2263 compute_partial_antic_aux (basic_block block
,
2264 bool block_has_abnormal_pred_edge
)
2266 bool changed
= false;
2267 bitmap_set_t old_PA_IN
;
2268 bitmap_set_t PA_OUT
;
2271 unsigned long max_pa
= PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH
);
2273 old_PA_IN
= PA_OUT
= NULL
;
2275 /* If any edges from predecessors are abnormal, antic_in is empty,
2277 if (block_has_abnormal_pred_edge
)
2278 goto maybe_dump_sets
;
2280 /* If there are too many partially anticipatable values in the
2281 block, phi_translate_set can take an exponential time: stop
2282 before the translation starts. */
2284 && single_succ_p (block
)
2285 && bitmap_count_bits (&PA_IN (single_succ (block
))->values
) > max_pa
)
2286 goto maybe_dump_sets
;
2288 old_PA_IN
= PA_IN (block
);
2289 PA_OUT
= bitmap_set_new ();
2291 /* If the block has no successors, ANTIC_OUT is empty. */
2292 if (EDGE_COUNT (block
->succs
) == 0)
2294 /* If we have one successor, we could have some phi nodes to
2295 translate through. Note that we can't phi translate across DFS
2296 back edges in partial antic, because it uses a union operation on
2297 the successors. For recurrences like IV's, we will end up
2298 generating a new value in the set on each go around (i + 3 (VH.1)
2299 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2300 else if (single_succ_p (block
))
2302 basic_block succ
= single_succ (block
);
2303 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
2304 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
2306 /* If we have multiple successors, we take the union of all of
2313 auto_vec
<basic_block
> worklist (EDGE_COUNT (block
->succs
));
2314 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2316 if (e
->flags
& EDGE_DFS_BACK
)
2318 worklist
.quick_push (e
->dest
);
2320 if (worklist
.length () > 0)
2322 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2327 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
2328 bitmap_value_insert_into_set (PA_OUT
,
2329 expression_for_id (i
));
2330 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2332 bitmap_set_t pa_in
= bitmap_set_new ();
2333 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
2334 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2335 bitmap_value_insert_into_set (PA_OUT
,
2336 expression_for_id (i
));
2337 bitmap_set_free (pa_in
);
2340 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
2341 bitmap_value_insert_into_set (PA_OUT
,
2342 expression_for_id (i
));
2347 /* Prune expressions that are clobbered in block and thus become
2348 invalid if translated from PA_OUT to PA_IN. */
2349 prune_clobbered_mems (PA_OUT
, block
);
2351 /* PA_IN starts with PA_OUT - TMP_GEN.
2352 Then we subtract things from ANTIC_IN. */
2353 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
2355 /* For partial antic, we want to put back in the phi results, since
2356 we will properly avoid making them partially antic over backedges. */
2357 bitmap_ior_into (&PA_IN (block
)->values
, &PHI_GEN (block
)->values
);
2358 bitmap_ior_into (&PA_IN (block
)->expressions
, &PHI_GEN (block
)->expressions
);
2360 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2361 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2363 dependent_clean (PA_IN (block
), ANTIC_IN (block
));
2365 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
2368 bitmap_set_bit (changed_blocks
, block
->index
);
2369 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2370 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2373 bitmap_clear_bit (changed_blocks
, block
->index
);
2376 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2379 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2381 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2384 bitmap_set_free (old_PA_IN
);
2386 bitmap_set_free (PA_OUT
);
2390 /* Compute ANTIC and partial ANTIC sets. */
2393 compute_antic (void)
2395 bool changed
= true;
2396 int num_iterations
= 0;
2400 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2401 We pre-build the map of blocks with incoming abnormal edges here. */
2402 has_abnormal_preds
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
2403 bitmap_clear (has_abnormal_preds
);
2405 FOR_ALL_BB_FN (block
, cfun
)
2410 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2412 e
->flags
&= ~EDGE_DFS_BACK
;
2413 if (e
->flags
& EDGE_ABNORMAL
)
2415 bitmap_set_bit (has_abnormal_preds
, block
->index
);
2420 BB_VISITED (block
) = 0;
2422 /* While we are here, give empty ANTIC_IN sets to each block. */
2423 ANTIC_IN (block
) = bitmap_set_new ();
2424 PA_IN (block
) = bitmap_set_new ();
2427 /* At the exit block we anticipate nothing. */
2428 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun
)) = 1;
2430 changed_blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
) + 1);
2431 bitmap_ones (changed_blocks
);
2434 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2435 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2436 /* ??? We need to clear our PHI translation cache here as the
2437 ANTIC sets shrink and we restrict valid translations to
2438 those having operands with leaders in ANTIC. Same below
2439 for PA ANTIC computation. */
2442 for (i
= postorder_num
- 1; i
>= 0; i
--)
2444 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2446 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2447 changed
|= compute_antic_aux (block
,
2448 bitmap_bit_p (has_abnormal_preds
,
2452 /* Theoretically possible, but *highly* unlikely. */
2453 gcc_checking_assert (num_iterations
< 500);
2456 statistics_histogram_event (cfun
, "compute_antic iterations",
2459 if (do_partial_partial
)
2461 bitmap_ones (changed_blocks
);
2462 mark_dfs_back_edges ();
2467 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2468 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2471 for (i
= postorder_num
- 1 ; i
>= 0; i
--)
2473 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2475 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2477 |= compute_partial_antic_aux (block
,
2478 bitmap_bit_p (has_abnormal_preds
,
2482 /* Theoretically possible, but *highly* unlikely. */
2483 gcc_checking_assert (num_iterations
< 500);
2485 statistics_histogram_event (cfun
, "compute_partial_antic iterations",
2488 sbitmap_free (has_abnormal_preds
);
2489 sbitmap_free (changed_blocks
);
2493 /* Inserted expressions are placed onto this worklist, which is used
2494 for performing quick dead code elimination of insertions we made
2495 that didn't turn out to be necessary. */
2496 static bitmap inserted_exprs
;
2498 /* The actual worker for create_component_ref_by_pieces. */
2501 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2502 unsigned int *operand
, gimple_seq
*stmts
)
2504 vn_reference_op_t currop
= &ref
->operands
[*operand
];
2507 switch (currop
->opcode
)
2511 tree folded
, sc
= NULL_TREE
;
2512 unsigned int nargs
= 0;
2514 if (TREE_CODE (currop
->op0
) == FUNCTION_DECL
)
2517 fn
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2522 sc
= find_or_generate_expression (block
, currop
->op1
, stmts
);
2526 args
= XNEWVEC (tree
, ref
->operands
.length () - 1);
2527 while (*operand
< ref
->operands
.length ())
2529 args
[nargs
] = create_component_ref_by_pieces_1 (block
, ref
,
2535 folded
= build_call_array (currop
->type
,
2536 (TREE_CODE (fn
) == FUNCTION_DECL
2537 ? build_fold_addr_expr (fn
) : fn
),
2539 if (currop
->with_bounds
)
2540 CALL_WITH_BOUNDS_P (folded
) = true;
2543 CALL_EXPR_STATIC_CHAIN (folded
) = sc
;
2549 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2553 tree offset
= currop
->op0
;
2554 if (TREE_CODE (baseop
) == ADDR_EXPR
2555 && handled_component_p (TREE_OPERAND (baseop
, 0)))
2559 base
= get_addr_base_and_unit_offset (TREE_OPERAND (baseop
, 0),
2562 offset
= int_const_binop (PLUS_EXPR
, offset
,
2563 build_int_cst (TREE_TYPE (offset
),
2565 baseop
= build_fold_addr_expr (base
);
2567 return fold_build2 (MEM_REF
, currop
->type
, baseop
, offset
);
2570 case TARGET_MEM_REF
:
2572 tree genop0
= NULL_TREE
, genop1
= NULL_TREE
;
2573 vn_reference_op_t nextop
= &ref
->operands
[++*operand
];
2574 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2580 genop0
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2586 genop1
= find_or_generate_expression (block
, nextop
->op0
, stmts
);
2590 return build5 (TARGET_MEM_REF
, currop
->type
,
2591 baseop
, currop
->op2
, genop0
, currop
->op1
, genop1
);
2597 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2603 case VIEW_CONVERT_EXPR
:
2605 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2609 return fold_build1 (currop
->opcode
, currop
->type
, genop0
);
2612 case WITH_SIZE_EXPR
:
2614 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2618 tree genop1
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2621 return fold_build2 (currop
->opcode
, currop
->type
, genop0
, genop1
);
2626 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2630 tree op1
= currop
->op0
;
2631 tree op2
= currop
->op1
;
2632 return fold_build3 (BIT_FIELD_REF
, currop
->type
, genop0
, op1
, op2
);
2635 /* For array ref vn_reference_op's, operand 1 of the array ref
2636 is op0 of the reference op and operand 3 of the array ref is
2638 case ARRAY_RANGE_REF
:
2642 tree genop1
= currop
->op0
;
2643 tree genop2
= currop
->op1
;
2644 tree genop3
= currop
->op2
;
2645 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2649 genop1
= find_or_generate_expression (block
, genop1
, stmts
);
2654 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (genop0
));
2655 /* Drop zero minimum index if redundant. */
2656 if (integer_zerop (genop2
)
2658 || integer_zerop (TYPE_MIN_VALUE (domain_type
))))
2662 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2669 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2670 /* We can't always put a size in units of the element alignment
2671 here as the element alignment may be not visible. See
2672 PR43783. Simply drop the element size for constant
2674 if (tree_int_cst_equal (genop3
, TYPE_SIZE_UNIT (elmt_type
)))
2678 genop3
= size_binop (EXACT_DIV_EXPR
, genop3
,
2679 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
2680 genop3
= find_or_generate_expression (block
, genop3
, stmts
);
2685 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2692 tree genop2
= currop
->op1
;
2693 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2696 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2700 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2704 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
, genop2
);
2709 genop
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2730 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2731 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2732 trying to rename aggregates into ssa form directly, which is a no no.
2734 Thus, this routine doesn't create temporaries, it just builds a
2735 single access expression for the array, calling
2736 find_or_generate_expression to build the innermost pieces.
2738 This function is a subroutine of create_expression_by_pieces, and
2739 should not be called on it's own unless you really know what you
2743 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2746 unsigned int op
= 0;
2747 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
);
2750 /* Find a simple leader for an expression, or generate one using
2751 create_expression_by_pieces from a NARY expression for the value.
2752 BLOCK is the basic_block we are looking for leaders in.
2753 OP is the tree expression to find a leader for or generate.
2754 Returns the leader or NULL_TREE on failure. */
2757 find_or_generate_expression (basic_block block
, tree op
, gimple_seq
*stmts
)
2759 pre_expr expr
= get_or_alloc_expr_for (op
);
2760 unsigned int lookfor
= get_expr_value_id (expr
);
2761 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
), lookfor
);
2764 if (leader
->kind
== NAME
)
2765 return PRE_EXPR_NAME (leader
);
2766 else if (leader
->kind
== CONSTANT
)
2767 return PRE_EXPR_CONSTANT (leader
);
2773 /* It must be a complex expression, so generate it recursively. Note
2774 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2775 where the insert algorithm fails to insert a required expression. */
2776 bitmap exprset
= value_expressions
[lookfor
];
2779 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
2781 pre_expr temp
= expression_for_id (i
);
2782 /* We cannot insert random REFERENCE expressions at arbitrary
2783 places. We can insert NARYs which eventually re-materializes
2784 its operand values. */
2785 if (temp
->kind
== NARY
)
2786 return create_expression_by_pieces (block
, temp
, stmts
,
2787 get_expr_type (expr
));
2794 #define NECESSARY GF_PLF_1
2796 /* Create an expression in pieces, so that we can handle very complex
2797 expressions that may be ANTIC, but not necessary GIMPLE.
2798 BLOCK is the basic block the expression will be inserted into,
2799 EXPR is the expression to insert (in value form)
2800 STMTS is a statement list to append the necessary insertions into.
2802 This function will die if we hit some value that shouldn't be
2803 ANTIC but is (IE there is no leader for it, or its components).
2804 The function returns NULL_TREE in case a different antic expression
2805 has to be inserted first.
2806 This function may also generate expressions that are themselves
2807 partially or fully redundant. Those that are will be either made
2808 fully redundant during the next iteration of insert (for partially
2809 redundant ones), or eliminated by eliminate (for fully redundant
2813 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2814 gimple_seq
*stmts
, tree type
)
2818 gimple_seq forced_stmts
= NULL
;
2819 unsigned int value_id
;
2820 gimple_stmt_iterator gsi
;
2821 tree exprtype
= type
? type
: get_expr_type (expr
);
2827 /* We may hit the NAME/CONSTANT case if we have to convert types
2828 that value numbering saw through. */
2830 folded
= PRE_EXPR_NAME (expr
);
2833 folded
= PRE_EXPR_CONSTANT (expr
);
2837 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2838 folded
= create_component_ref_by_pieces (block
, ref
, stmts
);
2845 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2846 tree
*genop
= XALLOCAVEC (tree
, nary
->length
);
2848 for (i
= 0; i
< nary
->length
; ++i
)
2850 genop
[i
] = find_or_generate_expression (block
, nary
->op
[i
], stmts
);
2853 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2854 may have conversions stripped. */
2855 if (nary
->opcode
== POINTER_PLUS_EXPR
)
2858 genop
[i
] = gimple_convert (&forced_stmts
,
2859 nary
->type
, genop
[i
]);
2861 genop
[i
] = gimple_convert (&forced_stmts
,
2862 sizetype
, genop
[i
]);
2865 genop
[i
] = gimple_convert (&forced_stmts
,
2866 TREE_TYPE (nary
->op
[i
]), genop
[i
]);
2868 if (nary
->opcode
== CONSTRUCTOR
)
2870 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
2871 for (i
= 0; i
< nary
->length
; ++i
)
2872 CONSTRUCTOR_APPEND_ELT (elts
, NULL_TREE
, genop
[i
]);
2873 folded
= build_constructor (nary
->type
, elts
);
2877 switch (nary
->length
)
2880 folded
= fold_build1 (nary
->opcode
, nary
->type
,
2884 folded
= fold_build2 (nary
->opcode
, nary
->type
,
2885 genop
[0], genop
[1]);
2888 folded
= fold_build3 (nary
->opcode
, nary
->type
,
2889 genop
[0], genop
[1], genop
[2]);
2901 if (!useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
2902 folded
= fold_convert (exprtype
, folded
);
2904 /* Force the generated expression to be a sequence of GIMPLE
2906 We have to call unshare_expr because force_gimple_operand may
2907 modify the tree we pass to it. */
2908 gimple_seq tem
= NULL
;
2909 folded
= force_gimple_operand (unshare_expr (folded
), &tem
,
2911 gimple_seq_add_seq_without_update (&forced_stmts
, tem
);
2913 /* If we have any intermediate expressions to the value sets, add them
2914 to the value sets and chain them in the instruction stream. */
2917 gsi
= gsi_start (forced_stmts
);
2918 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
2920 gimple stmt
= gsi_stmt (gsi
);
2921 tree forcedname
= gimple_get_lhs (stmt
);
2924 if (TREE_CODE (forcedname
) == SSA_NAME
)
2926 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (forcedname
));
2927 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
2928 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
2929 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
2930 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
2931 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2932 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2935 gimple_set_vuse (stmt
, BB_LIVE_VOP_ON_EXIT (block
));
2936 gimple_set_modified (stmt
, true);
2938 gimple_seq_add_seq (stmts
, forced_stmts
);
2941 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2942 newstmt
= gimple_build_assign (name
, folded
);
2943 gimple_set_vuse (newstmt
, BB_LIVE_VOP_ON_EXIT (block
));
2944 gimple_set_modified (newstmt
, true);
2945 gimple_set_plf (newstmt
, NECESSARY
, false);
2947 gimple_seq_add_stmt (stmts
, newstmt
);
2948 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (name
));
2950 /* Fold the last statement. */
2951 gsi
= gsi_last (*stmts
);
2952 if (fold_stmt_inplace (&gsi
))
2953 update_stmt (gsi_stmt (gsi
));
2955 /* Add a value number to the temporary.
2956 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2957 we are creating the expression by pieces, and this particular piece of
2958 the expression may have been represented. There is no harm in replacing
2960 value_id
= get_expr_value_id (expr
);
2961 VN_INFO_GET (name
)->value_id
= value_id
;
2962 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
2963 if (VN_INFO (name
)->valnum
== NULL_TREE
)
2964 VN_INFO (name
)->valnum
= name
;
2965 gcc_assert (VN_INFO (name
)->valnum
!= NULL_TREE
);
2966 nameexpr
= get_or_alloc_expr_for_name (name
);
2967 add_to_value (value_id
, nameexpr
);
2968 if (NEW_SETS (block
))
2969 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2970 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2972 pre_stats
.insertions
++;
2973 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2975 fprintf (dump_file
, "Inserted ");
2976 print_gimple_stmt (dump_file
, newstmt
, 0, 0);
2977 fprintf (dump_file
, " in predecessor %d (%04d)\n",
2978 block
->index
, value_id
);
2985 /* Insert the to-be-made-available values of expression EXPRNUM for each
2986 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2987 merge the result with a phi node, given the same value number as
2988 NODE. Return true if we have inserted new stuff. */
2991 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
2992 vec
<pre_expr
> avail
)
2994 pre_expr expr
= expression_for_id (exprnum
);
2996 unsigned int val
= get_expr_value_id (expr
);
2998 bool insertions
= false;
3003 tree type
= get_expr_type (expr
);
3007 /* Make sure we aren't creating an induction variable. */
3008 if (bb_loop_depth (block
) > 0 && EDGE_COUNT (block
->preds
) == 2)
3010 bool firstinsideloop
= false;
3011 bool secondinsideloop
= false;
3012 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3013 EDGE_PRED (block
, 0)->src
);
3014 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3015 EDGE_PRED (block
, 1)->src
);
3016 /* Induction variables only have one edge inside the loop. */
3017 if ((firstinsideloop
^ secondinsideloop
)
3018 && expr
->kind
!= REFERENCE
)
3020 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3021 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3026 /* Make the necessary insertions. */
3027 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3029 gimple_seq stmts
= NULL
;
3032 eprime
= avail
[pred
->dest_idx
];
3034 if (eprime
->kind
!= NAME
&& eprime
->kind
!= CONSTANT
)
3036 builtexpr
= create_expression_by_pieces (bprime
, eprime
,
3038 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3039 gsi_insert_seq_on_edge (pred
, stmts
);
3042 /* We cannot insert a PHI node if we failed to insert
3047 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (builtexpr
);
3050 else if (eprime
->kind
== CONSTANT
)
3052 /* Constants may not have the right type, fold_convert
3053 should give us back a constant with the right type. */
3054 tree constant
= PRE_EXPR_CONSTANT (eprime
);
3055 if (!useless_type_conversion_p (type
, TREE_TYPE (constant
)))
3057 tree builtexpr
= fold_convert (type
, constant
);
3058 if (!is_gimple_min_invariant (builtexpr
))
3060 tree forcedexpr
= force_gimple_operand (builtexpr
,
3063 if (!is_gimple_min_invariant (forcedexpr
))
3065 if (forcedexpr
!= builtexpr
)
3067 VN_INFO_GET (forcedexpr
)->valnum
= PRE_EXPR_CONSTANT (eprime
);
3068 VN_INFO (forcedexpr
)->value_id
= get_expr_value_id (eprime
);
3072 gimple_stmt_iterator gsi
;
3073 gsi
= gsi_start (stmts
);
3074 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3076 gimple stmt
= gsi_stmt (gsi
);
3077 tree lhs
= gimple_get_lhs (stmt
);
3078 if (TREE_CODE (lhs
) == SSA_NAME
)
3079 bitmap_set_bit (inserted_exprs
,
3080 SSA_NAME_VERSION (lhs
));
3081 gimple_set_plf (stmt
, NECESSARY
, false);
3083 gsi_insert_seq_on_edge (pred
, stmts
);
3085 avail
[pred
->dest_idx
]
3086 = get_or_alloc_expr_for_name (forcedexpr
);
3090 avail
[pred
->dest_idx
]
3091 = get_or_alloc_expr_for_constant (builtexpr
);
3094 else if (eprime
->kind
== NAME
)
3096 /* We may have to do a conversion because our value
3097 numbering can look through types in certain cases, but
3098 our IL requires all operands of a phi node have the same
3100 tree name
= PRE_EXPR_NAME (eprime
);
3101 if (!useless_type_conversion_p (type
, TREE_TYPE (name
)))
3105 builtexpr
= fold_convert (type
, name
);
3106 forcedexpr
= force_gimple_operand (builtexpr
,
3110 if (forcedexpr
!= name
)
3112 VN_INFO_GET (forcedexpr
)->valnum
= VN_INFO (name
)->valnum
;
3113 VN_INFO (forcedexpr
)->value_id
= VN_INFO (name
)->value_id
;
3118 gimple_stmt_iterator gsi
;
3119 gsi
= gsi_start (stmts
);
3120 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3122 gimple stmt
= gsi_stmt (gsi
);
3123 tree lhs
= gimple_get_lhs (stmt
);
3124 if (TREE_CODE (lhs
) == SSA_NAME
)
3125 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
3126 gimple_set_plf (stmt
, NECESSARY
, false);
3128 gsi_insert_seq_on_edge (pred
, stmts
);
3130 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (forcedexpr
);
3134 /* If we didn't want a phi node, and we made insertions, we still have
3135 inserted new stuff, and thus return true. If we didn't want a phi node,
3136 and didn't make insertions, we haven't added anything new, so return
3138 if (nophi
&& insertions
)
3140 else if (nophi
&& !insertions
)
3143 /* Now build a phi for the new variable. */
3144 temp
= make_temp_ssa_name (type
, NULL
, "prephitmp");
3145 phi
= create_phi_node (temp
, block
);
3147 gimple_set_plf (phi
, NECESSARY
, false);
3148 VN_INFO_GET (temp
)->value_id
= val
;
3149 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3150 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3151 VN_INFO (temp
)->valnum
= temp
;
3152 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3153 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3155 pre_expr ae
= avail
[pred
->dest_idx
];
3156 gcc_assert (get_expr_type (ae
) == type
3157 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3158 if (ae
->kind
== CONSTANT
)
3159 add_phi_arg (phi
, unshare_expr (PRE_EXPR_CONSTANT (ae
)),
3160 pred
, UNKNOWN_LOCATION
);
3162 add_phi_arg (phi
, PRE_EXPR_NAME (ae
), pred
, UNKNOWN_LOCATION
);
3165 newphi
= get_or_alloc_expr_for_name (temp
);
3166 add_to_value (val
, newphi
);
3168 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3169 this insertion, since we test for the existence of this value in PHI_GEN
3170 before proceeding with the partial redundancy checks in insert_aux.
3172 The value may exist in AVAIL_OUT, in particular, it could be represented
3173 by the expression we are trying to eliminate, in which case we want the
3174 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3177 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3178 this block, because if it did, it would have existed in our dominator's
3179 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3182 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3183 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3185 bitmap_insert_into_set (NEW_SETS (block
),
3188 /* If we insert a PHI node for a conversion of another PHI node
3189 in the same basic-block try to preserve range information.
3190 This is important so that followup loop passes receive optimal
3191 number of iteration analysis results. See PR61743. */
3192 if (expr
->kind
== NARY
3193 && CONVERT_EXPR_CODE_P (expr
->u
.nary
->opcode
)
3194 && TREE_CODE (expr
->u
.nary
->op
[0]) == SSA_NAME
3195 && gimple_bb (SSA_NAME_DEF_STMT (expr
->u
.nary
->op
[0])) == block
3196 && INTEGRAL_TYPE_P (type
)
3197 && INTEGRAL_TYPE_P (TREE_TYPE (expr
->u
.nary
->op
[0]))
3198 && (TYPE_PRECISION (type
)
3199 >= TYPE_PRECISION (TREE_TYPE (expr
->u
.nary
->op
[0])))
3200 && SSA_NAME_RANGE_INFO (expr
->u
.nary
->op
[0]))
3203 if (get_range_info (expr
->u
.nary
->op
[0], &min
, &max
) == VR_RANGE
3204 && !wi::neg_p (min
, SIGNED
)
3205 && !wi::neg_p (max
, SIGNED
))
3206 /* Just handle extension and sign-changes of all-positive ranges. */
3207 set_range_info (temp
,
3208 SSA_NAME_RANGE_TYPE (expr
->u
.nary
->op
[0]),
3209 wide_int_storage::from (min
, TYPE_PRECISION (type
),
3211 wide_int_storage::from (max
, TYPE_PRECISION (type
),
3215 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3217 fprintf (dump_file
, "Created phi ");
3218 print_gimple_stmt (dump_file
, phi
, 0, 0);
3219 fprintf (dump_file
, " in block %d (%04d)\n", block
->index
, val
);
3227 /* Perform insertion of partially redundant values.
3228 For BLOCK, do the following:
3229 1. Propagate the NEW_SETS of the dominator into the current block.
3230 If the block has multiple predecessors,
3231 2a. Iterate over the ANTIC expressions for the block to see if
3232 any of them are partially redundant.
3233 2b. If so, insert them into the necessary predecessors to make
3234 the expression fully redundant.
3235 2c. Insert a new PHI merging the values of the predecessors.
3236 2d. Insert the new PHI, and the new expressions, into the
3238 3. Recursively call ourselves on the dominator children of BLOCK.
3240 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3241 do_regular_insertion and do_partial_insertion.
3246 do_regular_insertion (basic_block block
, basic_block dom
)
3248 bool new_stuff
= false;
3249 vec
<pre_expr
> exprs
;
3251 auto_vec
<pre_expr
> avail
;
3254 exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3255 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3257 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3259 if (expr
->kind
== NARY
3260 || expr
->kind
== REFERENCE
)
3263 bool by_some
= false;
3264 bool cant_insert
= false;
3265 bool all_same
= true;
3266 pre_expr first_s
= NULL
;
3269 pre_expr eprime
= NULL
;
3271 pre_expr edoubleprime
= NULL
;
3272 bool do_insertion
= false;
3274 val
= get_expr_value_id (expr
);
3275 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3277 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3279 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3281 fprintf (dump_file
, "Found fully redundant value: ");
3282 print_pre_expr (dump_file
, expr
);
3283 fprintf (dump_file
, "\n");
3288 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3290 unsigned int vprime
;
3292 /* We should never run insertion for the exit block
3293 and so not come across fake pred edges. */
3294 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3296 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
3299 /* eprime will generally only be NULL if the
3300 value of the expression, translated
3301 through the PHI for this predecessor, is
3302 undefined. If that is the case, we can't
3303 make the expression fully redundant,
3304 because its value is undefined along a
3305 predecessor path. We can thus break out
3306 early because it doesn't matter what the
3307 rest of the results are. */
3310 avail
[pred
->dest_idx
] = NULL
;
3315 eprime
= fully_constant_expression (eprime
);
3316 vprime
= get_expr_value_id (eprime
);
3317 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3319 if (edoubleprime
== NULL
)
3321 avail
[pred
->dest_idx
] = eprime
;
3326 avail
[pred
->dest_idx
] = edoubleprime
;
3328 /* We want to perform insertions to remove a redundancy on
3329 a path in the CFG we want to optimize for speed. */
3330 if (optimize_edge_for_speed_p (pred
))
3331 do_insertion
= true;
3332 if (first_s
== NULL
)
3333 first_s
= edoubleprime
;
3334 else if (!pre_expr_d::equal (first_s
, edoubleprime
))
3338 /* If we can insert it, it's not the same value
3339 already existing along every predecessor, and
3340 it's defined by some predecessor, it is
3341 partially redundant. */
3342 if (!cant_insert
&& !all_same
&& by_some
)
3346 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3348 fprintf (dump_file
, "Skipping partial redundancy for "
3350 print_pre_expr (dump_file
, expr
);
3351 fprintf (dump_file
, " (%04d), no redundancy on to be "
3352 "optimized for speed edge\n", val
);
3355 else if (dbg_cnt (treepre_insert
))
3357 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3359 fprintf (dump_file
, "Found partial redundancy for "
3361 print_pre_expr (dump_file
, expr
);
3362 fprintf (dump_file
, " (%04d)\n",
3363 get_expr_value_id (expr
));
3365 if (insert_into_preds_of_block (block
,
3366 get_expression_id (expr
),
3371 /* If all edges produce the same value and that value is
3372 an invariant, then the PHI has the same value on all
3373 edges. Note this. */
3374 else if (!cant_insert
&& all_same
)
3376 gcc_assert (edoubleprime
->kind
== CONSTANT
3377 || edoubleprime
->kind
== NAME
);
3379 tree temp
= make_temp_ssa_name (get_expr_type (expr
),
3382 = gimple_build_assign (temp
,
3383 edoubleprime
->kind
== CONSTANT
?
3384 PRE_EXPR_CONSTANT (edoubleprime
) :
3385 PRE_EXPR_NAME (edoubleprime
));
3386 gimple_stmt_iterator gsi
= gsi_after_labels (block
);
3387 gsi_insert_before (&gsi
, assign
, GSI_NEW_STMT
);
3389 gimple_set_plf (assign
, NECESSARY
, false);
3390 VN_INFO_GET (temp
)->value_id
= val
;
3391 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3392 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3393 VN_INFO (temp
)->valnum
= temp
;
3394 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3395 pre_expr newe
= get_or_alloc_expr_for_name (temp
);
3396 add_to_value (val
, newe
);
3397 bitmap_value_replace_in_set (AVAIL_OUT (block
), newe
);
3398 bitmap_insert_into_set (NEW_SETS (block
), newe
);
3408 /* Perform insertion for partially anticipatable expressions. There
3409 is only one case we will perform insertion for these. This case is
3410 if the expression is partially anticipatable, and fully available.
3411 In this case, we know that putting it earlier will enable us to
3412 remove the later computation. */
3416 do_partial_partial_insertion (basic_block block
, basic_block dom
)
3418 bool new_stuff
= false;
3419 vec
<pre_expr
> exprs
;
3421 auto_vec
<pre_expr
> avail
;
3424 exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3425 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3427 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3429 if (expr
->kind
== NARY
3430 || expr
->kind
== REFERENCE
)
3434 bool cant_insert
= false;
3437 pre_expr eprime
= NULL
;
3440 val
= get_expr_value_id (expr
);
3441 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3443 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3446 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3448 unsigned int vprime
;
3449 pre_expr edoubleprime
;
3451 /* We should never run insertion for the exit block
3452 and so not come across fake pred edges. */
3453 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3455 eprime
= phi_translate (expr
, ANTIC_IN (block
),
3459 /* eprime will generally only be NULL if the
3460 value of the expression, translated
3461 through the PHI for this predecessor, is
3462 undefined. If that is the case, we can't
3463 make the expression fully redundant,
3464 because its value is undefined along a
3465 predecessor path. We can thus break out
3466 early because it doesn't matter what the
3467 rest of the results are. */
3470 avail
[pred
->dest_idx
] = NULL
;
3475 eprime
= fully_constant_expression (eprime
);
3476 vprime
= get_expr_value_id (eprime
);
3477 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
), vprime
);
3478 avail
[pred
->dest_idx
] = edoubleprime
;
3479 if (edoubleprime
== NULL
)
3486 /* If we can insert it, it's not the same value
3487 already existing along every predecessor, and
3488 it's defined by some predecessor, it is
3489 partially redundant. */
3490 if (!cant_insert
&& by_all
)
3493 bool do_insertion
= false;
3495 /* Insert only if we can remove a later expression on a path
3496 that we want to optimize for speed.
3497 The phi node that we will be inserting in BLOCK is not free,
3498 and inserting it for the sake of !optimize_for_speed successor
3499 may cause regressions on the speed path. */
3500 FOR_EACH_EDGE (succ
, ei
, block
->succs
)
3502 if (bitmap_set_contains_value (PA_IN (succ
->dest
), val
)
3503 || bitmap_set_contains_value (ANTIC_IN (succ
->dest
), val
))
3505 if (optimize_edge_for_speed_p (succ
))
3506 do_insertion
= true;
3512 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3514 fprintf (dump_file
, "Skipping partial partial redundancy "
3516 print_pre_expr (dump_file
, expr
);
3517 fprintf (dump_file
, " (%04d), not (partially) anticipated "
3518 "on any to be optimized for speed edges\n", val
);
3521 else if (dbg_cnt (treepre_insert
))
3523 pre_stats
.pa_insert
++;
3524 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3526 fprintf (dump_file
, "Found partial partial redundancy "
3528 print_pre_expr (dump_file
, expr
);
3529 fprintf (dump_file
, " (%04d)\n",
3530 get_expr_value_id (expr
));
3532 if (insert_into_preds_of_block (block
,
3533 get_expression_id (expr
),
3546 insert_aux (basic_block block
)
3549 bool new_stuff
= false;
3554 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3559 bitmap_set_t newset
= NEW_SETS (dom
);
3562 /* Note that we need to value_replace both NEW_SETS, and
3563 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3564 represented by some non-simple expression here that we want
3565 to replace it with. */
3566 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3568 pre_expr expr
= expression_for_id (i
);
3569 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3570 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3573 if (!single_pred_p (block
))
3575 new_stuff
|= do_regular_insertion (block
, dom
);
3576 if (do_partial_partial
)
3577 new_stuff
|= do_partial_partial_insertion (block
, dom
);
3581 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3583 son
= next_dom_son (CDI_DOMINATORS
, son
))
3585 new_stuff
|= insert_aux (son
);
3591 /* Perform insertion of partially redundant values. */
3596 bool new_stuff
= true;
3598 int num_iterations
= 0;
3600 FOR_ALL_BB_FN (bb
, cfun
)
3601 NEW_SETS (bb
) = bitmap_set_new ();
3606 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3607 fprintf (dump_file
, "Starting insert iteration %d\n", num_iterations
);
3608 new_stuff
= insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3610 /* Clear the NEW sets before the next iteration. We have already
3611 fully propagated its contents. */
3613 FOR_ALL_BB_FN (bb
, cfun
)
3614 bitmap_set_free (NEW_SETS (bb
));
3616 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3620 /* Compute the AVAIL set for all basic blocks.
3622 This function performs value numbering of the statements in each basic
3623 block. The AVAIL sets are built from information we glean while doing
3624 this value numbering, since the AVAIL sets contain only one entry per
3627 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3628 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3631 compute_avail (void)
3634 basic_block block
, son
;
3635 basic_block
*worklist
;
3639 /* We pretend that default definitions are defined in the entry block.
3640 This includes function arguments and the static chain decl. */
3641 for (i
= 1; i
< num_ssa_names
; ++i
)
3643 tree name
= ssa_name (i
);
3646 || !SSA_NAME_IS_DEFAULT_DEF (name
)
3647 || has_zero_uses (name
)
3648 || virtual_operand_p (name
))
3651 e
= get_or_alloc_expr_for_name (name
);
3652 add_to_value (get_expr_value_id (e
), e
);
3653 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), e
);
3654 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3658 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3660 print_bitmap_set (dump_file
, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3661 "tmp_gen", ENTRY_BLOCK
);
3662 print_bitmap_set (dump_file
, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3663 "avail_out", ENTRY_BLOCK
);
3666 /* Allocate the worklist. */
3667 worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
));
3669 /* Seed the algorithm by putting the dominator children of the entry
3670 block on the worklist. */
3671 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3673 son
= next_dom_son (CDI_DOMINATORS
, son
))
3674 worklist
[sp
++] = son
;
3676 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3677 = ssa_default_def (cfun
, gimple_vop (cfun
));
3679 /* Loop until the worklist is empty. */
3685 /* Pick a block from the worklist. */
3686 block
= worklist
[--sp
];
3688 /* Initially, the set of available values in BLOCK is that of
3689 its immediate dominator. */
3690 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3693 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3694 BB_LIVE_VOP_ON_EXIT (block
) = BB_LIVE_VOP_ON_EXIT (dom
);
3697 /* Generate values for PHI nodes. */
3698 for (gphi_iterator gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
);
3701 tree result
= gimple_phi_result (gsi
.phi ());
3703 /* We have no need for virtual phis, as they don't represent
3704 actual computations. */
3705 if (virtual_operand_p (result
))
3707 BB_LIVE_VOP_ON_EXIT (block
) = result
;
3711 pre_expr e
= get_or_alloc_expr_for_name (result
);
3712 add_to_value (get_expr_value_id (e
), e
);
3713 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3714 bitmap_insert_into_set (PHI_GEN (block
), e
);
3717 BB_MAY_NOTRETURN (block
) = 0;
3719 /* Now compute value numbers and populate value sets with all
3720 the expressions computed in BLOCK. */
3721 for (gimple_stmt_iterator gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
);
3727 stmt
= gsi_stmt (gsi
);
3729 /* Cache whether the basic-block has any non-visible side-effect
3731 If this isn't a call or it is the last stmt in the
3732 basic-block then the CFG represents things correctly. */
3733 if (is_gimple_call (stmt
) && !stmt_ends_bb_p (stmt
))
3735 /* Non-looping const functions always return normally.
3736 Otherwise the call might not return or have side-effects
3737 that forbids hoisting possibly trapping expressions
3739 int flags
= gimple_call_flags (stmt
);
3740 if (!(flags
& ECF_CONST
)
3741 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3742 BB_MAY_NOTRETURN (block
) = 1;
3745 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3747 pre_expr e
= get_or_alloc_expr_for_name (op
);
3749 add_to_value (get_expr_value_id (e
), e
);
3750 bitmap_insert_into_set (TMP_GEN (block
), e
);
3751 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3754 if (gimple_vdef (stmt
))
3755 BB_LIVE_VOP_ON_EXIT (block
) = gimple_vdef (stmt
);
3757 if (gimple_has_side_effects (stmt
)
3758 || stmt_could_throw_p (stmt
)
3759 || is_gimple_debug (stmt
))
3762 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3764 if (ssa_undefined_value_p (op
))
3766 pre_expr e
= get_or_alloc_expr_for_name (op
);
3767 bitmap_value_insert_into_set (EXP_GEN (block
), e
);
3770 switch (gimple_code (stmt
))
3778 vn_reference_s ref1
;
3779 pre_expr result
= NULL
;
3781 /* We can value number only calls to real functions. */
3782 if (gimple_call_internal_p (stmt
))
3785 vn_reference_lookup_call (as_a
<gcall
*> (stmt
), &ref
, &ref1
);
3789 /* If the value of the call is not invalidated in
3790 this block until it is computed, add the expression
3792 if (!gimple_vuse (stmt
)
3794 (SSA_NAME_DEF_STMT (gimple_vuse (stmt
))) == GIMPLE_PHI
3795 || gimple_bb (SSA_NAME_DEF_STMT
3796 (gimple_vuse (stmt
))) != block
)
3798 result
= pre_expr_pool
.allocate ();
3799 result
->kind
= REFERENCE
;
3801 PRE_EXPR_REFERENCE (result
) = ref
;
3803 get_or_alloc_expression_id (result
);
3804 add_to_value (get_expr_value_id (result
), result
);
3805 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3812 pre_expr result
= NULL
;
3813 switch (vn_get_stmt_kind (stmt
))
3817 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3820 /* COND_EXPR and VEC_COND_EXPR are awkward in
3821 that they contain an embedded complex expression.
3822 Don't even try to shove those through PRE. */
3823 if (code
== COND_EXPR
3824 || code
== VEC_COND_EXPR
)
3827 vn_nary_op_lookup_stmt (stmt
, &nary
);
3831 /* If the NARY traps and there was a preceding
3832 point in the block that might not return avoid
3833 adding the nary to EXP_GEN. */
3834 if (BB_MAY_NOTRETURN (block
)
3835 && vn_nary_may_trap (nary
))
3838 result
= pre_expr_pool
.allocate ();
3839 result
->kind
= NARY
;
3841 PRE_EXPR_NARY (result
) = nary
;
3848 vn_reference_lookup (gimple_assign_rhs1 (stmt
),
3854 /* If the value of the reference is not invalidated in
3855 this block until it is computed, add the expression
3857 if (gimple_vuse (stmt
))
3861 def_stmt
= SSA_NAME_DEF_STMT (gimple_vuse (stmt
));
3862 while (!gimple_nop_p (def_stmt
)
3863 && gimple_code (def_stmt
) != GIMPLE_PHI
3864 && gimple_bb (def_stmt
) == block
)
3866 if (stmt_may_clobber_ref_p
3867 (def_stmt
, gimple_assign_rhs1 (stmt
)))
3873 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt
));
3879 result
= pre_expr_pool
.allocate ();
3880 result
->kind
= REFERENCE
;
3882 PRE_EXPR_REFERENCE (result
) = ref
;
3890 get_or_alloc_expression_id (result
);
3891 add_to_value (get_expr_value_id (result
), result
);
3892 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3900 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3902 print_bitmap_set (dump_file
, EXP_GEN (block
),
3903 "exp_gen", block
->index
);
3904 print_bitmap_set (dump_file
, PHI_GEN (block
),
3905 "phi_gen", block
->index
);
3906 print_bitmap_set (dump_file
, TMP_GEN (block
),
3907 "tmp_gen", block
->index
);
3908 print_bitmap_set (dump_file
, AVAIL_OUT (block
),
3909 "avail_out", block
->index
);
3912 /* Put the dominator children of BLOCK on the worklist of blocks
3913 to compute available sets for. */
3914 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3916 son
= next_dom_son (CDI_DOMINATORS
, son
))
3917 worklist
[sp
++] = son
;
3924 /* Local state for the eliminate domwalk. */
3925 static vec
<gimple
> el_to_remove
;
3926 static vec
<gimple
> el_to_fixup
;
3927 static unsigned int el_todo
;
3928 static vec
<tree
> el_avail
;
3929 static vec
<tree
> el_avail_stack
;
3931 /* Return a leader for OP that is available at the current point of the
3932 eliminate domwalk. */
3935 eliminate_avail (tree op
)
3937 tree valnum
= VN_INFO (op
)->valnum
;
3938 if (TREE_CODE (valnum
) == SSA_NAME
)
3940 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
3942 if (el_avail
.length () > SSA_NAME_VERSION (valnum
))
3943 return el_avail
[SSA_NAME_VERSION (valnum
)];
3945 else if (is_gimple_min_invariant (valnum
))
3950 /* At the current point of the eliminate domwalk make OP available. */
3953 eliminate_push_avail (tree op
)
3955 tree valnum
= VN_INFO (op
)->valnum
;
3956 if (TREE_CODE (valnum
) == SSA_NAME
)
3958 if (el_avail
.length () <= SSA_NAME_VERSION (valnum
))
3959 el_avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
3961 if (el_avail
[SSA_NAME_VERSION (valnum
)])
3962 pushop
= el_avail
[SSA_NAME_VERSION (valnum
)];
3963 el_avail_stack
.safe_push (pushop
);
3964 el_avail
[SSA_NAME_VERSION (valnum
)] = op
;
3968 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3969 the leader for the expression if insertion was successful. */
3972 eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
)
3974 tree expr
= vn_get_expr_for (val
);
3975 if (!CONVERT_EXPR_P (expr
)
3976 && TREE_CODE (expr
) != VIEW_CONVERT_EXPR
)
3979 tree op
= TREE_OPERAND (expr
, 0);
3980 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (op
) : op
;
3984 tree res
= make_temp_ssa_name (TREE_TYPE (val
), NULL
, "pretmp");
3985 gassign
*tem
= gimple_build_assign (res
,
3986 fold_build1 (TREE_CODE (expr
),
3987 TREE_TYPE (expr
), leader
));
3988 gsi_insert_before (gsi
, tem
, GSI_SAME_STMT
);
3989 VN_INFO_GET (res
)->valnum
= val
;
3991 if (TREE_CODE (leader
) == SSA_NAME
)
3992 gimple_set_plf (SSA_NAME_DEF_STMT (leader
), NECESSARY
, true);
3994 pre_stats
.insertions
++;
3995 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3997 fprintf (dump_file
, "Inserted ");
3998 print_gimple_stmt (dump_file
, tem
, 0, 0);
4004 class eliminate_dom_walker
: public dom_walker
4007 eliminate_dom_walker (cdi_direction direction
, bool do_pre_
)
4008 : dom_walker (direction
), do_pre (do_pre_
) {}
4010 virtual void before_dom_children (basic_block
);
4011 virtual void after_dom_children (basic_block
);
4016 /* Perform elimination for the basic-block B during the domwalk. */
4019 eliminate_dom_walker::before_dom_children (basic_block b
)
4022 el_avail_stack
.safe_push (NULL_TREE
);
4024 /* ??? If we do nothing for unreachable blocks then this will confuse
4025 tailmerging. Eventually we can reduce its reliance on SCCVN now
4026 that we fully copy/constant-propagate (most) things. */
4028 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
4030 gphi
*phi
= gsi
.phi ();
4031 tree res
= PHI_RESULT (phi
);
4033 if (virtual_operand_p (res
))
4039 tree sprime
= eliminate_avail (res
);
4043 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4045 fprintf (dump_file
, "Replaced redundant PHI node defining ");
4046 print_generic_expr (dump_file
, res
, 0);
4047 fprintf (dump_file
, " with ");
4048 print_generic_expr (dump_file
, sprime
, 0);
4049 fprintf (dump_file
, "\n");
4052 /* If we inserted this PHI node ourself, it's not an elimination. */
4054 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
4057 pre_stats
.eliminations
++;
4059 /* If we will propagate into all uses don't bother to do
4061 if (may_propagate_copy (res
, sprime
))
4063 /* Mark the PHI for removal. */
4064 el_to_remove
.safe_push (phi
);
4069 remove_phi_node (&gsi
, false);
4072 && !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
))
4073 && TREE_CODE (sprime
) == SSA_NAME
)
4074 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4076 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
4077 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4078 gimple stmt
= gimple_build_assign (res
, sprime
);
4079 /* ??? It cannot yet be necessary (DOM walk). */
4080 gimple_set_plf (stmt
, NECESSARY
, gimple_plf (phi
, NECESSARY
));
4082 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
4083 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
4087 eliminate_push_avail (res
);
4091 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
4095 tree sprime
= NULL_TREE
;
4096 gimple stmt
= gsi_stmt (gsi
);
4097 tree lhs
= gimple_get_lhs (stmt
);
4098 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
4099 && !gimple_has_volatile_ops (stmt
)
4100 /* See PR43491. Do not replace a global register variable when
4101 it is a the RHS of an assignment. Do replace local register
4102 variables since gcc does not guarantee a local variable will
4103 be allocated in register.
4104 ??? The fix isn't effective here. This should instead
4105 be ensured by not value-numbering them the same but treating
4106 them like volatiles? */
4107 && !(gimple_assign_single_p (stmt
)
4108 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
4109 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
4110 && is_global_var (gimple_assign_rhs1 (stmt
)))))
4112 sprime
= eliminate_avail (lhs
);
4115 /* If there is no existing usable leader but SCCVN thinks
4116 it has an expression it wants to use as replacement,
4118 tree val
= VN_INFO (lhs
)->valnum
;
4120 && TREE_CODE (val
) == SSA_NAME
4121 && VN_INFO (val
)->needs_insertion
4122 && VN_INFO (val
)->expr
!= NULL_TREE
4123 && (sprime
= eliminate_insert (&gsi
, val
)) != NULL_TREE
)
4124 eliminate_push_avail (sprime
);
4127 /* If this now constitutes a copy duplicate points-to
4128 and range info appropriately. This is especially
4129 important for inserted code. See tree-ssa-copy.c
4130 for similar code. */
4132 && TREE_CODE (sprime
) == SSA_NAME
)
4134 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
4135 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
4136 && SSA_NAME_PTR_INFO (lhs
)
4137 && !SSA_NAME_PTR_INFO (sprime
))
4139 duplicate_ssa_name_ptr_info (sprime
,
4140 SSA_NAME_PTR_INFO (lhs
));
4142 mark_ptr_info_alignment_unknown
4143 (SSA_NAME_PTR_INFO (sprime
));
4145 else if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
4146 && SSA_NAME_RANGE_INFO (lhs
)
4147 && !SSA_NAME_RANGE_INFO (sprime
)
4149 duplicate_ssa_name_range_info (sprime
,
4150 SSA_NAME_RANGE_TYPE (lhs
),
4151 SSA_NAME_RANGE_INFO (lhs
));
4154 /* Inhibit the use of an inserted PHI on a loop header when
4155 the address of the memory reference is a simple induction
4156 variable. In other cases the vectorizer won't do anything
4157 anyway (either it's loop invariant or a complicated
4160 && TREE_CODE (sprime
) == SSA_NAME
4162 && flag_tree_loop_vectorize
4163 && loop_outer (b
->loop_father
)
4164 && has_zero_uses (sprime
)
4165 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
4166 && gimple_assign_load_p (stmt
))
4168 gimple def_stmt
= SSA_NAME_DEF_STMT (sprime
);
4169 basic_block def_bb
= gimple_bb (def_stmt
);
4170 if (gimple_code (def_stmt
) == GIMPLE_PHI
4171 && b
->loop_father
->header
== def_bb
)
4176 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
4179 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
4181 && flow_bb_inside_loop_p (b
->loop_father
, def_bb
)
4182 && simple_iv (b
->loop_father
,
4183 b
->loop_father
, op
, &iv
, true))
4191 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4193 fprintf (dump_file
, "Not replacing ");
4194 print_gimple_expr (dump_file
, stmt
, 0, 0);
4195 fprintf (dump_file
, " with ");
4196 print_generic_expr (dump_file
, sprime
, 0);
4197 fprintf (dump_file
, " which would add a loop"
4198 " carried dependence to loop %d\n",
4199 b
->loop_father
->num
);
4201 /* Don't keep sprime available. */
4209 /* If we can propagate the value computed for LHS into
4210 all uses don't bother doing anything with this stmt. */
4211 if (may_propagate_copy (lhs
, sprime
))
4213 /* Mark it for removal. */
4214 el_to_remove
.safe_push (stmt
);
4216 /* ??? Don't count copy/constant propagations. */
4217 if (gimple_assign_single_p (stmt
)
4218 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4219 || gimple_assign_rhs1 (stmt
) == sprime
))
4222 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4224 fprintf (dump_file
, "Replaced ");
4225 print_gimple_expr (dump_file
, stmt
, 0, 0);
4226 fprintf (dump_file
, " with ");
4227 print_generic_expr (dump_file
, sprime
, 0);
4228 fprintf (dump_file
, " in all uses of ");
4229 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4232 pre_stats
.eliminations
++;
4236 /* If this is an assignment from our leader (which
4237 happens in the case the value-number is a constant)
4238 then there is nothing to do. */
4239 if (gimple_assign_single_p (stmt
)
4240 && sprime
== gimple_assign_rhs1 (stmt
))
4243 /* Else replace its RHS. */
4244 bool can_make_abnormal_goto
4245 = is_gimple_call (stmt
)
4246 && stmt_can_make_abnormal_goto (stmt
);
4248 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4250 fprintf (dump_file
, "Replaced ");
4251 print_gimple_expr (dump_file
, stmt
, 0, 0);
4252 fprintf (dump_file
, " with ");
4253 print_generic_expr (dump_file
, sprime
, 0);
4254 fprintf (dump_file
, " in ");
4255 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4258 if (TREE_CODE (sprime
) == SSA_NAME
)
4259 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4262 pre_stats
.eliminations
++;
4263 gimple orig_stmt
= stmt
;
4264 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4265 TREE_TYPE (sprime
)))
4266 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4267 tree vdef
= gimple_vdef (stmt
);
4268 tree vuse
= gimple_vuse (stmt
);
4269 propagate_tree_value_into_stmt (&gsi
, sprime
);
4270 stmt
= gsi_stmt (gsi
);
4272 if (vdef
!= gimple_vdef (stmt
))
4273 VN_INFO (vdef
)->valnum
= vuse
;
4275 /* If we removed EH side-effects from the statement, clean
4276 its EH information. */
4277 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4279 bitmap_set_bit (need_eh_cleanup
,
4280 gimple_bb (stmt
)->index
);
4281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4282 fprintf (dump_file
, " Removed EH side-effects.\n");
4285 /* Likewise for AB side-effects. */
4286 if (can_make_abnormal_goto
4287 && !stmt_can_make_abnormal_goto (stmt
))
4289 bitmap_set_bit (need_ab_cleanup
,
4290 gimple_bb (stmt
)->index
);
4291 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4292 fprintf (dump_file
, " Removed AB side-effects.\n");
4299 /* If the statement is a scalar store, see if the expression
4300 has the same value number as its rhs. If so, the store is
4302 if (gimple_assign_single_p (stmt
)
4303 && !gimple_has_volatile_ops (stmt
)
4304 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4305 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4306 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
4309 tree rhs
= gimple_assign_rhs1 (stmt
);
4310 val
= vn_reference_lookup (gimple_assign_lhs (stmt
),
4311 gimple_vuse (stmt
), VN_WALK
, NULL
);
4312 if (TREE_CODE (rhs
) == SSA_NAME
)
4313 rhs
= VN_INFO (rhs
)->valnum
;
4315 && operand_equal_p (val
, rhs
, 0))
4317 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4319 fprintf (dump_file
, "Deleted redundant store ");
4320 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4323 /* Queue stmt for removal. */
4324 el_to_remove
.safe_push (stmt
);
4329 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
4330 bool was_noreturn
= (is_gimple_call (stmt
)
4331 && gimple_call_noreturn_p (stmt
));
4332 tree vdef
= gimple_vdef (stmt
);
4333 tree vuse
= gimple_vuse (stmt
);
4335 /* If we didn't replace the whole stmt (or propagate the result
4336 into all uses), replace all uses on this stmt with their
4338 use_operand_p use_p
;
4340 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4342 tree use
= USE_FROM_PTR (use_p
);
4343 /* ??? The call code above leaves stmt operands un-updated. */
4344 if (TREE_CODE (use
) != SSA_NAME
)
4346 tree sprime
= eliminate_avail (use
);
4347 if (sprime
&& sprime
!= use
4348 && may_propagate_copy (use
, sprime
)
4349 /* We substitute into debug stmts to avoid excessive
4350 debug temporaries created by removed stmts, but we need
4351 to avoid doing so for inserted sprimes as we never want
4352 to create debug temporaries for them. */
4354 || TREE_CODE (sprime
) != SSA_NAME
4355 || !is_gimple_debug (stmt
)
4356 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
4358 propagate_value (use_p
, sprime
);
4359 gimple_set_modified (stmt
, true);
4360 if (TREE_CODE (sprime
) == SSA_NAME
4361 && !is_gimple_debug (stmt
))
4362 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4367 /* Visit indirect calls and turn them into direct calls if
4368 possible using the devirtualization machinery. */
4369 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
4371 tree fn
= gimple_call_fn (call_stmt
);
4373 && flag_devirtualize
4374 && virtual_method_call_p (fn
))
4376 tree otr_type
= obj_type_ref_class (fn
);
4378 ipa_polymorphic_call_context
context (current_function_decl
, fn
, stmt
, &instance
);
4381 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
), otr_type
, stmt
);
4383 vec
<cgraph_node
*>targets
4384 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
4386 (OBJ_TYPE_REF_TOKEN (fn
)),
4389 if (dump_enabled_p ())
4390 dump_possible_polymorphic_call_targets (dump_file
,
4391 obj_type_ref_class (fn
),
4393 (OBJ_TYPE_REF_TOKEN (fn
)),
4395 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
4398 if (targets
.length () == 1)
4399 fn
= targets
[0]->decl
;
4401 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4402 if (dump_enabled_p ())
4404 location_t loc
= gimple_location_safe (stmt
);
4405 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
4406 "converting indirect call to "
4408 cgraph_node::get (fn
)->name ());
4410 gimple_call_set_fndecl (call_stmt
, fn
);
4411 maybe_remove_unused_call_args (cfun
, call_stmt
);
4412 gimple_set_modified (stmt
, true);
4417 if (gimple_modified_p (stmt
))
4419 /* If a formerly non-invariant ADDR_EXPR is turned into an
4420 invariant one it was on a separate stmt. */
4421 if (gimple_assign_single_p (stmt
)
4422 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
4423 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
4424 gimple old_stmt
= stmt
;
4425 if (is_gimple_call (stmt
))
4427 /* ??? Only fold calls inplace for now, this may create new
4428 SSA names which in turn will confuse free_scc_vn SSA name
4430 fold_stmt_inplace (&gsi
);
4431 /* When changing a call into a noreturn call, cfg cleanup
4432 is needed to fix up the noreturn call. */
4433 if (!was_noreturn
&& gimple_call_noreturn_p (stmt
))
4434 el_to_fixup
.safe_push (stmt
);
4439 stmt
= gsi_stmt (gsi
);
4440 if ((gimple_code (stmt
) == GIMPLE_COND
4441 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
4442 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
4443 || (gimple_code (stmt
) == GIMPLE_SWITCH
4444 && TREE_CODE (gimple_switch_index (
4445 as_a
<gswitch
*> (stmt
)))
4447 el_todo
|= TODO_cleanup_cfg
;
4449 /* If we removed EH side-effects from the statement, clean
4450 its EH information. */
4451 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
4453 bitmap_set_bit (need_eh_cleanup
,
4454 gimple_bb (stmt
)->index
);
4455 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4456 fprintf (dump_file
, " Removed EH side-effects.\n");
4458 /* Likewise for AB side-effects. */
4459 if (can_make_abnormal_goto
4460 && !stmt_can_make_abnormal_goto (stmt
))
4462 bitmap_set_bit (need_ab_cleanup
,
4463 gimple_bb (stmt
)->index
);
4464 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4465 fprintf (dump_file
, " Removed AB side-effects.\n");
4468 if (vdef
!= gimple_vdef (stmt
))
4469 VN_INFO (vdef
)->valnum
= vuse
;
4472 /* Make new values available - for fully redundant LHS we
4473 continue with the next stmt above and skip this. */
4475 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
4476 eliminate_push_avail (DEF_FROM_PTR (defp
));
4479 /* Replace destination PHI arguments. */
4482 FOR_EACH_EDGE (e
, ei
, b
->succs
)
4484 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
4488 gphi
*phi
= gsi
.phi ();
4489 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
4490 tree arg
= USE_FROM_PTR (use_p
);
4491 if (TREE_CODE (arg
) != SSA_NAME
4492 || virtual_operand_p (arg
))
4494 tree sprime
= eliminate_avail (arg
);
4495 if (sprime
&& may_propagate_copy (arg
, sprime
))
4497 propagate_value (use_p
, sprime
);
4498 if (TREE_CODE (sprime
) == SSA_NAME
)
4499 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4505 /* Make no longer available leaders no longer available. */
4508 eliminate_dom_walker::after_dom_children (basic_block
)
4511 while ((entry
= el_avail_stack
.pop ()) != NULL_TREE
)
4513 tree valnum
= VN_INFO (entry
)->valnum
;
4514 tree old
= el_avail
[SSA_NAME_VERSION (valnum
)];
4516 el_avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
4518 el_avail
[SSA_NAME_VERSION (valnum
)] = entry
;
4522 /* Eliminate fully redundant computations. */
4525 eliminate (bool do_pre
)
4527 gimple_stmt_iterator gsi
;
4530 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4531 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4533 el_to_remove
.create (0);
4534 el_to_fixup
.create (0);
4536 el_avail
.create (num_ssa_names
);
4537 el_avail_stack
.create (0);
4539 eliminate_dom_walker (CDI_DOMINATORS
,
4540 do_pre
).walk (cfun
->cfg
->x_entry_block_ptr
);
4542 el_avail
.release ();
4543 el_avail_stack
.release ();
4545 /* We cannot remove stmts during BB walk, especially not release SSA
4546 names there as this confuses the VN machinery. The stmts ending
4547 up in el_to_remove are either stores or simple copies.
4548 Remove stmts in reverse order to make debug stmt creation possible. */
4549 while (!el_to_remove
.is_empty ())
4551 stmt
= el_to_remove
.pop ();
4553 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4555 fprintf (dump_file
, "Removing dead stmt ");
4556 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4560 if (gimple_code (stmt
) == GIMPLE_PHI
)
4561 lhs
= gimple_phi_result (stmt
);
4563 lhs
= gimple_get_lhs (stmt
);
4566 && TREE_CODE (lhs
) == SSA_NAME
)
4567 bitmap_clear_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
4569 gsi
= gsi_for_stmt (stmt
);
4570 if (gimple_code (stmt
) == GIMPLE_PHI
)
4571 remove_phi_node (&gsi
, true);
4574 basic_block bb
= gimple_bb (stmt
);
4575 unlink_stmt_vdef (stmt
);
4576 if (gsi_remove (&gsi
, true))
4577 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
4578 release_defs (stmt
);
4581 /* Removing a stmt may expose a forwarder block. */
4582 el_todo
|= TODO_cleanup_cfg
;
4584 el_to_remove
.release ();
4586 /* Fixup stmts that became noreturn calls. This may require splitting
4587 blocks and thus isn't possible during the dominator walk. Do this
4588 in reverse order so we don't inadvertedly remove a stmt we want to
4589 fixup by visiting a dominating now noreturn call first. */
4590 while (!el_to_fixup
.is_empty ())
4592 stmt
= el_to_fixup
.pop ();
4594 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4596 fprintf (dump_file
, "Fixing up noreturn call ");
4597 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4600 if (fixup_noreturn_call (stmt
))
4601 el_todo
|= TODO_cleanup_cfg
;
4603 el_to_fixup
.release ();
4608 /* Perform CFG cleanups made necessary by elimination. */
4611 fini_eliminate (void)
4613 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
4614 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
4617 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
4620 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
4622 BITMAP_FREE (need_eh_cleanup
);
4623 BITMAP_FREE (need_ab_cleanup
);
4625 if (do_eh_cleanup
|| do_ab_cleanup
)
4626 return TODO_cleanup_cfg
;
4630 /* Borrow a bit of tree-ssa-dce.c for the moment.
4631 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4632 this may be a bit faster, and we may want critical edges kept split. */
4634 /* If OP's defining statement has not already been determined to be necessary,
4635 mark that statement necessary. Return the stmt, if it is newly
4638 static inline gimple
4639 mark_operand_necessary (tree op
)
4645 if (TREE_CODE (op
) != SSA_NAME
)
4648 stmt
= SSA_NAME_DEF_STMT (op
);
4651 if (gimple_plf (stmt
, NECESSARY
)
4652 || gimple_nop_p (stmt
))
4655 gimple_set_plf (stmt
, NECESSARY
, true);
4659 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4660 to insert PHI nodes sometimes, and because value numbering of casts isn't
4661 perfect, we sometimes end up inserting dead code. This simple DCE-like
4662 pass removes any insertions we made that weren't actually used. */
4665 remove_dead_inserted_code (void)
4672 worklist
= BITMAP_ALLOC (NULL
);
4673 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4675 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4676 if (gimple_plf (t
, NECESSARY
))
4677 bitmap_set_bit (worklist
, i
);
4679 while (!bitmap_empty_p (worklist
))
4681 i
= bitmap_first_set_bit (worklist
);
4682 bitmap_clear_bit (worklist
, i
);
4683 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4685 /* PHI nodes are somewhat special in that each PHI alternative has
4686 data and control dependencies. All the statements feeding the
4687 PHI node's arguments are always necessary. */
4688 if (gimple_code (t
) == GIMPLE_PHI
)
4692 for (k
= 0; k
< gimple_phi_num_args (t
); k
++)
4694 tree arg
= PHI_ARG_DEF (t
, k
);
4695 if (TREE_CODE (arg
) == SSA_NAME
)
4697 gimple n
= mark_operand_necessary (arg
);
4699 bitmap_set_bit (worklist
, SSA_NAME_VERSION (arg
));
4705 /* Propagate through the operands. Examine all the USE, VUSE and
4706 VDEF operands in this statement. Mark all the statements
4707 which feed this statement's uses as necessary. */
4711 /* The operands of VDEF expressions are also needed as they
4712 represent potential definitions that may reach this
4713 statement (VDEF operands allow us to follow def-def
4716 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
4718 gimple n
= mark_operand_necessary (use
);
4720 bitmap_set_bit (worklist
, SSA_NAME_VERSION (use
));
4725 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4727 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4728 if (!gimple_plf (t
, NECESSARY
))
4730 gimple_stmt_iterator gsi
;
4732 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4734 fprintf (dump_file
, "Removing unnecessary insertion:");
4735 print_gimple_stmt (dump_file
, t
, 0, 0);
4738 gsi
= gsi_for_stmt (t
);
4739 if (gimple_code (t
) == GIMPLE_PHI
)
4740 remove_phi_node (&gsi
, true);
4743 gsi_remove (&gsi
, true);
4748 BITMAP_FREE (worklist
);
4752 /* Initialize data structures used by PRE. */
4759 next_expression_id
= 1;
4760 expressions
.create (0);
4761 expressions
.safe_push (NULL
);
4762 value_expressions
.create (get_max_value_id () + 1);
4763 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
4764 name_to_id
.create (0);
4766 inserted_exprs
= BITMAP_ALLOC (NULL
);
4768 connect_infinite_loops_to_exit ();
4769 memset (&pre_stats
, 0, sizeof (pre_stats
));
4771 postorder
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
4772 postorder_num
= inverted_post_order_compute (postorder
);
4774 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets
));
4776 calculate_dominance_info (CDI_POST_DOMINATORS
);
4777 calculate_dominance_info (CDI_DOMINATORS
);
4779 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4780 phi_translate_table
= new hash_table
<expr_pred_trans_d
> (5110);
4781 expression_to_id
= new hash_table
<pre_expr_d
> (num_ssa_names
* 3);
4782 FOR_ALL_BB_FN (bb
, cfun
)
4784 EXP_GEN (bb
) = bitmap_set_new ();
4785 PHI_GEN (bb
) = bitmap_set_new ();
4786 TMP_GEN (bb
) = bitmap_set_new ();
4787 AVAIL_OUT (bb
) = bitmap_set_new ();
4792 /* Deallocate data structures used by PRE. */
4798 value_expressions
.release ();
4799 BITMAP_FREE (inserted_exprs
);
4800 bitmap_obstack_release (&grand_bitmap_obstack
);
4801 bitmap_set_pool
.release ();
4802 pre_expr_pool
.release ();
4803 delete phi_translate_table
;
4804 phi_translate_table
= NULL
;
4805 delete expression_to_id
;
4806 expression_to_id
= NULL
;
4807 name_to_id
.release ();
4809 free_aux_for_blocks ();
4811 free_dominance_info (CDI_POST_DOMINATORS
);
4816 const pass_data pass_data_pre
=
4818 GIMPLE_PASS
, /* type */
4820 OPTGROUP_NONE
, /* optinfo_flags */
4821 TV_TREE_PRE
, /* tv_id */
4822 /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
4824 ( PROP_no_crit_edges
| PROP_cfg
| PROP_ssa
), /* properties_required */
4825 0, /* properties_provided */
4826 PROP_no_crit_edges
, /* properties_destroyed */
4827 TODO_rebuild_alias
, /* todo_flags_start */
4828 0, /* todo_flags_finish */
4831 class pass_pre
: public gimple_opt_pass
4834 pass_pre (gcc::context
*ctxt
)
4835 : gimple_opt_pass (pass_data_pre
, ctxt
)
4838 /* opt_pass methods: */
4839 virtual bool gate (function
*) { return flag_tree_pre
!= 0; }
4840 virtual unsigned int execute (function
*);
4842 }; // class pass_pre
4845 pass_pre::execute (function
*fun
)
4847 unsigned int todo
= 0;
4849 do_partial_partial
=
4850 flag_tree_partial_pre
&& optimize_function_for_speed_p (fun
);
4852 /* This has to happen before SCCVN runs because
4853 loop_optimizer_init may create new phis, etc. */
4854 loop_optimizer_init (LOOPS_NORMAL
);
4856 if (!run_scc_vn (VN_WALK
))
4858 loop_optimizer_finalize ();
4865 /* Collect and value number expressions computed in each basic block. */
4868 /* Insert can get quite slow on an incredibly large number of basic
4869 blocks due to some quadratic behavior. Until this behavior is
4870 fixed, don't run it when he have an incredibly large number of
4871 bb's. If we aren't going to run insert, there is no point in
4872 computing ANTIC, either, even though it's plenty fast. */
4873 if (n_basic_blocks_for_fn (fun
) < 4000)
4879 /* Make sure to remove fake edges before committing our inserts.
4880 This makes sure we don't end up with extra critical edges that
4881 we would need to split. */
4882 remove_fake_exit_edges ();
4883 gsi_commit_edge_inserts ();
4885 /* Eliminate folds statements which might (should not...) end up
4886 not keeping virtual operands up-to-date. */
4887 gcc_assert (!need_ssa_update_p (fun
));
4889 /* Remove all the redundant expressions. */
4890 todo
|= eliminate (true);
4892 statistics_counter_event (fun
, "Insertions", pre_stats
.insertions
);
4893 statistics_counter_event (fun
, "PA inserted", pre_stats
.pa_insert
);
4894 statistics_counter_event (fun
, "New PHIs", pre_stats
.phis
);
4895 statistics_counter_event (fun
, "Eliminated", pre_stats
.eliminations
);
4897 clear_expression_ids ();
4898 remove_dead_inserted_code ();
4902 todo
|= fini_eliminate ();
4903 loop_optimizer_finalize ();
4905 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4906 case we can merge the block with the remaining predecessor of the block.
4908 - call merge_blocks after each tail merge iteration
4909 - call merge_blocks after all tail merge iterations
4910 - mark TODO_cleanup_cfg when necessary
4911 - share the cfg cleanup with fini_pre. */
4912 todo
|= tail_merge_optimize (todo
);
4916 /* Tail merging invalidates the virtual SSA web, together with
4917 cfg-cleanup opportunities exposed by PRE this will wreck the
4918 SSA updating machinery. So make sure to run update-ssa
4919 manually, before eventually scheduling cfg-cleanup as part of
4921 update_ssa (TODO_update_ssa_only_virtuals
);
4929 make_pass_pre (gcc::context
*ctxt
)
4931 return new pass_pre (ctxt
);
4936 const pass_data pass_data_fre
=
4938 GIMPLE_PASS
, /* type */
4940 OPTGROUP_NONE
, /* optinfo_flags */
4941 TV_TREE_FRE
, /* tv_id */
4942 ( PROP_cfg
| PROP_ssa
), /* properties_required */
4943 0, /* properties_provided */
4944 0, /* properties_destroyed */
4945 0, /* todo_flags_start */
4946 0, /* todo_flags_finish */
4949 class pass_fre
: public gimple_opt_pass
4952 pass_fre (gcc::context
*ctxt
)
4953 : gimple_opt_pass (pass_data_fre
, ctxt
)
4956 /* opt_pass methods: */
4957 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
4958 virtual bool gate (function
*) { return flag_tree_fre
!= 0; }
4959 virtual unsigned int execute (function
*);
4961 }; // class pass_fre
4964 pass_fre::execute (function
*fun
)
4966 unsigned int todo
= 0;
4968 if (!run_scc_vn (VN_WALKREWRITE
))
4971 memset (&pre_stats
, 0, sizeof (pre_stats
));
4973 /* Remove all the redundant expressions. */
4974 todo
|= eliminate (false);
4976 todo
|= fini_eliminate ();
4980 statistics_counter_event (fun
, "Insertions", pre_stats
.insertions
);
4981 statistics_counter_event (fun
, "Eliminated", pre_stats
.eliminations
);
4989 make_pass_fre (gcc::context
*ctxt
)
4991 return new pass_fre (ctxt
);