2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "double-int.h"
36 #include "fold-const.h"
38 #include "hard-reg-set.h"
40 #include "dominance.h"
43 #include "basic-block.h"
44 #include "gimple-pretty-print.h"
45 #include "tree-inline.h"
46 #include "hash-table.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-fold.h"
51 #include "gimple-expr.h"
55 #include "gimple-iterator.h"
56 #include "gimplify-me.h"
57 #include "gimple-ssa.h"
59 #include "tree-phinodes.h"
60 #include "ssa-iterators.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-ssa-loop.h"
64 #include "tree-into-ssa.h"
68 #include "statistics.h"
70 #include "fixed-value.h"
71 #include "insn-config.h"
82 #include "tree-iterator.h"
83 #include "alloc-pool.h"
85 #include "tree-pass.h"
86 #include "langhooks.h"
88 #include "tree-ssa-sccvn.h"
89 #include "tree-scalar-evolution.h"
94 #include "plugin-api.h"
97 #include "symbol-summary.h"
99 #include "tree-ssa-propagate.h"
100 #include "ipa-utils.h"
101 #include "tree-cfgcleanup.h"
105 1. Avail sets can be shared by making an avail_find_leader that
106 walks up the dominator tree and looks in those avail sets.
107 This might affect code optimality, it's unclear right now.
108 2. Strength reduction can be performed by anticipating expressions
109 we can repair later on.
110 3. We can do back-substitution or smarter value numbering to catch
111 commutative expressions split up over multiple statements.
114 /* For ease of terminology, "expression node" in the below refers to
115 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
116 represent the actual statement containing the expressions we care about,
117 and we cache the value number by putting it in the expression. */
121 First we walk the statements to generate the AVAIL sets, the
122 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
123 generation of values/expressions by a given block. We use them
124 when computing the ANTIC sets. The AVAIL sets consist of
125 SSA_NAME's that represent values, so we know what values are
126 available in what blocks. AVAIL is a forward dataflow problem. In
127 SSA, values are never killed, so we don't need a kill set, or a
128 fixpoint iteration, in order to calculate the AVAIL sets. In
129 traditional parlance, AVAIL sets tell us the downsafety of the
132 Next, we generate the ANTIC sets. These sets represent the
133 anticipatable expressions. ANTIC is a backwards dataflow
134 problem. An expression is anticipatable in a given block if it could
135 be generated in that block. This means that if we had to perform
136 an insertion in that block, of the value of that expression, we
137 could. Calculating the ANTIC sets requires phi translation of
138 expressions, because the flow goes backwards through phis. We must
139 iterate to a fixpoint of the ANTIC sets, because we have a kill
140 set. Even in SSA form, values are not live over the entire
141 function, only from their definition point onwards. So we have to
142 remove values from the ANTIC set once we go past the definition
143 point of the leaders that make them up.
144 compute_antic/compute_antic_aux performs this computation.
146 Third, we perform insertions to make partially redundant
147 expressions fully redundant.
149 An expression is partially redundant (excluding partial
152 1. It is AVAIL in some, but not all, of the predecessors of a
154 2. It is ANTIC in all the predecessors.
156 In order to make it fully redundant, we insert the expression into
157 the predecessors where it is not available, but is ANTIC.
159 For the partial anticipation case, we only perform insertion if it
160 is partially anticipated in some block, and fully available in all
163 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
164 performs these steps.
166 Fourth, we eliminate fully redundant expressions.
167 This is a simple statement walk that replaces redundant
168 calculations with the now available values. */
170 /* Representations of value numbers:
172 Value numbers are represented by a representative SSA_NAME. We
173 will create fake SSA_NAME's in situations where we need a
174 representative but do not have one (because it is a complex
175 expression). In order to facilitate storing the value numbers in
176 bitmaps, and keep the number of wasted SSA_NAME's down, we also
177 associate a value_id with each value number, and create full blown
178 ssa_name's only where we actually need them (IE in operands of
179 existing expressions).
181 Theoretically you could replace all the value_id's with
182 SSA_NAME_VERSION, but this would allocate a large number of
183 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
184 It would also require an additional indirection at each point we
187 /* Representation of expressions on value numbers:
189 Expressions consisting of value numbers are represented the same
190 way as our VN internally represents them, with an additional
191 "pre_expr" wrapping around them in order to facilitate storing all
192 of the expressions in the same sets. */
194 /* Representation of sets:
196 The dataflow sets do not need to be sorted in any particular order
197 for the majority of their lifetime, are simply represented as two
198 bitmaps, one that keeps track of values present in the set, and one
199 that keeps track of expressions present in the set.
201 When we need them in topological order, we produce it on demand by
202 transforming the bitmap into an array and sorting it into topo
205 /* Type of expression, used to know which member of the PRE_EXPR union
216 typedef union pre_expr_union_d
221 vn_reference_t reference
;
224 typedef struct pre_expr_d
: typed_noop_remove
<pre_expr_d
>
226 enum pre_expr_kind kind
;
230 /* hash_table support. */
231 typedef pre_expr_d
*value_type
;
232 typedef pre_expr_d
*compare_type
;
233 static inline hashval_t
hash (const pre_expr_d
*);
234 static inline int equal (const pre_expr_d
*, const pre_expr_d
*);
237 #define PRE_EXPR_NAME(e) (e)->u.name
238 #define PRE_EXPR_NARY(e) (e)->u.nary
239 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
240 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
242 /* Compare E1 and E1 for equality. */
245 pre_expr_d::equal (const pre_expr_d
*e1
, const pre_expr_d
*e2
)
247 if (e1
->kind
!= e2
->kind
)
253 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
254 PRE_EXPR_CONSTANT (e2
));
256 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
258 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
260 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
261 PRE_EXPR_REFERENCE (e2
));
270 pre_expr_d::hash (const pre_expr_d
*e
)
275 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
277 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
279 return PRE_EXPR_NARY (e
)->hashcode
;
281 return PRE_EXPR_REFERENCE (e
)->hashcode
;
287 /* Next global expression id number. */
288 static unsigned int next_expression_id
;
290 /* Mapping from expression to id number we can use in bitmap sets. */
291 static vec
<pre_expr
> expressions
;
292 static hash_table
<pre_expr_d
> *expression_to_id
;
293 static vec
<unsigned> name_to_id
;
295 /* Allocate an expression id for EXPR. */
297 static inline unsigned int
298 alloc_expression_id (pre_expr expr
)
300 struct pre_expr_d
**slot
;
301 /* Make sure we won't overflow. */
302 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
303 expr
->id
= next_expression_id
++;
304 expressions
.safe_push (expr
);
305 if (expr
->kind
== NAME
)
307 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
308 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
309 re-allocations by using vec::reserve upfront. */
310 unsigned old_len
= name_to_id
.length ();
311 name_to_id
.reserve (num_ssa_names
- old_len
);
312 name_to_id
.quick_grow_cleared (num_ssa_names
);
313 gcc_assert (name_to_id
[version
] == 0);
314 name_to_id
[version
] = expr
->id
;
318 slot
= expression_to_id
->find_slot (expr
, INSERT
);
322 return next_expression_id
- 1;
325 /* Return the expression id for tree EXPR. */
327 static inline unsigned int
328 get_expression_id (const pre_expr expr
)
333 static inline unsigned int
334 lookup_expression_id (const pre_expr expr
)
336 struct pre_expr_d
**slot
;
338 if (expr
->kind
== NAME
)
340 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
341 if (name_to_id
.length () <= version
)
343 return name_to_id
[version
];
347 slot
= expression_to_id
->find_slot (expr
, NO_INSERT
);
350 return ((pre_expr
)*slot
)->id
;
354 /* Return the existing expression id for EXPR, or create one if one
355 does not exist yet. */
357 static inline unsigned int
358 get_or_alloc_expression_id (pre_expr expr
)
360 unsigned int id
= lookup_expression_id (expr
);
362 return alloc_expression_id (expr
);
363 return expr
->id
= id
;
366 /* Return the expression that has expression id ID */
368 static inline pre_expr
369 expression_for_id (unsigned int id
)
371 return expressions
[id
];
374 /* Free the expression id field in all of our expressions,
375 and then destroy the expressions array. */
378 clear_expression_ids (void)
380 expressions
.release ();
383 static alloc_pool pre_expr_pool
;
385 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
388 get_or_alloc_expr_for_name (tree name
)
390 struct pre_expr_d expr
;
392 unsigned int result_id
;
396 PRE_EXPR_NAME (&expr
) = name
;
397 result_id
= lookup_expression_id (&expr
);
399 return expression_for_id (result_id
);
401 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
403 PRE_EXPR_NAME (result
) = name
;
404 alloc_expression_id (result
);
408 /* An unordered bitmap set. One bitmap tracks values, the other,
410 typedef struct bitmap_set
412 bitmap_head expressions
;
416 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
417 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
419 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
420 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
422 /* Mapping from value id to expressions with that value_id. */
423 static vec
<bitmap
> value_expressions
;
425 /* Sets that we need to keep track of. */
426 typedef struct bb_bitmap_sets
428 /* The EXP_GEN set, which represents expressions/values generated in
430 bitmap_set_t exp_gen
;
432 /* The PHI_GEN set, which represents PHI results generated in a
434 bitmap_set_t phi_gen
;
436 /* The TMP_GEN set, which represents results/temporaries generated
437 in a basic block. IE the LHS of an expression. */
438 bitmap_set_t tmp_gen
;
440 /* The AVAIL_OUT set, which represents which values are available in
441 a given basic block. */
442 bitmap_set_t avail_out
;
444 /* The ANTIC_IN set, which represents which values are anticipatable
445 in a given basic block. */
446 bitmap_set_t antic_in
;
448 /* The PA_IN set, which represents which values are
449 partially anticipatable in a given basic block. */
452 /* The NEW_SETS set, which is used during insertion to augment the
453 AVAIL_OUT set of blocks with the new insertions performed during
454 the current iteration. */
455 bitmap_set_t new_sets
;
457 /* A cache for value_dies_in_block_x. */
460 /* The live virtual operand on successor edges. */
463 /* True if we have visited this block during ANTIC calculation. */
464 unsigned int visited
: 1;
466 /* True when the block contains a call that might not return. */
467 unsigned int contains_may_not_return_call
: 1;
470 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
471 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
472 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
473 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
474 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
475 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
476 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
477 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
478 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
479 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
480 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
483 /* Basic block list in postorder. */
484 static int *postorder
;
485 static int postorder_num
;
487 /* This structure is used to keep track of statistics on what
488 optimization PRE was able to perform. */
491 /* The number of RHS computations eliminated by PRE. */
494 /* The number of new expressions/temporaries generated by PRE. */
497 /* The number of inserts found due to partial anticipation */
500 /* The number of new PHI nodes added by PRE. */
504 static bool do_partial_partial
;
505 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int);
506 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
507 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
508 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
509 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
510 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
511 static void bitmap_insert_into_set_1 (bitmap_set_t
, pre_expr
,
513 static bitmap_set_t
bitmap_set_new (void);
514 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
516 static tree
find_or_generate_expression (basic_block
, tree
, gimple_seq
*);
517 static unsigned int get_expr_value_id (pre_expr
);
519 /* We can add and remove elements and entries to and from sets
520 and hash tables, so we use alloc pools for them. */
522 static alloc_pool bitmap_set_pool
;
523 static bitmap_obstack grand_bitmap_obstack
;
525 /* Set of blocks with statements that have had their EH properties changed. */
526 static bitmap need_eh_cleanup
;
528 /* Set of blocks with statements that have had their AB properties changed. */
529 static bitmap need_ab_cleanup
;
531 /* A three tuple {e, pred, v} used to cache phi translations in the
532 phi_translate_table. */
534 typedef struct expr_pred_trans_d
: typed_free_remove
<expr_pred_trans_d
>
536 /* The expression. */
539 /* The predecessor block along which we translated the expression. */
542 /* The value that resulted from the translation. */
545 /* The hashcode for the expression, pred pair. This is cached for
549 /* hash_table support. */
550 typedef expr_pred_trans_d
*value_type
;
551 typedef expr_pred_trans_d
*compare_type
;
552 static inline hashval_t
hash (const expr_pred_trans_d
*);
553 static inline int equal (const expr_pred_trans_d
*, const expr_pred_trans_d
*);
554 } *expr_pred_trans_t
;
555 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
558 expr_pred_trans_d::hash (const expr_pred_trans_d
*e
)
564 expr_pred_trans_d::equal (const expr_pred_trans_d
*ve1
,
565 const expr_pred_trans_d
*ve2
)
567 basic_block b1
= ve1
->pred
;
568 basic_block b2
= ve2
->pred
;
570 /* If they are not translations for the same basic block, they can't
574 return pre_expr_d::equal (ve1
->e
, ve2
->e
);
577 /* The phi_translate_table caches phi translations for a given
578 expression and predecessor. */
579 static hash_table
<expr_pred_trans_d
> *phi_translate_table
;
581 /* Add the tuple mapping from {expression E, basic block PRED} to
582 the phi translation table and return whether it pre-existed. */
585 phi_trans_add (expr_pred_trans_t
*entry
, pre_expr e
, basic_block pred
)
587 expr_pred_trans_t
*slot
;
588 expr_pred_trans_d tem
;
589 hashval_t hash
= iterative_hash_hashval_t (pre_expr_d::hash (e
),
594 slot
= phi_translate_table
->find_slot_with_hash (&tem
, hash
, INSERT
);
601 *entry
= *slot
= XNEW (struct expr_pred_trans_d
);
603 (*entry
)->pred
= pred
;
604 (*entry
)->hashcode
= hash
;
609 /* Add expression E to the expression set of value id V. */
612 add_to_value (unsigned int v
, pre_expr e
)
616 gcc_checking_assert (get_expr_value_id (e
) == v
);
618 if (v
>= value_expressions
.length ())
620 value_expressions
.safe_grow_cleared (v
+ 1);
623 set
= value_expressions
[v
];
626 set
= BITMAP_ALLOC (&grand_bitmap_obstack
);
627 value_expressions
[v
] = set
;
630 bitmap_set_bit (set
, get_or_alloc_expression_id (e
));
633 /* Create a new bitmap set and return it. */
636 bitmap_set_new (void)
638 bitmap_set_t ret
= (bitmap_set_t
) pool_alloc (bitmap_set_pool
);
639 bitmap_initialize (&ret
->expressions
, &grand_bitmap_obstack
);
640 bitmap_initialize (&ret
->values
, &grand_bitmap_obstack
);
644 /* Return the value id for a PRE expression EXPR. */
647 get_expr_value_id (pre_expr expr
)
653 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
656 id
= VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
659 id
= PRE_EXPR_NARY (expr
)->value_id
;
662 id
= PRE_EXPR_REFERENCE (expr
)->value_id
;
667 /* ??? We cannot assert that expr has a value-id (it can be 0), because
668 we assign value-ids only to expressions that have a result
669 in set_hashtable_value_ids. */
673 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
676 sccvn_valnum_from_value_id (unsigned int val
)
680 bitmap exprset
= value_expressions
[val
];
681 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
683 pre_expr vexpr
= expression_for_id (i
);
684 if (vexpr
->kind
== NAME
)
685 return VN_INFO (PRE_EXPR_NAME (vexpr
))->valnum
;
686 else if (vexpr
->kind
== CONSTANT
)
687 return PRE_EXPR_CONSTANT (vexpr
);
692 /* Remove an expression EXPR from a bitmapped set. */
695 bitmap_remove_from_set (bitmap_set_t set
, pre_expr expr
)
697 unsigned int val
= get_expr_value_id (expr
);
698 if (!value_id_constant_p (val
))
700 bitmap_clear_bit (&set
->values
, val
);
701 bitmap_clear_bit (&set
->expressions
, get_expression_id (expr
));
706 bitmap_insert_into_set_1 (bitmap_set_t set
, pre_expr expr
,
707 unsigned int val
, bool allow_constants
)
709 if (allow_constants
|| !value_id_constant_p (val
))
711 /* We specifically expect this and only this function to be able to
712 insert constants into a set. */
713 bitmap_set_bit (&set
->values
, val
);
714 bitmap_set_bit (&set
->expressions
, get_or_alloc_expression_id (expr
));
718 /* Insert an expression EXPR into a bitmapped set. */
721 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
723 bitmap_insert_into_set_1 (set
, expr
, get_expr_value_id (expr
), false);
726 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
729 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
731 bitmap_copy (&dest
->expressions
, &orig
->expressions
);
732 bitmap_copy (&dest
->values
, &orig
->values
);
736 /* Free memory used up by SET. */
738 bitmap_set_free (bitmap_set_t set
)
740 bitmap_clear (&set
->expressions
);
741 bitmap_clear (&set
->values
);
745 /* Generate an topological-ordered array of bitmap set SET. */
748 sorted_array_from_bitmap_set (bitmap_set_t set
)
751 bitmap_iterator bi
, bj
;
752 vec
<pre_expr
> result
;
754 /* Pre-allocate enough space for the array. */
755 result
.create (bitmap_count_bits (&set
->expressions
));
757 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
759 /* The number of expressions having a given value is usually
760 relatively small. Thus, rather than making a vector of all
761 the expressions and sorting it by value-id, we walk the values
762 and check in the reverse mapping that tells us what expressions
763 have a given value, to filter those in our set. As a result,
764 the expressions are inserted in value-id order, which means
767 If this is somehow a significant lose for some cases, we can
768 choose which set to walk based on the set size. */
769 bitmap exprset
= value_expressions
[i
];
770 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bj
)
772 if (bitmap_bit_p (&set
->expressions
, j
))
773 result
.quick_push (expression_for_id (j
));
780 /* Perform bitmapped set operation DEST &= ORIG. */
783 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
791 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
793 bitmap_and_into (&dest
->values
, &orig
->values
);
794 bitmap_copy (&temp
, &dest
->expressions
);
795 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
797 pre_expr expr
= expression_for_id (i
);
798 unsigned int value_id
= get_expr_value_id (expr
);
799 if (!bitmap_bit_p (&dest
->values
, value_id
))
800 bitmap_clear_bit (&dest
->expressions
, i
);
802 bitmap_clear (&temp
);
806 /* Subtract all values and expressions contained in ORIG from DEST. */
809 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
811 bitmap_set_t result
= bitmap_set_new ();
815 bitmap_and_compl (&result
->expressions
, &dest
->expressions
,
818 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
820 pre_expr expr
= expression_for_id (i
);
821 unsigned int value_id
= get_expr_value_id (expr
);
822 bitmap_set_bit (&result
->values
, value_id
);
828 /* Subtract all the values in bitmap set B from bitmap set A. */
831 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
837 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
839 bitmap_copy (&temp
, &a
->expressions
);
840 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
842 pre_expr expr
= expression_for_id (i
);
843 if (bitmap_set_contains_value (b
, get_expr_value_id (expr
)))
844 bitmap_remove_from_set (a
, expr
);
846 bitmap_clear (&temp
);
850 /* Return true if bitmapped set SET contains the value VALUE_ID. */
853 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
855 if (value_id_constant_p (value_id
))
858 if (!set
|| bitmap_empty_p (&set
->expressions
))
861 return bitmap_bit_p (&set
->values
, value_id
);
865 bitmap_set_contains_expr (bitmap_set_t set
, const pre_expr expr
)
867 return bitmap_bit_p (&set
->expressions
, get_expression_id (expr
));
870 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
873 bitmap_set_replace_value (bitmap_set_t set
, unsigned int lookfor
,
880 if (value_id_constant_p (lookfor
))
883 if (!bitmap_set_contains_value (set
, lookfor
))
886 /* The number of expressions having a given value is usually
887 significantly less than the total number of expressions in SET.
888 Thus, rather than check, for each expression in SET, whether it
889 has the value LOOKFOR, we walk the reverse mapping that tells us
890 what expressions have a given value, and see if any of those
891 expressions are in our set. For large testcases, this is about
892 5-10x faster than walking the bitmap. If this is somehow a
893 significant lose for some cases, we can choose which set to walk
894 based on the set size. */
895 exprset
= value_expressions
[lookfor
];
896 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
898 if (bitmap_clear_bit (&set
->expressions
, i
))
900 bitmap_set_bit (&set
->expressions
, get_expression_id (expr
));
908 /* Return true if two bitmap sets are equal. */
911 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
913 return bitmap_equal_p (&a
->values
, &b
->values
);
916 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
917 and add it otherwise. */
920 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
922 unsigned int val
= get_expr_value_id (expr
);
924 if (bitmap_set_contains_value (set
, val
))
925 bitmap_set_replace_value (set
, val
, expr
);
927 bitmap_insert_into_set (set
, expr
);
930 /* Insert EXPR into SET if EXPR's value is not already present in
934 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
936 unsigned int val
= get_expr_value_id (expr
);
938 gcc_checking_assert (expr
->id
== get_or_alloc_expression_id (expr
));
940 /* Constant values are always considered to be part of the set. */
941 if (value_id_constant_p (val
))
944 /* If the value membership changed, add the expression. */
945 if (bitmap_set_bit (&set
->values
, val
))
946 bitmap_set_bit (&set
->expressions
, expr
->id
);
949 /* Print out EXPR to outfile. */
952 print_pre_expr (FILE *outfile
, const pre_expr expr
)
957 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
), 0);
960 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
), 0);
965 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
966 fprintf (outfile
, "{%s,", get_tree_code_name (nary
->opcode
));
967 for (i
= 0; i
< nary
->length
; i
++)
969 print_generic_expr (outfile
, nary
->op
[i
], 0);
970 if (i
!= (unsigned) nary
->length
- 1)
971 fprintf (outfile
, ",");
973 fprintf (outfile
, "}");
979 vn_reference_op_t vro
;
981 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
982 fprintf (outfile
, "{");
984 ref
->operands
.iterate (i
, &vro
);
987 bool closebrace
= false;
988 if (vro
->opcode
!= SSA_NAME
989 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
991 fprintf (outfile
, "%s", get_tree_code_name (vro
->opcode
));
994 fprintf (outfile
, "<");
1000 print_generic_expr (outfile
, vro
->op0
, 0);
1003 fprintf (outfile
, ",");
1004 print_generic_expr (outfile
, vro
->op1
, 0);
1008 fprintf (outfile
, ",");
1009 print_generic_expr (outfile
, vro
->op2
, 0);
1013 fprintf (outfile
, ">");
1014 if (i
!= ref
->operands
.length () - 1)
1015 fprintf (outfile
, ",");
1017 fprintf (outfile
, "}");
1020 fprintf (outfile
, "@");
1021 print_generic_expr (outfile
, ref
->vuse
, 0);
1027 void debug_pre_expr (pre_expr
);
1029 /* Like print_pre_expr but always prints to stderr. */
1031 debug_pre_expr (pre_expr e
)
1033 print_pre_expr (stderr
, e
);
1034 fprintf (stderr
, "\n");
1037 /* Print out SET to OUTFILE. */
1040 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
1041 const char *setname
, int blockindex
)
1043 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
1050 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1052 const pre_expr expr
= expression_for_id (i
);
1055 fprintf (outfile
, ", ");
1057 print_pre_expr (outfile
, expr
);
1059 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1062 fprintf (outfile
, " }\n");
1065 void debug_bitmap_set (bitmap_set_t
);
1068 debug_bitmap_set (bitmap_set_t set
)
1070 print_bitmap_set (stderr
, set
, "debug", 0);
1073 void debug_bitmap_sets_for (basic_block
);
1076 debug_bitmap_sets_for (basic_block bb
)
1078 print_bitmap_set (stderr
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
1079 print_bitmap_set (stderr
, EXP_GEN (bb
), "exp_gen", bb
->index
);
1080 print_bitmap_set (stderr
, PHI_GEN (bb
), "phi_gen", bb
->index
);
1081 print_bitmap_set (stderr
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
1082 print_bitmap_set (stderr
, ANTIC_IN (bb
), "antic_in", bb
->index
);
1083 if (do_partial_partial
)
1084 print_bitmap_set (stderr
, PA_IN (bb
), "pa_in", bb
->index
);
1085 print_bitmap_set (stderr
, NEW_SETS (bb
), "new_sets", bb
->index
);
1088 /* Print out the expressions that have VAL to OUTFILE. */
1091 print_value_expressions (FILE *outfile
, unsigned int val
)
1093 bitmap set
= value_expressions
[val
];
1098 sprintf (s
, "%04d", val
);
1099 x
.expressions
= *set
;
1100 print_bitmap_set (outfile
, &x
, s
, 0);
1106 debug_value_expressions (unsigned int val
)
1108 print_value_expressions (stderr
, val
);
1111 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1115 get_or_alloc_expr_for_constant (tree constant
)
1117 unsigned int result_id
;
1118 unsigned int value_id
;
1119 struct pre_expr_d expr
;
1122 expr
.kind
= CONSTANT
;
1123 PRE_EXPR_CONSTANT (&expr
) = constant
;
1124 result_id
= lookup_expression_id (&expr
);
1126 return expression_for_id (result_id
);
1128 newexpr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1129 newexpr
->kind
= CONSTANT
;
1130 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1131 alloc_expression_id (newexpr
);
1132 value_id
= get_or_alloc_constant_value_id (constant
);
1133 add_to_value (value_id
, newexpr
);
1137 /* Given a value id V, find the actual tree representing the constant
1138 value if there is one, and return it. Return NULL if we can't find
1142 get_constant_for_value_id (unsigned int v
)
1144 if (value_id_constant_p (v
))
1148 bitmap exprset
= value_expressions
[v
];
1150 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1152 pre_expr expr
= expression_for_id (i
);
1153 if (expr
->kind
== CONSTANT
)
1154 return PRE_EXPR_CONSTANT (expr
);
1160 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1161 Currently only supports constants and SSA_NAMES. */
1163 get_or_alloc_expr_for (tree t
)
1165 if (TREE_CODE (t
) == SSA_NAME
)
1166 return get_or_alloc_expr_for_name (t
);
1167 else if (is_gimple_min_invariant (t
))
1168 return get_or_alloc_expr_for_constant (t
);
1171 /* More complex expressions can result from SCCVN expression
1172 simplification that inserts values for them. As they all
1173 do not have VOPs the get handled by the nary ops struct. */
1174 vn_nary_op_t result
;
1175 unsigned int result_id
;
1176 vn_nary_op_lookup (t
, &result
);
1179 pre_expr e
= (pre_expr
) pool_alloc (pre_expr_pool
);
1181 PRE_EXPR_NARY (e
) = result
;
1182 result_id
= lookup_expression_id (e
);
1185 pool_free (pre_expr_pool
, e
);
1186 e
= expression_for_id (result_id
);
1189 alloc_expression_id (e
);
1196 /* Return the folded version of T if T, when folded, is a gimple
1197 min_invariant. Otherwise, return T. */
1200 fully_constant_expression (pre_expr e
)
1208 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1209 switch (TREE_CODE_CLASS (nary
->opcode
))
1212 case tcc_comparison
:
1214 /* We have to go from trees to pre exprs to value ids to
1216 tree naryop0
= nary
->op
[0];
1217 tree naryop1
= nary
->op
[1];
1219 if (!is_gimple_min_invariant (naryop0
))
1221 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1222 unsigned int vrep0
= get_expr_value_id (rep0
);
1223 tree const0
= get_constant_for_value_id (vrep0
);
1225 naryop0
= fold_convert (TREE_TYPE (naryop0
), const0
);
1227 if (!is_gimple_min_invariant (naryop1
))
1229 pre_expr rep1
= get_or_alloc_expr_for (naryop1
);
1230 unsigned int vrep1
= get_expr_value_id (rep1
);
1231 tree const1
= get_constant_for_value_id (vrep1
);
1233 naryop1
= fold_convert (TREE_TYPE (naryop1
), const1
);
1235 result
= fold_binary (nary
->opcode
, nary
->type
,
1237 if (result
&& is_gimple_min_invariant (result
))
1238 return get_or_alloc_expr_for_constant (result
);
1239 /* We might have simplified the expression to a
1240 SSA_NAME for example from x_1 * 1. But we cannot
1241 insert a PHI for x_1 unconditionally as x_1 might
1242 not be available readily. */
1246 if (nary
->opcode
!= REALPART_EXPR
1247 && nary
->opcode
!= IMAGPART_EXPR
1248 && nary
->opcode
!= VIEW_CONVERT_EXPR
)
1253 /* We have to go from trees to pre exprs to value ids to
1255 tree naryop0
= nary
->op
[0];
1256 tree const0
, result
;
1257 if (is_gimple_min_invariant (naryop0
))
1261 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1262 unsigned int vrep0
= get_expr_value_id (rep0
);
1263 const0
= get_constant_for_value_id (vrep0
);
1268 tree type1
= TREE_TYPE (nary
->op
[0]);
1269 const0
= fold_convert (type1
, const0
);
1270 result
= fold_unary (nary
->opcode
, nary
->type
, const0
);
1272 if (result
&& is_gimple_min_invariant (result
))
1273 return get_or_alloc_expr_for_constant (result
);
1282 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1284 if ((folded
= fully_constant_vn_reference_p (ref
)))
1285 return get_or_alloc_expr_for_constant (folded
);
1294 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1295 it has the value it would have in BLOCK. Set *SAME_VALID to true
1296 in case the new vuse doesn't change the value id of the OPERANDS. */
1299 translate_vuse_through_block (vec
<vn_reference_op_s
> operands
,
1300 alias_set_type set
, tree type
, tree vuse
,
1301 basic_block phiblock
,
1302 basic_block block
, bool *same_valid
)
1304 gimple phi
= SSA_NAME_DEF_STMT (vuse
);
1311 if (gimple_bb (phi
) != phiblock
)
1314 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, type
, operands
);
1316 /* Use the alias-oracle to find either the PHI node in this block,
1317 the first VUSE used in this block that is equivalent to vuse or
1318 the first VUSE which definition in this block kills the value. */
1319 if (gimple_code (phi
) == GIMPLE_PHI
)
1320 e
= find_edge (block
, phiblock
);
1321 else if (use_oracle
)
1322 while (!stmt_may_clobber_ref_p_1 (phi
, &ref
))
1324 vuse
= gimple_vuse (phi
);
1325 phi
= SSA_NAME_DEF_STMT (vuse
);
1326 if (gimple_bb (phi
) != phiblock
)
1328 if (gimple_code (phi
) == GIMPLE_PHI
)
1330 e
= find_edge (block
, phiblock
);
1341 bitmap visited
= NULL
;
1343 /* Try to find a vuse that dominates this phi node by skipping
1344 non-clobbering statements. */
1345 vuse
= get_continuation_for_phi (phi
, &ref
, &cnt
, &visited
, false,
1348 BITMAP_FREE (visited
);
1354 /* If we didn't find any, the value ID can't stay the same,
1355 but return the translated vuse. */
1356 *same_valid
= false;
1357 vuse
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1359 /* ??? We would like to return vuse here as this is the canonical
1360 upmost vdef that this reference is associated with. But during
1361 insertion of the references into the hash tables we only ever
1362 directly insert with their direct gimple_vuse, hence returning
1363 something else would make us not find the other expression. */
1364 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1370 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1371 SET2. This is used to avoid making a set consisting of the union
1372 of PA_IN and ANTIC_IN during insert. */
1374 static inline pre_expr
1375 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
)
1379 result
= bitmap_find_leader (set1
, val
);
1380 if (!result
&& set2
)
1381 result
= bitmap_find_leader (set2
, val
);
1385 /* Get the tree type for our PRE expression e. */
1388 get_expr_type (const pre_expr e
)
1393 return TREE_TYPE (PRE_EXPR_NAME (e
));
1395 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1397 return PRE_EXPR_REFERENCE (e
)->type
;
1399 return PRE_EXPR_NARY (e
)->type
;
1404 /* Get a representative SSA_NAME for a given expression.
1405 Since all of our sub-expressions are treated as values, we require
1406 them to be SSA_NAME's for simplicity.
1407 Prior versions of GVNPRE used to use "value handles" here, so that
1408 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1409 either case, the operands are really values (IE we do not expect
1410 them to be usable without finding leaders). */
1413 get_representative_for (const pre_expr e
)
1416 unsigned int value_id
= get_expr_value_id (e
);
1421 return PRE_EXPR_NAME (e
);
1423 return PRE_EXPR_CONSTANT (e
);
1427 /* Go through all of the expressions representing this value
1428 and pick out an SSA_NAME. */
1431 bitmap exprs
= value_expressions
[value_id
];
1432 EXECUTE_IF_SET_IN_BITMAP (exprs
, 0, i
, bi
)
1434 pre_expr rep
= expression_for_id (i
);
1435 if (rep
->kind
== NAME
)
1436 return PRE_EXPR_NAME (rep
);
1437 else if (rep
->kind
== CONSTANT
)
1438 return PRE_EXPR_CONSTANT (rep
);
1444 /* If we reached here we couldn't find an SSA_NAME. This can
1445 happen when we've discovered a value that has never appeared in
1446 the program as set to an SSA_NAME, as the result of phi translation.
1448 ??? We should be able to re-use this when we insert the statement
1450 name
= make_temp_ssa_name (get_expr_type (e
), gimple_build_nop (), "pretmp");
1451 VN_INFO_GET (name
)->value_id
= value_id
;
1452 VN_INFO (name
)->valnum
= name
;
1453 /* ??? For now mark this SSA name for release by SCCVN. */
1454 VN_INFO (name
)->needs_insertion
= true;
1455 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1456 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1458 fprintf (dump_file
, "Created SSA_NAME representative ");
1459 print_generic_expr (dump_file
, name
, 0);
1460 fprintf (dump_file
, " for expression:");
1461 print_pre_expr (dump_file
, e
);
1462 fprintf (dump_file
, " (%04d)\n", value_id
);
1471 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1472 basic_block pred
, basic_block phiblock
);
1474 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1475 the phis in PRED. Return NULL if we can't find a leader for each part
1476 of the translated expression. */
1479 phi_translate_1 (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1480 basic_block pred
, basic_block phiblock
)
1487 bool changed
= false;
1488 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1489 vn_nary_op_t newnary
= XALLOCAVAR (struct vn_nary_op_s
,
1490 sizeof_vn_nary_op (nary
->length
));
1491 memcpy (newnary
, nary
, sizeof_vn_nary_op (nary
->length
));
1493 for (i
= 0; i
< newnary
->length
; i
++)
1495 if (TREE_CODE (newnary
->op
[i
]) != SSA_NAME
)
1499 pre_expr leader
, result
;
1500 unsigned int op_val_id
= VN_INFO (newnary
->op
[i
])->value_id
;
1501 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1502 result
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1503 if (result
&& result
!= leader
)
1505 tree name
= get_representative_for (result
);
1508 newnary
->op
[i
] = name
;
1513 changed
|= newnary
->op
[i
] != nary
->op
[i
];
1519 unsigned int new_val_id
;
1521 tree result
= vn_nary_op_lookup_pieces (newnary
->length
,
1526 if (result
&& is_gimple_min_invariant (result
))
1527 return get_or_alloc_expr_for_constant (result
);
1529 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1534 PRE_EXPR_NARY (expr
) = nary
;
1535 constant
= fully_constant_expression (expr
);
1536 if (constant
!= expr
)
1539 new_val_id
= nary
->value_id
;
1540 get_or_alloc_expression_id (expr
);
1544 new_val_id
= get_next_value_id ();
1545 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
1546 nary
= vn_nary_op_insert_pieces (newnary
->length
,
1550 result
, new_val_id
);
1551 PRE_EXPR_NARY (expr
) = nary
;
1552 constant
= fully_constant_expression (expr
);
1553 if (constant
!= expr
)
1555 get_or_alloc_expression_id (expr
);
1557 add_to_value (new_val_id
, expr
);
1565 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1566 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1567 tree vuse
= ref
->vuse
;
1568 tree newvuse
= vuse
;
1569 vec
<vn_reference_op_s
> newoperands
= vNULL
;
1570 bool changed
= false, same_valid
= true;
1572 vn_reference_op_t operand
;
1573 vn_reference_t newref
;
1575 for (i
= 0; operands
.iterate (i
, &operand
); i
++)
1580 tree type
= operand
->type
;
1581 vn_reference_op_s newop
= *operand
;
1582 op
[0] = operand
->op0
;
1583 op
[1] = operand
->op1
;
1584 op
[2] = operand
->op2
;
1585 for (n
= 0; n
< 3; ++n
)
1587 unsigned int op_val_id
;
1590 if (TREE_CODE (op
[n
]) != SSA_NAME
)
1592 /* We can't possibly insert these. */
1594 && !is_gimple_min_invariant (op
[n
]))
1598 op_val_id
= VN_INFO (op
[n
])->value_id
;
1599 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1602 opresult
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1605 if (opresult
!= leader
)
1607 tree name
= get_representative_for (opresult
);
1610 changed
|= name
!= op
[n
];
1616 newoperands
.release ();
1621 if (!newoperands
.exists ())
1622 newoperands
= operands
.copy ();
1623 /* We may have changed from an SSA_NAME to a constant */
1624 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op
[0]) != SSA_NAME
)
1625 newop
.opcode
= TREE_CODE (op
[0]);
1630 newoperands
[i
] = newop
;
1632 gcc_checking_assert (i
== operands
.length ());
1636 newvuse
= translate_vuse_through_block (newoperands
.exists ()
1637 ? newoperands
: operands
,
1638 ref
->set
, ref
->type
,
1639 vuse
, phiblock
, pred
,
1641 if (newvuse
== NULL_TREE
)
1643 newoperands
.release ();
1648 if (changed
|| newvuse
!= vuse
)
1650 unsigned int new_val_id
;
1653 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1655 newoperands
.exists ()
1656 ? newoperands
: operands
,
1659 newoperands
.release ();
1661 /* We can always insert constants, so if we have a partial
1662 redundant constant load of another type try to translate it
1663 to a constant of appropriate type. */
1664 if (result
&& is_gimple_min_invariant (result
))
1667 if (!useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1669 tem
= fold_unary (VIEW_CONVERT_EXPR
, ref
->type
, result
);
1670 if (tem
&& !is_gimple_min_invariant (tem
))
1674 return get_or_alloc_expr_for_constant (tem
);
1677 /* If we'd have to convert things we would need to validate
1678 if we can insert the translated expression. So fail
1679 here for now - we cannot insert an alias with a different
1680 type in the VN tables either, as that would assert. */
1682 && !useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1684 else if (!result
&& newref
1685 && !useless_type_conversion_p (ref
->type
, newref
->type
))
1687 newoperands
.release ();
1691 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1692 expr
->kind
= REFERENCE
;
1697 PRE_EXPR_REFERENCE (expr
) = newref
;
1698 constant
= fully_constant_expression (expr
);
1699 if (constant
!= expr
)
1702 new_val_id
= newref
->value_id
;
1703 get_or_alloc_expression_id (expr
);
1707 if (changed
|| !same_valid
)
1709 new_val_id
= get_next_value_id ();
1710 value_expressions
.safe_grow_cleared
1711 (get_max_value_id () + 1);
1714 new_val_id
= ref
->value_id
;
1715 if (!newoperands
.exists ())
1716 newoperands
= operands
.copy ();
1717 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1720 result
, new_val_id
);
1721 newoperands
= vNULL
;
1722 PRE_EXPR_REFERENCE (expr
) = newref
;
1723 constant
= fully_constant_expression (expr
);
1724 if (constant
!= expr
)
1726 get_or_alloc_expression_id (expr
);
1728 add_to_value (new_val_id
, expr
);
1730 newoperands
.release ();
1737 tree name
= PRE_EXPR_NAME (expr
);
1738 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
1739 /* If the SSA name is defined by a PHI node in this block,
1741 if (gimple_code (def_stmt
) == GIMPLE_PHI
1742 && gimple_bb (def_stmt
) == phiblock
)
1744 edge e
= find_edge (pred
, gimple_bb (def_stmt
));
1745 tree def
= PHI_ARG_DEF (def_stmt
, e
->dest_idx
);
1747 /* Handle constant. */
1748 if (is_gimple_min_invariant (def
))
1749 return get_or_alloc_expr_for_constant (def
);
1751 return get_or_alloc_expr_for_name (def
);
1753 /* Otherwise return it unchanged - it will get removed if its
1754 value is not available in PREDs AVAIL_OUT set of expressions
1755 by the subtraction of TMP_GEN. */
1764 /* Wrapper around phi_translate_1 providing caching functionality. */
1767 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1768 basic_block pred
, basic_block phiblock
)
1770 expr_pred_trans_t slot
= NULL
;
1776 /* Constants contain no values that need translation. */
1777 if (expr
->kind
== CONSTANT
)
1780 if (value_id_constant_p (get_expr_value_id (expr
)))
1783 /* Don't add translations of NAMEs as those are cheap to translate. */
1784 if (expr
->kind
!= NAME
)
1786 if (phi_trans_add (&slot
, expr
, pred
))
1788 /* Store NULL for the value we want to return in the case of
1794 phitrans
= phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
);
1801 /* Remove failed translations again, they cause insert
1802 iteration to not pick up new opportunities reliably. */
1803 phi_translate_table
->remove_elt_with_hash (slot
, slot
->hashcode
);
1810 /* For each expression in SET, translate the values through phi nodes
1811 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1812 expressions in DEST. */
1815 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1816 basic_block phiblock
)
1818 vec
<pre_expr
> exprs
;
1822 if (gimple_seq_empty_p (phi_nodes (phiblock
)))
1824 bitmap_set_copy (dest
, set
);
1828 exprs
= sorted_array_from_bitmap_set (set
);
1829 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
1831 pre_expr translated
;
1832 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1836 /* We might end up with multiple expressions from SET being
1837 translated to the same value. In this case we do not want
1838 to retain the NARY or REFERENCE expression but prefer a NAME
1839 which would be the leader. */
1840 if (translated
->kind
== NAME
)
1841 bitmap_value_replace_in_set (dest
, translated
);
1843 bitmap_value_insert_into_set (dest
, translated
);
1848 /* Find the leader for a value (i.e., the name representing that
1849 value) in a given set, and return it. Return NULL if no leader
1853 bitmap_find_leader (bitmap_set_t set
, unsigned int val
)
1855 if (value_id_constant_p (val
))
1859 bitmap exprset
= value_expressions
[val
];
1861 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1863 pre_expr expr
= expression_for_id (i
);
1864 if (expr
->kind
== CONSTANT
)
1868 if (bitmap_set_contains_value (set
, val
))
1870 /* Rather than walk the entire bitmap of expressions, and see
1871 whether any of them has the value we are looking for, we look
1872 at the reverse mapping, which tells us the set of expressions
1873 that have a given value (IE value->expressions with that
1874 value) and see if any of those expressions are in our set.
1875 The number of expressions per value is usually significantly
1876 less than the number of expressions in the set. In fact, for
1877 large testcases, doing it this way is roughly 5-10x faster
1878 than walking the bitmap.
1879 If this is somehow a significant lose for some cases, we can
1880 choose which set to walk based on which set is smaller. */
1883 bitmap exprset
= value_expressions
[val
];
1885 EXECUTE_IF_AND_IN_BITMAP (exprset
, &set
->expressions
, 0, i
, bi
)
1886 return expression_for_id (i
);
1891 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1892 BLOCK by seeing if it is not killed in the block. Note that we are
1893 only determining whether there is a store that kills it. Because
1894 of the order in which clean iterates over values, we are guaranteed
1895 that altered operands will have caused us to be eliminated from the
1896 ANTIC_IN set already. */
1899 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1901 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1902 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1904 gimple_stmt_iterator gsi
;
1905 unsigned id
= get_expression_id (expr
);
1912 /* Lookup a previously calculated result. */
1913 if (EXPR_DIES (block
)
1914 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1915 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1917 /* A memory expression {e, VUSE} dies in the block if there is a
1918 statement that may clobber e. If, starting statement walk from the
1919 top of the basic block, a statement uses VUSE there can be no kill
1920 inbetween that use and the original statement that loaded {e, VUSE},
1921 so we can stop walking. */
1922 ref
.base
= NULL_TREE
;
1923 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1925 tree def_vuse
, def_vdef
;
1926 def
= gsi_stmt (gsi
);
1927 def_vuse
= gimple_vuse (def
);
1928 def_vdef
= gimple_vdef (def
);
1930 /* Not a memory statement. */
1934 /* Not a may-def. */
1937 /* A load with the same VUSE, we're done. */
1938 if (def_vuse
== vuse
)
1944 /* Init ref only if we really need it. */
1945 if (ref
.base
== NULL_TREE
1946 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->type
,
1952 /* If the statement may clobber expr, it dies. */
1953 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1960 /* Remember the result. */
1961 if (!EXPR_DIES (block
))
1962 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1963 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1965 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1971 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1972 contains its value-id. */
1975 op_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree op
)
1977 if (op
&& TREE_CODE (op
) == SSA_NAME
)
1979 unsigned int value_id
= VN_INFO (op
)->value_id
;
1980 if (!(bitmap_set_contains_value (set1
, value_id
)
1981 || (set2
&& bitmap_set_contains_value (set2
, value_id
))))
1987 /* Determine if the expression EXPR is valid in SET1 U SET2.
1988 ONLY SET2 CAN BE NULL.
1989 This means that we have a leader for each part of the expression
1990 (if it consists of values), or the expression is an SSA_NAME.
1991 For loads/calls, we also see if the vuse is killed in this block. */
1994 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
)
1999 /* By construction all NAMEs are available. Non-available
2000 NAMEs are removed by subtracting TMP_GEN from the sets. */
2005 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2006 for (i
= 0; i
< nary
->length
; i
++)
2007 if (!op_valid_in_sets (set1
, set2
, nary
->op
[i
]))
2014 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2015 vn_reference_op_t vro
;
2018 FOR_EACH_VEC_ELT (ref
->operands
, i
, vro
)
2020 if (!op_valid_in_sets (set1
, set2
, vro
->op0
)
2021 || !op_valid_in_sets (set1
, set2
, vro
->op1
)
2022 || !op_valid_in_sets (set1
, set2
, vro
->op2
))
2032 /* Clean the set of expressions that are no longer valid in SET1 or
2033 SET2. This means expressions that are made up of values we have no
2034 leaders for in SET1 or SET2. This version is used for partial
2035 anticipation, which means it is not valid in either ANTIC_IN or
2039 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
)
2041 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set1
);
2045 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2047 if (!valid_in_sets (set1
, set2
, expr
))
2048 bitmap_remove_from_set (set1
, expr
);
2053 /* Clean the set of expressions that are no longer valid in SET. This
2054 means expressions that are made up of values we have no leaders for
2058 clean (bitmap_set_t set
)
2060 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set
);
2064 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2066 if (!valid_in_sets (set
, NULL
, expr
))
2067 bitmap_remove_from_set (set
, expr
);
2072 /* Clean the set of expressions that are no longer valid in SET because
2073 they are clobbered in BLOCK or because they trap and may not be executed. */
2076 prune_clobbered_mems (bitmap_set_t set
, basic_block block
)
2081 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
2083 pre_expr expr
= expression_for_id (i
);
2084 if (expr
->kind
== REFERENCE
)
2086 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2089 gimple def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
2090 if (!gimple_nop_p (def_stmt
)
2091 && ((gimple_bb (def_stmt
) != block
2092 && !dominated_by_p (CDI_DOMINATORS
,
2093 block
, gimple_bb (def_stmt
)))
2094 || (gimple_bb (def_stmt
) == block
2095 && value_dies_in_block_x (expr
, block
))))
2096 bitmap_remove_from_set (set
, expr
);
2099 else if (expr
->kind
== NARY
)
2101 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2102 /* If the NARY may trap make sure the block does not contain
2103 a possible exit point.
2104 ??? This is overly conservative if we translate AVAIL_OUT
2105 as the available expression might be after the exit point. */
2106 if (BB_MAY_NOTRETURN (block
)
2107 && vn_nary_may_trap (nary
))
2108 bitmap_remove_from_set (set
, expr
);
2113 static sbitmap has_abnormal_preds
;
2115 /* List of blocks that may have changed during ANTIC computation and
2116 thus need to be iterated over. */
2118 static sbitmap changed_blocks
;
2120 /* Compute the ANTIC set for BLOCK.
2122 If succs(BLOCK) > 1 then
2123 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2124 else if succs(BLOCK) == 1 then
2125 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2127 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2131 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2133 bool changed
= false;
2134 bitmap_set_t S
, old
, ANTIC_OUT
;
2140 old
= ANTIC_OUT
= S
= NULL
;
2141 BB_VISITED (block
) = 1;
2143 /* If any edges from predecessors are abnormal, antic_in is empty,
2145 if (block_has_abnormal_pred_edge
)
2146 goto maybe_dump_sets
;
2148 old
= ANTIC_IN (block
);
2149 ANTIC_OUT
= bitmap_set_new ();
2151 /* If the block has no successors, ANTIC_OUT is empty. */
2152 if (EDGE_COUNT (block
->succs
) == 0)
2154 /* If we have one successor, we could have some phi nodes to
2155 translate through. */
2156 else if (single_succ_p (block
))
2158 basic_block succ_bb
= single_succ (block
);
2159 gcc_assert (BB_VISITED (succ_bb
));
2160 phi_translate_set (ANTIC_OUT
, ANTIC_IN (succ_bb
), block
, succ_bb
);
2162 /* If we have multiple successors, we take the intersection of all of
2163 them. Note that in the case of loop exit phi nodes, we may have
2164 phis to translate through. */
2168 basic_block bprime
, first
= NULL
;
2170 auto_vec
<basic_block
> worklist (EDGE_COUNT (block
->succs
));
2171 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2174 && BB_VISITED (e
->dest
))
2176 else if (BB_VISITED (e
->dest
))
2177 worklist
.quick_push (e
->dest
);
2180 /* Of multiple successors we have to have visited one already
2181 which is guaranteed by iteration order. */
2182 gcc_assert (first
!= NULL
);
2184 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
), block
, first
);
2186 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2188 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2190 bitmap_set_t tmp
= bitmap_set_new ();
2191 phi_translate_set (tmp
, ANTIC_IN (bprime
), block
, bprime
);
2192 bitmap_set_and (ANTIC_OUT
, tmp
);
2193 bitmap_set_free (tmp
);
2196 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
2200 /* Prune expressions that are clobbered in block and thus become
2201 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2202 prune_clobbered_mems (ANTIC_OUT
, block
);
2204 /* Generate ANTIC_OUT - TMP_GEN. */
2205 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
2207 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2208 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
2211 /* Then union in the ANTIC_OUT - TMP_GEN values,
2212 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2213 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
2214 bitmap_value_insert_into_set (ANTIC_IN (block
),
2215 expression_for_id (bii
));
2217 clean (ANTIC_IN (block
));
2219 if (!bitmap_set_equal (old
, ANTIC_IN (block
)))
2222 bitmap_set_bit (changed_blocks
, block
->index
);
2223 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2224 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2227 bitmap_clear_bit (changed_blocks
, block
->index
);
2230 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2233 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2235 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2239 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2242 bitmap_set_free (old
);
2244 bitmap_set_free (S
);
2246 bitmap_set_free (ANTIC_OUT
);
2250 /* Compute PARTIAL_ANTIC for BLOCK.
2252 If succs(BLOCK) > 1 then
2253 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2254 in ANTIC_OUT for all succ(BLOCK)
2255 else if succs(BLOCK) == 1 then
2256 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2258 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2263 compute_partial_antic_aux (basic_block block
,
2264 bool block_has_abnormal_pred_edge
)
2266 bool changed
= false;
2267 bitmap_set_t old_PA_IN
;
2268 bitmap_set_t PA_OUT
;
2271 unsigned long max_pa
= PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH
);
2273 old_PA_IN
= PA_OUT
= NULL
;
2275 /* If any edges from predecessors are abnormal, antic_in is empty,
2277 if (block_has_abnormal_pred_edge
)
2278 goto maybe_dump_sets
;
2280 /* If there are too many partially anticipatable values in the
2281 block, phi_translate_set can take an exponential time: stop
2282 before the translation starts. */
2284 && single_succ_p (block
)
2285 && bitmap_count_bits (&PA_IN (single_succ (block
))->values
) > max_pa
)
2286 goto maybe_dump_sets
;
2288 old_PA_IN
= PA_IN (block
);
2289 PA_OUT
= bitmap_set_new ();
2291 /* If the block has no successors, ANTIC_OUT is empty. */
2292 if (EDGE_COUNT (block
->succs
) == 0)
2294 /* If we have one successor, we could have some phi nodes to
2295 translate through. Note that we can't phi translate across DFS
2296 back edges in partial antic, because it uses a union operation on
2297 the successors. For recurrences like IV's, we will end up
2298 generating a new value in the set on each go around (i + 3 (VH.1)
2299 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2300 else if (single_succ_p (block
))
2302 basic_block succ
= single_succ (block
);
2303 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
2304 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
2306 /* If we have multiple successors, we take the union of all of
2313 auto_vec
<basic_block
> worklist (EDGE_COUNT (block
->succs
));
2314 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2316 if (e
->flags
& EDGE_DFS_BACK
)
2318 worklist
.quick_push (e
->dest
);
2320 if (worklist
.length () > 0)
2322 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2327 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
2328 bitmap_value_insert_into_set (PA_OUT
,
2329 expression_for_id (i
));
2330 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2332 bitmap_set_t pa_in
= bitmap_set_new ();
2333 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
2334 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2335 bitmap_value_insert_into_set (PA_OUT
,
2336 expression_for_id (i
));
2337 bitmap_set_free (pa_in
);
2340 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
2341 bitmap_value_insert_into_set (PA_OUT
,
2342 expression_for_id (i
));
2347 /* Prune expressions that are clobbered in block and thus become
2348 invalid if translated from PA_OUT to PA_IN. */
2349 prune_clobbered_mems (PA_OUT
, block
);
2351 /* PA_IN starts with PA_OUT - TMP_GEN.
2352 Then we subtract things from ANTIC_IN. */
2353 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
2355 /* For partial antic, we want to put back in the phi results, since
2356 we will properly avoid making them partially antic over backedges. */
2357 bitmap_ior_into (&PA_IN (block
)->values
, &PHI_GEN (block
)->values
);
2358 bitmap_ior_into (&PA_IN (block
)->expressions
, &PHI_GEN (block
)->expressions
);
2360 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2361 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2363 dependent_clean (PA_IN (block
), ANTIC_IN (block
));
2365 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
2368 bitmap_set_bit (changed_blocks
, block
->index
);
2369 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2370 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2373 bitmap_clear_bit (changed_blocks
, block
->index
);
2376 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2379 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2381 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2384 bitmap_set_free (old_PA_IN
);
2386 bitmap_set_free (PA_OUT
);
2390 /* Compute ANTIC and partial ANTIC sets. */
2393 compute_antic (void)
2395 bool changed
= true;
2396 int num_iterations
= 0;
2400 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2401 We pre-build the map of blocks with incoming abnormal edges here. */
2402 has_abnormal_preds
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
2403 bitmap_clear (has_abnormal_preds
);
2405 FOR_ALL_BB_FN (block
, cfun
)
2410 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2412 e
->flags
&= ~EDGE_DFS_BACK
;
2413 if (e
->flags
& EDGE_ABNORMAL
)
2415 bitmap_set_bit (has_abnormal_preds
, block
->index
);
2420 BB_VISITED (block
) = 0;
2422 /* While we are here, give empty ANTIC_IN sets to each block. */
2423 ANTIC_IN (block
) = bitmap_set_new ();
2424 PA_IN (block
) = bitmap_set_new ();
2427 /* At the exit block we anticipate nothing. */
2428 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun
)) = 1;
2430 changed_blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
) + 1);
2431 bitmap_ones (changed_blocks
);
2434 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2435 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2436 /* ??? We need to clear our PHI translation cache here as the
2437 ANTIC sets shrink and we restrict valid translations to
2438 those having operands with leaders in ANTIC. Same below
2439 for PA ANTIC computation. */
2442 for (i
= postorder_num
- 1; i
>= 0; i
--)
2444 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2446 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2447 changed
|= compute_antic_aux (block
,
2448 bitmap_bit_p (has_abnormal_preds
,
2452 /* Theoretically possible, but *highly* unlikely. */
2453 gcc_checking_assert (num_iterations
< 500);
2456 statistics_histogram_event (cfun
, "compute_antic iterations",
2459 if (do_partial_partial
)
2461 bitmap_ones (changed_blocks
);
2462 mark_dfs_back_edges ();
2467 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2468 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2471 for (i
= postorder_num
- 1 ; i
>= 0; i
--)
2473 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2475 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2477 |= compute_partial_antic_aux (block
,
2478 bitmap_bit_p (has_abnormal_preds
,
2482 /* Theoretically possible, but *highly* unlikely. */
2483 gcc_checking_assert (num_iterations
< 500);
2485 statistics_histogram_event (cfun
, "compute_partial_antic iterations",
2488 sbitmap_free (has_abnormal_preds
);
2489 sbitmap_free (changed_blocks
);
2493 /* Inserted expressions are placed onto this worklist, which is used
2494 for performing quick dead code elimination of insertions we made
2495 that didn't turn out to be necessary. */
2496 static bitmap inserted_exprs
;
2498 /* The actual worker for create_component_ref_by_pieces. */
2501 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2502 unsigned int *operand
, gimple_seq
*stmts
)
2504 vn_reference_op_t currop
= &ref
->operands
[*operand
];
2507 switch (currop
->opcode
)
2511 tree folded
, sc
= NULL_TREE
;
2512 unsigned int nargs
= 0;
2514 if (TREE_CODE (currop
->op0
) == FUNCTION_DECL
)
2517 fn
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2522 sc
= find_or_generate_expression (block
, currop
->op1
, stmts
);
2526 args
= XNEWVEC (tree
, ref
->operands
.length () - 1);
2527 while (*operand
< ref
->operands
.length ())
2529 args
[nargs
] = create_component_ref_by_pieces_1 (block
, ref
,
2535 folded
= build_call_array (currop
->type
,
2536 (TREE_CODE (fn
) == FUNCTION_DECL
2537 ? build_fold_addr_expr (fn
) : fn
),
2539 if (currop
->with_bounds
)
2540 CALL_WITH_BOUNDS_P (folded
) = true;
2543 CALL_EXPR_STATIC_CHAIN (folded
) = sc
;
2549 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2553 tree offset
= currop
->op0
;
2554 if (TREE_CODE (baseop
) == ADDR_EXPR
2555 && handled_component_p (TREE_OPERAND (baseop
, 0)))
2559 base
= get_addr_base_and_unit_offset (TREE_OPERAND (baseop
, 0),
2562 offset
= int_const_binop (PLUS_EXPR
, offset
,
2563 build_int_cst (TREE_TYPE (offset
),
2565 baseop
= build_fold_addr_expr (base
);
2567 tree t
= fold_build2 (MEM_REF
, currop
->type
, baseop
, offset
);
2568 REF_REVERSE_STORAGE_ORDER (t
) = currop
->reverse
;
2572 case TARGET_MEM_REF
:
2574 tree genop0
= NULL_TREE
, genop1
= NULL_TREE
;
2575 vn_reference_op_t nextop
= &ref
->operands
[++*operand
];
2576 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2582 genop0
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2588 genop1
= find_or_generate_expression (block
, nextop
->op0
, stmts
);
2592 return build5 (TARGET_MEM_REF
, currop
->type
,
2593 baseop
, currop
->op2
, genop0
, currop
->op1
, genop1
);
2599 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2605 case VIEW_CONVERT_EXPR
:
2607 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2611 return fold_build1 (currop
->opcode
, currop
->type
, genop0
);
2614 case WITH_SIZE_EXPR
:
2616 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2620 tree genop1
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2623 return fold_build2 (currop
->opcode
, currop
->type
, genop0
, genop1
);
2628 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2632 tree op1
= currop
->op0
;
2633 tree op2
= currop
->op1
;
2634 tree t
= build3 (BIT_FIELD_REF
, currop
->type
, genop0
, op1
, op2
);
2635 REF_REVERSE_STORAGE_ORDER (t
) = currop
->reverse
;
2639 /* For array ref vn_reference_op's, operand 1 of the array ref
2640 is op0 of the reference op and operand 3 of the array ref is
2642 case ARRAY_RANGE_REF
:
2646 tree genop1
= currop
->op0
;
2647 tree genop2
= currop
->op1
;
2648 tree genop3
= currop
->op2
;
2649 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2653 genop1
= find_or_generate_expression (block
, genop1
, stmts
);
2658 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (genop0
));
2659 /* Drop zero minimum index if redundant. */
2660 if (integer_zerop (genop2
)
2662 || integer_zerop (TYPE_MIN_VALUE (domain_type
))))
2666 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2673 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2674 /* We can't always put a size in units of the element alignment
2675 here as the element alignment may be not visible. See
2676 PR43783. Simply drop the element size for constant
2678 if (tree_int_cst_equal (genop3
, TYPE_SIZE_UNIT (elmt_type
)))
2682 genop3
= size_binop (EXACT_DIV_EXPR
, genop3
,
2683 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
2684 genop3
= find_or_generate_expression (block
, genop3
, stmts
);
2689 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2696 tree genop2
= currop
->op1
;
2697 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2700 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2704 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2708 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
, genop2
);
2713 genop
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2734 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2735 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2736 trying to rename aggregates into ssa form directly, which is a no no.
2738 Thus, this routine doesn't create temporaries, it just builds a
2739 single access expression for the array, calling
2740 find_or_generate_expression to build the innermost pieces.
2742 This function is a subroutine of create_expression_by_pieces, and
2743 should not be called on it's own unless you really know what you
2747 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2750 unsigned int op
= 0;
2751 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
);
2754 /* Find a simple leader for an expression, or generate one using
2755 create_expression_by_pieces from a NARY expression for the value.
2756 BLOCK is the basic_block we are looking for leaders in.
2757 OP is the tree expression to find a leader for or generate.
2758 Returns the leader or NULL_TREE on failure. */
2761 find_or_generate_expression (basic_block block
, tree op
, gimple_seq
*stmts
)
2763 pre_expr expr
= get_or_alloc_expr_for (op
);
2764 unsigned int lookfor
= get_expr_value_id (expr
);
2765 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
), lookfor
);
2768 if (leader
->kind
== NAME
)
2769 return PRE_EXPR_NAME (leader
);
2770 else if (leader
->kind
== CONSTANT
)
2771 return PRE_EXPR_CONSTANT (leader
);
2777 /* It must be a complex expression, so generate it recursively. Note
2778 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2779 where the insert algorithm fails to insert a required expression. */
2780 bitmap exprset
= value_expressions
[lookfor
];
2783 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
2785 pre_expr temp
= expression_for_id (i
);
2786 /* We cannot insert random REFERENCE expressions at arbitrary
2787 places. We can insert NARYs which eventually re-materializes
2788 its operand values. */
2789 if (temp
->kind
== NARY
)
2790 return create_expression_by_pieces (block
, temp
, stmts
,
2791 get_expr_type (expr
));
2798 #define NECESSARY GF_PLF_1
2800 /* Create an expression in pieces, so that we can handle very complex
2801 expressions that may be ANTIC, but not necessary GIMPLE.
2802 BLOCK is the basic block the expression will be inserted into,
2803 EXPR is the expression to insert (in value form)
2804 STMTS is a statement list to append the necessary insertions into.
2806 This function will die if we hit some value that shouldn't be
2807 ANTIC but is (IE there is no leader for it, or its components).
2808 The function returns NULL_TREE in case a different antic expression
2809 has to be inserted first.
2810 This function may also generate expressions that are themselves
2811 partially or fully redundant. Those that are will be either made
2812 fully redundant during the next iteration of insert (for partially
2813 redundant ones), or eliminated by eliminate (for fully redundant
2817 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2818 gimple_seq
*stmts
, tree type
)
2822 gimple_seq forced_stmts
= NULL
;
2823 unsigned int value_id
;
2824 gimple_stmt_iterator gsi
;
2825 tree exprtype
= type
? type
: get_expr_type (expr
);
2831 /* We may hit the NAME/CONSTANT case if we have to convert types
2832 that value numbering saw through. */
2834 folded
= PRE_EXPR_NAME (expr
);
2837 folded
= PRE_EXPR_CONSTANT (expr
);
2841 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2842 folded
= create_component_ref_by_pieces (block
, ref
, stmts
);
2849 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2850 tree
*genop
= XALLOCAVEC (tree
, nary
->length
);
2852 for (i
= 0; i
< nary
->length
; ++i
)
2854 genop
[i
] = find_or_generate_expression (block
, nary
->op
[i
], stmts
);
2857 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2858 may have conversions stripped. */
2859 if (nary
->opcode
== POINTER_PLUS_EXPR
)
2862 genop
[i
] = gimple_convert (&forced_stmts
,
2863 nary
->type
, genop
[i
]);
2865 genop
[i
] = gimple_convert (&forced_stmts
,
2866 sizetype
, genop
[i
]);
2869 genop
[i
] = gimple_convert (&forced_stmts
,
2870 TREE_TYPE (nary
->op
[i
]), genop
[i
]);
2872 if (nary
->opcode
== CONSTRUCTOR
)
2874 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
2875 for (i
= 0; i
< nary
->length
; ++i
)
2876 CONSTRUCTOR_APPEND_ELT (elts
, NULL_TREE
, genop
[i
]);
2877 folded
= build_constructor (nary
->type
, elts
);
2881 switch (nary
->length
)
2884 folded
= fold_build1 (nary
->opcode
, nary
->type
,
2888 folded
= fold_build2 (nary
->opcode
, nary
->type
,
2889 genop
[0], genop
[1]);
2892 folded
= fold_build3 (nary
->opcode
, nary
->type
,
2893 genop
[0], genop
[1], genop
[2]);
2905 if (!useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
2906 folded
= fold_convert (exprtype
, folded
);
2908 /* Force the generated expression to be a sequence of GIMPLE
2910 We have to call unshare_expr because force_gimple_operand may
2911 modify the tree we pass to it. */
2912 gimple_seq tem
= NULL
;
2913 folded
= force_gimple_operand (unshare_expr (folded
), &tem
,
2915 gimple_seq_add_seq_without_update (&forced_stmts
, tem
);
2917 /* If we have any intermediate expressions to the value sets, add them
2918 to the value sets and chain them in the instruction stream. */
2921 gsi
= gsi_start (forced_stmts
);
2922 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
2924 gimple stmt
= gsi_stmt (gsi
);
2925 tree forcedname
= gimple_get_lhs (stmt
);
2928 if (TREE_CODE (forcedname
) == SSA_NAME
)
2930 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (forcedname
));
2931 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
2932 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
2933 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
2934 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
2935 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2936 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2939 gimple_set_vuse (stmt
, BB_LIVE_VOP_ON_EXIT (block
));
2940 gimple_set_modified (stmt
, true);
2942 gimple_seq_add_seq (stmts
, forced_stmts
);
2945 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2946 newstmt
= gimple_build_assign (name
, folded
);
2947 gimple_set_vuse (newstmt
, BB_LIVE_VOP_ON_EXIT (block
));
2948 gimple_set_modified (newstmt
, true);
2949 gimple_set_plf (newstmt
, NECESSARY
, false);
2951 gimple_seq_add_stmt (stmts
, newstmt
);
2952 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (name
));
2954 /* Fold the last statement. */
2955 gsi
= gsi_last (*stmts
);
2956 if (fold_stmt_inplace (&gsi
))
2957 update_stmt (gsi_stmt (gsi
));
2959 /* Add a value number to the temporary.
2960 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2961 we are creating the expression by pieces, and this particular piece of
2962 the expression may have been represented. There is no harm in replacing
2964 value_id
= get_expr_value_id (expr
);
2965 VN_INFO_GET (name
)->value_id
= value_id
;
2966 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
2967 if (VN_INFO (name
)->valnum
== NULL_TREE
)
2968 VN_INFO (name
)->valnum
= name
;
2969 gcc_assert (VN_INFO (name
)->valnum
!= NULL_TREE
);
2970 nameexpr
= get_or_alloc_expr_for_name (name
);
2971 add_to_value (value_id
, nameexpr
);
2972 if (NEW_SETS (block
))
2973 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2974 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2976 pre_stats
.insertions
++;
2977 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2979 fprintf (dump_file
, "Inserted ");
2980 print_gimple_stmt (dump_file
, newstmt
, 0, 0);
2981 fprintf (dump_file
, " in predecessor %d (%04d)\n",
2982 block
->index
, value_id
);
2989 /* Insert the to-be-made-available values of expression EXPRNUM for each
2990 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2991 merge the result with a phi node, given the same value number as
2992 NODE. Return true if we have inserted new stuff. */
2995 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
2996 vec
<pre_expr
> avail
)
2998 pre_expr expr
= expression_for_id (exprnum
);
3000 unsigned int val
= get_expr_value_id (expr
);
3002 bool insertions
= false;
3007 tree type
= get_expr_type (expr
);
3011 /* Make sure we aren't creating an induction variable. */
3012 if (bb_loop_depth (block
) > 0 && EDGE_COUNT (block
->preds
) == 2)
3014 bool firstinsideloop
= false;
3015 bool secondinsideloop
= false;
3016 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3017 EDGE_PRED (block
, 0)->src
);
3018 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3019 EDGE_PRED (block
, 1)->src
);
3020 /* Induction variables only have one edge inside the loop. */
3021 if ((firstinsideloop
^ secondinsideloop
)
3022 && expr
->kind
!= REFERENCE
)
3024 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3025 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3030 /* Make the necessary insertions. */
3031 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3033 gimple_seq stmts
= NULL
;
3036 eprime
= avail
[pred
->dest_idx
];
3038 if (eprime
->kind
!= NAME
&& eprime
->kind
!= CONSTANT
)
3040 builtexpr
= create_expression_by_pieces (bprime
, eprime
,
3042 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3043 gsi_insert_seq_on_edge (pred
, stmts
);
3046 /* We cannot insert a PHI node if we failed to insert
3051 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (builtexpr
);
3054 else if (eprime
->kind
== CONSTANT
)
3056 /* Constants may not have the right type, fold_convert
3057 should give us back a constant with the right type. */
3058 tree constant
= PRE_EXPR_CONSTANT (eprime
);
3059 if (!useless_type_conversion_p (type
, TREE_TYPE (constant
)))
3061 tree builtexpr
= fold_convert (type
, constant
);
3062 if (!is_gimple_min_invariant (builtexpr
))
3064 tree forcedexpr
= force_gimple_operand (builtexpr
,
3067 if (!is_gimple_min_invariant (forcedexpr
))
3069 if (forcedexpr
!= builtexpr
)
3071 VN_INFO_GET (forcedexpr
)->valnum
= PRE_EXPR_CONSTANT (eprime
);
3072 VN_INFO (forcedexpr
)->value_id
= get_expr_value_id (eprime
);
3076 gimple_stmt_iterator gsi
;
3077 gsi
= gsi_start (stmts
);
3078 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3080 gimple stmt
= gsi_stmt (gsi
);
3081 tree lhs
= gimple_get_lhs (stmt
);
3082 if (TREE_CODE (lhs
) == SSA_NAME
)
3083 bitmap_set_bit (inserted_exprs
,
3084 SSA_NAME_VERSION (lhs
));
3085 gimple_set_plf (stmt
, NECESSARY
, false);
3087 gsi_insert_seq_on_edge (pred
, stmts
);
3089 avail
[pred
->dest_idx
]
3090 = get_or_alloc_expr_for_name (forcedexpr
);
3094 avail
[pred
->dest_idx
]
3095 = get_or_alloc_expr_for_constant (builtexpr
);
3098 else if (eprime
->kind
== NAME
)
3100 /* We may have to do a conversion because our value
3101 numbering can look through types in certain cases, but
3102 our IL requires all operands of a phi node have the same
3104 tree name
= PRE_EXPR_NAME (eprime
);
3105 if (!useless_type_conversion_p (type
, TREE_TYPE (name
)))
3109 builtexpr
= fold_convert (type
, name
);
3110 forcedexpr
= force_gimple_operand (builtexpr
,
3114 if (forcedexpr
!= name
)
3116 VN_INFO_GET (forcedexpr
)->valnum
= VN_INFO (name
)->valnum
;
3117 VN_INFO (forcedexpr
)->value_id
= VN_INFO (name
)->value_id
;
3122 gimple_stmt_iterator gsi
;
3123 gsi
= gsi_start (stmts
);
3124 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3126 gimple stmt
= gsi_stmt (gsi
);
3127 tree lhs
= gimple_get_lhs (stmt
);
3128 if (TREE_CODE (lhs
) == SSA_NAME
)
3129 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
3130 gimple_set_plf (stmt
, NECESSARY
, false);
3132 gsi_insert_seq_on_edge (pred
, stmts
);
3134 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (forcedexpr
);
3138 /* If we didn't want a phi node, and we made insertions, we still have
3139 inserted new stuff, and thus return true. If we didn't want a phi node,
3140 and didn't make insertions, we haven't added anything new, so return
3142 if (nophi
&& insertions
)
3144 else if (nophi
&& !insertions
)
3147 /* Now build a phi for the new variable. */
3148 temp
= make_temp_ssa_name (type
, NULL
, "prephitmp");
3149 phi
= create_phi_node (temp
, block
);
3151 gimple_set_plf (phi
, NECESSARY
, false);
3152 VN_INFO_GET (temp
)->value_id
= val
;
3153 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3154 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3155 VN_INFO (temp
)->valnum
= temp
;
3156 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3157 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3159 pre_expr ae
= avail
[pred
->dest_idx
];
3160 gcc_assert (get_expr_type (ae
) == type
3161 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3162 if (ae
->kind
== CONSTANT
)
3163 add_phi_arg (phi
, unshare_expr (PRE_EXPR_CONSTANT (ae
)),
3164 pred
, UNKNOWN_LOCATION
);
3166 add_phi_arg (phi
, PRE_EXPR_NAME (ae
), pred
, UNKNOWN_LOCATION
);
3169 newphi
= get_or_alloc_expr_for_name (temp
);
3170 add_to_value (val
, newphi
);
3172 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3173 this insertion, since we test for the existence of this value in PHI_GEN
3174 before proceeding with the partial redundancy checks in insert_aux.
3176 The value may exist in AVAIL_OUT, in particular, it could be represented
3177 by the expression we are trying to eliminate, in which case we want the
3178 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3181 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3182 this block, because if it did, it would have existed in our dominator's
3183 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3186 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3187 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3189 bitmap_insert_into_set (NEW_SETS (block
),
3192 /* If we insert a PHI node for a conversion of another PHI node
3193 in the same basic-block try to preserve range information.
3194 This is important so that followup loop passes receive optimal
3195 number of iteration analysis results. See PR61743. */
3196 if (expr
->kind
== NARY
3197 && CONVERT_EXPR_CODE_P (expr
->u
.nary
->opcode
)
3198 && TREE_CODE (expr
->u
.nary
->op
[0]) == SSA_NAME
3199 && gimple_bb (SSA_NAME_DEF_STMT (expr
->u
.nary
->op
[0])) == block
3200 && INTEGRAL_TYPE_P (type
)
3201 && INTEGRAL_TYPE_P (TREE_TYPE (expr
->u
.nary
->op
[0]))
3202 && (TYPE_PRECISION (type
)
3203 >= TYPE_PRECISION (TREE_TYPE (expr
->u
.nary
->op
[0])))
3204 && SSA_NAME_RANGE_INFO (expr
->u
.nary
->op
[0]))
3207 if (get_range_info (expr
->u
.nary
->op
[0], &min
, &max
) == VR_RANGE
3208 && !wi::neg_p (min
, SIGNED
)
3209 && !wi::neg_p (max
, SIGNED
))
3210 /* Just handle extension and sign-changes of all-positive ranges. */
3211 set_range_info (temp
,
3212 SSA_NAME_RANGE_TYPE (expr
->u
.nary
->op
[0]),
3213 wide_int_storage::from (min
, TYPE_PRECISION (type
),
3215 wide_int_storage::from (max
, TYPE_PRECISION (type
),
3219 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3221 fprintf (dump_file
, "Created phi ");
3222 print_gimple_stmt (dump_file
, phi
, 0, 0);
3223 fprintf (dump_file
, " in block %d (%04d)\n", block
->index
, val
);
3231 /* Perform insertion of partially redundant values.
3232 For BLOCK, do the following:
3233 1. Propagate the NEW_SETS of the dominator into the current block.
3234 If the block has multiple predecessors,
3235 2a. Iterate over the ANTIC expressions for the block to see if
3236 any of them are partially redundant.
3237 2b. If so, insert them into the necessary predecessors to make
3238 the expression fully redundant.
3239 2c. Insert a new PHI merging the values of the predecessors.
3240 2d. Insert the new PHI, and the new expressions, into the
3242 3. Recursively call ourselves on the dominator children of BLOCK.
3244 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3245 do_regular_insertion and do_partial_insertion.
3250 do_regular_insertion (basic_block block
, basic_block dom
)
3252 bool new_stuff
= false;
3253 vec
<pre_expr
> exprs
;
3255 auto_vec
<pre_expr
> avail
;
3258 exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3259 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3261 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3263 if (expr
->kind
== NARY
3264 || expr
->kind
== REFERENCE
)
3267 bool by_some
= false;
3268 bool cant_insert
= false;
3269 bool all_same
= true;
3270 pre_expr first_s
= NULL
;
3273 pre_expr eprime
= NULL
;
3275 pre_expr edoubleprime
= NULL
;
3276 bool do_insertion
= false;
3278 val
= get_expr_value_id (expr
);
3279 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3281 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3283 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3285 fprintf (dump_file
, "Found fully redundant value: ");
3286 print_pre_expr (dump_file
, expr
);
3287 fprintf (dump_file
, "\n");
3292 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3294 unsigned int vprime
;
3296 /* We should never run insertion for the exit block
3297 and so not come across fake pred edges. */
3298 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3300 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
3303 /* eprime will generally only be NULL if the
3304 value of the expression, translated
3305 through the PHI for this predecessor, is
3306 undefined. If that is the case, we can't
3307 make the expression fully redundant,
3308 because its value is undefined along a
3309 predecessor path. We can thus break out
3310 early because it doesn't matter what the
3311 rest of the results are. */
3314 avail
[pred
->dest_idx
] = NULL
;
3319 eprime
= fully_constant_expression (eprime
);
3320 vprime
= get_expr_value_id (eprime
);
3321 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3323 if (edoubleprime
== NULL
)
3325 avail
[pred
->dest_idx
] = eprime
;
3330 avail
[pred
->dest_idx
] = edoubleprime
;
3332 /* We want to perform insertions to remove a redundancy on
3333 a path in the CFG we want to optimize for speed. */
3334 if (optimize_edge_for_speed_p (pred
))
3335 do_insertion
= true;
3336 if (first_s
== NULL
)
3337 first_s
= edoubleprime
;
3338 else if (!pre_expr_d::equal (first_s
, edoubleprime
))
3342 /* If we can insert it, it's not the same value
3343 already existing along every predecessor, and
3344 it's defined by some predecessor, it is
3345 partially redundant. */
3346 if (!cant_insert
&& !all_same
&& by_some
)
3350 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3352 fprintf (dump_file
, "Skipping partial redundancy for "
3354 print_pre_expr (dump_file
, expr
);
3355 fprintf (dump_file
, " (%04d), no redundancy on to be "
3356 "optimized for speed edge\n", val
);
3359 else if (dbg_cnt (treepre_insert
))
3361 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3363 fprintf (dump_file
, "Found partial redundancy for "
3365 print_pre_expr (dump_file
, expr
);
3366 fprintf (dump_file
, " (%04d)\n",
3367 get_expr_value_id (expr
));
3369 if (insert_into_preds_of_block (block
,
3370 get_expression_id (expr
),
3375 /* If all edges produce the same value and that value is
3376 an invariant, then the PHI has the same value on all
3377 edges. Note this. */
3378 else if (!cant_insert
&& all_same
)
3380 gcc_assert (edoubleprime
->kind
== CONSTANT
3381 || edoubleprime
->kind
== NAME
);
3383 tree temp
= make_temp_ssa_name (get_expr_type (expr
),
3386 = gimple_build_assign (temp
,
3387 edoubleprime
->kind
== CONSTANT
?
3388 PRE_EXPR_CONSTANT (edoubleprime
) :
3389 PRE_EXPR_NAME (edoubleprime
));
3390 gimple_stmt_iterator gsi
= gsi_after_labels (block
);
3391 gsi_insert_before (&gsi
, assign
, GSI_NEW_STMT
);
3393 gimple_set_plf (assign
, NECESSARY
, false);
3394 VN_INFO_GET (temp
)->value_id
= val
;
3395 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3396 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3397 VN_INFO (temp
)->valnum
= temp
;
3398 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3399 pre_expr newe
= get_or_alloc_expr_for_name (temp
);
3400 add_to_value (val
, newe
);
3401 bitmap_value_replace_in_set (AVAIL_OUT (block
), newe
);
3402 bitmap_insert_into_set (NEW_SETS (block
), newe
);
3412 /* Perform insertion for partially anticipatable expressions. There
3413 is only one case we will perform insertion for these. This case is
3414 if the expression is partially anticipatable, and fully available.
3415 In this case, we know that putting it earlier will enable us to
3416 remove the later computation. */
3420 do_partial_partial_insertion (basic_block block
, basic_block dom
)
3422 bool new_stuff
= false;
3423 vec
<pre_expr
> exprs
;
3425 auto_vec
<pre_expr
> avail
;
3428 exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3429 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3431 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3433 if (expr
->kind
== NARY
3434 || expr
->kind
== REFERENCE
)
3438 bool cant_insert
= false;
3441 pre_expr eprime
= NULL
;
3444 val
= get_expr_value_id (expr
);
3445 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3447 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3450 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3452 unsigned int vprime
;
3453 pre_expr edoubleprime
;
3455 /* We should never run insertion for the exit block
3456 and so not come across fake pred edges. */
3457 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3459 eprime
= phi_translate (expr
, ANTIC_IN (block
),
3463 /* eprime will generally only be NULL if the
3464 value of the expression, translated
3465 through the PHI for this predecessor, is
3466 undefined. If that is the case, we can't
3467 make the expression fully redundant,
3468 because its value is undefined along a
3469 predecessor path. We can thus break out
3470 early because it doesn't matter what the
3471 rest of the results are. */
3474 avail
[pred
->dest_idx
] = NULL
;
3479 eprime
= fully_constant_expression (eprime
);
3480 vprime
= get_expr_value_id (eprime
);
3481 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
), vprime
);
3482 avail
[pred
->dest_idx
] = edoubleprime
;
3483 if (edoubleprime
== NULL
)
3490 /* If we can insert it, it's not the same value
3491 already existing along every predecessor, and
3492 it's defined by some predecessor, it is
3493 partially redundant. */
3494 if (!cant_insert
&& by_all
)
3497 bool do_insertion
= false;
3499 /* Insert only if we can remove a later expression on a path
3500 that we want to optimize for speed.
3501 The phi node that we will be inserting in BLOCK is not free,
3502 and inserting it for the sake of !optimize_for_speed successor
3503 may cause regressions on the speed path. */
3504 FOR_EACH_EDGE (succ
, ei
, block
->succs
)
3506 if (bitmap_set_contains_value (PA_IN (succ
->dest
), val
)
3507 || bitmap_set_contains_value (ANTIC_IN (succ
->dest
), val
))
3509 if (optimize_edge_for_speed_p (succ
))
3510 do_insertion
= true;
3516 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3518 fprintf (dump_file
, "Skipping partial partial redundancy "
3520 print_pre_expr (dump_file
, expr
);
3521 fprintf (dump_file
, " (%04d), not (partially) anticipated "
3522 "on any to be optimized for speed edges\n", val
);
3525 else if (dbg_cnt (treepre_insert
))
3527 pre_stats
.pa_insert
++;
3528 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3530 fprintf (dump_file
, "Found partial partial redundancy "
3532 print_pre_expr (dump_file
, expr
);
3533 fprintf (dump_file
, " (%04d)\n",
3534 get_expr_value_id (expr
));
3536 if (insert_into_preds_of_block (block
,
3537 get_expression_id (expr
),
3550 insert_aux (basic_block block
)
3553 bool new_stuff
= false;
3558 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3563 bitmap_set_t newset
= NEW_SETS (dom
);
3566 /* Note that we need to value_replace both NEW_SETS, and
3567 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3568 represented by some non-simple expression here that we want
3569 to replace it with. */
3570 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3572 pre_expr expr
= expression_for_id (i
);
3573 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3574 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3577 if (!single_pred_p (block
))
3579 new_stuff
|= do_regular_insertion (block
, dom
);
3580 if (do_partial_partial
)
3581 new_stuff
|= do_partial_partial_insertion (block
, dom
);
3585 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3587 son
= next_dom_son (CDI_DOMINATORS
, son
))
3589 new_stuff
|= insert_aux (son
);
3595 /* Perform insertion of partially redundant values. */
3600 bool new_stuff
= true;
3602 int num_iterations
= 0;
3604 FOR_ALL_BB_FN (bb
, cfun
)
3605 NEW_SETS (bb
) = bitmap_set_new ();
3610 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3611 fprintf (dump_file
, "Starting insert iteration %d\n", num_iterations
);
3612 new_stuff
= insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3614 /* Clear the NEW sets before the next iteration. We have already
3615 fully propagated its contents. */
3617 FOR_ALL_BB_FN (bb
, cfun
)
3618 bitmap_set_free (NEW_SETS (bb
));
3620 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3624 /* Compute the AVAIL set for all basic blocks.
3626 This function performs value numbering of the statements in each basic
3627 block. The AVAIL sets are built from information we glean while doing
3628 this value numbering, since the AVAIL sets contain only one entry per
3631 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3632 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3635 compute_avail (void)
3638 basic_block block
, son
;
3639 basic_block
*worklist
;
3643 /* We pretend that default definitions are defined in the entry block.
3644 This includes function arguments and the static chain decl. */
3645 for (i
= 1; i
< num_ssa_names
; ++i
)
3647 tree name
= ssa_name (i
);
3650 || !SSA_NAME_IS_DEFAULT_DEF (name
)
3651 || has_zero_uses (name
)
3652 || virtual_operand_p (name
))
3655 e
= get_or_alloc_expr_for_name (name
);
3656 add_to_value (get_expr_value_id (e
), e
);
3657 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), e
);
3658 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3662 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3664 print_bitmap_set (dump_file
, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3665 "tmp_gen", ENTRY_BLOCK
);
3666 print_bitmap_set (dump_file
, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3667 "avail_out", ENTRY_BLOCK
);
3670 /* Allocate the worklist. */
3671 worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
));
3673 /* Seed the algorithm by putting the dominator children of the entry
3674 block on the worklist. */
3675 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3677 son
= next_dom_son (CDI_DOMINATORS
, son
))
3678 worklist
[sp
++] = son
;
3680 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3681 = ssa_default_def (cfun
, gimple_vop (cfun
));
3683 /* Loop until the worklist is empty. */
3689 /* Pick a block from the worklist. */
3690 block
= worklist
[--sp
];
3692 /* Initially, the set of available values in BLOCK is that of
3693 its immediate dominator. */
3694 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3697 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3698 BB_LIVE_VOP_ON_EXIT (block
) = BB_LIVE_VOP_ON_EXIT (dom
);
3701 /* Generate values for PHI nodes. */
3702 for (gphi_iterator gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
);
3705 tree result
= gimple_phi_result (gsi
.phi ());
3707 /* We have no need for virtual phis, as they don't represent
3708 actual computations. */
3709 if (virtual_operand_p (result
))
3711 BB_LIVE_VOP_ON_EXIT (block
) = result
;
3715 pre_expr e
= get_or_alloc_expr_for_name (result
);
3716 add_to_value (get_expr_value_id (e
), e
);
3717 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3718 bitmap_insert_into_set (PHI_GEN (block
), e
);
3721 BB_MAY_NOTRETURN (block
) = 0;
3723 /* Now compute value numbers and populate value sets with all
3724 the expressions computed in BLOCK. */
3725 for (gimple_stmt_iterator gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
);
3731 stmt
= gsi_stmt (gsi
);
3733 /* Cache whether the basic-block has any non-visible side-effect
3735 If this isn't a call or it is the last stmt in the
3736 basic-block then the CFG represents things correctly. */
3737 if (is_gimple_call (stmt
) && !stmt_ends_bb_p (stmt
))
3739 /* Non-looping const functions always return normally.
3740 Otherwise the call might not return or have side-effects
3741 that forbids hoisting possibly trapping expressions
3743 int flags
= gimple_call_flags (stmt
);
3744 if (!(flags
& ECF_CONST
)
3745 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3746 BB_MAY_NOTRETURN (block
) = 1;
3749 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3751 pre_expr e
= get_or_alloc_expr_for_name (op
);
3753 add_to_value (get_expr_value_id (e
), e
);
3754 bitmap_insert_into_set (TMP_GEN (block
), e
);
3755 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3758 if (gimple_vdef (stmt
))
3759 BB_LIVE_VOP_ON_EXIT (block
) = gimple_vdef (stmt
);
3761 if (gimple_has_side_effects (stmt
)
3762 || stmt_could_throw_p (stmt
)
3763 || is_gimple_debug (stmt
))
3766 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3768 if (ssa_undefined_value_p (op
))
3770 pre_expr e
= get_or_alloc_expr_for_name (op
);
3771 bitmap_value_insert_into_set (EXP_GEN (block
), e
);
3774 switch (gimple_code (stmt
))
3782 vn_reference_s ref1
;
3783 pre_expr result
= NULL
;
3785 /* We can value number only calls to real functions. */
3786 if (gimple_call_internal_p (stmt
))
3789 vn_reference_lookup_call (as_a
<gcall
*> (stmt
), &ref
, &ref1
);
3793 /* If the value of the call is not invalidated in
3794 this block until it is computed, add the expression
3796 if (!gimple_vuse (stmt
)
3798 (SSA_NAME_DEF_STMT (gimple_vuse (stmt
))) == GIMPLE_PHI
3799 || gimple_bb (SSA_NAME_DEF_STMT
3800 (gimple_vuse (stmt
))) != block
)
3802 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3803 result
->kind
= REFERENCE
;
3805 PRE_EXPR_REFERENCE (result
) = ref
;
3807 get_or_alloc_expression_id (result
);
3808 add_to_value (get_expr_value_id (result
), result
);
3809 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3816 pre_expr result
= NULL
;
3817 switch (vn_get_stmt_kind (stmt
))
3821 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3824 /* COND_EXPR and VEC_COND_EXPR are awkward in
3825 that they contain an embedded complex expression.
3826 Don't even try to shove those through PRE. */
3827 if (code
== COND_EXPR
3828 || code
== VEC_COND_EXPR
)
3831 vn_nary_op_lookup_stmt (stmt
, &nary
);
3835 /* If the NARY traps and there was a preceding
3836 point in the block that might not return avoid
3837 adding the nary to EXP_GEN. */
3838 if (BB_MAY_NOTRETURN (block
)
3839 && vn_nary_may_trap (nary
))
3842 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3843 result
->kind
= NARY
;
3845 PRE_EXPR_NARY (result
) = nary
;
3852 vn_reference_lookup (gimple_assign_rhs1 (stmt
),
3858 /* If the value of the reference is not invalidated in
3859 this block until it is computed, add the expression
3861 if (gimple_vuse (stmt
))
3865 def_stmt
= SSA_NAME_DEF_STMT (gimple_vuse (stmt
));
3866 while (!gimple_nop_p (def_stmt
)
3867 && gimple_code (def_stmt
) != GIMPLE_PHI
3868 && gimple_bb (def_stmt
) == block
)
3870 if (stmt_may_clobber_ref_p
3871 (def_stmt
, gimple_assign_rhs1 (stmt
)))
3877 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt
));
3883 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3884 result
->kind
= REFERENCE
;
3886 PRE_EXPR_REFERENCE (result
) = ref
;
3894 get_or_alloc_expression_id (result
);
3895 add_to_value (get_expr_value_id (result
), result
);
3896 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3904 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3906 print_bitmap_set (dump_file
, EXP_GEN (block
),
3907 "exp_gen", block
->index
);
3908 print_bitmap_set (dump_file
, PHI_GEN (block
),
3909 "phi_gen", block
->index
);
3910 print_bitmap_set (dump_file
, TMP_GEN (block
),
3911 "tmp_gen", block
->index
);
3912 print_bitmap_set (dump_file
, AVAIL_OUT (block
),
3913 "avail_out", block
->index
);
3916 /* Put the dominator children of BLOCK on the worklist of blocks
3917 to compute available sets for. */
3918 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3920 son
= next_dom_son (CDI_DOMINATORS
, son
))
3921 worklist
[sp
++] = son
;
3928 /* Local state for the eliminate domwalk. */
3929 static vec
<gimple
> el_to_remove
;
3930 static vec
<gimple
> el_to_fixup
;
3931 static unsigned int el_todo
;
3932 static vec
<tree
> el_avail
;
3933 static vec
<tree
> el_avail_stack
;
3935 /* Return a leader for OP that is available at the current point of the
3936 eliminate domwalk. */
3939 eliminate_avail (tree op
)
3941 tree valnum
= VN_INFO (op
)->valnum
;
3942 if (TREE_CODE (valnum
) == SSA_NAME
)
3944 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
3946 if (el_avail
.length () > SSA_NAME_VERSION (valnum
))
3947 return el_avail
[SSA_NAME_VERSION (valnum
)];
3949 else if (is_gimple_min_invariant (valnum
))
3954 /* At the current point of the eliminate domwalk make OP available. */
3957 eliminate_push_avail (tree op
)
3959 tree valnum
= VN_INFO (op
)->valnum
;
3960 if (TREE_CODE (valnum
) == SSA_NAME
)
3962 if (el_avail
.length () <= SSA_NAME_VERSION (valnum
))
3963 el_avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
3965 if (el_avail
[SSA_NAME_VERSION (valnum
)])
3966 pushop
= el_avail
[SSA_NAME_VERSION (valnum
)];
3967 el_avail_stack
.safe_push (pushop
);
3968 el_avail
[SSA_NAME_VERSION (valnum
)] = op
;
3972 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3973 the leader for the expression if insertion was successful. */
3976 eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
)
3978 tree expr
= vn_get_expr_for (val
);
3979 if (!CONVERT_EXPR_P (expr
)
3980 && TREE_CODE (expr
) != VIEW_CONVERT_EXPR
)
3983 tree op
= TREE_OPERAND (expr
, 0);
3984 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (op
) : op
;
3988 tree res
= make_temp_ssa_name (TREE_TYPE (val
), NULL
, "pretmp");
3989 gassign
*tem
= gimple_build_assign (res
,
3990 fold_build1 (TREE_CODE (expr
),
3991 TREE_TYPE (expr
), leader
));
3992 gsi_insert_before (gsi
, tem
, GSI_SAME_STMT
);
3993 VN_INFO_GET (res
)->valnum
= val
;
3995 if (TREE_CODE (leader
) == SSA_NAME
)
3996 gimple_set_plf (SSA_NAME_DEF_STMT (leader
), NECESSARY
, true);
3998 pre_stats
.insertions
++;
3999 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4001 fprintf (dump_file
, "Inserted ");
4002 print_gimple_stmt (dump_file
, tem
, 0, 0);
4008 class eliminate_dom_walker
: public dom_walker
4011 eliminate_dom_walker (cdi_direction direction
, bool do_pre_
)
4012 : dom_walker (direction
), do_pre (do_pre_
) {}
4014 virtual void before_dom_children (basic_block
);
4015 virtual void after_dom_children (basic_block
);
4020 /* Perform elimination for the basic-block B during the domwalk. */
4023 eliminate_dom_walker::before_dom_children (basic_block b
)
4026 el_avail_stack
.safe_push (NULL_TREE
);
4028 /* ??? If we do nothing for unreachable blocks then this will confuse
4029 tailmerging. Eventually we can reduce its reliance on SCCVN now
4030 that we fully copy/constant-propagate (most) things. */
4032 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
4034 gphi
*phi
= gsi
.phi ();
4035 tree res
= PHI_RESULT (phi
);
4037 if (virtual_operand_p (res
))
4043 tree sprime
= eliminate_avail (res
);
4047 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4049 fprintf (dump_file
, "Replaced redundant PHI node defining ");
4050 print_generic_expr (dump_file
, res
, 0);
4051 fprintf (dump_file
, " with ");
4052 print_generic_expr (dump_file
, sprime
, 0);
4053 fprintf (dump_file
, "\n");
4056 /* If we inserted this PHI node ourself, it's not an elimination. */
4058 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
4061 pre_stats
.eliminations
++;
4063 /* If we will propagate into all uses don't bother to do
4065 if (may_propagate_copy (res
, sprime
))
4067 /* Mark the PHI for removal. */
4068 el_to_remove
.safe_push (phi
);
4073 remove_phi_node (&gsi
, false);
4076 && !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
))
4077 && TREE_CODE (sprime
) == SSA_NAME
)
4078 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4080 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
4081 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4082 gimple stmt
= gimple_build_assign (res
, sprime
);
4083 /* ??? It cannot yet be necessary (DOM walk). */
4084 gimple_set_plf (stmt
, NECESSARY
, gimple_plf (phi
, NECESSARY
));
4086 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
4087 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
4091 eliminate_push_avail (res
);
4095 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
4099 tree sprime
= NULL_TREE
;
4100 gimple stmt
= gsi_stmt (gsi
);
4101 tree lhs
= gimple_get_lhs (stmt
);
4102 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
4103 && !gimple_has_volatile_ops (stmt
)
4104 /* See PR43491. Do not replace a global register variable when
4105 it is a the RHS of an assignment. Do replace local register
4106 variables since gcc does not guarantee a local variable will
4107 be allocated in register.
4108 ??? The fix isn't effective here. This should instead
4109 be ensured by not value-numbering them the same but treating
4110 them like volatiles? */
4111 && !(gimple_assign_single_p (stmt
)
4112 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
4113 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
4114 && is_global_var (gimple_assign_rhs1 (stmt
)))))
4116 sprime
= eliminate_avail (lhs
);
4119 /* If there is no existing usable leader but SCCVN thinks
4120 it has an expression it wants to use as replacement,
4122 tree val
= VN_INFO (lhs
)->valnum
;
4124 && TREE_CODE (val
) == SSA_NAME
4125 && VN_INFO (val
)->needs_insertion
4126 && VN_INFO (val
)->expr
!= NULL_TREE
4127 && (sprime
= eliminate_insert (&gsi
, val
)) != NULL_TREE
)
4128 eliminate_push_avail (sprime
);
4131 /* If this now constitutes a copy duplicate points-to
4132 and range info appropriately. This is especially
4133 important for inserted code. See tree-ssa-copy.c
4134 for similar code. */
4136 && TREE_CODE (sprime
) == SSA_NAME
)
4138 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
4139 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
4140 && SSA_NAME_PTR_INFO (lhs
)
4141 && !SSA_NAME_PTR_INFO (sprime
))
4143 duplicate_ssa_name_ptr_info (sprime
,
4144 SSA_NAME_PTR_INFO (lhs
));
4146 mark_ptr_info_alignment_unknown
4147 (SSA_NAME_PTR_INFO (sprime
));
4149 else if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
4150 && SSA_NAME_RANGE_INFO (lhs
)
4151 && !SSA_NAME_RANGE_INFO (sprime
)
4153 duplicate_ssa_name_range_info (sprime
,
4154 SSA_NAME_RANGE_TYPE (lhs
),
4155 SSA_NAME_RANGE_INFO (lhs
));
4158 /* Inhibit the use of an inserted PHI on a loop header when
4159 the address of the memory reference is a simple induction
4160 variable. In other cases the vectorizer won't do anything
4161 anyway (either it's loop invariant or a complicated
4164 && TREE_CODE (sprime
) == SSA_NAME
4166 && flag_tree_loop_vectorize
4167 && loop_outer (b
->loop_father
)
4168 && has_zero_uses (sprime
)
4169 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
4170 && gimple_assign_load_p (stmt
))
4172 gimple def_stmt
= SSA_NAME_DEF_STMT (sprime
);
4173 basic_block def_bb
= gimple_bb (def_stmt
);
4174 if (gimple_code (def_stmt
) == GIMPLE_PHI
4175 && b
->loop_father
->header
== def_bb
)
4180 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
4183 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
4185 && flow_bb_inside_loop_p (b
->loop_father
, def_bb
)
4186 && simple_iv (b
->loop_father
,
4187 b
->loop_father
, op
, &iv
, true))
4195 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4197 fprintf (dump_file
, "Not replacing ");
4198 print_gimple_expr (dump_file
, stmt
, 0, 0);
4199 fprintf (dump_file
, " with ");
4200 print_generic_expr (dump_file
, sprime
, 0);
4201 fprintf (dump_file
, " which would add a loop"
4202 " carried dependence to loop %d\n",
4203 b
->loop_father
->num
);
4205 /* Don't keep sprime available. */
4213 /* If we can propagate the value computed for LHS into
4214 all uses don't bother doing anything with this stmt. */
4215 if (may_propagate_copy (lhs
, sprime
))
4217 /* Mark it for removal. */
4218 el_to_remove
.safe_push (stmt
);
4220 /* ??? Don't count copy/constant propagations. */
4221 if (gimple_assign_single_p (stmt
)
4222 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4223 || gimple_assign_rhs1 (stmt
) == sprime
))
4226 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4228 fprintf (dump_file
, "Replaced ");
4229 print_gimple_expr (dump_file
, stmt
, 0, 0);
4230 fprintf (dump_file
, " with ");
4231 print_generic_expr (dump_file
, sprime
, 0);
4232 fprintf (dump_file
, " in all uses of ");
4233 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4236 pre_stats
.eliminations
++;
4240 /* If this is an assignment from our leader (which
4241 happens in the case the value-number is a constant)
4242 then there is nothing to do. */
4243 if (gimple_assign_single_p (stmt
)
4244 && sprime
== gimple_assign_rhs1 (stmt
))
4247 /* Else replace its RHS. */
4248 bool can_make_abnormal_goto
4249 = is_gimple_call (stmt
)
4250 && stmt_can_make_abnormal_goto (stmt
);
4252 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4254 fprintf (dump_file
, "Replaced ");
4255 print_gimple_expr (dump_file
, stmt
, 0, 0);
4256 fprintf (dump_file
, " with ");
4257 print_generic_expr (dump_file
, sprime
, 0);
4258 fprintf (dump_file
, " in ");
4259 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4262 if (TREE_CODE (sprime
) == SSA_NAME
)
4263 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4266 pre_stats
.eliminations
++;
4267 gimple orig_stmt
= stmt
;
4268 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4269 TREE_TYPE (sprime
)))
4270 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4271 tree vdef
= gimple_vdef (stmt
);
4272 tree vuse
= gimple_vuse (stmt
);
4273 propagate_tree_value_into_stmt (&gsi
, sprime
);
4274 stmt
= gsi_stmt (gsi
);
4276 if (vdef
!= gimple_vdef (stmt
))
4277 VN_INFO (vdef
)->valnum
= vuse
;
4279 /* If we removed EH side-effects from the statement, clean
4280 its EH information. */
4281 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4283 bitmap_set_bit (need_eh_cleanup
,
4284 gimple_bb (stmt
)->index
);
4285 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4286 fprintf (dump_file
, " Removed EH side-effects.\n");
4289 /* Likewise for AB side-effects. */
4290 if (can_make_abnormal_goto
4291 && !stmt_can_make_abnormal_goto (stmt
))
4293 bitmap_set_bit (need_ab_cleanup
,
4294 gimple_bb (stmt
)->index
);
4295 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4296 fprintf (dump_file
, " Removed AB side-effects.\n");
4303 /* If the statement is a scalar store, see if the expression
4304 has the same value number as its rhs. If so, the store is
4306 if (gimple_assign_single_p (stmt
)
4307 && !gimple_has_volatile_ops (stmt
)
4308 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4309 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4310 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
4313 tree rhs
= gimple_assign_rhs1 (stmt
);
4314 val
= vn_reference_lookup (gimple_assign_lhs (stmt
),
4315 gimple_vuse (stmt
), VN_WALK
, NULL
);
4316 if (TREE_CODE (rhs
) == SSA_NAME
)
4317 rhs
= VN_INFO (rhs
)->valnum
;
4319 && operand_equal_p (val
, rhs
, 0))
4321 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4323 fprintf (dump_file
, "Deleted redundant store ");
4324 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4327 /* Queue stmt for removal. */
4328 el_to_remove
.safe_push (stmt
);
4333 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
4334 bool was_noreturn
= (is_gimple_call (stmt
)
4335 && gimple_call_noreturn_p (stmt
));
4336 tree vdef
= gimple_vdef (stmt
);
4337 tree vuse
= gimple_vuse (stmt
);
4339 /* If we didn't replace the whole stmt (or propagate the result
4340 into all uses), replace all uses on this stmt with their
4342 use_operand_p use_p
;
4344 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4346 tree use
= USE_FROM_PTR (use_p
);
4347 /* ??? The call code above leaves stmt operands un-updated. */
4348 if (TREE_CODE (use
) != SSA_NAME
)
4350 tree sprime
= eliminate_avail (use
);
4351 if (sprime
&& sprime
!= use
4352 && may_propagate_copy (use
, sprime
)
4353 /* We substitute into debug stmts to avoid excessive
4354 debug temporaries created by removed stmts, but we need
4355 to avoid doing so for inserted sprimes as we never want
4356 to create debug temporaries for them. */
4358 || TREE_CODE (sprime
) != SSA_NAME
4359 || !is_gimple_debug (stmt
)
4360 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
4362 propagate_value (use_p
, sprime
);
4363 gimple_set_modified (stmt
, true);
4364 if (TREE_CODE (sprime
) == SSA_NAME
4365 && !is_gimple_debug (stmt
))
4366 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4371 /* Visit indirect calls and turn them into direct calls if
4372 possible using the devirtualization machinery. */
4373 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
4375 tree fn
= gimple_call_fn (call_stmt
);
4377 && flag_devirtualize
4378 && virtual_method_call_p (fn
))
4380 tree otr_type
= obj_type_ref_class (fn
);
4382 ipa_polymorphic_call_context
context (current_function_decl
, fn
, stmt
, &instance
);
4385 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
), otr_type
, stmt
);
4387 vec
<cgraph_node
*>targets
4388 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
4390 (OBJ_TYPE_REF_TOKEN (fn
)),
4393 if (dump_enabled_p ())
4394 dump_possible_polymorphic_call_targets (dump_file
,
4395 obj_type_ref_class (fn
),
4397 (OBJ_TYPE_REF_TOKEN (fn
)),
4399 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
4402 if (targets
.length () == 1)
4403 fn
= targets
[0]->decl
;
4405 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4406 if (dump_enabled_p ())
4408 location_t loc
= gimple_location_safe (stmt
);
4409 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
4410 "converting indirect call to "
4412 cgraph_node::get (fn
)->name ());
4414 gimple_call_set_fndecl (call_stmt
, fn
);
4415 maybe_remove_unused_call_args (cfun
, call_stmt
);
4416 gimple_set_modified (stmt
, true);
4421 if (gimple_modified_p (stmt
))
4423 /* If a formerly non-invariant ADDR_EXPR is turned into an
4424 invariant one it was on a separate stmt. */
4425 if (gimple_assign_single_p (stmt
)
4426 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
4427 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
4428 gimple old_stmt
= stmt
;
4429 if (is_gimple_call (stmt
))
4431 /* ??? Only fold calls inplace for now, this may create new
4432 SSA names which in turn will confuse free_scc_vn SSA name
4434 fold_stmt_inplace (&gsi
);
4435 /* When changing a call into a noreturn call, cfg cleanup
4436 is needed to fix up the noreturn call. */
4437 if (!was_noreturn
&& gimple_call_noreturn_p (stmt
))
4438 el_to_fixup
.safe_push (stmt
);
4443 stmt
= gsi_stmt (gsi
);
4444 if ((gimple_code (stmt
) == GIMPLE_COND
4445 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
4446 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
4447 || (gimple_code (stmt
) == GIMPLE_SWITCH
4448 && TREE_CODE (gimple_switch_index (
4449 as_a
<gswitch
*> (stmt
)))
4451 el_todo
|= TODO_cleanup_cfg
;
4453 /* If we removed EH side-effects from the statement, clean
4454 its EH information. */
4455 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
4457 bitmap_set_bit (need_eh_cleanup
,
4458 gimple_bb (stmt
)->index
);
4459 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4460 fprintf (dump_file
, " Removed EH side-effects.\n");
4462 /* Likewise for AB side-effects. */
4463 if (can_make_abnormal_goto
4464 && !stmt_can_make_abnormal_goto (stmt
))
4466 bitmap_set_bit (need_ab_cleanup
,
4467 gimple_bb (stmt
)->index
);
4468 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4469 fprintf (dump_file
, " Removed AB side-effects.\n");
4472 if (vdef
!= gimple_vdef (stmt
))
4473 VN_INFO (vdef
)->valnum
= vuse
;
4476 /* Make new values available - for fully redundant LHS we
4477 continue with the next stmt above and skip this. */
4479 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
4480 eliminate_push_avail (DEF_FROM_PTR (defp
));
4483 /* Replace destination PHI arguments. */
4486 FOR_EACH_EDGE (e
, ei
, b
->succs
)
4488 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
4492 gphi
*phi
= gsi
.phi ();
4493 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
4494 tree arg
= USE_FROM_PTR (use_p
);
4495 if (TREE_CODE (arg
) != SSA_NAME
4496 || virtual_operand_p (arg
))
4498 tree sprime
= eliminate_avail (arg
);
4499 if (sprime
&& may_propagate_copy (arg
, sprime
))
4501 propagate_value (use_p
, sprime
);
4502 if (TREE_CODE (sprime
) == SSA_NAME
)
4503 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4509 /* Make no longer available leaders no longer available. */
4512 eliminate_dom_walker::after_dom_children (basic_block
)
4515 while ((entry
= el_avail_stack
.pop ()) != NULL_TREE
)
4517 tree valnum
= VN_INFO (entry
)->valnum
;
4518 tree old
= el_avail
[SSA_NAME_VERSION (valnum
)];
4520 el_avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
4522 el_avail
[SSA_NAME_VERSION (valnum
)] = entry
;
4526 /* Eliminate fully redundant computations. */
4529 eliminate (bool do_pre
)
4531 gimple_stmt_iterator gsi
;
4534 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4535 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4537 el_to_remove
.create (0);
4538 el_to_fixup
.create (0);
4540 el_avail
.create (num_ssa_names
);
4541 el_avail_stack
.create (0);
4543 eliminate_dom_walker (CDI_DOMINATORS
,
4544 do_pre
).walk (cfun
->cfg
->x_entry_block_ptr
);
4546 el_avail
.release ();
4547 el_avail_stack
.release ();
4549 /* We cannot remove stmts during BB walk, especially not release SSA
4550 names there as this confuses the VN machinery. The stmts ending
4551 up in el_to_remove are either stores or simple copies.
4552 Remove stmts in reverse order to make debug stmt creation possible. */
4553 while (!el_to_remove
.is_empty ())
4555 stmt
= el_to_remove
.pop ();
4557 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4559 fprintf (dump_file
, "Removing dead stmt ");
4560 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4564 if (gimple_code (stmt
) == GIMPLE_PHI
)
4565 lhs
= gimple_phi_result (stmt
);
4567 lhs
= gimple_get_lhs (stmt
);
4570 && TREE_CODE (lhs
) == SSA_NAME
)
4571 bitmap_clear_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
4573 gsi
= gsi_for_stmt (stmt
);
4574 if (gimple_code (stmt
) == GIMPLE_PHI
)
4575 remove_phi_node (&gsi
, true);
4578 basic_block bb
= gimple_bb (stmt
);
4579 unlink_stmt_vdef (stmt
);
4580 if (gsi_remove (&gsi
, true))
4581 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
4582 release_defs (stmt
);
4585 /* Removing a stmt may expose a forwarder block. */
4586 el_todo
|= TODO_cleanup_cfg
;
4588 el_to_remove
.release ();
4590 /* Fixup stmts that became noreturn calls. This may require splitting
4591 blocks and thus isn't possible during the dominator walk. Do this
4592 in reverse order so we don't inadvertedly remove a stmt we want to
4593 fixup by visiting a dominating now noreturn call first. */
4594 while (!el_to_fixup
.is_empty ())
4596 stmt
= el_to_fixup
.pop ();
4598 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4600 fprintf (dump_file
, "Fixing up noreturn call ");
4601 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4604 if (fixup_noreturn_call (stmt
))
4605 el_todo
|= TODO_cleanup_cfg
;
4607 el_to_fixup
.release ();
4612 /* Perform CFG cleanups made necessary by elimination. */
4615 fini_eliminate (void)
4617 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
4618 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
4621 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
4624 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
4626 BITMAP_FREE (need_eh_cleanup
);
4627 BITMAP_FREE (need_ab_cleanup
);
4629 if (do_eh_cleanup
|| do_ab_cleanup
)
4630 return TODO_cleanup_cfg
;
4634 /* Borrow a bit of tree-ssa-dce.c for the moment.
4635 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4636 this may be a bit faster, and we may want critical edges kept split. */
4638 /* If OP's defining statement has not already been determined to be necessary,
4639 mark that statement necessary. Return the stmt, if it is newly
4642 static inline gimple
4643 mark_operand_necessary (tree op
)
4649 if (TREE_CODE (op
) != SSA_NAME
)
4652 stmt
= SSA_NAME_DEF_STMT (op
);
4655 if (gimple_plf (stmt
, NECESSARY
)
4656 || gimple_nop_p (stmt
))
4659 gimple_set_plf (stmt
, NECESSARY
, true);
4663 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4664 to insert PHI nodes sometimes, and because value numbering of casts isn't
4665 perfect, we sometimes end up inserting dead code. This simple DCE-like
4666 pass removes any insertions we made that weren't actually used. */
4669 remove_dead_inserted_code (void)
4676 worklist
= BITMAP_ALLOC (NULL
);
4677 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4679 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4680 if (gimple_plf (t
, NECESSARY
))
4681 bitmap_set_bit (worklist
, i
);
4683 while (!bitmap_empty_p (worklist
))
4685 i
= bitmap_first_set_bit (worklist
);
4686 bitmap_clear_bit (worklist
, i
);
4687 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4689 /* PHI nodes are somewhat special in that each PHI alternative has
4690 data and control dependencies. All the statements feeding the
4691 PHI node's arguments are always necessary. */
4692 if (gimple_code (t
) == GIMPLE_PHI
)
4696 for (k
= 0; k
< gimple_phi_num_args (t
); k
++)
4698 tree arg
= PHI_ARG_DEF (t
, k
);
4699 if (TREE_CODE (arg
) == SSA_NAME
)
4701 gimple n
= mark_operand_necessary (arg
);
4703 bitmap_set_bit (worklist
, SSA_NAME_VERSION (arg
));
4709 /* Propagate through the operands. Examine all the USE, VUSE and
4710 VDEF operands in this statement. Mark all the statements
4711 which feed this statement's uses as necessary. */
4715 /* The operands of VDEF expressions are also needed as they
4716 represent potential definitions that may reach this
4717 statement (VDEF operands allow us to follow def-def
4720 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
4722 gimple n
= mark_operand_necessary (use
);
4724 bitmap_set_bit (worklist
, SSA_NAME_VERSION (use
));
4729 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4731 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4732 if (!gimple_plf (t
, NECESSARY
))
4734 gimple_stmt_iterator gsi
;
4736 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4738 fprintf (dump_file
, "Removing unnecessary insertion:");
4739 print_gimple_stmt (dump_file
, t
, 0, 0);
4742 gsi
= gsi_for_stmt (t
);
4743 if (gimple_code (t
) == GIMPLE_PHI
)
4744 remove_phi_node (&gsi
, true);
4747 gsi_remove (&gsi
, true);
4752 BITMAP_FREE (worklist
);
4756 /* Initialize data structures used by PRE. */
4763 next_expression_id
= 1;
4764 expressions
.create (0);
4765 expressions
.safe_push (NULL
);
4766 value_expressions
.create (get_max_value_id () + 1);
4767 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
4768 name_to_id
.create (0);
4770 inserted_exprs
= BITMAP_ALLOC (NULL
);
4772 connect_infinite_loops_to_exit ();
4773 memset (&pre_stats
, 0, sizeof (pre_stats
));
4775 postorder
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
4776 postorder_num
= inverted_post_order_compute (postorder
);
4778 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets
));
4780 calculate_dominance_info (CDI_POST_DOMINATORS
);
4781 calculate_dominance_info (CDI_DOMINATORS
);
4783 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4784 phi_translate_table
= new hash_table
<expr_pred_trans_d
> (5110);
4785 expression_to_id
= new hash_table
<pre_expr_d
> (num_ssa_names
* 3);
4786 bitmap_set_pool
= create_alloc_pool ("Bitmap sets",
4787 sizeof (struct bitmap_set
), 30);
4788 pre_expr_pool
= create_alloc_pool ("pre_expr nodes",
4789 sizeof (struct pre_expr_d
), 30);
4790 FOR_ALL_BB_FN (bb
, cfun
)
4792 EXP_GEN (bb
) = bitmap_set_new ();
4793 PHI_GEN (bb
) = bitmap_set_new ();
4794 TMP_GEN (bb
) = bitmap_set_new ();
4795 AVAIL_OUT (bb
) = bitmap_set_new ();
4800 /* Deallocate data structures used by PRE. */
4806 value_expressions
.release ();
4807 BITMAP_FREE (inserted_exprs
);
4808 bitmap_obstack_release (&grand_bitmap_obstack
);
4809 free_alloc_pool (bitmap_set_pool
);
4810 free_alloc_pool (pre_expr_pool
);
4811 delete phi_translate_table
;
4812 phi_translate_table
= NULL
;
4813 delete expression_to_id
;
4814 expression_to_id
= NULL
;
4815 name_to_id
.release ();
4817 free_aux_for_blocks ();
4819 free_dominance_info (CDI_POST_DOMINATORS
);
4824 const pass_data pass_data_pre
=
4826 GIMPLE_PASS
, /* type */
4828 OPTGROUP_NONE
, /* optinfo_flags */
4829 TV_TREE_PRE
, /* tv_id */
4830 /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
4832 ( PROP_no_crit_edges
| PROP_cfg
| PROP_ssa
), /* properties_required */
4833 0, /* properties_provided */
4834 PROP_no_crit_edges
, /* properties_destroyed */
4835 TODO_rebuild_alias
, /* todo_flags_start */
4836 0, /* todo_flags_finish */
4839 class pass_pre
: public gimple_opt_pass
4842 pass_pre (gcc::context
*ctxt
)
4843 : gimple_opt_pass (pass_data_pre
, ctxt
)
4846 /* opt_pass methods: */
4847 virtual bool gate (function
*) { return flag_tree_pre
!= 0; }
4848 virtual unsigned int execute (function
*);
4850 }; // class pass_pre
4853 pass_pre::execute (function
*fun
)
4855 unsigned int todo
= 0;
4857 do_partial_partial
=
4858 flag_tree_partial_pre
&& optimize_function_for_speed_p (fun
);
4860 /* This has to happen before SCCVN runs because
4861 loop_optimizer_init may create new phis, etc. */
4862 loop_optimizer_init (LOOPS_NORMAL
);
4864 if (!run_scc_vn (VN_WALK
))
4866 loop_optimizer_finalize ();
4873 /* Collect and value number expressions computed in each basic block. */
4876 /* Insert can get quite slow on an incredibly large number of basic
4877 blocks due to some quadratic behavior. Until this behavior is
4878 fixed, don't run it when he have an incredibly large number of
4879 bb's. If we aren't going to run insert, there is no point in
4880 computing ANTIC, either, even though it's plenty fast. */
4881 if (n_basic_blocks_for_fn (fun
) < 4000)
4887 /* Make sure to remove fake edges before committing our inserts.
4888 This makes sure we don't end up with extra critical edges that
4889 we would need to split. */
4890 remove_fake_exit_edges ();
4891 gsi_commit_edge_inserts ();
4893 /* Eliminate folds statements which might (should not...) end up
4894 not keeping virtual operands up-to-date. */
4895 gcc_assert (!need_ssa_update_p (fun
));
4897 /* Remove all the redundant expressions. */
4898 todo
|= eliminate (true);
4900 statistics_counter_event (fun
, "Insertions", pre_stats
.insertions
);
4901 statistics_counter_event (fun
, "PA inserted", pre_stats
.pa_insert
);
4902 statistics_counter_event (fun
, "New PHIs", pre_stats
.phis
);
4903 statistics_counter_event (fun
, "Eliminated", pre_stats
.eliminations
);
4905 clear_expression_ids ();
4906 remove_dead_inserted_code ();
4910 todo
|= fini_eliminate ();
4911 loop_optimizer_finalize ();
4913 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4914 case we can merge the block with the remaining predecessor of the block.
4916 - call merge_blocks after each tail merge iteration
4917 - call merge_blocks after all tail merge iterations
4918 - mark TODO_cleanup_cfg when necessary
4919 - share the cfg cleanup with fini_pre. */
4920 todo
|= tail_merge_optimize (todo
);
4924 /* Tail merging invalidates the virtual SSA web, together with
4925 cfg-cleanup opportunities exposed by PRE this will wreck the
4926 SSA updating machinery. So make sure to run update-ssa
4927 manually, before eventually scheduling cfg-cleanup as part of
4929 update_ssa (TODO_update_ssa_only_virtuals
);
4937 make_pass_pre (gcc::context
*ctxt
)
4939 return new pass_pre (ctxt
);
4944 const pass_data pass_data_fre
=
4946 GIMPLE_PASS
, /* type */
4948 OPTGROUP_NONE
, /* optinfo_flags */
4949 TV_TREE_FRE
, /* tv_id */
4950 ( PROP_cfg
| PROP_ssa
), /* properties_required */
4951 0, /* properties_provided */
4952 0, /* properties_destroyed */
4953 0, /* todo_flags_start */
4954 0, /* todo_flags_finish */
4957 class pass_fre
: public gimple_opt_pass
4960 pass_fre (gcc::context
*ctxt
)
4961 : gimple_opt_pass (pass_data_fre
, ctxt
)
4964 /* opt_pass methods: */
4965 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
4966 virtual bool gate (function
*) { return flag_tree_fre
!= 0; }
4967 virtual unsigned int execute (function
*);
4969 }; // class pass_fre
4972 pass_fre::execute (function
*fun
)
4974 unsigned int todo
= 0;
4976 if (!run_scc_vn (VN_WALKREWRITE
))
4979 memset (&pre_stats
, 0, sizeof (pre_stats
));
4981 /* Remove all the redundant expressions. */
4982 todo
|= eliminate (false);
4984 todo
|= fini_eliminate ();
4988 statistics_counter_event (fun
, "Insertions", pre_stats
.insertions
);
4989 statistics_counter_event (fun
, "Eliminated", pre_stats
.eliminations
);
4997 make_pass_fre (gcc::context
*ctxt
)
4999 return new pass_fre (ctxt
);