2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
31 #include "hash-table.h"
32 #include "tree-ssa-alias.h"
33 #include "internal-fn.h"
34 #include "gimple-fold.h"
36 #include "gimple-expr.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-ssa.h"
44 #include "tree-phinodes.h"
45 #include "ssa-iterators.h"
46 #include "stringpool.h"
47 #include "tree-ssanames.h"
48 #include "tree-ssa-loop.h"
49 #include "tree-into-ssa.h"
53 #include "tree-iterator.h"
54 #include "alloc-pool.h"
56 #include "tree-pass.h"
58 #include "langhooks.h"
60 #include "tree-ssa-sccvn.h"
61 #include "tree-scalar-evolution.h"
66 #include "tree-ssa-propagate.h"
67 #include "ipa-utils.h"
71 1. Avail sets can be shared by making an avail_find_leader that
72 walks up the dominator tree and looks in those avail sets.
73 This might affect code optimality, it's unclear right now.
74 2. Strength reduction can be performed by anticipating expressions
75 we can repair later on.
76 3. We can do back-substitution or smarter value numbering to catch
77 commutative expressions split up over multiple statements.
80 /* For ease of terminology, "expression node" in the below refers to
81 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
82 represent the actual statement containing the expressions we care about,
83 and we cache the value number by putting it in the expression. */
87 First we walk the statements to generate the AVAIL sets, the
88 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
89 generation of values/expressions by a given block. We use them
90 when computing the ANTIC sets. The AVAIL sets consist of
91 SSA_NAME's that represent values, so we know what values are
92 available in what blocks. AVAIL is a forward dataflow problem. In
93 SSA, values are never killed, so we don't need a kill set, or a
94 fixpoint iteration, in order to calculate the AVAIL sets. In
95 traditional parlance, AVAIL sets tell us the downsafety of the
98 Next, we generate the ANTIC sets. These sets represent the
99 anticipatable expressions. ANTIC is a backwards dataflow
100 problem. An expression is anticipatable in a given block if it could
101 be generated in that block. This means that if we had to perform
102 an insertion in that block, of the value of that expression, we
103 could. Calculating the ANTIC sets requires phi translation of
104 expressions, because the flow goes backwards through phis. We must
105 iterate to a fixpoint of the ANTIC sets, because we have a kill
106 set. Even in SSA form, values are not live over the entire
107 function, only from their definition point onwards. So we have to
108 remove values from the ANTIC set once we go past the definition
109 point of the leaders that make them up.
110 compute_antic/compute_antic_aux performs this computation.
112 Third, we perform insertions to make partially redundant
113 expressions fully redundant.
115 An expression is partially redundant (excluding partial
118 1. It is AVAIL in some, but not all, of the predecessors of a
120 2. It is ANTIC in all the predecessors.
122 In order to make it fully redundant, we insert the expression into
123 the predecessors where it is not available, but is ANTIC.
125 For the partial anticipation case, we only perform insertion if it
126 is partially anticipated in some block, and fully available in all
129 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
130 performs these steps.
132 Fourth, we eliminate fully redundant expressions.
133 This is a simple statement walk that replaces redundant
134 calculations with the now available values. */
136 /* Representations of value numbers:
138 Value numbers are represented by a representative SSA_NAME. We
139 will create fake SSA_NAME's in situations where we need a
140 representative but do not have one (because it is a complex
141 expression). In order to facilitate storing the value numbers in
142 bitmaps, and keep the number of wasted SSA_NAME's down, we also
143 associate a value_id with each value number, and create full blown
144 ssa_name's only where we actually need them (IE in operands of
145 existing expressions).
147 Theoretically you could replace all the value_id's with
148 SSA_NAME_VERSION, but this would allocate a large number of
149 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
150 It would also require an additional indirection at each point we
153 /* Representation of expressions on value numbers:
155 Expressions consisting of value numbers are represented the same
156 way as our VN internally represents them, with an additional
157 "pre_expr" wrapping around them in order to facilitate storing all
158 of the expressions in the same sets. */
160 /* Representation of sets:
162 The dataflow sets do not need to be sorted in any particular order
163 for the majority of their lifetime, are simply represented as two
164 bitmaps, one that keeps track of values present in the set, and one
165 that keeps track of expressions present in the set.
167 When we need them in topological order, we produce it on demand by
168 transforming the bitmap into an array and sorting it into topo
171 /* Type of expression, used to know which member of the PRE_EXPR union
182 typedef union pre_expr_union_d
187 vn_reference_t reference
;
190 typedef struct pre_expr_d
: typed_noop_remove
<pre_expr_d
>
192 enum pre_expr_kind kind
;
196 /* hash_table support. */
197 typedef pre_expr_d value_type
;
198 typedef pre_expr_d compare_type
;
199 static inline hashval_t
hash (const pre_expr_d
*);
200 static inline int equal (const pre_expr_d
*, const pre_expr_d
*);
203 #define PRE_EXPR_NAME(e) (e)->u.name
204 #define PRE_EXPR_NARY(e) (e)->u.nary
205 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
206 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
208 /* Compare E1 and E1 for equality. */
211 pre_expr_d::equal (const value_type
*e1
, const compare_type
*e2
)
213 if (e1
->kind
!= e2
->kind
)
219 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
220 PRE_EXPR_CONSTANT (e2
));
222 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
224 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
226 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
227 PRE_EXPR_REFERENCE (e2
));
236 pre_expr_d::hash (const value_type
*e
)
241 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
243 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
245 return PRE_EXPR_NARY (e
)->hashcode
;
247 return PRE_EXPR_REFERENCE (e
)->hashcode
;
253 /* Next global expression id number. */
254 static unsigned int next_expression_id
;
256 /* Mapping from expression to id number we can use in bitmap sets. */
257 static vec
<pre_expr
> expressions
;
258 static hash_table
<pre_expr_d
> *expression_to_id
;
259 static vec
<unsigned> name_to_id
;
261 /* Allocate an expression id for EXPR. */
263 static inline unsigned int
264 alloc_expression_id (pre_expr expr
)
266 struct pre_expr_d
**slot
;
267 /* Make sure we won't overflow. */
268 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
269 expr
->id
= next_expression_id
++;
270 expressions
.safe_push (expr
);
271 if (expr
->kind
== NAME
)
273 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
274 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
275 re-allocations by using vec::reserve upfront. */
276 unsigned old_len
= name_to_id
.length ();
277 name_to_id
.reserve (num_ssa_names
- old_len
);
278 name_to_id
.quick_grow_cleared (num_ssa_names
);
279 gcc_assert (name_to_id
[version
] == 0);
280 name_to_id
[version
] = expr
->id
;
284 slot
= expression_to_id
->find_slot (expr
, INSERT
);
288 return next_expression_id
- 1;
291 /* Return the expression id for tree EXPR. */
293 static inline unsigned int
294 get_expression_id (const pre_expr expr
)
299 static inline unsigned int
300 lookup_expression_id (const pre_expr expr
)
302 struct pre_expr_d
**slot
;
304 if (expr
->kind
== NAME
)
306 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
307 if (name_to_id
.length () <= version
)
309 return name_to_id
[version
];
313 slot
= expression_to_id
->find_slot (expr
, NO_INSERT
);
316 return ((pre_expr
)*slot
)->id
;
320 /* Return the existing expression id for EXPR, or create one if one
321 does not exist yet. */
323 static inline unsigned int
324 get_or_alloc_expression_id (pre_expr expr
)
326 unsigned int id
= lookup_expression_id (expr
);
328 return alloc_expression_id (expr
);
329 return expr
->id
= id
;
332 /* Return the expression that has expression id ID */
334 static inline pre_expr
335 expression_for_id (unsigned int id
)
337 return expressions
[id
];
340 /* Free the expression id field in all of our expressions,
341 and then destroy the expressions array. */
344 clear_expression_ids (void)
346 expressions
.release ();
349 static alloc_pool pre_expr_pool
;
351 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
354 get_or_alloc_expr_for_name (tree name
)
356 struct pre_expr_d expr
;
358 unsigned int result_id
;
362 PRE_EXPR_NAME (&expr
) = name
;
363 result_id
= lookup_expression_id (&expr
);
365 return expression_for_id (result_id
);
367 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
369 PRE_EXPR_NAME (result
) = name
;
370 alloc_expression_id (result
);
374 /* An unordered bitmap set. One bitmap tracks values, the other,
376 typedef struct bitmap_set
378 bitmap_head expressions
;
382 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
383 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
385 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
386 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
388 /* Mapping from value id to expressions with that value_id. */
389 static vec
<bitmap
> value_expressions
;
391 /* Sets that we need to keep track of. */
392 typedef struct bb_bitmap_sets
394 /* The EXP_GEN set, which represents expressions/values generated in
396 bitmap_set_t exp_gen
;
398 /* The PHI_GEN set, which represents PHI results generated in a
400 bitmap_set_t phi_gen
;
402 /* The TMP_GEN set, which represents results/temporaries generated
403 in a basic block. IE the LHS of an expression. */
404 bitmap_set_t tmp_gen
;
406 /* The AVAIL_OUT set, which represents which values are available in
407 a given basic block. */
408 bitmap_set_t avail_out
;
410 /* The ANTIC_IN set, which represents which values are anticipatable
411 in a given basic block. */
412 bitmap_set_t antic_in
;
414 /* The PA_IN set, which represents which values are
415 partially anticipatable in a given basic block. */
418 /* The NEW_SETS set, which is used during insertion to augment the
419 AVAIL_OUT set of blocks with the new insertions performed during
420 the current iteration. */
421 bitmap_set_t new_sets
;
423 /* A cache for value_dies_in_block_x. */
426 /* The live virtual operand on successor edges. */
429 /* True if we have visited this block during ANTIC calculation. */
430 unsigned int visited
: 1;
432 /* True when the block contains a call that might not return. */
433 unsigned int contains_may_not_return_call
: 1;
436 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
437 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
438 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
439 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
440 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
441 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
442 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
443 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
444 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
445 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
446 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
449 /* Basic block list in postorder. */
450 static int *postorder
;
451 static int postorder_num
;
453 /* This structure is used to keep track of statistics on what
454 optimization PRE was able to perform. */
457 /* The number of RHS computations eliminated by PRE. */
460 /* The number of new expressions/temporaries generated by PRE. */
463 /* The number of inserts found due to partial anticipation */
466 /* The number of new PHI nodes added by PRE. */
470 static bool do_partial_partial
;
471 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int);
472 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
473 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
474 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
475 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
476 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
477 static void bitmap_insert_into_set_1 (bitmap_set_t
, pre_expr
,
479 static bitmap_set_t
bitmap_set_new (void);
480 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
482 static tree
find_or_generate_expression (basic_block
, tree
, gimple_seq
*);
483 static unsigned int get_expr_value_id (pre_expr
);
485 /* We can add and remove elements and entries to and from sets
486 and hash tables, so we use alloc pools for them. */
488 static alloc_pool bitmap_set_pool
;
489 static bitmap_obstack grand_bitmap_obstack
;
491 /* Set of blocks with statements that have had their EH properties changed. */
492 static bitmap need_eh_cleanup
;
494 /* Set of blocks with statements that have had their AB properties changed. */
495 static bitmap need_ab_cleanup
;
497 /* A three tuple {e, pred, v} used to cache phi translations in the
498 phi_translate_table. */
500 typedef struct expr_pred_trans_d
: typed_free_remove
<expr_pred_trans_d
>
502 /* The expression. */
505 /* The predecessor block along which we translated the expression. */
508 /* The value that resulted from the translation. */
511 /* The hashcode for the expression, pred pair. This is cached for
515 /* hash_table support. */
516 typedef expr_pred_trans_d value_type
;
517 typedef expr_pred_trans_d compare_type
;
518 static inline hashval_t
hash (const value_type
*);
519 static inline int equal (const value_type
*, const compare_type
*);
520 } *expr_pred_trans_t
;
521 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
524 expr_pred_trans_d::hash (const expr_pred_trans_d
*e
)
530 expr_pred_trans_d::equal (const value_type
*ve1
,
531 const compare_type
*ve2
)
533 basic_block b1
= ve1
->pred
;
534 basic_block b2
= ve2
->pred
;
536 /* If they are not translations for the same basic block, they can't
540 return pre_expr_d::equal (ve1
->e
, ve2
->e
);
543 /* The phi_translate_table caches phi translations for a given
544 expression and predecessor. */
545 static hash_table
<expr_pred_trans_d
> *phi_translate_table
;
547 /* Add the tuple mapping from {expression E, basic block PRED} to
548 the phi translation table and return whether it pre-existed. */
551 phi_trans_add (expr_pred_trans_t
*entry
, pre_expr e
, basic_block pred
)
553 expr_pred_trans_t
*slot
;
554 expr_pred_trans_d tem
;
555 hashval_t hash
= iterative_hash_hashval_t (pre_expr_d::hash (e
),
560 slot
= phi_translate_table
->find_slot_with_hash (&tem
, hash
, INSERT
);
567 *entry
= *slot
= XNEW (struct expr_pred_trans_d
);
569 (*entry
)->pred
= pred
;
570 (*entry
)->hashcode
= hash
;
575 /* Add expression E to the expression set of value id V. */
578 add_to_value (unsigned int v
, pre_expr e
)
582 gcc_checking_assert (get_expr_value_id (e
) == v
);
584 if (v
>= value_expressions
.length ())
586 value_expressions
.safe_grow_cleared (v
+ 1);
589 set
= value_expressions
[v
];
592 set
= BITMAP_ALLOC (&grand_bitmap_obstack
);
593 value_expressions
[v
] = set
;
596 bitmap_set_bit (set
, get_or_alloc_expression_id (e
));
599 /* Create a new bitmap set and return it. */
602 bitmap_set_new (void)
604 bitmap_set_t ret
= (bitmap_set_t
) pool_alloc (bitmap_set_pool
);
605 bitmap_initialize (&ret
->expressions
, &grand_bitmap_obstack
);
606 bitmap_initialize (&ret
->values
, &grand_bitmap_obstack
);
610 /* Return the value id for a PRE expression EXPR. */
613 get_expr_value_id (pre_expr expr
)
619 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
622 id
= VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
625 id
= PRE_EXPR_NARY (expr
)->value_id
;
628 id
= PRE_EXPR_REFERENCE (expr
)->value_id
;
633 /* ??? We cannot assert that expr has a value-id (it can be 0), because
634 we assign value-ids only to expressions that have a result
635 in set_hashtable_value_ids. */
639 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
642 sccvn_valnum_from_value_id (unsigned int val
)
646 bitmap exprset
= value_expressions
[val
];
647 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
649 pre_expr vexpr
= expression_for_id (i
);
650 if (vexpr
->kind
== NAME
)
651 return VN_INFO (PRE_EXPR_NAME (vexpr
))->valnum
;
652 else if (vexpr
->kind
== CONSTANT
)
653 return PRE_EXPR_CONSTANT (vexpr
);
658 /* Remove an expression EXPR from a bitmapped set. */
661 bitmap_remove_from_set (bitmap_set_t set
, pre_expr expr
)
663 unsigned int val
= get_expr_value_id (expr
);
664 if (!value_id_constant_p (val
))
666 bitmap_clear_bit (&set
->values
, val
);
667 bitmap_clear_bit (&set
->expressions
, get_expression_id (expr
));
672 bitmap_insert_into_set_1 (bitmap_set_t set
, pre_expr expr
,
673 unsigned int val
, bool allow_constants
)
675 if (allow_constants
|| !value_id_constant_p (val
))
677 /* We specifically expect this and only this function to be able to
678 insert constants into a set. */
679 bitmap_set_bit (&set
->values
, val
);
680 bitmap_set_bit (&set
->expressions
, get_or_alloc_expression_id (expr
));
684 /* Insert an expression EXPR into a bitmapped set. */
687 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
689 bitmap_insert_into_set_1 (set
, expr
, get_expr_value_id (expr
), false);
692 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
695 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
697 bitmap_copy (&dest
->expressions
, &orig
->expressions
);
698 bitmap_copy (&dest
->values
, &orig
->values
);
702 /* Free memory used up by SET. */
704 bitmap_set_free (bitmap_set_t set
)
706 bitmap_clear (&set
->expressions
);
707 bitmap_clear (&set
->values
);
711 /* Generate an topological-ordered array of bitmap set SET. */
714 sorted_array_from_bitmap_set (bitmap_set_t set
)
717 bitmap_iterator bi
, bj
;
718 vec
<pre_expr
> result
;
720 /* Pre-allocate enough space for the array. */
721 result
.create (bitmap_count_bits (&set
->expressions
));
723 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
725 /* The number of expressions having a given value is usually
726 relatively small. Thus, rather than making a vector of all
727 the expressions and sorting it by value-id, we walk the values
728 and check in the reverse mapping that tells us what expressions
729 have a given value, to filter those in our set. As a result,
730 the expressions are inserted in value-id order, which means
733 If this is somehow a significant lose for some cases, we can
734 choose which set to walk based on the set size. */
735 bitmap exprset
= value_expressions
[i
];
736 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bj
)
738 if (bitmap_bit_p (&set
->expressions
, j
))
739 result
.quick_push (expression_for_id (j
));
746 /* Perform bitmapped set operation DEST &= ORIG. */
749 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
757 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
759 bitmap_and_into (&dest
->values
, &orig
->values
);
760 bitmap_copy (&temp
, &dest
->expressions
);
761 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
763 pre_expr expr
= expression_for_id (i
);
764 unsigned int value_id
= get_expr_value_id (expr
);
765 if (!bitmap_bit_p (&dest
->values
, value_id
))
766 bitmap_clear_bit (&dest
->expressions
, i
);
768 bitmap_clear (&temp
);
772 /* Subtract all values and expressions contained in ORIG from DEST. */
775 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
777 bitmap_set_t result
= bitmap_set_new ();
781 bitmap_and_compl (&result
->expressions
, &dest
->expressions
,
784 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
786 pre_expr expr
= expression_for_id (i
);
787 unsigned int value_id
= get_expr_value_id (expr
);
788 bitmap_set_bit (&result
->values
, value_id
);
794 /* Subtract all the values in bitmap set B from bitmap set A. */
797 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
803 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
805 bitmap_copy (&temp
, &a
->expressions
);
806 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
808 pre_expr expr
= expression_for_id (i
);
809 if (bitmap_set_contains_value (b
, get_expr_value_id (expr
)))
810 bitmap_remove_from_set (a
, expr
);
812 bitmap_clear (&temp
);
816 /* Return true if bitmapped set SET contains the value VALUE_ID. */
819 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
821 if (value_id_constant_p (value_id
))
824 if (!set
|| bitmap_empty_p (&set
->expressions
))
827 return bitmap_bit_p (&set
->values
, value_id
);
831 bitmap_set_contains_expr (bitmap_set_t set
, const pre_expr expr
)
833 return bitmap_bit_p (&set
->expressions
, get_expression_id (expr
));
836 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
839 bitmap_set_replace_value (bitmap_set_t set
, unsigned int lookfor
,
846 if (value_id_constant_p (lookfor
))
849 if (!bitmap_set_contains_value (set
, lookfor
))
852 /* The number of expressions having a given value is usually
853 significantly less than the total number of expressions in SET.
854 Thus, rather than check, for each expression in SET, whether it
855 has the value LOOKFOR, we walk the reverse mapping that tells us
856 what expressions have a given value, and see if any of those
857 expressions are in our set. For large testcases, this is about
858 5-10x faster than walking the bitmap. If this is somehow a
859 significant lose for some cases, we can choose which set to walk
860 based on the set size. */
861 exprset
= value_expressions
[lookfor
];
862 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
864 if (bitmap_clear_bit (&set
->expressions
, i
))
866 bitmap_set_bit (&set
->expressions
, get_expression_id (expr
));
874 /* Return true if two bitmap sets are equal. */
877 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
879 return bitmap_equal_p (&a
->values
, &b
->values
);
882 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
883 and add it otherwise. */
886 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
888 unsigned int val
= get_expr_value_id (expr
);
890 if (bitmap_set_contains_value (set
, val
))
891 bitmap_set_replace_value (set
, val
, expr
);
893 bitmap_insert_into_set (set
, expr
);
896 /* Insert EXPR into SET if EXPR's value is not already present in
900 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
902 unsigned int val
= get_expr_value_id (expr
);
904 gcc_checking_assert (expr
->id
== get_or_alloc_expression_id (expr
));
906 /* Constant values are always considered to be part of the set. */
907 if (value_id_constant_p (val
))
910 /* If the value membership changed, add the expression. */
911 if (bitmap_set_bit (&set
->values
, val
))
912 bitmap_set_bit (&set
->expressions
, expr
->id
);
915 /* Print out EXPR to outfile. */
918 print_pre_expr (FILE *outfile
, const pre_expr expr
)
923 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
), 0);
926 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
), 0);
931 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
932 fprintf (outfile
, "{%s,", get_tree_code_name (nary
->opcode
));
933 for (i
= 0; i
< nary
->length
; i
++)
935 print_generic_expr (outfile
, nary
->op
[i
], 0);
936 if (i
!= (unsigned) nary
->length
- 1)
937 fprintf (outfile
, ",");
939 fprintf (outfile
, "}");
945 vn_reference_op_t vro
;
947 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
948 fprintf (outfile
, "{");
950 ref
->operands
.iterate (i
, &vro
);
953 bool closebrace
= false;
954 if (vro
->opcode
!= SSA_NAME
955 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
957 fprintf (outfile
, "%s", get_tree_code_name (vro
->opcode
));
960 fprintf (outfile
, "<");
966 print_generic_expr (outfile
, vro
->op0
, 0);
969 fprintf (outfile
, ",");
970 print_generic_expr (outfile
, vro
->op1
, 0);
974 fprintf (outfile
, ",");
975 print_generic_expr (outfile
, vro
->op2
, 0);
979 fprintf (outfile
, ">");
980 if (i
!= ref
->operands
.length () - 1)
981 fprintf (outfile
, ",");
983 fprintf (outfile
, "}");
986 fprintf (outfile
, "@");
987 print_generic_expr (outfile
, ref
->vuse
, 0);
993 void debug_pre_expr (pre_expr
);
995 /* Like print_pre_expr but always prints to stderr. */
997 debug_pre_expr (pre_expr e
)
999 print_pre_expr (stderr
, e
);
1000 fprintf (stderr
, "\n");
1003 /* Print out SET to OUTFILE. */
1006 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
1007 const char *setname
, int blockindex
)
1009 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
1016 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1018 const pre_expr expr
= expression_for_id (i
);
1021 fprintf (outfile
, ", ");
1023 print_pre_expr (outfile
, expr
);
1025 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1028 fprintf (outfile
, " }\n");
1031 void debug_bitmap_set (bitmap_set_t
);
1034 debug_bitmap_set (bitmap_set_t set
)
1036 print_bitmap_set (stderr
, set
, "debug", 0);
1039 void debug_bitmap_sets_for (basic_block
);
1042 debug_bitmap_sets_for (basic_block bb
)
1044 print_bitmap_set (stderr
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
1045 print_bitmap_set (stderr
, EXP_GEN (bb
), "exp_gen", bb
->index
);
1046 print_bitmap_set (stderr
, PHI_GEN (bb
), "phi_gen", bb
->index
);
1047 print_bitmap_set (stderr
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
1048 print_bitmap_set (stderr
, ANTIC_IN (bb
), "antic_in", bb
->index
);
1049 if (do_partial_partial
)
1050 print_bitmap_set (stderr
, PA_IN (bb
), "pa_in", bb
->index
);
1051 print_bitmap_set (stderr
, NEW_SETS (bb
), "new_sets", bb
->index
);
1054 /* Print out the expressions that have VAL to OUTFILE. */
1057 print_value_expressions (FILE *outfile
, unsigned int val
)
1059 bitmap set
= value_expressions
[val
];
1064 sprintf (s
, "%04d", val
);
1065 x
.expressions
= *set
;
1066 print_bitmap_set (outfile
, &x
, s
, 0);
1072 debug_value_expressions (unsigned int val
)
1074 print_value_expressions (stderr
, val
);
1077 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1081 get_or_alloc_expr_for_constant (tree constant
)
1083 unsigned int result_id
;
1084 unsigned int value_id
;
1085 struct pre_expr_d expr
;
1088 expr
.kind
= CONSTANT
;
1089 PRE_EXPR_CONSTANT (&expr
) = constant
;
1090 result_id
= lookup_expression_id (&expr
);
1092 return expression_for_id (result_id
);
1094 newexpr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1095 newexpr
->kind
= CONSTANT
;
1096 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1097 alloc_expression_id (newexpr
);
1098 value_id
= get_or_alloc_constant_value_id (constant
);
1099 add_to_value (value_id
, newexpr
);
1103 /* Given a value id V, find the actual tree representing the constant
1104 value if there is one, and return it. Return NULL if we can't find
1108 get_constant_for_value_id (unsigned int v
)
1110 if (value_id_constant_p (v
))
1114 bitmap exprset
= value_expressions
[v
];
1116 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1118 pre_expr expr
= expression_for_id (i
);
1119 if (expr
->kind
== CONSTANT
)
1120 return PRE_EXPR_CONSTANT (expr
);
1126 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1127 Currently only supports constants and SSA_NAMES. */
1129 get_or_alloc_expr_for (tree t
)
1131 if (TREE_CODE (t
) == SSA_NAME
)
1132 return get_or_alloc_expr_for_name (t
);
1133 else if (is_gimple_min_invariant (t
))
1134 return get_or_alloc_expr_for_constant (t
);
1137 /* More complex expressions can result from SCCVN expression
1138 simplification that inserts values for them. As they all
1139 do not have VOPs the get handled by the nary ops struct. */
1140 vn_nary_op_t result
;
1141 unsigned int result_id
;
1142 vn_nary_op_lookup (t
, &result
);
1145 pre_expr e
= (pre_expr
) pool_alloc (pre_expr_pool
);
1147 PRE_EXPR_NARY (e
) = result
;
1148 result_id
= lookup_expression_id (e
);
1151 pool_free (pre_expr_pool
, e
);
1152 e
= expression_for_id (result_id
);
1155 alloc_expression_id (e
);
1162 /* Return the folded version of T if T, when folded, is a gimple
1163 min_invariant. Otherwise, return T. */
1166 fully_constant_expression (pre_expr e
)
1174 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1175 switch (TREE_CODE_CLASS (nary
->opcode
))
1178 case tcc_comparison
:
1180 /* We have to go from trees to pre exprs to value ids to
1182 tree naryop0
= nary
->op
[0];
1183 tree naryop1
= nary
->op
[1];
1185 if (!is_gimple_min_invariant (naryop0
))
1187 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1188 unsigned int vrep0
= get_expr_value_id (rep0
);
1189 tree const0
= get_constant_for_value_id (vrep0
);
1191 naryop0
= fold_convert (TREE_TYPE (naryop0
), const0
);
1193 if (!is_gimple_min_invariant (naryop1
))
1195 pre_expr rep1
= get_or_alloc_expr_for (naryop1
);
1196 unsigned int vrep1
= get_expr_value_id (rep1
);
1197 tree const1
= get_constant_for_value_id (vrep1
);
1199 naryop1
= fold_convert (TREE_TYPE (naryop1
), const1
);
1201 result
= fold_binary (nary
->opcode
, nary
->type
,
1203 if (result
&& is_gimple_min_invariant (result
))
1204 return get_or_alloc_expr_for_constant (result
);
1205 /* We might have simplified the expression to a
1206 SSA_NAME for example from x_1 * 1. But we cannot
1207 insert a PHI for x_1 unconditionally as x_1 might
1208 not be available readily. */
1212 if (nary
->opcode
!= REALPART_EXPR
1213 && nary
->opcode
!= IMAGPART_EXPR
1214 && nary
->opcode
!= VIEW_CONVERT_EXPR
)
1219 /* We have to go from trees to pre exprs to value ids to
1221 tree naryop0
= nary
->op
[0];
1222 tree const0
, result
;
1223 if (is_gimple_min_invariant (naryop0
))
1227 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1228 unsigned int vrep0
= get_expr_value_id (rep0
);
1229 const0
= get_constant_for_value_id (vrep0
);
1234 tree type1
= TREE_TYPE (nary
->op
[0]);
1235 const0
= fold_convert (type1
, const0
);
1236 result
= fold_unary (nary
->opcode
, nary
->type
, const0
);
1238 if (result
&& is_gimple_min_invariant (result
))
1239 return get_or_alloc_expr_for_constant (result
);
1248 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1250 if ((folded
= fully_constant_vn_reference_p (ref
)))
1251 return get_or_alloc_expr_for_constant (folded
);
1260 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1261 it has the value it would have in BLOCK. Set *SAME_VALID to true
1262 in case the new vuse doesn't change the value id of the OPERANDS. */
1265 translate_vuse_through_block (vec
<vn_reference_op_s
> operands
,
1266 alias_set_type set
, tree type
, tree vuse
,
1267 basic_block phiblock
,
1268 basic_block block
, bool *same_valid
)
1270 gimple phi
= SSA_NAME_DEF_STMT (vuse
);
1277 if (gimple_bb (phi
) != phiblock
)
1280 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, type
, operands
);
1282 /* Use the alias-oracle to find either the PHI node in this block,
1283 the first VUSE used in this block that is equivalent to vuse or
1284 the first VUSE which definition in this block kills the value. */
1285 if (gimple_code (phi
) == GIMPLE_PHI
)
1286 e
= find_edge (block
, phiblock
);
1287 else if (use_oracle
)
1288 while (!stmt_may_clobber_ref_p_1 (phi
, &ref
))
1290 vuse
= gimple_vuse (phi
);
1291 phi
= SSA_NAME_DEF_STMT (vuse
);
1292 if (gimple_bb (phi
) != phiblock
)
1294 if (gimple_code (phi
) == GIMPLE_PHI
)
1296 e
= find_edge (block
, phiblock
);
1307 bitmap visited
= NULL
;
1309 /* Try to find a vuse that dominates this phi node by skipping
1310 non-clobbering statements. */
1311 vuse
= get_continuation_for_phi (phi
, &ref
, &cnt
, &visited
, false,
1314 BITMAP_FREE (visited
);
1320 /* If we didn't find any, the value ID can't stay the same,
1321 but return the translated vuse. */
1322 *same_valid
= false;
1323 vuse
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1325 /* ??? We would like to return vuse here as this is the canonical
1326 upmost vdef that this reference is associated with. But during
1327 insertion of the references into the hash tables we only ever
1328 directly insert with their direct gimple_vuse, hence returning
1329 something else would make us not find the other expression. */
1330 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1336 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1337 SET2. This is used to avoid making a set consisting of the union
1338 of PA_IN and ANTIC_IN during insert. */
1340 static inline pre_expr
1341 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
)
1345 result
= bitmap_find_leader (set1
, val
);
1346 if (!result
&& set2
)
1347 result
= bitmap_find_leader (set2
, val
);
1351 /* Get the tree type for our PRE expression e. */
1354 get_expr_type (const pre_expr e
)
1359 return TREE_TYPE (PRE_EXPR_NAME (e
));
1361 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1363 return PRE_EXPR_REFERENCE (e
)->type
;
1365 return PRE_EXPR_NARY (e
)->type
;
1370 /* Get a representative SSA_NAME for a given expression.
1371 Since all of our sub-expressions are treated as values, we require
1372 them to be SSA_NAME's for simplicity.
1373 Prior versions of GVNPRE used to use "value handles" here, so that
1374 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1375 either case, the operands are really values (IE we do not expect
1376 them to be usable without finding leaders). */
1379 get_representative_for (const pre_expr e
)
1382 unsigned int value_id
= get_expr_value_id (e
);
1387 return PRE_EXPR_NAME (e
);
1389 return PRE_EXPR_CONSTANT (e
);
1393 /* Go through all of the expressions representing this value
1394 and pick out an SSA_NAME. */
1397 bitmap exprs
= value_expressions
[value_id
];
1398 EXECUTE_IF_SET_IN_BITMAP (exprs
, 0, i
, bi
)
1400 pre_expr rep
= expression_for_id (i
);
1401 if (rep
->kind
== NAME
)
1402 return PRE_EXPR_NAME (rep
);
1403 else if (rep
->kind
== CONSTANT
)
1404 return PRE_EXPR_CONSTANT (rep
);
1410 /* If we reached here we couldn't find an SSA_NAME. This can
1411 happen when we've discovered a value that has never appeared in
1412 the program as set to an SSA_NAME, as the result of phi translation.
1414 ??? We should be able to re-use this when we insert the statement
1416 name
= make_temp_ssa_name (get_expr_type (e
), gimple_build_nop (), "pretmp");
1417 VN_INFO_GET (name
)->value_id
= value_id
;
1418 VN_INFO (name
)->valnum
= name
;
1419 /* ??? For now mark this SSA name for release by SCCVN. */
1420 VN_INFO (name
)->needs_insertion
= true;
1421 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1422 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1424 fprintf (dump_file
, "Created SSA_NAME representative ");
1425 print_generic_expr (dump_file
, name
, 0);
1426 fprintf (dump_file
, " for expression:");
1427 print_pre_expr (dump_file
, e
);
1428 fprintf (dump_file
, " (%04d)\n", value_id
);
1437 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1438 basic_block pred
, basic_block phiblock
);
1440 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1441 the phis in PRED. Return NULL if we can't find a leader for each part
1442 of the translated expression. */
1445 phi_translate_1 (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1446 basic_block pred
, basic_block phiblock
)
1453 bool changed
= false;
1454 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1455 vn_nary_op_t newnary
= XALLOCAVAR (struct vn_nary_op_s
,
1456 sizeof_vn_nary_op (nary
->length
));
1457 memcpy (newnary
, nary
, sizeof_vn_nary_op (nary
->length
));
1459 for (i
= 0; i
< newnary
->length
; i
++)
1461 if (TREE_CODE (newnary
->op
[i
]) != SSA_NAME
)
1465 pre_expr leader
, result
;
1466 unsigned int op_val_id
= VN_INFO (newnary
->op
[i
])->value_id
;
1467 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1468 result
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1469 if (result
&& result
!= leader
)
1471 tree name
= get_representative_for (result
);
1474 newnary
->op
[i
] = name
;
1479 changed
|= newnary
->op
[i
] != nary
->op
[i
];
1485 unsigned int new_val_id
;
1487 tree result
= vn_nary_op_lookup_pieces (newnary
->length
,
1492 if (result
&& is_gimple_min_invariant (result
))
1493 return get_or_alloc_expr_for_constant (result
);
1495 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1500 PRE_EXPR_NARY (expr
) = nary
;
1501 constant
= fully_constant_expression (expr
);
1502 if (constant
!= expr
)
1505 new_val_id
= nary
->value_id
;
1506 get_or_alloc_expression_id (expr
);
1510 new_val_id
= get_next_value_id ();
1511 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
1512 nary
= vn_nary_op_insert_pieces (newnary
->length
,
1516 result
, new_val_id
);
1517 PRE_EXPR_NARY (expr
) = nary
;
1518 constant
= fully_constant_expression (expr
);
1519 if (constant
!= expr
)
1521 get_or_alloc_expression_id (expr
);
1523 add_to_value (new_val_id
, expr
);
1531 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1532 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1533 tree vuse
= ref
->vuse
;
1534 tree newvuse
= vuse
;
1535 vec
<vn_reference_op_s
> newoperands
= vNULL
;
1536 bool changed
= false, same_valid
= true;
1538 vn_reference_op_t operand
;
1539 vn_reference_t newref
;
1541 for (i
= 0; operands
.iterate (i
, &operand
); i
++)
1546 tree type
= operand
->type
;
1547 vn_reference_op_s newop
= *operand
;
1548 op
[0] = operand
->op0
;
1549 op
[1] = operand
->op1
;
1550 op
[2] = operand
->op2
;
1551 for (n
= 0; n
< 3; ++n
)
1553 unsigned int op_val_id
;
1556 if (TREE_CODE (op
[n
]) != SSA_NAME
)
1558 /* We can't possibly insert these. */
1560 && !is_gimple_min_invariant (op
[n
]))
1564 op_val_id
= VN_INFO (op
[n
])->value_id
;
1565 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1568 opresult
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1571 if (opresult
!= leader
)
1573 tree name
= get_representative_for (opresult
);
1576 changed
|= name
!= op
[n
];
1582 newoperands
.release ();
1587 if (!newoperands
.exists ())
1588 newoperands
= operands
.copy ();
1589 /* We may have changed from an SSA_NAME to a constant */
1590 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op
[0]) != SSA_NAME
)
1591 newop
.opcode
= TREE_CODE (op
[0]);
1596 newoperands
[i
] = newop
;
1598 gcc_checking_assert (i
== operands
.length ());
1602 newvuse
= translate_vuse_through_block (newoperands
.exists ()
1603 ? newoperands
: operands
,
1604 ref
->set
, ref
->type
,
1605 vuse
, phiblock
, pred
,
1607 if (newvuse
== NULL_TREE
)
1609 newoperands
.release ();
1614 if (changed
|| newvuse
!= vuse
)
1616 unsigned int new_val_id
;
1619 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1621 newoperands
.exists ()
1622 ? newoperands
: operands
,
1625 newoperands
.release ();
1627 /* We can always insert constants, so if we have a partial
1628 redundant constant load of another type try to translate it
1629 to a constant of appropriate type. */
1630 if (result
&& is_gimple_min_invariant (result
))
1633 if (!useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1635 tem
= fold_unary (VIEW_CONVERT_EXPR
, ref
->type
, result
);
1636 if (tem
&& !is_gimple_min_invariant (tem
))
1640 return get_or_alloc_expr_for_constant (tem
);
1643 /* If we'd have to convert things we would need to validate
1644 if we can insert the translated expression. So fail
1645 here for now - we cannot insert an alias with a different
1646 type in the VN tables either, as that would assert. */
1648 && !useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1650 else if (!result
&& newref
1651 && !useless_type_conversion_p (ref
->type
, newref
->type
))
1653 newoperands
.release ();
1657 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1658 expr
->kind
= REFERENCE
;
1663 PRE_EXPR_REFERENCE (expr
) = newref
;
1664 constant
= fully_constant_expression (expr
);
1665 if (constant
!= expr
)
1668 new_val_id
= newref
->value_id
;
1669 get_or_alloc_expression_id (expr
);
1673 if (changed
|| !same_valid
)
1675 new_val_id
= get_next_value_id ();
1676 value_expressions
.safe_grow_cleared
1677 (get_max_value_id () + 1);
1680 new_val_id
= ref
->value_id
;
1681 if (!newoperands
.exists ())
1682 newoperands
= operands
.copy ();
1683 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1686 result
, new_val_id
);
1687 newoperands
= vNULL
;
1688 PRE_EXPR_REFERENCE (expr
) = newref
;
1689 constant
= fully_constant_expression (expr
);
1690 if (constant
!= expr
)
1692 get_or_alloc_expression_id (expr
);
1694 add_to_value (new_val_id
, expr
);
1696 newoperands
.release ();
1703 tree name
= PRE_EXPR_NAME (expr
);
1704 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
1705 /* If the SSA name is defined by a PHI node in this block,
1707 if (gimple_code (def_stmt
) == GIMPLE_PHI
1708 && gimple_bb (def_stmt
) == phiblock
)
1710 edge e
= find_edge (pred
, gimple_bb (def_stmt
));
1711 tree def
= PHI_ARG_DEF (def_stmt
, e
->dest_idx
);
1713 /* Handle constant. */
1714 if (is_gimple_min_invariant (def
))
1715 return get_or_alloc_expr_for_constant (def
);
1717 return get_or_alloc_expr_for_name (def
);
1719 /* Otherwise return it unchanged - it will get removed if its
1720 value is not available in PREDs AVAIL_OUT set of expressions
1721 by the subtraction of TMP_GEN. */
1730 /* Wrapper around phi_translate_1 providing caching functionality. */
1733 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1734 basic_block pred
, basic_block phiblock
)
1736 expr_pred_trans_t slot
= NULL
;
1742 /* Constants contain no values that need translation. */
1743 if (expr
->kind
== CONSTANT
)
1746 if (value_id_constant_p (get_expr_value_id (expr
)))
1749 /* Don't add translations of NAMEs as those are cheap to translate. */
1750 if (expr
->kind
!= NAME
)
1752 if (phi_trans_add (&slot
, expr
, pred
))
1754 /* Store NULL for the value we want to return in the case of
1760 phitrans
= phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
);
1767 /* Remove failed translations again, they cause insert
1768 iteration to not pick up new opportunities reliably. */
1769 phi_translate_table
->remove_elt_with_hash (slot
, slot
->hashcode
);
1776 /* For each expression in SET, translate the values through phi nodes
1777 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1778 expressions in DEST. */
1781 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1782 basic_block phiblock
)
1784 vec
<pre_expr
> exprs
;
1788 if (gimple_seq_empty_p (phi_nodes (phiblock
)))
1790 bitmap_set_copy (dest
, set
);
1794 exprs
= sorted_array_from_bitmap_set (set
);
1795 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
1797 pre_expr translated
;
1798 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1802 /* We might end up with multiple expressions from SET being
1803 translated to the same value. In this case we do not want
1804 to retain the NARY or REFERENCE expression but prefer a NAME
1805 which would be the leader. */
1806 if (translated
->kind
== NAME
)
1807 bitmap_value_replace_in_set (dest
, translated
);
1809 bitmap_value_insert_into_set (dest
, translated
);
1814 /* Find the leader for a value (i.e., the name representing that
1815 value) in a given set, and return it. Return NULL if no leader
1819 bitmap_find_leader (bitmap_set_t set
, unsigned int val
)
1821 if (value_id_constant_p (val
))
1825 bitmap exprset
= value_expressions
[val
];
1827 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1829 pre_expr expr
= expression_for_id (i
);
1830 if (expr
->kind
== CONSTANT
)
1834 if (bitmap_set_contains_value (set
, val
))
1836 /* Rather than walk the entire bitmap of expressions, and see
1837 whether any of them has the value we are looking for, we look
1838 at the reverse mapping, which tells us the set of expressions
1839 that have a given value (IE value->expressions with that
1840 value) and see if any of those expressions are in our set.
1841 The number of expressions per value is usually significantly
1842 less than the number of expressions in the set. In fact, for
1843 large testcases, doing it this way is roughly 5-10x faster
1844 than walking the bitmap.
1845 If this is somehow a significant lose for some cases, we can
1846 choose which set to walk based on which set is smaller. */
1849 bitmap exprset
= value_expressions
[val
];
1851 EXECUTE_IF_AND_IN_BITMAP (exprset
, &set
->expressions
, 0, i
, bi
)
1852 return expression_for_id (i
);
1857 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1858 BLOCK by seeing if it is not killed in the block. Note that we are
1859 only determining whether there is a store that kills it. Because
1860 of the order in which clean iterates over values, we are guaranteed
1861 that altered operands will have caused us to be eliminated from the
1862 ANTIC_IN set already. */
1865 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1867 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1868 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1870 gimple_stmt_iterator gsi
;
1871 unsigned id
= get_expression_id (expr
);
1878 /* Lookup a previously calculated result. */
1879 if (EXPR_DIES (block
)
1880 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1881 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1883 /* A memory expression {e, VUSE} dies in the block if there is a
1884 statement that may clobber e. If, starting statement walk from the
1885 top of the basic block, a statement uses VUSE there can be no kill
1886 inbetween that use and the original statement that loaded {e, VUSE},
1887 so we can stop walking. */
1888 ref
.base
= NULL_TREE
;
1889 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1891 tree def_vuse
, def_vdef
;
1892 def
= gsi_stmt (gsi
);
1893 def_vuse
= gimple_vuse (def
);
1894 def_vdef
= gimple_vdef (def
);
1896 /* Not a memory statement. */
1900 /* Not a may-def. */
1903 /* A load with the same VUSE, we're done. */
1904 if (def_vuse
== vuse
)
1910 /* Init ref only if we really need it. */
1911 if (ref
.base
== NULL_TREE
1912 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->type
,
1918 /* If the statement may clobber expr, it dies. */
1919 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1926 /* Remember the result. */
1927 if (!EXPR_DIES (block
))
1928 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1929 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1931 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1937 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1938 contains its value-id. */
1941 op_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree op
)
1943 if (op
&& TREE_CODE (op
) == SSA_NAME
)
1945 unsigned int value_id
= VN_INFO (op
)->value_id
;
1946 if (!(bitmap_set_contains_value (set1
, value_id
)
1947 || (set2
&& bitmap_set_contains_value (set2
, value_id
))))
1953 /* Determine if the expression EXPR is valid in SET1 U SET2.
1954 ONLY SET2 CAN BE NULL.
1955 This means that we have a leader for each part of the expression
1956 (if it consists of values), or the expression is an SSA_NAME.
1957 For loads/calls, we also see if the vuse is killed in this block. */
1960 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
)
1965 /* By construction all NAMEs are available. Non-available
1966 NAMEs are removed by subtracting TMP_GEN from the sets. */
1971 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1972 for (i
= 0; i
< nary
->length
; i
++)
1973 if (!op_valid_in_sets (set1
, set2
, nary
->op
[i
]))
1980 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1981 vn_reference_op_t vro
;
1984 FOR_EACH_VEC_ELT (ref
->operands
, i
, vro
)
1986 if (!op_valid_in_sets (set1
, set2
, vro
->op0
)
1987 || !op_valid_in_sets (set1
, set2
, vro
->op1
)
1988 || !op_valid_in_sets (set1
, set2
, vro
->op2
))
1998 /* Clean the set of expressions that are no longer valid in SET1 or
1999 SET2. This means expressions that are made up of values we have no
2000 leaders for in SET1 or SET2. This version is used for partial
2001 anticipation, which means it is not valid in either ANTIC_IN or
2005 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
)
2007 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set1
);
2011 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2013 if (!valid_in_sets (set1
, set2
, expr
))
2014 bitmap_remove_from_set (set1
, expr
);
2019 /* Clean the set of expressions that are no longer valid in SET. This
2020 means expressions that are made up of values we have no leaders for
2024 clean (bitmap_set_t set
)
2026 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set
);
2030 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2032 if (!valid_in_sets (set
, NULL
, expr
))
2033 bitmap_remove_from_set (set
, expr
);
2038 /* Clean the set of expressions that are no longer valid in SET because
2039 they are clobbered in BLOCK or because they trap and may not be executed. */
2042 prune_clobbered_mems (bitmap_set_t set
, basic_block block
)
2047 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
2049 pre_expr expr
= expression_for_id (i
);
2050 if (expr
->kind
== REFERENCE
)
2052 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2055 gimple def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
2056 if (!gimple_nop_p (def_stmt
)
2057 && ((gimple_bb (def_stmt
) != block
2058 && !dominated_by_p (CDI_DOMINATORS
,
2059 block
, gimple_bb (def_stmt
)))
2060 || (gimple_bb (def_stmt
) == block
2061 && value_dies_in_block_x (expr
, block
))))
2062 bitmap_remove_from_set (set
, expr
);
2065 else if (expr
->kind
== NARY
)
2067 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2068 /* If the NARY may trap make sure the block does not contain
2069 a possible exit point.
2070 ??? This is overly conservative if we translate AVAIL_OUT
2071 as the available expression might be after the exit point. */
2072 if (BB_MAY_NOTRETURN (block
)
2073 && vn_nary_may_trap (nary
))
2074 bitmap_remove_from_set (set
, expr
);
2079 static sbitmap has_abnormal_preds
;
2081 /* List of blocks that may have changed during ANTIC computation and
2082 thus need to be iterated over. */
2084 static sbitmap changed_blocks
;
2086 /* Compute the ANTIC set for BLOCK.
2088 If succs(BLOCK) > 1 then
2089 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2090 else if succs(BLOCK) == 1 then
2091 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2093 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2097 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2099 bool changed
= false;
2100 bitmap_set_t S
, old
, ANTIC_OUT
;
2106 old
= ANTIC_OUT
= S
= NULL
;
2107 BB_VISITED (block
) = 1;
2109 /* If any edges from predecessors are abnormal, antic_in is empty,
2111 if (block_has_abnormal_pred_edge
)
2112 goto maybe_dump_sets
;
2114 old
= ANTIC_IN (block
);
2115 ANTIC_OUT
= bitmap_set_new ();
2117 /* If the block has no successors, ANTIC_OUT is empty. */
2118 if (EDGE_COUNT (block
->succs
) == 0)
2120 /* If we have one successor, we could have some phi nodes to
2121 translate through. */
2122 else if (single_succ_p (block
))
2124 basic_block succ_bb
= single_succ (block
);
2125 gcc_assert (BB_VISITED (succ_bb
));
2126 phi_translate_set (ANTIC_OUT
, ANTIC_IN (succ_bb
), block
, succ_bb
);
2128 /* If we have multiple successors, we take the intersection of all of
2129 them. Note that in the case of loop exit phi nodes, we may have
2130 phis to translate through. */
2134 basic_block bprime
, first
= NULL
;
2136 auto_vec
<basic_block
> worklist (EDGE_COUNT (block
->succs
));
2137 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2140 && BB_VISITED (e
->dest
))
2142 else if (BB_VISITED (e
->dest
))
2143 worklist
.quick_push (e
->dest
);
2146 /* Of multiple successors we have to have visited one already
2147 which is guaranteed by iteration order. */
2148 gcc_assert (first
!= NULL
);
2150 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
), block
, first
);
2152 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2154 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2156 bitmap_set_t tmp
= bitmap_set_new ();
2157 phi_translate_set (tmp
, ANTIC_IN (bprime
), block
, bprime
);
2158 bitmap_set_and (ANTIC_OUT
, tmp
);
2159 bitmap_set_free (tmp
);
2162 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
2166 /* Prune expressions that are clobbered in block and thus become
2167 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2168 prune_clobbered_mems (ANTIC_OUT
, block
);
2170 /* Generate ANTIC_OUT - TMP_GEN. */
2171 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
2173 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2174 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
2177 /* Then union in the ANTIC_OUT - TMP_GEN values,
2178 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2179 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
2180 bitmap_value_insert_into_set (ANTIC_IN (block
),
2181 expression_for_id (bii
));
2183 clean (ANTIC_IN (block
));
2185 if (!bitmap_set_equal (old
, ANTIC_IN (block
)))
2188 bitmap_set_bit (changed_blocks
, block
->index
);
2189 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2190 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2193 bitmap_clear_bit (changed_blocks
, block
->index
);
2196 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2199 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2201 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2205 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2208 bitmap_set_free (old
);
2210 bitmap_set_free (S
);
2212 bitmap_set_free (ANTIC_OUT
);
2216 /* Compute PARTIAL_ANTIC for BLOCK.
2218 If succs(BLOCK) > 1 then
2219 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2220 in ANTIC_OUT for all succ(BLOCK)
2221 else if succs(BLOCK) == 1 then
2222 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2224 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2229 compute_partial_antic_aux (basic_block block
,
2230 bool block_has_abnormal_pred_edge
)
2232 bool changed
= false;
2233 bitmap_set_t old_PA_IN
;
2234 bitmap_set_t PA_OUT
;
2237 unsigned long max_pa
= PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH
);
2239 old_PA_IN
= PA_OUT
= NULL
;
2241 /* If any edges from predecessors are abnormal, antic_in is empty,
2243 if (block_has_abnormal_pred_edge
)
2244 goto maybe_dump_sets
;
2246 /* If there are too many partially anticipatable values in the
2247 block, phi_translate_set can take an exponential time: stop
2248 before the translation starts. */
2250 && single_succ_p (block
)
2251 && bitmap_count_bits (&PA_IN (single_succ (block
))->values
) > max_pa
)
2252 goto maybe_dump_sets
;
2254 old_PA_IN
= PA_IN (block
);
2255 PA_OUT
= bitmap_set_new ();
2257 /* If the block has no successors, ANTIC_OUT is empty. */
2258 if (EDGE_COUNT (block
->succs
) == 0)
2260 /* If we have one successor, we could have some phi nodes to
2261 translate through. Note that we can't phi translate across DFS
2262 back edges in partial antic, because it uses a union operation on
2263 the successors. For recurrences like IV's, we will end up
2264 generating a new value in the set on each go around (i + 3 (VH.1)
2265 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2266 else if (single_succ_p (block
))
2268 basic_block succ
= single_succ (block
);
2269 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
2270 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
2272 /* If we have multiple successors, we take the union of all of
2279 auto_vec
<basic_block
> worklist (EDGE_COUNT (block
->succs
));
2280 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2282 if (e
->flags
& EDGE_DFS_BACK
)
2284 worklist
.quick_push (e
->dest
);
2286 if (worklist
.length () > 0)
2288 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2293 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
2294 bitmap_value_insert_into_set (PA_OUT
,
2295 expression_for_id (i
));
2296 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2298 bitmap_set_t pa_in
= bitmap_set_new ();
2299 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
2300 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2301 bitmap_value_insert_into_set (PA_OUT
,
2302 expression_for_id (i
));
2303 bitmap_set_free (pa_in
);
2306 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
2307 bitmap_value_insert_into_set (PA_OUT
,
2308 expression_for_id (i
));
2313 /* Prune expressions that are clobbered in block and thus become
2314 invalid if translated from PA_OUT to PA_IN. */
2315 prune_clobbered_mems (PA_OUT
, block
);
2317 /* PA_IN starts with PA_OUT - TMP_GEN.
2318 Then we subtract things from ANTIC_IN. */
2319 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
2321 /* For partial antic, we want to put back in the phi results, since
2322 we will properly avoid making them partially antic over backedges. */
2323 bitmap_ior_into (&PA_IN (block
)->values
, &PHI_GEN (block
)->values
);
2324 bitmap_ior_into (&PA_IN (block
)->expressions
, &PHI_GEN (block
)->expressions
);
2326 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2327 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2329 dependent_clean (PA_IN (block
), ANTIC_IN (block
));
2331 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
2334 bitmap_set_bit (changed_blocks
, block
->index
);
2335 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2336 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2339 bitmap_clear_bit (changed_blocks
, block
->index
);
2342 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2345 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2347 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2350 bitmap_set_free (old_PA_IN
);
2352 bitmap_set_free (PA_OUT
);
2356 /* Compute ANTIC and partial ANTIC sets. */
2359 compute_antic (void)
2361 bool changed
= true;
2362 int num_iterations
= 0;
2366 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2367 We pre-build the map of blocks with incoming abnormal edges here. */
2368 has_abnormal_preds
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
2369 bitmap_clear (has_abnormal_preds
);
2371 FOR_ALL_BB_FN (block
, cfun
)
2376 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2378 e
->flags
&= ~EDGE_DFS_BACK
;
2379 if (e
->flags
& EDGE_ABNORMAL
)
2381 bitmap_set_bit (has_abnormal_preds
, block
->index
);
2386 BB_VISITED (block
) = 0;
2388 /* While we are here, give empty ANTIC_IN sets to each block. */
2389 ANTIC_IN (block
) = bitmap_set_new ();
2390 PA_IN (block
) = bitmap_set_new ();
2393 /* At the exit block we anticipate nothing. */
2394 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun
)) = 1;
2396 changed_blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
) + 1);
2397 bitmap_ones (changed_blocks
);
2400 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2401 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2402 /* ??? We need to clear our PHI translation cache here as the
2403 ANTIC sets shrink and we restrict valid translations to
2404 those having operands with leaders in ANTIC. Same below
2405 for PA ANTIC computation. */
2408 for (i
= postorder_num
- 1; i
>= 0; i
--)
2410 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2412 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2413 changed
|= compute_antic_aux (block
,
2414 bitmap_bit_p (has_abnormal_preds
,
2418 /* Theoretically possible, but *highly* unlikely. */
2419 gcc_checking_assert (num_iterations
< 500);
2422 statistics_histogram_event (cfun
, "compute_antic iterations",
2425 if (do_partial_partial
)
2427 bitmap_ones (changed_blocks
);
2428 mark_dfs_back_edges ();
2433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2434 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2437 for (i
= postorder_num
- 1 ; i
>= 0; i
--)
2439 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2441 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2443 |= compute_partial_antic_aux (block
,
2444 bitmap_bit_p (has_abnormal_preds
,
2448 /* Theoretically possible, but *highly* unlikely. */
2449 gcc_checking_assert (num_iterations
< 500);
2451 statistics_histogram_event (cfun
, "compute_partial_antic iterations",
2454 sbitmap_free (has_abnormal_preds
);
2455 sbitmap_free (changed_blocks
);
2459 /* Inserted expressions are placed onto this worklist, which is used
2460 for performing quick dead code elimination of insertions we made
2461 that didn't turn out to be necessary. */
2462 static bitmap inserted_exprs
;
2464 /* The actual worker for create_component_ref_by_pieces. */
2467 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2468 unsigned int *operand
, gimple_seq
*stmts
)
2470 vn_reference_op_t currop
= &ref
->operands
[*operand
];
2473 switch (currop
->opcode
)
2477 tree folded
, sc
= NULL_TREE
;
2478 unsigned int nargs
= 0;
2480 if (TREE_CODE (currop
->op0
) == FUNCTION_DECL
)
2483 fn
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2488 sc
= find_or_generate_expression (block
, currop
->op1
, stmts
);
2492 args
= XNEWVEC (tree
, ref
->operands
.length () - 1);
2493 while (*operand
< ref
->operands
.length ())
2495 args
[nargs
] = create_component_ref_by_pieces_1 (block
, ref
,
2501 folded
= build_call_array (currop
->type
,
2502 (TREE_CODE (fn
) == FUNCTION_DECL
2503 ? build_fold_addr_expr (fn
) : fn
),
2507 CALL_EXPR_STATIC_CHAIN (folded
) = sc
;
2513 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2517 tree offset
= currop
->op0
;
2518 if (TREE_CODE (baseop
) == ADDR_EXPR
2519 && handled_component_p (TREE_OPERAND (baseop
, 0)))
2523 base
= get_addr_base_and_unit_offset (TREE_OPERAND (baseop
, 0),
2526 offset
= int_const_binop (PLUS_EXPR
, offset
,
2527 build_int_cst (TREE_TYPE (offset
),
2529 baseop
= build_fold_addr_expr (base
);
2531 return fold_build2 (MEM_REF
, currop
->type
, baseop
, offset
);
2534 case TARGET_MEM_REF
:
2536 tree genop0
= NULL_TREE
, genop1
= NULL_TREE
;
2537 vn_reference_op_t nextop
= &ref
->operands
[++*operand
];
2538 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2544 genop0
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2550 genop1
= find_or_generate_expression (block
, nextop
->op0
, stmts
);
2554 return build5 (TARGET_MEM_REF
, currop
->type
,
2555 baseop
, currop
->op2
, genop0
, currop
->op1
, genop1
);
2561 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2567 case VIEW_CONVERT_EXPR
:
2569 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2573 return fold_build1 (currop
->opcode
, currop
->type
, genop0
);
2576 case WITH_SIZE_EXPR
:
2578 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2582 tree genop1
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2585 return fold_build2 (currop
->opcode
, currop
->type
, genop0
, genop1
);
2590 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2594 tree op1
= currop
->op0
;
2595 tree op2
= currop
->op1
;
2596 return fold_build3 (BIT_FIELD_REF
, currop
->type
, genop0
, op1
, op2
);
2599 /* For array ref vn_reference_op's, operand 1 of the array ref
2600 is op0 of the reference op and operand 3 of the array ref is
2602 case ARRAY_RANGE_REF
:
2606 tree genop1
= currop
->op0
;
2607 tree genop2
= currop
->op1
;
2608 tree genop3
= currop
->op2
;
2609 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2613 genop1
= find_or_generate_expression (block
, genop1
, stmts
);
2618 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (genop0
));
2619 /* Drop zero minimum index if redundant. */
2620 if (integer_zerop (genop2
)
2622 || integer_zerop (TYPE_MIN_VALUE (domain_type
))))
2626 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2633 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2634 /* We can't always put a size in units of the element alignment
2635 here as the element alignment may be not visible. See
2636 PR43783. Simply drop the element size for constant
2638 if (tree_int_cst_equal (genop3
, TYPE_SIZE_UNIT (elmt_type
)))
2642 genop3
= size_binop (EXACT_DIV_EXPR
, genop3
,
2643 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
2644 genop3
= find_or_generate_expression (block
, genop3
, stmts
);
2649 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2656 tree genop2
= currop
->op1
;
2657 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2660 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2664 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2668 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
, genop2
);
2673 genop
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2694 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2695 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2696 trying to rename aggregates into ssa form directly, which is a no no.
2698 Thus, this routine doesn't create temporaries, it just builds a
2699 single access expression for the array, calling
2700 find_or_generate_expression to build the innermost pieces.
2702 This function is a subroutine of create_expression_by_pieces, and
2703 should not be called on it's own unless you really know what you
2707 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2710 unsigned int op
= 0;
2711 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
);
2714 /* Find a simple leader for an expression, or generate one using
2715 create_expression_by_pieces from a NARY expression for the value.
2716 BLOCK is the basic_block we are looking for leaders in.
2717 OP is the tree expression to find a leader for or generate.
2718 Returns the leader or NULL_TREE on failure. */
2721 find_or_generate_expression (basic_block block
, tree op
, gimple_seq
*stmts
)
2723 pre_expr expr
= get_or_alloc_expr_for (op
);
2724 unsigned int lookfor
= get_expr_value_id (expr
);
2725 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
), lookfor
);
2728 if (leader
->kind
== NAME
)
2729 return PRE_EXPR_NAME (leader
);
2730 else if (leader
->kind
== CONSTANT
)
2731 return PRE_EXPR_CONSTANT (leader
);
2737 /* It must be a complex expression, so generate it recursively. Note
2738 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2739 where the insert algorithm fails to insert a required expression. */
2740 bitmap exprset
= value_expressions
[lookfor
];
2743 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
2745 pre_expr temp
= expression_for_id (i
);
2746 /* We cannot insert random REFERENCE expressions at arbitrary
2747 places. We can insert NARYs which eventually re-materializes
2748 its operand values. */
2749 if (temp
->kind
== NARY
)
2750 return create_expression_by_pieces (block
, temp
, stmts
,
2751 get_expr_type (expr
));
2758 #define NECESSARY GF_PLF_1
2760 /* Create an expression in pieces, so that we can handle very complex
2761 expressions that may be ANTIC, but not necessary GIMPLE.
2762 BLOCK is the basic block the expression will be inserted into,
2763 EXPR is the expression to insert (in value form)
2764 STMTS is a statement list to append the necessary insertions into.
2766 This function will die if we hit some value that shouldn't be
2767 ANTIC but is (IE there is no leader for it, or its components).
2768 The function returns NULL_TREE in case a different antic expression
2769 has to be inserted first.
2770 This function may also generate expressions that are themselves
2771 partially or fully redundant. Those that are will be either made
2772 fully redundant during the next iteration of insert (for partially
2773 redundant ones), or eliminated by eliminate (for fully redundant
2777 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2778 gimple_seq
*stmts
, tree type
)
2782 gimple_seq forced_stmts
= NULL
;
2783 unsigned int value_id
;
2784 gimple_stmt_iterator gsi
;
2785 tree exprtype
= type
? type
: get_expr_type (expr
);
2791 /* We may hit the NAME/CONSTANT case if we have to convert types
2792 that value numbering saw through. */
2794 folded
= PRE_EXPR_NAME (expr
);
2797 folded
= PRE_EXPR_CONSTANT (expr
);
2801 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2802 folded
= create_component_ref_by_pieces (block
, ref
, stmts
);
2809 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2810 tree
*genop
= XALLOCAVEC (tree
, nary
->length
);
2812 for (i
= 0; i
< nary
->length
; ++i
)
2814 genop
[i
] = find_or_generate_expression (block
, nary
->op
[i
], stmts
);
2817 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2818 may have conversions stripped. */
2819 if (nary
->opcode
== POINTER_PLUS_EXPR
)
2822 genop
[i
] = gimple_convert (&forced_stmts
,
2823 nary
->type
, genop
[i
]);
2825 genop
[i
] = gimple_convert (&forced_stmts
,
2826 sizetype
, genop
[i
]);
2829 genop
[i
] = gimple_convert (&forced_stmts
,
2830 TREE_TYPE (nary
->op
[i
]), genop
[i
]);
2832 if (nary
->opcode
== CONSTRUCTOR
)
2834 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
2835 for (i
= 0; i
< nary
->length
; ++i
)
2836 CONSTRUCTOR_APPEND_ELT (elts
, NULL_TREE
, genop
[i
]);
2837 folded
= build_constructor (nary
->type
, elts
);
2841 switch (nary
->length
)
2844 folded
= fold_build1 (nary
->opcode
, nary
->type
,
2848 folded
= fold_build2 (nary
->opcode
, nary
->type
,
2849 genop
[0], genop
[1]);
2852 folded
= fold_build3 (nary
->opcode
, nary
->type
,
2853 genop
[0], genop
[1], genop
[2]);
2865 if (!useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
2866 folded
= fold_convert (exprtype
, folded
);
2868 /* Force the generated expression to be a sequence of GIMPLE
2870 We have to call unshare_expr because force_gimple_operand may
2871 modify the tree we pass to it. */
2872 gimple_seq tem
= NULL
;
2873 folded
= force_gimple_operand (unshare_expr (folded
), &tem
,
2875 gimple_seq_add_seq_without_update (&forced_stmts
, tem
);
2877 /* If we have any intermediate expressions to the value sets, add them
2878 to the value sets and chain them in the instruction stream. */
2881 gsi
= gsi_start (forced_stmts
);
2882 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
2884 gimple stmt
= gsi_stmt (gsi
);
2885 tree forcedname
= gimple_get_lhs (stmt
);
2888 if (TREE_CODE (forcedname
) == SSA_NAME
)
2890 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (forcedname
));
2891 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
2892 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
2893 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
2894 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
2895 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2896 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2899 gimple_set_vuse (stmt
, BB_LIVE_VOP_ON_EXIT (block
));
2900 gimple_set_modified (stmt
, true);
2902 gimple_seq_add_seq (stmts
, forced_stmts
);
2905 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2906 newstmt
= gimple_build_assign (name
, folded
);
2907 gimple_set_vuse (newstmt
, BB_LIVE_VOP_ON_EXIT (block
));
2908 gimple_set_modified (newstmt
, true);
2909 gimple_set_plf (newstmt
, NECESSARY
, false);
2911 gimple_seq_add_stmt (stmts
, newstmt
);
2912 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (name
));
2914 /* Fold the last statement. */
2915 gsi
= gsi_last (*stmts
);
2916 if (fold_stmt_inplace (&gsi
))
2917 update_stmt (gsi_stmt (gsi
));
2919 /* Add a value number to the temporary.
2920 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2921 we are creating the expression by pieces, and this particular piece of
2922 the expression may have been represented. There is no harm in replacing
2924 value_id
= get_expr_value_id (expr
);
2925 VN_INFO_GET (name
)->value_id
= value_id
;
2926 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
2927 if (VN_INFO (name
)->valnum
== NULL_TREE
)
2928 VN_INFO (name
)->valnum
= name
;
2929 gcc_assert (VN_INFO (name
)->valnum
!= NULL_TREE
);
2930 nameexpr
= get_or_alloc_expr_for_name (name
);
2931 add_to_value (value_id
, nameexpr
);
2932 if (NEW_SETS (block
))
2933 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2934 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2936 pre_stats
.insertions
++;
2937 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2939 fprintf (dump_file
, "Inserted ");
2940 print_gimple_stmt (dump_file
, newstmt
, 0, 0);
2941 fprintf (dump_file
, " in predecessor %d (%04d)\n",
2942 block
->index
, value_id
);
2949 /* Insert the to-be-made-available values of expression EXPRNUM for each
2950 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2951 merge the result with a phi node, given the same value number as
2952 NODE. Return true if we have inserted new stuff. */
2955 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
2956 vec
<pre_expr
> avail
)
2958 pre_expr expr
= expression_for_id (exprnum
);
2960 unsigned int val
= get_expr_value_id (expr
);
2962 bool insertions
= false;
2967 tree type
= get_expr_type (expr
);
2971 /* Make sure we aren't creating an induction variable. */
2972 if (bb_loop_depth (block
) > 0 && EDGE_COUNT (block
->preds
) == 2)
2974 bool firstinsideloop
= false;
2975 bool secondinsideloop
= false;
2976 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
2977 EDGE_PRED (block
, 0)->src
);
2978 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
2979 EDGE_PRED (block
, 1)->src
);
2980 /* Induction variables only have one edge inside the loop. */
2981 if ((firstinsideloop
^ secondinsideloop
)
2982 && expr
->kind
!= REFERENCE
)
2984 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2985 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2990 /* Make the necessary insertions. */
2991 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
2993 gimple_seq stmts
= NULL
;
2996 eprime
= avail
[pred
->dest_idx
];
2998 if (eprime
->kind
!= NAME
&& eprime
->kind
!= CONSTANT
)
3000 builtexpr
= create_expression_by_pieces (bprime
, eprime
,
3002 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3003 gsi_insert_seq_on_edge (pred
, stmts
);
3006 /* We cannot insert a PHI node if we failed to insert
3011 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (builtexpr
);
3014 else if (eprime
->kind
== CONSTANT
)
3016 /* Constants may not have the right type, fold_convert
3017 should give us back a constant with the right type. */
3018 tree constant
= PRE_EXPR_CONSTANT (eprime
);
3019 if (!useless_type_conversion_p (type
, TREE_TYPE (constant
)))
3021 tree builtexpr
= fold_convert (type
, constant
);
3022 if (!is_gimple_min_invariant (builtexpr
))
3024 tree forcedexpr
= force_gimple_operand (builtexpr
,
3027 if (!is_gimple_min_invariant (forcedexpr
))
3029 if (forcedexpr
!= builtexpr
)
3031 VN_INFO_GET (forcedexpr
)->valnum
= PRE_EXPR_CONSTANT (eprime
);
3032 VN_INFO (forcedexpr
)->value_id
= get_expr_value_id (eprime
);
3036 gimple_stmt_iterator gsi
;
3037 gsi
= gsi_start (stmts
);
3038 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3040 gimple stmt
= gsi_stmt (gsi
);
3041 tree lhs
= gimple_get_lhs (stmt
);
3042 if (TREE_CODE (lhs
) == SSA_NAME
)
3043 bitmap_set_bit (inserted_exprs
,
3044 SSA_NAME_VERSION (lhs
));
3045 gimple_set_plf (stmt
, NECESSARY
, false);
3047 gsi_insert_seq_on_edge (pred
, stmts
);
3049 avail
[pred
->dest_idx
]
3050 = get_or_alloc_expr_for_name (forcedexpr
);
3054 avail
[pred
->dest_idx
]
3055 = get_or_alloc_expr_for_constant (builtexpr
);
3058 else if (eprime
->kind
== NAME
)
3060 /* We may have to do a conversion because our value
3061 numbering can look through types in certain cases, but
3062 our IL requires all operands of a phi node have the same
3064 tree name
= PRE_EXPR_NAME (eprime
);
3065 if (!useless_type_conversion_p (type
, TREE_TYPE (name
)))
3069 builtexpr
= fold_convert (type
, name
);
3070 forcedexpr
= force_gimple_operand (builtexpr
,
3074 if (forcedexpr
!= name
)
3076 VN_INFO_GET (forcedexpr
)->valnum
= VN_INFO (name
)->valnum
;
3077 VN_INFO (forcedexpr
)->value_id
= VN_INFO (name
)->value_id
;
3082 gimple_stmt_iterator gsi
;
3083 gsi
= gsi_start (stmts
);
3084 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3086 gimple stmt
= gsi_stmt (gsi
);
3087 tree lhs
= gimple_get_lhs (stmt
);
3088 if (TREE_CODE (lhs
) == SSA_NAME
)
3089 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
3090 gimple_set_plf (stmt
, NECESSARY
, false);
3092 gsi_insert_seq_on_edge (pred
, stmts
);
3094 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (forcedexpr
);
3098 /* If we didn't want a phi node, and we made insertions, we still have
3099 inserted new stuff, and thus return true. If we didn't want a phi node,
3100 and didn't make insertions, we haven't added anything new, so return
3102 if (nophi
&& insertions
)
3104 else if (nophi
&& !insertions
)
3107 /* Now build a phi for the new variable. */
3108 temp
= make_temp_ssa_name (type
, NULL
, "prephitmp");
3109 phi
= create_phi_node (temp
, block
);
3111 gimple_set_plf (phi
, NECESSARY
, false);
3112 VN_INFO_GET (temp
)->value_id
= val
;
3113 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3114 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3115 VN_INFO (temp
)->valnum
= temp
;
3116 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3117 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3119 pre_expr ae
= avail
[pred
->dest_idx
];
3120 gcc_assert (get_expr_type (ae
) == type
3121 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3122 if (ae
->kind
== CONSTANT
)
3123 add_phi_arg (phi
, unshare_expr (PRE_EXPR_CONSTANT (ae
)),
3124 pred
, UNKNOWN_LOCATION
);
3126 add_phi_arg (phi
, PRE_EXPR_NAME (ae
), pred
, UNKNOWN_LOCATION
);
3129 newphi
= get_or_alloc_expr_for_name (temp
);
3130 add_to_value (val
, newphi
);
3132 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3133 this insertion, since we test for the existence of this value in PHI_GEN
3134 before proceeding with the partial redundancy checks in insert_aux.
3136 The value may exist in AVAIL_OUT, in particular, it could be represented
3137 by the expression we are trying to eliminate, in which case we want the
3138 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3141 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3142 this block, because if it did, it would have existed in our dominator's
3143 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3146 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3147 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3149 bitmap_insert_into_set (NEW_SETS (block
),
3152 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3154 fprintf (dump_file
, "Created phi ");
3155 print_gimple_stmt (dump_file
, phi
, 0, 0);
3156 fprintf (dump_file
, " in block %d (%04d)\n", block
->index
, val
);
3164 /* Perform insertion of partially redundant values.
3165 For BLOCK, do the following:
3166 1. Propagate the NEW_SETS of the dominator into the current block.
3167 If the block has multiple predecessors,
3168 2a. Iterate over the ANTIC expressions for the block to see if
3169 any of them are partially redundant.
3170 2b. If so, insert them into the necessary predecessors to make
3171 the expression fully redundant.
3172 2c. Insert a new PHI merging the values of the predecessors.
3173 2d. Insert the new PHI, and the new expressions, into the
3175 3. Recursively call ourselves on the dominator children of BLOCK.
3177 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3178 do_regular_insertion and do_partial_insertion.
3183 do_regular_insertion (basic_block block
, basic_block dom
)
3185 bool new_stuff
= false;
3186 vec
<pre_expr
> exprs
;
3188 vec
<pre_expr
> avail
= vNULL
;
3191 exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3192 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3194 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3196 if (expr
->kind
== NARY
3197 || expr
->kind
== REFERENCE
)
3200 bool by_some
= false;
3201 bool cant_insert
= false;
3202 bool all_same
= true;
3203 pre_expr first_s
= NULL
;
3206 pre_expr eprime
= NULL
;
3208 pre_expr edoubleprime
= NULL
;
3209 bool do_insertion
= false;
3211 val
= get_expr_value_id (expr
);
3212 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3214 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3216 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3218 fprintf (dump_file
, "Found fully redundant value: ");
3219 print_pre_expr (dump_file
, expr
);
3220 fprintf (dump_file
, "\n");
3225 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3227 unsigned int vprime
;
3229 /* We should never run insertion for the exit block
3230 and so not come across fake pred edges. */
3231 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3233 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
3236 /* eprime will generally only be NULL if the
3237 value of the expression, translated
3238 through the PHI for this predecessor, is
3239 undefined. If that is the case, we can't
3240 make the expression fully redundant,
3241 because its value is undefined along a
3242 predecessor path. We can thus break out
3243 early because it doesn't matter what the
3244 rest of the results are. */
3247 avail
[pred
->dest_idx
] = NULL
;
3252 eprime
= fully_constant_expression (eprime
);
3253 vprime
= get_expr_value_id (eprime
);
3254 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3256 if (edoubleprime
== NULL
)
3258 avail
[pred
->dest_idx
] = eprime
;
3263 avail
[pred
->dest_idx
] = edoubleprime
;
3265 /* We want to perform insertions to remove a redundancy on
3266 a path in the CFG we want to optimize for speed. */
3267 if (optimize_edge_for_speed_p (pred
))
3268 do_insertion
= true;
3269 if (first_s
== NULL
)
3270 first_s
= edoubleprime
;
3271 else if (!pre_expr_d::equal (first_s
, edoubleprime
))
3275 /* If we can insert it, it's not the same value
3276 already existing along every predecessor, and
3277 it's defined by some predecessor, it is
3278 partially redundant. */
3279 if (!cant_insert
&& !all_same
&& by_some
)
3283 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3285 fprintf (dump_file
, "Skipping partial redundancy for "
3287 print_pre_expr (dump_file
, expr
);
3288 fprintf (dump_file
, " (%04d), no redundancy on to be "
3289 "optimized for speed edge\n", val
);
3292 else if (dbg_cnt (treepre_insert
))
3294 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3296 fprintf (dump_file
, "Found partial redundancy for "
3298 print_pre_expr (dump_file
, expr
);
3299 fprintf (dump_file
, " (%04d)\n",
3300 get_expr_value_id (expr
));
3302 if (insert_into_preds_of_block (block
,
3303 get_expression_id (expr
),
3308 /* If all edges produce the same value and that value is
3309 an invariant, then the PHI has the same value on all
3310 edges. Note this. */
3311 else if (!cant_insert
&& all_same
)
3313 gcc_assert (edoubleprime
->kind
== CONSTANT
3314 || edoubleprime
->kind
== NAME
);
3316 tree temp
= make_temp_ssa_name (get_expr_type (expr
),
3318 gimple assign
= gimple_build_assign (temp
,
3319 edoubleprime
->kind
== CONSTANT
? PRE_EXPR_CONSTANT (edoubleprime
) : PRE_EXPR_NAME (edoubleprime
));
3320 gimple_stmt_iterator gsi
= gsi_after_labels (block
);
3321 gsi_insert_before (&gsi
, assign
, GSI_NEW_STMT
);
3323 gimple_set_plf (assign
, NECESSARY
, false);
3324 VN_INFO_GET (temp
)->value_id
= val
;
3325 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3326 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3327 VN_INFO (temp
)->valnum
= temp
;
3328 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3329 pre_expr newe
= get_or_alloc_expr_for_name (temp
);
3330 add_to_value (val
, newe
);
3331 bitmap_value_replace_in_set (AVAIL_OUT (block
), newe
);
3332 bitmap_insert_into_set (NEW_SETS (block
), newe
);
3342 /* Perform insertion for partially anticipatable expressions. There
3343 is only one case we will perform insertion for these. This case is
3344 if the expression is partially anticipatable, and fully available.
3345 In this case, we know that putting it earlier will enable us to
3346 remove the later computation. */
3350 do_partial_partial_insertion (basic_block block
, basic_block dom
)
3352 bool new_stuff
= false;
3353 vec
<pre_expr
> exprs
;
3355 auto_vec
<pre_expr
> avail
;
3358 exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3359 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3361 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3363 if (expr
->kind
== NARY
3364 || expr
->kind
== REFERENCE
)
3368 bool cant_insert
= false;
3371 pre_expr eprime
= NULL
;
3374 val
= get_expr_value_id (expr
);
3375 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3377 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3380 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3382 unsigned int vprime
;
3383 pre_expr edoubleprime
;
3385 /* We should never run insertion for the exit block
3386 and so not come across fake pred edges. */
3387 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3389 eprime
= phi_translate (expr
, ANTIC_IN (block
),
3393 /* eprime will generally only be NULL if the
3394 value of the expression, translated
3395 through the PHI for this predecessor, is
3396 undefined. If that is the case, we can't
3397 make the expression fully redundant,
3398 because its value is undefined along a
3399 predecessor path. We can thus break out
3400 early because it doesn't matter what the
3401 rest of the results are. */
3404 avail
[pred
->dest_idx
] = NULL
;
3409 eprime
= fully_constant_expression (eprime
);
3410 vprime
= get_expr_value_id (eprime
);
3411 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
), vprime
);
3412 avail
[pred
->dest_idx
] = edoubleprime
;
3413 if (edoubleprime
== NULL
)
3420 /* If we can insert it, it's not the same value
3421 already existing along every predecessor, and
3422 it's defined by some predecessor, it is
3423 partially redundant. */
3424 if (!cant_insert
&& by_all
)
3427 bool do_insertion
= false;
3429 /* Insert only if we can remove a later expression on a path
3430 that we want to optimize for speed.
3431 The phi node that we will be inserting in BLOCK is not free,
3432 and inserting it for the sake of !optimize_for_speed successor
3433 may cause regressions on the speed path. */
3434 FOR_EACH_EDGE (succ
, ei
, block
->succs
)
3436 if (bitmap_set_contains_value (PA_IN (succ
->dest
), val
)
3437 || bitmap_set_contains_value (ANTIC_IN (succ
->dest
), val
))
3439 if (optimize_edge_for_speed_p (succ
))
3440 do_insertion
= true;
3446 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3448 fprintf (dump_file
, "Skipping partial partial redundancy "
3450 print_pre_expr (dump_file
, expr
);
3451 fprintf (dump_file
, " (%04d), not (partially) anticipated "
3452 "on any to be optimized for speed edges\n", val
);
3455 else if (dbg_cnt (treepre_insert
))
3457 pre_stats
.pa_insert
++;
3458 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3460 fprintf (dump_file
, "Found partial partial redundancy "
3462 print_pre_expr (dump_file
, expr
);
3463 fprintf (dump_file
, " (%04d)\n",
3464 get_expr_value_id (expr
));
3466 if (insert_into_preds_of_block (block
,
3467 get_expression_id (expr
),
3480 insert_aux (basic_block block
)
3483 bool new_stuff
= false;
3488 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3493 bitmap_set_t newset
= NEW_SETS (dom
);
3496 /* Note that we need to value_replace both NEW_SETS, and
3497 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3498 represented by some non-simple expression here that we want
3499 to replace it with. */
3500 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3502 pre_expr expr
= expression_for_id (i
);
3503 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3504 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3507 if (!single_pred_p (block
))
3509 new_stuff
|= do_regular_insertion (block
, dom
);
3510 if (do_partial_partial
)
3511 new_stuff
|= do_partial_partial_insertion (block
, dom
);
3515 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3517 son
= next_dom_son (CDI_DOMINATORS
, son
))
3519 new_stuff
|= insert_aux (son
);
3525 /* Perform insertion of partially redundant values. */
3530 bool new_stuff
= true;
3532 int num_iterations
= 0;
3534 FOR_ALL_BB_FN (bb
, cfun
)
3535 NEW_SETS (bb
) = bitmap_set_new ();
3540 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3541 fprintf (dump_file
, "Starting insert iteration %d\n", num_iterations
);
3542 new_stuff
= insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3544 /* Clear the NEW sets before the next iteration. We have already
3545 fully propagated its contents. */
3547 FOR_ALL_BB_FN (bb
, cfun
)
3548 bitmap_set_free (NEW_SETS (bb
));
3550 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3554 /* Compute the AVAIL set for all basic blocks.
3556 This function performs value numbering of the statements in each basic
3557 block. The AVAIL sets are built from information we glean while doing
3558 this value numbering, since the AVAIL sets contain only one entry per
3561 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3562 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3565 compute_avail (void)
3568 basic_block block
, son
;
3569 basic_block
*worklist
;
3573 /* We pretend that default definitions are defined in the entry block.
3574 This includes function arguments and the static chain decl. */
3575 for (i
= 1; i
< num_ssa_names
; ++i
)
3577 tree name
= ssa_name (i
);
3580 || !SSA_NAME_IS_DEFAULT_DEF (name
)
3581 || has_zero_uses (name
)
3582 || virtual_operand_p (name
))
3585 e
= get_or_alloc_expr_for_name (name
);
3586 add_to_value (get_expr_value_id (e
), e
);
3587 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), e
);
3588 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3592 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3594 print_bitmap_set (dump_file
, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3595 "tmp_gen", ENTRY_BLOCK
);
3596 print_bitmap_set (dump_file
, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3597 "avail_out", ENTRY_BLOCK
);
3600 /* Allocate the worklist. */
3601 worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
));
3603 /* Seed the algorithm by putting the dominator children of the entry
3604 block on the worklist. */
3605 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3607 son
= next_dom_son (CDI_DOMINATORS
, son
))
3608 worklist
[sp
++] = son
;
3610 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3611 = ssa_default_def (cfun
, gimple_vop (cfun
));
3613 /* Loop until the worklist is empty. */
3616 gimple_stmt_iterator gsi
;
3620 /* Pick a block from the worklist. */
3621 block
= worklist
[--sp
];
3623 /* Initially, the set of available values in BLOCK is that of
3624 its immediate dominator. */
3625 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3628 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3629 BB_LIVE_VOP_ON_EXIT (block
) = BB_LIVE_VOP_ON_EXIT (dom
);
3632 /* Generate values for PHI nodes. */
3633 for (gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3635 tree result
= gimple_phi_result (gsi_stmt (gsi
));
3637 /* We have no need for virtual phis, as they don't represent
3638 actual computations. */
3639 if (virtual_operand_p (result
))
3641 BB_LIVE_VOP_ON_EXIT (block
) = result
;
3645 pre_expr e
= get_or_alloc_expr_for_name (result
);
3646 add_to_value (get_expr_value_id (e
), e
);
3647 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3648 bitmap_insert_into_set (PHI_GEN (block
), e
);
3651 BB_MAY_NOTRETURN (block
) = 0;
3653 /* Now compute value numbers and populate value sets with all
3654 the expressions computed in BLOCK. */
3655 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3660 stmt
= gsi_stmt (gsi
);
3662 /* Cache whether the basic-block has any non-visible side-effect
3664 If this isn't a call or it is the last stmt in the
3665 basic-block then the CFG represents things correctly. */
3666 if (is_gimple_call (stmt
) && !stmt_ends_bb_p (stmt
))
3668 /* Non-looping const functions always return normally.
3669 Otherwise the call might not return or have side-effects
3670 that forbids hoisting possibly trapping expressions
3672 int flags
= gimple_call_flags (stmt
);
3673 if (!(flags
& ECF_CONST
)
3674 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3675 BB_MAY_NOTRETURN (block
) = 1;
3678 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3680 pre_expr e
= get_or_alloc_expr_for_name (op
);
3682 add_to_value (get_expr_value_id (e
), e
);
3683 bitmap_insert_into_set (TMP_GEN (block
), e
);
3684 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3687 if (gimple_vdef (stmt
))
3688 BB_LIVE_VOP_ON_EXIT (block
) = gimple_vdef (stmt
);
3690 if (gimple_has_side_effects (stmt
)
3691 || stmt_could_throw_p (stmt
)
3692 || is_gimple_debug (stmt
))
3695 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3697 if (ssa_undefined_value_p (op
))
3699 pre_expr e
= get_or_alloc_expr_for_name (op
);
3700 bitmap_value_insert_into_set (EXP_GEN (block
), e
);
3703 switch (gimple_code (stmt
))
3711 vn_reference_s ref1
;
3712 pre_expr result
= NULL
;
3714 /* We can value number only calls to real functions. */
3715 if (gimple_call_internal_p (stmt
))
3718 vn_reference_lookup_call (stmt
, &ref
, &ref1
);
3722 /* If the value of the call is not invalidated in
3723 this block until it is computed, add the expression
3725 if (!gimple_vuse (stmt
)
3727 (SSA_NAME_DEF_STMT (gimple_vuse (stmt
))) == GIMPLE_PHI
3728 || gimple_bb (SSA_NAME_DEF_STMT
3729 (gimple_vuse (stmt
))) != block
)
3731 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3732 result
->kind
= REFERENCE
;
3734 PRE_EXPR_REFERENCE (result
) = ref
;
3736 get_or_alloc_expression_id (result
);
3737 add_to_value (get_expr_value_id (result
), result
);
3738 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3745 pre_expr result
= NULL
;
3746 switch (vn_get_stmt_kind (stmt
))
3750 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3753 /* COND_EXPR and VEC_COND_EXPR are awkward in
3754 that they contain an embedded complex expression.
3755 Don't even try to shove those through PRE. */
3756 if (code
== COND_EXPR
3757 || code
== VEC_COND_EXPR
)
3760 vn_nary_op_lookup_stmt (stmt
, &nary
);
3764 /* If the NARY traps and there was a preceding
3765 point in the block that might not return avoid
3766 adding the nary to EXP_GEN. */
3767 if (BB_MAY_NOTRETURN (block
)
3768 && vn_nary_may_trap (nary
))
3771 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3772 result
->kind
= NARY
;
3774 PRE_EXPR_NARY (result
) = nary
;
3781 vn_reference_lookup (gimple_assign_rhs1 (stmt
),
3787 /* If the value of the reference is not invalidated in
3788 this block until it is computed, add the expression
3790 if (gimple_vuse (stmt
))
3794 def_stmt
= SSA_NAME_DEF_STMT (gimple_vuse (stmt
));
3795 while (!gimple_nop_p (def_stmt
)
3796 && gimple_code (def_stmt
) != GIMPLE_PHI
3797 && gimple_bb (def_stmt
) == block
)
3799 if (stmt_may_clobber_ref_p
3800 (def_stmt
, gimple_assign_rhs1 (stmt
)))
3806 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt
));
3812 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3813 result
->kind
= REFERENCE
;
3815 PRE_EXPR_REFERENCE (result
) = ref
;
3823 get_or_alloc_expression_id (result
);
3824 add_to_value (get_expr_value_id (result
), result
);
3825 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3833 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3835 print_bitmap_set (dump_file
, EXP_GEN (block
),
3836 "exp_gen", block
->index
);
3837 print_bitmap_set (dump_file
, PHI_GEN (block
),
3838 "phi_gen", block
->index
);
3839 print_bitmap_set (dump_file
, TMP_GEN (block
),
3840 "tmp_gen", block
->index
);
3841 print_bitmap_set (dump_file
, AVAIL_OUT (block
),
3842 "avail_out", block
->index
);
3845 /* Put the dominator children of BLOCK on the worklist of blocks
3846 to compute available sets for. */
3847 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3849 son
= next_dom_son (CDI_DOMINATORS
, son
))
3850 worklist
[sp
++] = son
;
3857 /* Local state for the eliminate domwalk. */
3858 static vec
<gimple
> el_to_remove
;
3859 static unsigned int el_todo
;
3860 static vec
<tree
> el_avail
;
3861 static vec
<tree
> el_avail_stack
;
3863 /* Return a leader for OP that is available at the current point of the
3864 eliminate domwalk. */
3867 eliminate_avail (tree op
)
3869 tree valnum
= VN_INFO (op
)->valnum
;
3870 if (TREE_CODE (valnum
) == SSA_NAME
)
3872 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
3874 if (el_avail
.length () > SSA_NAME_VERSION (valnum
))
3875 return el_avail
[SSA_NAME_VERSION (valnum
)];
3877 else if (is_gimple_min_invariant (valnum
))
3882 /* At the current point of the eliminate domwalk make OP available. */
3885 eliminate_push_avail (tree op
)
3887 tree valnum
= VN_INFO (op
)->valnum
;
3888 if (TREE_CODE (valnum
) == SSA_NAME
)
3890 if (el_avail
.length () <= SSA_NAME_VERSION (valnum
))
3891 el_avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
3892 el_avail
[SSA_NAME_VERSION (valnum
)] = op
;
3893 el_avail_stack
.safe_push (op
);
3897 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3898 the leader for the expression if insertion was successful. */
3901 eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
)
3903 tree expr
= vn_get_expr_for (val
);
3904 if (!CONVERT_EXPR_P (expr
)
3905 && TREE_CODE (expr
) != VIEW_CONVERT_EXPR
)
3908 tree op
= TREE_OPERAND (expr
, 0);
3909 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (op
) : op
;
3913 tree res
= make_temp_ssa_name (TREE_TYPE (val
), NULL
, "pretmp");
3914 gimple tem
= gimple_build_assign (res
,
3915 fold_build1 (TREE_CODE (expr
),
3916 TREE_TYPE (expr
), leader
));
3917 gsi_insert_before (gsi
, tem
, GSI_SAME_STMT
);
3918 VN_INFO_GET (res
)->valnum
= val
;
3920 if (TREE_CODE (leader
) == SSA_NAME
)
3921 gimple_set_plf (SSA_NAME_DEF_STMT (leader
), NECESSARY
, true);
3923 pre_stats
.insertions
++;
3924 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3926 fprintf (dump_file
, "Inserted ");
3927 print_gimple_stmt (dump_file
, tem
, 0, 0);
3933 class eliminate_dom_walker
: public dom_walker
3936 eliminate_dom_walker (cdi_direction direction
, bool do_pre_
)
3937 : dom_walker (direction
), do_pre (do_pre_
) {}
3939 virtual void before_dom_children (basic_block
);
3940 virtual void after_dom_children (basic_block
);
3945 /* Perform elimination for the basic-block B during the domwalk. */
3948 eliminate_dom_walker::before_dom_children (basic_block b
)
3950 gimple_stmt_iterator gsi
;
3954 el_avail_stack
.safe_push (NULL_TREE
);
3956 /* ??? If we do nothing for unreachable blocks then this will confuse
3957 tailmerging. Eventually we can reduce its reliance on SCCVN now
3958 that we fully copy/constant-propagate (most) things. */
3960 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
3962 gimple phi
= gsi_stmt (gsi
);
3963 tree res
= PHI_RESULT (phi
);
3965 if (virtual_operand_p (res
))
3971 tree sprime
= eliminate_avail (res
);
3975 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3977 fprintf (dump_file
, "Replaced redundant PHI node defining ");
3978 print_generic_expr (dump_file
, res
, 0);
3979 fprintf (dump_file
, " with ");
3980 print_generic_expr (dump_file
, sprime
, 0);
3981 fprintf (dump_file
, "\n");
3984 /* If we inserted this PHI node ourself, it's not an elimination. */
3986 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
3989 pre_stats
.eliminations
++;
3991 /* If we will propagate into all uses don't bother to do
3993 if (may_propagate_copy (res
, sprime
))
3995 /* Mark the PHI for removal. */
3996 el_to_remove
.safe_push (phi
);
4001 remove_phi_node (&gsi
, false);
4004 && !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
))
4005 && TREE_CODE (sprime
) == SSA_NAME
)
4006 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4008 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
4009 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4010 gimple stmt
= gimple_build_assign (res
, sprime
);
4011 /* ??? It cannot yet be necessary (DOM walk). */
4012 gimple_set_plf (stmt
, NECESSARY
, gimple_plf (phi
, NECESSARY
));
4014 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
4015 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
4019 eliminate_push_avail (res
);
4023 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4025 tree sprime
= NULL_TREE
;
4026 stmt
= gsi_stmt (gsi
);
4027 tree lhs
= gimple_get_lhs (stmt
);
4028 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
4029 && !gimple_has_volatile_ops (stmt
)
4030 /* See PR43491. Do not replace a global register variable when
4031 it is a the RHS of an assignment. Do replace local register
4032 variables since gcc does not guarantee a local variable will
4033 be allocated in register.
4034 ??? The fix isn't effective here. This should instead
4035 be ensured by not value-numbering them the same but treating
4036 them like volatiles? */
4037 && !(gimple_assign_single_p (stmt
)
4038 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
4039 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
4040 && is_global_var (gimple_assign_rhs1 (stmt
)))))
4042 sprime
= eliminate_avail (lhs
);
4045 /* If there is no existing usable leader but SCCVN thinks
4046 it has an expression it wants to use as replacement,
4048 tree val
= VN_INFO (lhs
)->valnum
;
4050 && TREE_CODE (val
) == SSA_NAME
4051 && VN_INFO (val
)->needs_insertion
4052 && VN_INFO (val
)->expr
!= NULL_TREE
4053 && (sprime
= eliminate_insert (&gsi
, val
)) != NULL_TREE
)
4054 eliminate_push_avail (sprime
);
4057 /* If this now constitutes a copy duplicate points-to
4058 and range info appropriately. This is especially
4059 important for inserted code. See tree-ssa-copy.c
4060 for similar code. */
4062 && TREE_CODE (sprime
) == SSA_NAME
)
4064 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
4065 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
4066 && SSA_NAME_PTR_INFO (lhs
)
4067 && !SSA_NAME_PTR_INFO (sprime
))
4069 duplicate_ssa_name_ptr_info (sprime
,
4070 SSA_NAME_PTR_INFO (lhs
));
4072 mark_ptr_info_alignment_unknown
4073 (SSA_NAME_PTR_INFO (sprime
));
4075 else if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
4076 && SSA_NAME_RANGE_INFO (lhs
)
4077 && !SSA_NAME_RANGE_INFO (sprime
)
4079 duplicate_ssa_name_range_info (sprime
,
4080 SSA_NAME_RANGE_TYPE (lhs
),
4081 SSA_NAME_RANGE_INFO (lhs
));
4084 /* Inhibit the use of an inserted PHI on a loop header when
4085 the address of the memory reference is a simple induction
4086 variable. In other cases the vectorizer won't do anything
4087 anyway (either it's loop invariant or a complicated
4090 && TREE_CODE (sprime
) == SSA_NAME
4092 && flag_tree_loop_vectorize
4093 && loop_outer (b
->loop_father
)
4094 && has_zero_uses (sprime
)
4095 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
4096 && gimple_assign_load_p (stmt
))
4098 gimple def_stmt
= SSA_NAME_DEF_STMT (sprime
);
4099 basic_block def_bb
= gimple_bb (def_stmt
);
4100 if (gimple_code (def_stmt
) == GIMPLE_PHI
4101 && b
->loop_father
->header
== def_bb
)
4106 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
4109 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
4111 && flow_bb_inside_loop_p (b
->loop_father
, def_bb
)
4112 && simple_iv (b
->loop_father
,
4113 b
->loop_father
, op
, &iv
, true))
4121 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4123 fprintf (dump_file
, "Not replacing ");
4124 print_gimple_expr (dump_file
, stmt
, 0, 0);
4125 fprintf (dump_file
, " with ");
4126 print_generic_expr (dump_file
, sprime
, 0);
4127 fprintf (dump_file
, " which would add a loop"
4128 " carried dependence to loop %d\n",
4129 b
->loop_father
->num
);
4131 /* Don't keep sprime available. */
4139 /* If we can propagate the value computed for LHS into
4140 all uses don't bother doing anything with this stmt. */
4141 if (may_propagate_copy (lhs
, sprime
))
4143 /* Mark it for removal. */
4144 el_to_remove
.safe_push (stmt
);
4146 /* ??? Don't count copy/constant propagations. */
4147 if (gimple_assign_single_p (stmt
)
4148 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4149 || gimple_assign_rhs1 (stmt
) == sprime
))
4152 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4154 fprintf (dump_file
, "Replaced ");
4155 print_gimple_expr (dump_file
, stmt
, 0, 0);
4156 fprintf (dump_file
, " with ");
4157 print_generic_expr (dump_file
, sprime
, 0);
4158 fprintf (dump_file
, " in all uses of ");
4159 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4162 pre_stats
.eliminations
++;
4166 /* If this is an assignment from our leader (which
4167 happens in the case the value-number is a constant)
4168 then there is nothing to do. */
4169 if (gimple_assign_single_p (stmt
)
4170 && sprime
== gimple_assign_rhs1 (stmt
))
4173 /* Else replace its RHS. */
4174 bool can_make_abnormal_goto
4175 = is_gimple_call (stmt
)
4176 && stmt_can_make_abnormal_goto (stmt
);
4178 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4180 fprintf (dump_file
, "Replaced ");
4181 print_gimple_expr (dump_file
, stmt
, 0, 0);
4182 fprintf (dump_file
, " with ");
4183 print_generic_expr (dump_file
, sprime
, 0);
4184 fprintf (dump_file
, " in ");
4185 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4188 if (TREE_CODE (sprime
) == SSA_NAME
)
4189 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4192 pre_stats
.eliminations
++;
4193 gimple orig_stmt
= stmt
;
4194 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4195 TREE_TYPE (sprime
)))
4196 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4197 tree vdef
= gimple_vdef (stmt
);
4198 tree vuse
= gimple_vuse (stmt
);
4199 propagate_tree_value_into_stmt (&gsi
, sprime
);
4200 stmt
= gsi_stmt (gsi
);
4202 if (vdef
!= gimple_vdef (stmt
))
4203 VN_INFO (vdef
)->valnum
= vuse
;
4205 /* If we removed EH side-effects from the statement, clean
4206 its EH information. */
4207 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4209 bitmap_set_bit (need_eh_cleanup
,
4210 gimple_bb (stmt
)->index
);
4211 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4212 fprintf (dump_file
, " Removed EH side-effects.\n");
4215 /* Likewise for AB side-effects. */
4216 if (can_make_abnormal_goto
4217 && !stmt_can_make_abnormal_goto (stmt
))
4219 bitmap_set_bit (need_ab_cleanup
,
4220 gimple_bb (stmt
)->index
);
4221 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4222 fprintf (dump_file
, " Removed AB side-effects.\n");
4229 /* If the statement is a scalar store, see if the expression
4230 has the same value number as its rhs. If so, the store is
4232 if (gimple_assign_single_p (stmt
)
4233 && !gimple_has_volatile_ops (stmt
)
4234 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4235 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4236 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
4239 tree rhs
= gimple_assign_rhs1 (stmt
);
4240 val
= vn_reference_lookup (gimple_assign_lhs (stmt
),
4241 gimple_vuse (stmt
), VN_WALK
, NULL
);
4242 if (TREE_CODE (rhs
) == SSA_NAME
)
4243 rhs
= VN_INFO (rhs
)->valnum
;
4245 && operand_equal_p (val
, rhs
, 0))
4247 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4249 fprintf (dump_file
, "Deleted redundant store ");
4250 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4253 /* Queue stmt for removal. */
4254 el_to_remove
.safe_push (stmt
);
4259 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
4260 bool was_noreturn
= (is_gimple_call (stmt
)
4261 && gimple_call_noreturn_p (stmt
));
4262 tree vdef
= gimple_vdef (stmt
);
4263 tree vuse
= gimple_vuse (stmt
);
4265 /* If we didn't replace the whole stmt (or propagate the result
4266 into all uses), replace all uses on this stmt with their
4268 use_operand_p use_p
;
4270 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4272 tree use
= USE_FROM_PTR (use_p
);
4273 /* ??? The call code above leaves stmt operands un-updated. */
4274 if (TREE_CODE (use
) != SSA_NAME
)
4276 tree sprime
= eliminate_avail (use
);
4277 if (sprime
&& sprime
!= use
4278 && may_propagate_copy (use
, sprime
)
4279 /* We substitute into debug stmts to avoid excessive
4280 debug temporaries created by removed stmts, but we need
4281 to avoid doing so for inserted sprimes as we never want
4282 to create debug temporaries for them. */
4284 || TREE_CODE (sprime
) != SSA_NAME
4285 || !is_gimple_debug (stmt
)
4286 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
4288 propagate_value (use_p
, sprime
);
4289 gimple_set_modified (stmt
, true);
4290 if (TREE_CODE (sprime
) == SSA_NAME
4291 && !is_gimple_debug (stmt
))
4292 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4297 /* Visit indirect calls and turn them into direct calls if
4298 possible using the devirtualization machinery. */
4299 if (is_gimple_call (stmt
))
4301 tree fn
= gimple_call_fn (stmt
);
4303 && flag_devirtualize
4304 && virtual_method_call_p (fn
))
4306 tree otr_type
= obj_type_ref_class (fn
);
4308 ipa_polymorphic_call_context
context (current_function_decl
, fn
, stmt
, &instance
);
4311 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
), otr_type
, stmt
);
4313 vec
<cgraph_node
*>targets
4314 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
4316 (OBJ_TYPE_REF_TOKEN (fn
)),
4319 if (dump_enabled_p ())
4320 dump_possible_polymorphic_call_targets (dump_file
,
4321 obj_type_ref_class (fn
),
4323 (OBJ_TYPE_REF_TOKEN (fn
)),
4325 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
4328 if (targets
.length () == 1)
4329 fn
= targets
[0]->decl
;
4331 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4332 if (dump_enabled_p ())
4334 location_t loc
= gimple_location_safe (stmt
);
4335 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
4336 "converting indirect call to "
4338 cgraph_node::get (fn
)->name ());
4340 gimple_call_set_fndecl (stmt
, fn
);
4341 gimple_set_modified (stmt
, true);
4346 if (gimple_modified_p (stmt
))
4348 /* If a formerly non-invariant ADDR_EXPR is turned into an
4349 invariant one it was on a separate stmt. */
4350 if (gimple_assign_single_p (stmt
)
4351 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
4352 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
4353 gimple old_stmt
= stmt
;
4354 if (is_gimple_call (stmt
))
4356 /* ??? Only fold calls inplace for now, this may create new
4357 SSA names which in turn will confuse free_scc_vn SSA name
4359 fold_stmt_inplace (&gsi
);
4360 /* When changing a call into a noreturn call, cfg cleanup
4361 is needed to fix up the noreturn call. */
4362 if (!was_noreturn
&& gimple_call_noreturn_p (stmt
))
4363 el_todo
|= TODO_cleanup_cfg
;
4368 stmt
= gsi_stmt (gsi
);
4369 if ((gimple_code (stmt
) == GIMPLE_COND
4370 && (gimple_cond_true_p (stmt
)
4371 || gimple_cond_false_p (stmt
)))
4372 || (gimple_code (stmt
) == GIMPLE_SWITCH
4373 && TREE_CODE (gimple_switch_index (stmt
)) == INTEGER_CST
))
4374 el_todo
|= TODO_cleanup_cfg
;
4376 /* If we removed EH side-effects from the statement, clean
4377 its EH information. */
4378 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
4380 bitmap_set_bit (need_eh_cleanup
,
4381 gimple_bb (stmt
)->index
);
4382 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4383 fprintf (dump_file
, " Removed EH side-effects.\n");
4385 /* Likewise for AB side-effects. */
4386 if (can_make_abnormal_goto
4387 && !stmt_can_make_abnormal_goto (stmt
))
4389 bitmap_set_bit (need_ab_cleanup
,
4390 gimple_bb (stmt
)->index
);
4391 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4392 fprintf (dump_file
, " Removed AB side-effects.\n");
4395 if (vdef
!= gimple_vdef (stmt
))
4396 VN_INFO (vdef
)->valnum
= vuse
;
4399 /* Make new values available - for fully redundant LHS we
4400 continue with the next stmt above and skip this. */
4402 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
4403 eliminate_push_avail (DEF_FROM_PTR (defp
));
4406 /* Replace destination PHI arguments. */
4409 FOR_EACH_EDGE (e
, ei
, b
->succs
)
4411 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4413 gimple phi
= gsi_stmt (gsi
);
4414 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
4415 tree arg
= USE_FROM_PTR (use_p
);
4416 if (TREE_CODE (arg
) != SSA_NAME
4417 || virtual_operand_p (arg
))
4419 tree sprime
= eliminate_avail (arg
);
4420 if (sprime
&& may_propagate_copy (arg
, sprime
))
4422 propagate_value (use_p
, sprime
);
4423 if (TREE_CODE (sprime
) == SSA_NAME
)
4424 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4430 /* Make no longer available leaders no longer available. */
4433 eliminate_dom_walker::after_dom_children (basic_block
)
4436 while ((entry
= el_avail_stack
.pop ()) != NULL_TREE
)
4437 el_avail
[SSA_NAME_VERSION (VN_INFO (entry
)->valnum
)] = NULL_TREE
;
4440 /* Eliminate fully redundant computations. */
4443 eliminate (bool do_pre
)
4445 gimple_stmt_iterator gsi
;
4448 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4449 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4451 el_to_remove
.create (0);
4453 el_avail
.create (num_ssa_names
);
4454 el_avail_stack
.create (0);
4456 eliminate_dom_walker (CDI_DOMINATORS
,
4457 do_pre
).walk (cfun
->cfg
->x_entry_block_ptr
);
4459 el_avail
.release ();
4460 el_avail_stack
.release ();
4462 /* We cannot remove stmts during BB walk, especially not release SSA
4463 names there as this confuses the VN machinery. The stmts ending
4464 up in el_to_remove are either stores or simple copies.
4465 Remove stmts in reverse order to make debug stmt creation possible. */
4466 while (!el_to_remove
.is_empty ())
4468 stmt
= el_to_remove
.pop ();
4470 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4472 fprintf (dump_file
, "Removing dead stmt ");
4473 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4477 if (gimple_code (stmt
) == GIMPLE_PHI
)
4478 lhs
= gimple_phi_result (stmt
);
4480 lhs
= gimple_get_lhs (stmt
);
4483 && TREE_CODE (lhs
) == SSA_NAME
)
4484 bitmap_clear_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
4486 gsi
= gsi_for_stmt (stmt
);
4487 if (gimple_code (stmt
) == GIMPLE_PHI
)
4488 remove_phi_node (&gsi
, true);
4491 basic_block bb
= gimple_bb (stmt
);
4492 unlink_stmt_vdef (stmt
);
4493 if (gsi_remove (&gsi
, true))
4494 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
4495 release_defs (stmt
);
4498 /* Removing a stmt may expose a forwarder block. */
4499 el_todo
|= TODO_cleanup_cfg
;
4501 el_to_remove
.release ();
4506 /* Perform CFG cleanups made necessary by elimination. */
4509 fini_eliminate (void)
4511 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
4512 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
4515 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
4518 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
4520 BITMAP_FREE (need_eh_cleanup
);
4521 BITMAP_FREE (need_ab_cleanup
);
4523 if (do_eh_cleanup
|| do_ab_cleanup
)
4524 return TODO_cleanup_cfg
;
4528 /* Borrow a bit of tree-ssa-dce.c for the moment.
4529 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4530 this may be a bit faster, and we may want critical edges kept split. */
4532 /* If OP's defining statement has not already been determined to be necessary,
4533 mark that statement necessary. Return the stmt, if it is newly
4536 static inline gimple
4537 mark_operand_necessary (tree op
)
4543 if (TREE_CODE (op
) != SSA_NAME
)
4546 stmt
= SSA_NAME_DEF_STMT (op
);
4549 if (gimple_plf (stmt
, NECESSARY
)
4550 || gimple_nop_p (stmt
))
4553 gimple_set_plf (stmt
, NECESSARY
, true);
4557 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4558 to insert PHI nodes sometimes, and because value numbering of casts isn't
4559 perfect, we sometimes end up inserting dead code. This simple DCE-like
4560 pass removes any insertions we made that weren't actually used. */
4563 remove_dead_inserted_code (void)
4570 worklist
= BITMAP_ALLOC (NULL
);
4571 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4573 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4574 if (gimple_plf (t
, NECESSARY
))
4575 bitmap_set_bit (worklist
, i
);
4577 while (!bitmap_empty_p (worklist
))
4579 i
= bitmap_first_set_bit (worklist
);
4580 bitmap_clear_bit (worklist
, i
);
4581 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4583 /* PHI nodes are somewhat special in that each PHI alternative has
4584 data and control dependencies. All the statements feeding the
4585 PHI node's arguments are always necessary. */
4586 if (gimple_code (t
) == GIMPLE_PHI
)
4590 for (k
= 0; k
< gimple_phi_num_args (t
); k
++)
4592 tree arg
= PHI_ARG_DEF (t
, k
);
4593 if (TREE_CODE (arg
) == SSA_NAME
)
4595 gimple n
= mark_operand_necessary (arg
);
4597 bitmap_set_bit (worklist
, SSA_NAME_VERSION (arg
));
4603 /* Propagate through the operands. Examine all the USE, VUSE and
4604 VDEF operands in this statement. Mark all the statements
4605 which feed this statement's uses as necessary. */
4609 /* The operands of VDEF expressions are also needed as they
4610 represent potential definitions that may reach this
4611 statement (VDEF operands allow us to follow def-def
4614 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
4616 gimple n
= mark_operand_necessary (use
);
4618 bitmap_set_bit (worklist
, SSA_NAME_VERSION (use
));
4623 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4625 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4626 if (!gimple_plf (t
, NECESSARY
))
4628 gimple_stmt_iterator gsi
;
4630 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4632 fprintf (dump_file
, "Removing unnecessary insertion:");
4633 print_gimple_stmt (dump_file
, t
, 0, 0);
4636 gsi
= gsi_for_stmt (t
);
4637 if (gimple_code (t
) == GIMPLE_PHI
)
4638 remove_phi_node (&gsi
, true);
4641 gsi_remove (&gsi
, true);
4646 BITMAP_FREE (worklist
);
4650 /* Initialize data structures used by PRE. */
4657 next_expression_id
= 1;
4658 expressions
.create (0);
4659 expressions
.safe_push (NULL
);
4660 value_expressions
.create (get_max_value_id () + 1);
4661 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
4662 name_to_id
.create (0);
4664 inserted_exprs
= BITMAP_ALLOC (NULL
);
4666 connect_infinite_loops_to_exit ();
4667 memset (&pre_stats
, 0, sizeof (pre_stats
));
4669 postorder
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
4670 postorder_num
= inverted_post_order_compute (postorder
);
4672 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets
));
4674 calculate_dominance_info (CDI_POST_DOMINATORS
);
4675 calculate_dominance_info (CDI_DOMINATORS
);
4677 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4678 phi_translate_table
= new hash_table
<expr_pred_trans_d
> (5110);
4679 expression_to_id
= new hash_table
<pre_expr_d
> (num_ssa_names
* 3);
4680 bitmap_set_pool
= create_alloc_pool ("Bitmap sets",
4681 sizeof (struct bitmap_set
), 30);
4682 pre_expr_pool
= create_alloc_pool ("pre_expr nodes",
4683 sizeof (struct pre_expr_d
), 30);
4684 FOR_ALL_BB_FN (bb
, cfun
)
4686 EXP_GEN (bb
) = bitmap_set_new ();
4687 PHI_GEN (bb
) = bitmap_set_new ();
4688 TMP_GEN (bb
) = bitmap_set_new ();
4689 AVAIL_OUT (bb
) = bitmap_set_new ();
4694 /* Deallocate data structures used by PRE. */
4700 value_expressions
.release ();
4701 BITMAP_FREE (inserted_exprs
);
4702 bitmap_obstack_release (&grand_bitmap_obstack
);
4703 free_alloc_pool (bitmap_set_pool
);
4704 free_alloc_pool (pre_expr_pool
);
4705 delete phi_translate_table
;
4706 phi_translate_table
= NULL
;
4707 delete expression_to_id
;
4708 expression_to_id
= NULL
;
4709 name_to_id
.release ();
4711 free_aux_for_blocks ();
4713 free_dominance_info (CDI_POST_DOMINATORS
);
4718 const pass_data pass_data_pre
=
4720 GIMPLE_PASS
, /* type */
4722 OPTGROUP_NONE
, /* optinfo_flags */
4723 TV_TREE_PRE
, /* tv_id */
4724 /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
4726 ( PROP_no_crit_edges
| PROP_cfg
| PROP_ssa
), /* properties_required */
4727 0, /* properties_provided */
4728 PROP_no_crit_edges
, /* properties_destroyed */
4729 TODO_rebuild_alias
, /* todo_flags_start */
4730 0, /* todo_flags_finish */
4733 class pass_pre
: public gimple_opt_pass
4736 pass_pre (gcc::context
*ctxt
)
4737 : gimple_opt_pass (pass_data_pre
, ctxt
)
4740 /* opt_pass methods: */
4741 virtual bool gate (function
*) { return flag_tree_pre
!= 0; }
4742 virtual unsigned int execute (function
*);
4744 }; // class pass_pre
4747 pass_pre::execute (function
*fun
)
4749 unsigned int todo
= 0;
4751 do_partial_partial
=
4752 flag_tree_partial_pre
&& optimize_function_for_speed_p (fun
);
4754 /* This has to happen before SCCVN runs because
4755 loop_optimizer_init may create new phis, etc. */
4756 loop_optimizer_init (LOOPS_NORMAL
);
4758 if (!run_scc_vn (VN_WALK
))
4760 loop_optimizer_finalize ();
4767 /* Collect and value number expressions computed in each basic block. */
4770 /* Insert can get quite slow on an incredibly large number of basic
4771 blocks due to some quadratic behavior. Until this behavior is
4772 fixed, don't run it when he have an incredibly large number of
4773 bb's. If we aren't going to run insert, there is no point in
4774 computing ANTIC, either, even though it's plenty fast. */
4775 if (n_basic_blocks_for_fn (fun
) < 4000)
4781 /* Make sure to remove fake edges before committing our inserts.
4782 This makes sure we don't end up with extra critical edges that
4783 we would need to split. */
4784 remove_fake_exit_edges ();
4785 gsi_commit_edge_inserts ();
4787 /* Eliminate folds statements which might (should not...) end up
4788 not keeping virtual operands up-to-date. */
4789 gcc_assert (!need_ssa_update_p (fun
));
4791 /* Remove all the redundant expressions. */
4792 todo
|= eliminate (true);
4794 statistics_counter_event (fun
, "Insertions", pre_stats
.insertions
);
4795 statistics_counter_event (fun
, "PA inserted", pre_stats
.pa_insert
);
4796 statistics_counter_event (fun
, "New PHIs", pre_stats
.phis
);
4797 statistics_counter_event (fun
, "Eliminated", pre_stats
.eliminations
);
4799 clear_expression_ids ();
4800 remove_dead_inserted_code ();
4804 todo
|= fini_eliminate ();
4805 loop_optimizer_finalize ();
4807 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4808 case we can merge the block with the remaining predecessor of the block.
4810 - call merge_blocks after each tail merge iteration
4811 - call merge_blocks after all tail merge iterations
4812 - mark TODO_cleanup_cfg when necessary
4813 - share the cfg cleanup with fini_pre. */
4814 todo
|= tail_merge_optimize (todo
);
4818 /* Tail merging invalidates the virtual SSA web, together with
4819 cfg-cleanup opportunities exposed by PRE this will wreck the
4820 SSA updating machinery. So make sure to run update-ssa
4821 manually, before eventually scheduling cfg-cleanup as part of
4823 update_ssa (TODO_update_ssa_only_virtuals
);
4831 make_pass_pre (gcc::context
*ctxt
)
4833 return new pass_pre (ctxt
);
4838 const pass_data pass_data_fre
=
4840 GIMPLE_PASS
, /* type */
4842 OPTGROUP_NONE
, /* optinfo_flags */
4843 TV_TREE_FRE
, /* tv_id */
4844 ( PROP_cfg
| PROP_ssa
), /* properties_required */
4845 0, /* properties_provided */
4846 0, /* properties_destroyed */
4847 0, /* todo_flags_start */
4848 0, /* todo_flags_finish */
4851 class pass_fre
: public gimple_opt_pass
4854 pass_fre (gcc::context
*ctxt
)
4855 : gimple_opt_pass (pass_data_fre
, ctxt
)
4858 /* opt_pass methods: */
4859 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
4860 virtual bool gate (function
*) { return flag_tree_fre
!= 0; }
4861 virtual unsigned int execute (function
*);
4863 }; // class pass_fre
4866 pass_fre::execute (function
*fun
)
4868 unsigned int todo
= 0;
4870 if (!run_scc_vn (VN_WALKREWRITE
))
4873 memset (&pre_stats
, 0, sizeof (pre_stats
));
4875 /* Remove all the redundant expressions. */
4876 todo
|= eliminate (false);
4878 todo
|= fini_eliminate ();
4882 statistics_counter_event (fun
, "Insertions", pre_stats
.insertions
);
4883 statistics_counter_event (fun
, "Eliminated", pre_stats
.eliminations
);
4891 make_pass_fre (gcc::context
*ctxt
)
4893 return new pass_fre (ctxt
);