2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
38 #include "tree-iterator.h"
40 #include "alloc-pool.h"
42 #include "tree-pass.h"
45 #include "langhooks.h"
47 #include "tree-ssa-sccvn.h"
48 #include "tree-scalar-evolution.h"
54 1. Avail sets can be shared by making an avail_find_leader that
55 walks up the dominator tree and looks in those avail sets.
56 This might affect code optimality, it's unclear right now.
57 2. Strength reduction can be performed by anticipating expressions
58 we can repair later on.
59 3. We can do back-substitution or smarter value numbering to catch
60 commutative expressions split up over multiple statements.
63 /* For ease of terminology, "expression node" in the below refers to
64 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
65 represent the actual statement containing the expressions we care about,
66 and we cache the value number by putting it in the expression. */
70 First we walk the statements to generate the AVAIL sets, the
71 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
72 generation of values/expressions by a given block. We use them
73 when computing the ANTIC sets. The AVAIL sets consist of
74 SSA_NAME's that represent values, so we know what values are
75 available in what blocks. AVAIL is a forward dataflow problem. In
76 SSA, values are never killed, so we don't need a kill set, or a
77 fixpoint iteration, in order to calculate the AVAIL sets. In
78 traditional parlance, AVAIL sets tell us the downsafety of the
81 Next, we generate the ANTIC sets. These sets represent the
82 anticipatable expressions. ANTIC is a backwards dataflow
83 problem. An expression is anticipatable in a given block if it could
84 be generated in that block. This means that if we had to perform
85 an insertion in that block, of the value of that expression, we
86 could. Calculating the ANTIC sets requires phi translation of
87 expressions, because the flow goes backwards through phis. We must
88 iterate to a fixpoint of the ANTIC sets, because we have a kill
89 set. Even in SSA form, values are not live over the entire
90 function, only from their definition point onwards. So we have to
91 remove values from the ANTIC set once we go past the definition
92 point of the leaders that make them up.
93 compute_antic/compute_antic_aux performs this computation.
95 Third, we perform insertions to make partially redundant
96 expressions fully redundant.
98 An expression is partially redundant (excluding partial
101 1. It is AVAIL in some, but not all, of the predecessors of a
103 2. It is ANTIC in all the predecessors.
105 In order to make it fully redundant, we insert the expression into
106 the predecessors where it is not available, but is ANTIC.
108 For the partial anticipation case, we only perform insertion if it
109 is partially anticipated in some block, and fully available in all
112 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
113 performs these steps.
115 Fourth, we eliminate fully redundant expressions.
116 This is a simple statement walk that replaces redundant
117 calculations with the now available values. */
119 /* Representations of value numbers:
121 Value numbers are represented by a representative SSA_NAME. We
122 will create fake SSA_NAME's in situations where we need a
123 representative but do not have one (because it is a complex
124 expression). In order to facilitate storing the value numbers in
125 bitmaps, and keep the number of wasted SSA_NAME's down, we also
126 associate a value_id with each value number, and create full blown
127 ssa_name's only where we actually need them (IE in operands of
128 existing expressions).
130 Theoretically you could replace all the value_id's with
131 SSA_NAME_VERSION, but this would allocate a large number of
132 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
133 It would also require an additional indirection at each point we
136 /* Representation of expressions on value numbers:
138 Expressions consisting of value numbers are represented the same
139 way as our VN internally represents them, with an additional
140 "pre_expr" wrapping around them in order to facilitate storing all
141 of the expressions in the same sets. */
143 /* Representation of sets:
145 The dataflow sets do not need to be sorted in any particular order
146 for the majority of their lifetime, are simply represented as two
147 bitmaps, one that keeps track of values present in the set, and one
148 that keeps track of expressions present in the set.
150 When we need them in topological order, we produce it on demand by
151 transforming the bitmap into an array and sorting it into topo
154 /* Type of expression, used to know which member of the PRE_EXPR union
165 typedef union pre_expr_union_d
170 vn_reference_t reference
;
173 typedef struct pre_expr_d
175 enum pre_expr_kind kind
;
180 #define PRE_EXPR_NAME(e) (e)->u.name
181 #define PRE_EXPR_NARY(e) (e)->u.nary
182 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
183 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
186 pre_expr_eq (const void *p1
, const void *p2
)
188 const struct pre_expr_d
*e1
= (const struct pre_expr_d
*) p1
;
189 const struct pre_expr_d
*e2
= (const struct pre_expr_d
*) p2
;
191 if (e1
->kind
!= e2
->kind
)
197 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
198 PRE_EXPR_CONSTANT (e2
));
200 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
202 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
204 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
205 PRE_EXPR_REFERENCE (e2
));
212 pre_expr_hash (const void *p1
)
214 const struct pre_expr_d
*e
= (const struct pre_expr_d
*) p1
;
218 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
220 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
222 return PRE_EXPR_NARY (e
)->hashcode
;
224 return PRE_EXPR_REFERENCE (e
)->hashcode
;
231 /* Next global expression id number. */
232 static unsigned int next_expression_id
;
234 /* Mapping from expression to id number we can use in bitmap sets. */
235 DEF_VEC_P (pre_expr
);
236 DEF_VEC_ALLOC_P (pre_expr
, heap
);
237 static VEC(pre_expr
, heap
) *expressions
;
238 static htab_t expression_to_id
;
239 static VEC(unsigned, heap
) *name_to_id
;
241 /* Allocate an expression id for EXPR. */
243 static inline unsigned int
244 alloc_expression_id (pre_expr expr
)
247 /* Make sure we won't overflow. */
248 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
249 expr
->id
= next_expression_id
++;
250 VEC_safe_push (pre_expr
, heap
, expressions
, expr
);
251 if (expr
->kind
== NAME
)
253 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
254 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
255 re-allocations by using VEC_reserve upfront. There is no
256 VEC_quick_grow_cleared unfortunately. */
257 VEC_reserve (unsigned, heap
, name_to_id
, num_ssa_names
);
258 VEC_safe_grow_cleared (unsigned, heap
, name_to_id
, num_ssa_names
);
259 gcc_assert (VEC_index (unsigned, name_to_id
, version
) == 0);
260 VEC_replace (unsigned, name_to_id
, version
, expr
->id
);
264 slot
= htab_find_slot (expression_to_id
, expr
, INSERT
);
268 return next_expression_id
- 1;
271 /* Return the expression id for tree EXPR. */
273 static inline unsigned int
274 get_expression_id (const pre_expr expr
)
279 static inline unsigned int
280 lookup_expression_id (const pre_expr expr
)
284 if (expr
->kind
== NAME
)
286 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
287 if (VEC_length (unsigned, name_to_id
) <= version
)
289 return VEC_index (unsigned, name_to_id
, version
);
293 slot
= htab_find_slot (expression_to_id
, expr
, NO_INSERT
);
296 return ((pre_expr
)*slot
)->id
;
300 /* Return the existing expression id for EXPR, or create one if one
301 does not exist yet. */
303 static inline unsigned int
304 get_or_alloc_expression_id (pre_expr expr
)
306 unsigned int id
= lookup_expression_id (expr
);
308 return alloc_expression_id (expr
);
309 return expr
->id
= id
;
312 /* Return the expression that has expression id ID */
314 static inline pre_expr
315 expression_for_id (unsigned int id
)
317 return VEC_index (pre_expr
, expressions
, id
);
320 /* Free the expression id field in all of our expressions,
321 and then destroy the expressions array. */
324 clear_expression_ids (void)
326 VEC_free (pre_expr
, heap
, expressions
);
329 static alloc_pool pre_expr_pool
;
331 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
334 get_or_alloc_expr_for_name (tree name
)
336 struct pre_expr_d expr
;
338 unsigned int result_id
;
342 PRE_EXPR_NAME (&expr
) = name
;
343 result_id
= lookup_expression_id (&expr
);
345 return expression_for_id (result_id
);
347 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
349 PRE_EXPR_NAME (result
) = name
;
350 alloc_expression_id (result
);
354 static bool in_fre
= false;
356 /* An unordered bitmap set. One bitmap tracks values, the other,
358 typedef struct bitmap_set
364 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
365 EXECUTE_IF_SET_IN_BITMAP((set)->expressions, 0, (id), (bi))
367 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
368 EXECUTE_IF_SET_IN_BITMAP((set)->values, 0, (id), (bi))
370 /* Mapping from value id to expressions with that value_id. */
371 DEF_VEC_P (bitmap_set_t
);
372 DEF_VEC_ALLOC_P (bitmap_set_t
, heap
);
373 static VEC(bitmap_set_t
, heap
) *value_expressions
;
375 /* Sets that we need to keep track of. */
376 typedef struct bb_bitmap_sets
378 /* The EXP_GEN set, which represents expressions/values generated in
380 bitmap_set_t exp_gen
;
382 /* The PHI_GEN set, which represents PHI results generated in a
384 bitmap_set_t phi_gen
;
386 /* The TMP_GEN set, which represents results/temporaries generated
387 in a basic block. IE the LHS of an expression. */
388 bitmap_set_t tmp_gen
;
390 /* The AVAIL_OUT set, which represents which values are available in
391 a given basic block. */
392 bitmap_set_t avail_out
;
394 /* The ANTIC_IN set, which represents which values are anticipatable
395 in a given basic block. */
396 bitmap_set_t antic_in
;
398 /* The PA_IN set, which represents which values are
399 partially anticipatable in a given basic block. */
402 /* The NEW_SETS set, which is used during insertion to augment the
403 AVAIL_OUT set of blocks with the new insertions performed during
404 the current iteration. */
405 bitmap_set_t new_sets
;
407 /* A cache for value_dies_in_block_x. */
410 /* True if we have visited this block during ANTIC calculation. */
411 unsigned int visited
: 1;
413 /* True we have deferred processing this block during ANTIC
414 calculation until its successor is processed. */
415 unsigned int deferred
: 1;
417 /* True when the block contains a call that might not return. */
418 unsigned int contains_may_not_return_call
: 1;
421 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
422 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
423 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
424 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
425 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
426 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
427 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
428 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
429 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
430 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
431 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
434 /* Basic block list in postorder. */
435 static int *postorder
;
437 /* This structure is used to keep track of statistics on what
438 optimization PRE was able to perform. */
441 /* The number of RHS computations eliminated by PRE. */
444 /* The number of new expressions/temporaries generated by PRE. */
447 /* The number of inserts found due to partial anticipation */
450 /* The number of new PHI nodes added by PRE. */
453 /* The number of values found constant. */
458 static bool do_partial_partial
;
459 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int, gimple
);
460 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
461 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
462 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
463 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
464 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
465 static void bitmap_insert_into_set_1 (bitmap_set_t
, pre_expr
,
467 static bitmap_set_t
bitmap_set_new (void);
468 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
470 static tree
find_or_generate_expression (basic_block
, pre_expr
, gimple_seq
*,
472 static unsigned int get_expr_value_id (pre_expr
);
474 /* We can add and remove elements and entries to and from sets
475 and hash tables, so we use alloc pools for them. */
477 static alloc_pool bitmap_set_pool
;
478 static bitmap_obstack grand_bitmap_obstack
;
480 /* To avoid adding 300 temporary variables when we only need one, we
481 only create one temporary variable, on demand, and build ssa names
482 off that. We do have to change the variable if the types don't
483 match the current variable's type. */
485 static tree storetemp
;
486 static tree prephitemp
;
488 /* Set of blocks with statements that have had its EH information
490 static bitmap need_eh_cleanup
;
492 /* The phi_translate_table caches phi translations for a given
493 expression and predecessor. */
495 static htab_t phi_translate_table
;
497 /* A three tuple {e, pred, v} used to cache phi translations in the
498 phi_translate_table. */
500 typedef struct expr_pred_trans_d
502 /* The expression. */
505 /* The predecessor block along which we translated the expression. */
508 /* The value that resulted from the translation. */
511 /* The hashcode for the expression, pred pair. This is cached for
514 } *expr_pred_trans_t
;
515 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
517 /* Return the hash value for a phi translation table entry. */
520 expr_pred_trans_hash (const void *p
)
522 const_expr_pred_trans_t
const ve
= (const_expr_pred_trans_t
) p
;
526 /* Return true if two phi translation table entries are the same.
527 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
530 expr_pred_trans_eq (const void *p1
, const void *p2
)
532 const_expr_pred_trans_t
const ve1
= (const_expr_pred_trans_t
) p1
;
533 const_expr_pred_trans_t
const ve2
= (const_expr_pred_trans_t
) p2
;
534 basic_block b1
= ve1
->pred
;
535 basic_block b2
= ve2
->pred
;
537 /* If they are not translations for the same basic block, they can't
541 return pre_expr_eq (ve1
->e
, ve2
->e
);
544 /* Search in the phi translation table for the translation of
545 expression E in basic block PRED.
546 Return the translated value, if found, NULL otherwise. */
548 static inline pre_expr
549 phi_trans_lookup (pre_expr e
, basic_block pred
)
552 struct expr_pred_trans_d ept
;
556 ept
.hashcode
= iterative_hash_hashval_t (pre_expr_hash (e
), pred
->index
);
557 slot
= htab_find_slot_with_hash (phi_translate_table
, &ept
, ept
.hashcode
,
562 return ((expr_pred_trans_t
) *slot
)->v
;
566 /* Add the tuple mapping from {expression E, basic block PRED} to
567 value V, to the phi translation table. */
570 phi_trans_add (pre_expr e
, pre_expr v
, basic_block pred
)
573 expr_pred_trans_t new_pair
= XNEW (struct expr_pred_trans_d
);
575 new_pair
->pred
= pred
;
577 new_pair
->hashcode
= iterative_hash_hashval_t (pre_expr_hash (e
),
580 slot
= htab_find_slot_with_hash (phi_translate_table
, new_pair
,
581 new_pair
->hashcode
, INSERT
);
584 *slot
= (void *) new_pair
;
588 /* Add expression E to the expression set of value id V. */
591 add_to_value (unsigned int v
, pre_expr e
)
595 gcc_assert (get_expr_value_id (e
) == v
);
597 if (v
>= VEC_length (bitmap_set_t
, value_expressions
))
599 VEC_safe_grow_cleared (bitmap_set_t
, heap
, value_expressions
,
603 set
= VEC_index (bitmap_set_t
, value_expressions
, v
);
606 set
= bitmap_set_new ();
607 VEC_replace (bitmap_set_t
, value_expressions
, v
, set
);
610 bitmap_insert_into_set_1 (set
, e
, v
, true);
613 /* Create a new bitmap set and return it. */
616 bitmap_set_new (void)
618 bitmap_set_t ret
= (bitmap_set_t
) pool_alloc (bitmap_set_pool
);
619 ret
->expressions
= BITMAP_ALLOC (&grand_bitmap_obstack
);
620 ret
->values
= BITMAP_ALLOC (&grand_bitmap_obstack
);
624 /* Return the value id for a PRE expression EXPR. */
627 get_expr_value_id (pre_expr expr
)
634 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
637 id
= get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr
));
638 add_to_value (id
, expr
);
643 return VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
645 return PRE_EXPR_NARY (expr
)->value_id
;
647 return PRE_EXPR_REFERENCE (expr
)->value_id
;
653 /* Remove an expression EXPR from a bitmapped set. */
656 bitmap_remove_from_set (bitmap_set_t set
, pre_expr expr
)
658 unsigned int val
= get_expr_value_id (expr
);
659 if (!value_id_constant_p (val
))
661 bitmap_clear_bit (set
->values
, val
);
662 bitmap_clear_bit (set
->expressions
, get_expression_id (expr
));
667 bitmap_insert_into_set_1 (bitmap_set_t set
, pre_expr expr
,
668 unsigned int val
, bool allow_constants
)
670 if (allow_constants
|| !value_id_constant_p (val
))
672 /* We specifically expect this and only this function to be able to
673 insert constants into a set. */
674 bitmap_set_bit (set
->values
, val
);
675 bitmap_set_bit (set
->expressions
, get_or_alloc_expression_id (expr
));
679 /* Insert an expression EXPR into a bitmapped set. */
682 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
684 bitmap_insert_into_set_1 (set
, expr
, get_expr_value_id (expr
), false);
687 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
690 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
692 bitmap_copy (dest
->expressions
, orig
->expressions
);
693 bitmap_copy (dest
->values
, orig
->values
);
697 /* Free memory used up by SET. */
699 bitmap_set_free (bitmap_set_t set
)
701 BITMAP_FREE (set
->expressions
);
702 BITMAP_FREE (set
->values
);
706 /* Generate an topological-ordered array of bitmap set SET. */
708 static VEC(pre_expr
, heap
) *
709 sorted_array_from_bitmap_set (bitmap_set_t set
)
712 bitmap_iterator bi
, bj
;
713 VEC(pre_expr
, heap
) *result
;
715 /* Pre-allocate roughly enough space for the array. */
716 result
= VEC_alloc (pre_expr
, heap
, bitmap_count_bits (set
->values
));
718 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
720 /* The number of expressions having a given value is usually
721 relatively small. Thus, rather than making a vector of all
722 the expressions and sorting it by value-id, we walk the values
723 and check in the reverse mapping that tells us what expressions
724 have a given value, to filter those in our set. As a result,
725 the expressions are inserted in value-id order, which means
728 If this is somehow a significant lose for some cases, we can
729 choose which set to walk based on the set size. */
730 bitmap_set_t exprset
= VEC_index (bitmap_set_t
, value_expressions
, i
);
731 FOR_EACH_EXPR_ID_IN_SET (exprset
, j
, bj
)
733 if (bitmap_bit_p (set
->expressions
, j
))
734 VEC_safe_push (pre_expr
, heap
, result
, expression_for_id (j
));
741 /* Perform bitmapped set operation DEST &= ORIG. */
744 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
751 bitmap temp
= BITMAP_ALLOC (&grand_bitmap_obstack
);
753 bitmap_and_into (dest
->values
, orig
->values
);
754 bitmap_copy (temp
, dest
->expressions
);
755 EXECUTE_IF_SET_IN_BITMAP (temp
, 0, i
, bi
)
757 pre_expr expr
= expression_for_id (i
);
758 unsigned int value_id
= get_expr_value_id (expr
);
759 if (!bitmap_bit_p (dest
->values
, value_id
))
760 bitmap_clear_bit (dest
->expressions
, i
);
766 /* Subtract all values and expressions contained in ORIG from DEST. */
769 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
771 bitmap_set_t result
= bitmap_set_new ();
775 bitmap_and_compl (result
->expressions
, dest
->expressions
,
778 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
780 pre_expr expr
= expression_for_id (i
);
781 unsigned int value_id
= get_expr_value_id (expr
);
782 bitmap_set_bit (result
->values
, value_id
);
788 /* Subtract all the values in bitmap set B from bitmap set A. */
791 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
795 bitmap temp
= BITMAP_ALLOC (&grand_bitmap_obstack
);
797 bitmap_copy (temp
, a
->expressions
);
798 EXECUTE_IF_SET_IN_BITMAP (temp
, 0, i
, bi
)
800 pre_expr expr
= expression_for_id (i
);
801 if (bitmap_set_contains_value (b
, get_expr_value_id (expr
)))
802 bitmap_remove_from_set (a
, expr
);
808 /* Return true if bitmapped set SET contains the value VALUE_ID. */
811 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
813 if (value_id_constant_p (value_id
))
816 if (!set
|| bitmap_empty_p (set
->expressions
))
819 return bitmap_bit_p (set
->values
, value_id
);
823 bitmap_set_contains_expr (bitmap_set_t set
, const pre_expr expr
)
825 return bitmap_bit_p (set
->expressions
, get_expression_id (expr
));
828 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
831 bitmap_set_replace_value (bitmap_set_t set
, unsigned int lookfor
,
834 bitmap_set_t exprset
;
838 if (value_id_constant_p (lookfor
))
841 if (!bitmap_set_contains_value (set
, lookfor
))
844 /* The number of expressions having a given value is usually
845 significantly less than the total number of expressions in SET.
846 Thus, rather than check, for each expression in SET, whether it
847 has the value LOOKFOR, we walk the reverse mapping that tells us
848 what expressions have a given value, and see if any of those
849 expressions are in our set. For large testcases, this is about
850 5-10x faster than walking the bitmap. If this is somehow a
851 significant lose for some cases, we can choose which set to walk
852 based on the set size. */
853 exprset
= VEC_index (bitmap_set_t
, value_expressions
, lookfor
);
854 FOR_EACH_EXPR_ID_IN_SET (exprset
, i
, bi
)
856 if (bitmap_bit_p (set
->expressions
, i
))
858 bitmap_clear_bit (set
->expressions
, i
);
859 bitmap_set_bit (set
->expressions
, get_expression_id (expr
));
865 /* Return true if two bitmap sets are equal. */
868 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
870 return bitmap_equal_p (a
->values
, b
->values
);
873 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
874 and add it otherwise. */
877 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
879 unsigned int val
= get_expr_value_id (expr
);
881 if (bitmap_set_contains_value (set
, val
))
882 bitmap_set_replace_value (set
, val
, expr
);
884 bitmap_insert_into_set (set
, expr
);
887 /* Insert EXPR into SET if EXPR's value is not already present in
891 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
893 unsigned int val
= get_expr_value_id (expr
);
895 #ifdef ENABLE_CHECKING
896 gcc_assert (expr
->id
== get_or_alloc_expression_id (expr
));
899 /* Constant values are always considered to be part of the set. */
900 if (value_id_constant_p (val
))
903 /* If the value membership changed, add the expression. */
904 if (bitmap_set_bit (set
->values
, val
))
905 bitmap_set_bit (set
->expressions
, expr
->id
);
908 /* Print out EXPR to outfile. */
911 print_pre_expr (FILE *outfile
, const pre_expr expr
)
916 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
), 0);
919 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
), 0);
924 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
925 fprintf (outfile
, "{%s,", tree_code_name
[nary
->opcode
]);
926 for (i
= 0; i
< nary
->length
; i
++)
928 print_generic_expr (outfile
, nary
->op
[i
], 0);
929 if (i
!= (unsigned) nary
->length
- 1)
930 fprintf (outfile
, ",");
932 fprintf (outfile
, "}");
938 vn_reference_op_t vro
;
940 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
941 fprintf (outfile
, "{");
943 VEC_iterate (vn_reference_op_s
, ref
->operands
, i
, vro
);
946 bool closebrace
= false;
947 if (vro
->opcode
!= SSA_NAME
948 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
950 fprintf (outfile
, "%s", tree_code_name
[vro
->opcode
]);
953 fprintf (outfile
, "<");
959 print_generic_expr (outfile
, vro
->op0
, 0);
962 fprintf (outfile
, ",");
963 print_generic_expr (outfile
, vro
->op1
, 0);
967 fprintf (outfile
, ",");
968 print_generic_expr (outfile
, vro
->op2
, 0);
972 fprintf (outfile
, ">");
973 if (i
!= VEC_length (vn_reference_op_s
, ref
->operands
) - 1)
974 fprintf (outfile
, ",");
976 fprintf (outfile
, "}");
979 fprintf (outfile
, "@");
980 print_generic_expr (outfile
, ref
->vuse
, 0);
986 void debug_pre_expr (pre_expr
);
988 /* Like print_pre_expr but always prints to stderr. */
990 debug_pre_expr (pre_expr e
)
992 print_pre_expr (stderr
, e
);
993 fprintf (stderr
, "\n");
996 /* Print out SET to OUTFILE. */
999 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
1000 const char *setname
, int blockindex
)
1002 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
1009 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1011 const pre_expr expr
= expression_for_id (i
);
1014 fprintf (outfile
, ", ");
1016 print_pre_expr (outfile
, expr
);
1018 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1021 fprintf (outfile
, " }\n");
1024 void debug_bitmap_set (bitmap_set_t
);
1027 debug_bitmap_set (bitmap_set_t set
)
1029 print_bitmap_set (stderr
, set
, "debug", 0);
1032 /* Print out the expressions that have VAL to OUTFILE. */
1035 print_value_expressions (FILE *outfile
, unsigned int val
)
1037 bitmap_set_t set
= VEC_index (bitmap_set_t
, value_expressions
, val
);
1041 sprintf (s
, "%04d", val
);
1042 print_bitmap_set (outfile
, set
, s
, 0);
1048 debug_value_expressions (unsigned int val
)
1050 print_value_expressions (stderr
, val
);
1053 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1057 get_or_alloc_expr_for_constant (tree constant
)
1059 unsigned int result_id
;
1060 unsigned int value_id
;
1061 struct pre_expr_d expr
;
1064 expr
.kind
= CONSTANT
;
1065 PRE_EXPR_CONSTANT (&expr
) = constant
;
1066 result_id
= lookup_expression_id (&expr
);
1068 return expression_for_id (result_id
);
1070 newexpr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1071 newexpr
->kind
= CONSTANT
;
1072 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1073 alloc_expression_id (newexpr
);
1074 value_id
= get_or_alloc_constant_value_id (constant
);
1075 add_to_value (value_id
, newexpr
);
1079 /* Given a value id V, find the actual tree representing the constant
1080 value if there is one, and return it. Return NULL if we can't find
1084 get_constant_for_value_id (unsigned int v
)
1086 if (value_id_constant_p (v
))
1090 bitmap_set_t exprset
= VEC_index (bitmap_set_t
, value_expressions
, v
);
1092 FOR_EACH_EXPR_ID_IN_SET (exprset
, i
, bi
)
1094 pre_expr expr
= expression_for_id (i
);
1095 if (expr
->kind
== CONSTANT
)
1096 return PRE_EXPR_CONSTANT (expr
);
1102 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1103 Currently only supports constants and SSA_NAMES. */
1105 get_or_alloc_expr_for (tree t
)
1107 if (TREE_CODE (t
) == SSA_NAME
)
1108 return get_or_alloc_expr_for_name (t
);
1109 else if (is_gimple_min_invariant (t
))
1110 return get_or_alloc_expr_for_constant (t
);
1113 /* More complex expressions can result from SCCVN expression
1114 simplification that inserts values for them. As they all
1115 do not have VOPs the get handled by the nary ops struct. */
1116 vn_nary_op_t result
;
1117 unsigned int result_id
;
1118 vn_nary_op_lookup (t
, &result
);
1121 pre_expr e
= (pre_expr
) pool_alloc (pre_expr_pool
);
1123 PRE_EXPR_NARY (e
) = result
;
1124 result_id
= lookup_expression_id (e
);
1127 pool_free (pre_expr_pool
, e
);
1128 e
= expression_for_id (result_id
);
1131 alloc_expression_id (e
);
1138 /* Return the folded version of T if T, when folded, is a gimple
1139 min_invariant. Otherwise, return T. */
1142 fully_constant_expression (pre_expr e
)
1150 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1151 switch (TREE_CODE_CLASS (nary
->opcode
))
1153 case tcc_expression
:
1154 if (nary
->opcode
== TRUTH_NOT_EXPR
)
1156 if (nary
->opcode
!= TRUTH_AND_EXPR
1157 && nary
->opcode
!= TRUTH_OR_EXPR
1158 && nary
->opcode
!= TRUTH_XOR_EXPR
)
1162 case tcc_comparison
:
1164 /* We have to go from trees to pre exprs to value ids to
1166 tree naryop0
= nary
->op
[0];
1167 tree naryop1
= nary
->op
[1];
1169 if (!is_gimple_min_invariant (naryop0
))
1171 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1172 unsigned int vrep0
= get_expr_value_id (rep0
);
1173 tree const0
= get_constant_for_value_id (vrep0
);
1175 naryop0
= fold_convert (TREE_TYPE (naryop0
), const0
);
1177 if (!is_gimple_min_invariant (naryop1
))
1179 pre_expr rep1
= get_or_alloc_expr_for (naryop1
);
1180 unsigned int vrep1
= get_expr_value_id (rep1
);
1181 tree const1
= get_constant_for_value_id (vrep1
);
1183 naryop1
= fold_convert (TREE_TYPE (naryop1
), const1
);
1185 result
= fold_binary (nary
->opcode
, nary
->type
,
1187 if (result
&& is_gimple_min_invariant (result
))
1188 return get_or_alloc_expr_for_constant (result
);
1189 /* We might have simplified the expression to a
1190 SSA_NAME for example from x_1 * 1. But we cannot
1191 insert a PHI for x_1 unconditionally as x_1 might
1192 not be available readily. */
1196 if (nary
->opcode
!= REALPART_EXPR
1197 && nary
->opcode
!= IMAGPART_EXPR
1198 && nary
->opcode
!= VIEW_CONVERT_EXPR
)
1204 /* We have to go from trees to pre exprs to value ids to
1206 tree naryop0
= nary
->op
[0];
1207 tree const0
, result
;
1208 if (is_gimple_min_invariant (naryop0
))
1212 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1213 unsigned int vrep0
= get_expr_value_id (rep0
);
1214 const0
= get_constant_for_value_id (vrep0
);
1219 tree type1
= TREE_TYPE (nary
->op
[0]);
1220 const0
= fold_convert (type1
, const0
);
1221 result
= fold_unary (nary
->opcode
, nary
->type
, const0
);
1223 if (result
&& is_gimple_min_invariant (result
))
1224 return get_or_alloc_expr_for_constant (result
);
1233 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1235 if ((folded
= fully_constant_vn_reference_p (ref
)))
1236 return get_or_alloc_expr_for_constant (folded
);
1245 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1246 it has the value it would have in BLOCK. Set *SAME_VALID to true
1247 in case the new vuse doesn't change the value id of the OPERANDS. */
1250 translate_vuse_through_block (VEC (vn_reference_op_s
, heap
) *operands
,
1251 alias_set_type set
, tree type
, tree vuse
,
1252 basic_block phiblock
,
1253 basic_block block
, bool *same_valid
)
1255 gimple phi
= SSA_NAME_DEF_STMT (vuse
);
1262 if (gimple_bb (phi
) != phiblock
)
1265 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, type
, operands
);
1267 /* Use the alias-oracle to find either the PHI node in this block,
1268 the first VUSE used in this block that is equivalent to vuse or
1269 the first VUSE which definition in this block kills the value. */
1270 if (gimple_code (phi
) == GIMPLE_PHI
)
1271 e
= find_edge (block
, phiblock
);
1272 else if (use_oracle
)
1273 while (!stmt_may_clobber_ref_p_1 (phi
, &ref
))
1275 vuse
= gimple_vuse (phi
);
1276 phi
= SSA_NAME_DEF_STMT (vuse
);
1277 if (gimple_bb (phi
) != phiblock
)
1279 if (gimple_code (phi
) == GIMPLE_PHI
)
1281 e
= find_edge (block
, phiblock
);
1292 bitmap visited
= NULL
;
1293 /* Try to find a vuse that dominates this phi node by skipping
1294 non-clobbering statements. */
1295 vuse
= get_continuation_for_phi (phi
, &ref
, &visited
);
1297 BITMAP_FREE (visited
);
1303 /* If we didn't find any, the value ID can't stay the same,
1304 but return the translated vuse. */
1305 *same_valid
= false;
1306 vuse
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1308 /* ??? We would like to return vuse here as this is the canonical
1309 upmost vdef that this reference is associated with. But during
1310 insertion of the references into the hash tables we only ever
1311 directly insert with their direct gimple_vuse, hence returning
1312 something else would make us not find the other expression. */
1313 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1319 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1320 SET2. This is used to avoid making a set consisting of the union
1321 of PA_IN and ANTIC_IN during insert. */
1323 static inline pre_expr
1324 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
)
1328 result
= bitmap_find_leader (set1
, val
, NULL
);
1329 if (!result
&& set2
)
1330 result
= bitmap_find_leader (set2
, val
, NULL
);
1334 /* Get the tree type for our PRE expression e. */
1337 get_expr_type (const pre_expr e
)
1342 return TREE_TYPE (PRE_EXPR_NAME (e
));
1344 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1346 return PRE_EXPR_REFERENCE (e
)->type
;
1348 return PRE_EXPR_NARY (e
)->type
;
1353 /* Get a representative SSA_NAME for a given expression.
1354 Since all of our sub-expressions are treated as values, we require
1355 them to be SSA_NAME's for simplicity.
1356 Prior versions of GVNPRE used to use "value handles" here, so that
1357 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1358 either case, the operands are really values (IE we do not expect
1359 them to be usable without finding leaders). */
1362 get_representative_for (const pre_expr e
)
1366 unsigned int value_id
= get_expr_value_id (e
);
1371 return PRE_EXPR_NAME (e
);
1373 return PRE_EXPR_CONSTANT (e
);
1377 /* Go through all of the expressions representing this value
1378 and pick out an SSA_NAME. */
1381 bitmap_set_t exprs
= VEC_index (bitmap_set_t
, value_expressions
,
1383 FOR_EACH_EXPR_ID_IN_SET (exprs
, i
, bi
)
1385 pre_expr rep
= expression_for_id (i
);
1386 if (rep
->kind
== NAME
)
1387 return PRE_EXPR_NAME (rep
);
1392 /* If we reached here we couldn't find an SSA_NAME. This can
1393 happen when we've discovered a value that has never appeared in
1394 the program as set to an SSA_NAME, most likely as the result of
1399 "Could not find SSA_NAME representative for expression:");
1400 print_pre_expr (dump_file
, e
);
1401 fprintf (dump_file
, "\n");
1404 exprtype
= get_expr_type (e
);
1406 /* Build and insert the assignment of the end result to the temporary
1407 that we will return. */
1408 if (!pretemp
|| exprtype
!= TREE_TYPE (pretemp
))
1410 pretemp
= create_tmp_reg (exprtype
, "pretmp");
1411 get_var_ann (pretemp
);
1414 name
= make_ssa_name (pretemp
, gimple_build_nop ());
1415 VN_INFO_GET (name
)->value_id
= value_id
;
1416 if (e
->kind
== CONSTANT
)
1417 VN_INFO (name
)->valnum
= PRE_EXPR_CONSTANT (e
);
1419 VN_INFO (name
)->valnum
= name
;
1421 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1424 fprintf (dump_file
, "Created SSA_NAME representative ");
1425 print_generic_expr (dump_file
, name
, 0);
1426 fprintf (dump_file
, " for expression:");
1427 print_pre_expr (dump_file
, e
);
1428 fprintf (dump_file
, "\n");
1437 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1438 basic_block pred
, basic_block phiblock
);
1440 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1441 the phis in PRED. Return NULL if we can't find a leader for each part
1442 of the translated expression. */
1445 phi_translate_1 (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1446 basic_block pred
, basic_block phiblock
)
1453 bool changed
= false;
1454 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1455 struct vn_nary_op_s newnary
;
1456 /* The NARY structure is only guaranteed to have been
1457 allocated to the nary->length operands. */
1458 memcpy (&newnary
, nary
, (sizeof (struct vn_nary_op_s
)
1459 - sizeof (tree
) * (4 - nary
->length
)));
1461 for (i
= 0; i
< newnary
.length
; i
++)
1463 if (TREE_CODE (newnary
.op
[i
]) != SSA_NAME
)
1467 pre_expr leader
, result
;
1468 unsigned int op_val_id
= VN_INFO (newnary
.op
[i
])->value_id
;
1469 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1470 result
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1471 if (result
&& result
!= leader
)
1473 tree name
= get_representative_for (result
);
1476 newnary
.op
[i
] = name
;
1481 changed
|= newnary
.op
[i
] != nary
->op
[i
];
1487 unsigned int new_val_id
;
1489 tree result
= vn_nary_op_lookup_pieces (newnary
.length
,
1497 if (result
&& is_gimple_min_invariant (result
))
1498 return get_or_alloc_expr_for_constant (result
);
1500 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1505 PRE_EXPR_NARY (expr
) = nary
;
1506 constant
= fully_constant_expression (expr
);
1507 if (constant
!= expr
)
1510 new_val_id
= nary
->value_id
;
1511 get_or_alloc_expression_id (expr
);
1515 new_val_id
= get_next_value_id ();
1516 VEC_safe_grow_cleared (bitmap_set_t
, heap
,
1518 get_max_value_id() + 1);
1519 nary
= vn_nary_op_insert_pieces (newnary
.length
,
1526 result
, new_val_id
);
1527 PRE_EXPR_NARY (expr
) = nary
;
1528 constant
= fully_constant_expression (expr
);
1529 if (constant
!= expr
)
1531 get_or_alloc_expression_id (expr
);
1533 add_to_value (new_val_id
, expr
);
1541 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1542 VEC (vn_reference_op_s
, heap
) *operands
= ref
->operands
;
1543 tree vuse
= ref
->vuse
;
1544 tree newvuse
= vuse
;
1545 VEC (vn_reference_op_s
, heap
) *newoperands
= NULL
;
1546 bool changed
= false, same_valid
= true;
1548 vn_reference_op_t operand
;
1549 vn_reference_t newref
;
1552 VEC_iterate (vn_reference_op_s
, operands
, i
, operand
); i
++, j
++)
1556 tree oldop0
= operand
->op0
;
1557 tree oldop1
= operand
->op1
;
1558 tree oldop2
= operand
->op2
;
1562 tree type
= operand
->type
;
1563 vn_reference_op_s newop
= *operand
;
1565 if (op0
&& TREE_CODE (op0
) == SSA_NAME
)
1567 unsigned int op_val_id
= VN_INFO (op0
)->value_id
;
1568 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1569 opresult
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1570 if (opresult
&& opresult
!= leader
)
1572 tree name
= get_representative_for (opresult
);
1580 changed
|= op0
!= oldop0
;
1582 if (op1
&& TREE_CODE (op1
) == SSA_NAME
)
1584 unsigned int op_val_id
= VN_INFO (op1
)->value_id
;
1585 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1586 opresult
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1587 if (opresult
&& opresult
!= leader
)
1589 tree name
= get_representative_for (opresult
);
1597 /* We can't possibly insert these. */
1598 else if (op1
&& !is_gimple_min_invariant (op1
))
1600 changed
|= op1
!= oldop1
;
1601 if (op2
&& TREE_CODE (op2
) == SSA_NAME
)
1603 unsigned int op_val_id
= VN_INFO (op2
)->value_id
;
1604 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1605 opresult
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1606 if (opresult
&& opresult
!= leader
)
1608 tree name
= get_representative_for (opresult
);
1616 /* We can't possibly insert these. */
1617 else if (op2
&& !is_gimple_min_invariant (op2
))
1619 changed
|= op2
!= oldop2
;
1622 newoperands
= VEC_copy (vn_reference_op_s
, heap
, operands
);
1623 /* We may have changed from an SSA_NAME to a constant */
1624 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op0
) != SSA_NAME
)
1625 newop
.opcode
= TREE_CODE (op0
);
1630 VEC_replace (vn_reference_op_s
, newoperands
, j
, &newop
);
1631 /* If it transforms from an SSA_NAME to an address, fold with
1632 a preceding indirect reference. */
1633 if (j
> 0 && op0
&& TREE_CODE (op0
) == ADDR_EXPR
1634 && VEC_index (vn_reference_op_s
,
1635 newoperands
, j
- 1)->opcode
== INDIRECT_REF
)
1636 vn_reference_fold_indirect (&newoperands
, &j
);
1638 if (i
!= VEC_length (vn_reference_op_s
, operands
))
1641 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1647 newvuse
= translate_vuse_through_block (newoperands
,
1648 ref
->set
, ref
->type
,
1649 vuse
, phiblock
, pred
,
1651 if (newvuse
== NULL_TREE
)
1653 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1658 if (changed
|| newvuse
!= vuse
)
1660 unsigned int new_val_id
;
1663 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1668 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1670 if (result
&& is_gimple_min_invariant (result
))
1672 gcc_assert (!newoperands
);
1673 return get_or_alloc_expr_for_constant (result
);
1676 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1677 expr
->kind
= REFERENCE
;
1682 PRE_EXPR_REFERENCE (expr
) = newref
;
1683 constant
= fully_constant_expression (expr
);
1684 if (constant
!= expr
)
1687 new_val_id
= newref
->value_id
;
1688 get_or_alloc_expression_id (expr
);
1692 if (changed
|| !same_valid
)
1694 new_val_id
= get_next_value_id ();
1695 VEC_safe_grow_cleared (bitmap_set_t
, heap
,
1697 get_max_value_id() + 1);
1700 new_val_id
= ref
->value_id
;
1701 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1704 result
, new_val_id
);
1706 PRE_EXPR_REFERENCE (expr
) = newref
;
1707 constant
= fully_constant_expression (expr
);
1708 if (constant
!= expr
)
1710 get_or_alloc_expression_id (expr
);
1712 add_to_value (new_val_id
, expr
);
1714 VEC_free (vn_reference_op_s
, heap
, newoperands
);
1724 tree name
= PRE_EXPR_NAME (expr
);
1726 def_stmt
= SSA_NAME_DEF_STMT (name
);
1727 if (gimple_code (def_stmt
) == GIMPLE_PHI
1728 && gimple_bb (def_stmt
) == phiblock
)
1733 e
= find_edge (pred
, gimple_bb (phi
));
1736 tree def
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1739 if (TREE_CODE (def
) == SSA_NAME
)
1740 def
= VN_INFO (def
)->valnum
;
1742 /* Handle constant. */
1743 if (is_gimple_min_invariant (def
))
1744 return get_or_alloc_expr_for_constant (def
);
1746 if (TREE_CODE (def
) == SSA_NAME
&& ssa_undefined_value_p (def
))
1749 newexpr
= get_or_alloc_expr_for_name (def
);
1760 /* Wrapper around phi_translate_1 providing caching functionality. */
1763 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1764 basic_block pred
, basic_block phiblock
)
1771 /* Constants contain no values that need translation. */
1772 if (expr
->kind
== CONSTANT
)
1775 if (value_id_constant_p (get_expr_value_id (expr
)))
1778 if (expr
->kind
!= NAME
)
1780 phitrans
= phi_trans_lookup (expr
, pred
);
1786 phitrans
= phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
);
1788 /* Don't add empty translations to the cache. Neither add
1789 translations of NAMEs as those are cheap to translate. */
1791 && expr
->kind
!= NAME
)
1792 phi_trans_add (expr
, phitrans
, pred
);
1798 /* For each expression in SET, translate the values through phi nodes
1799 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1800 expressions in DEST. */
1803 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1804 basic_block phiblock
)
1806 VEC (pre_expr
, heap
) *exprs
;
1810 if (gimple_seq_empty_p (phi_nodes (phiblock
)))
1812 bitmap_set_copy (dest
, set
);
1816 exprs
= sorted_array_from_bitmap_set (set
);
1817 for (i
= 0; VEC_iterate (pre_expr
, exprs
, i
, expr
); i
++)
1819 pre_expr translated
;
1820 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1824 /* We might end up with multiple expressions from SET being
1825 translated to the same value. In this case we do not want
1826 to retain the NARY or REFERENCE expression but prefer a NAME
1827 which would be the leader. */
1828 if (translated
->kind
== NAME
)
1829 bitmap_value_replace_in_set (dest
, translated
);
1831 bitmap_value_insert_into_set (dest
, translated
);
1833 VEC_free (pre_expr
, heap
, exprs
);
1836 /* Find the leader for a value (i.e., the name representing that
1837 value) in a given set, and return it. If STMT is non-NULL it
1838 makes sure the defining statement for the leader dominates it.
1839 Return NULL if no leader is found. */
1842 bitmap_find_leader (bitmap_set_t set
, unsigned int val
, gimple stmt
)
1844 if (value_id_constant_p (val
))
1848 bitmap_set_t exprset
= VEC_index (bitmap_set_t
, value_expressions
, val
);
1850 FOR_EACH_EXPR_ID_IN_SET (exprset
, i
, bi
)
1852 pre_expr expr
= expression_for_id (i
);
1853 if (expr
->kind
== CONSTANT
)
1857 if (bitmap_set_contains_value (set
, val
))
1859 /* Rather than walk the entire bitmap of expressions, and see
1860 whether any of them has the value we are looking for, we look
1861 at the reverse mapping, which tells us the set of expressions
1862 that have a given value (IE value->expressions with that
1863 value) and see if any of those expressions are in our set.
1864 The number of expressions per value is usually significantly
1865 less than the number of expressions in the set. In fact, for
1866 large testcases, doing it this way is roughly 5-10x faster
1867 than walking the bitmap.
1868 If this is somehow a significant lose for some cases, we can
1869 choose which set to walk based on which set is smaller. */
1872 bitmap_set_t exprset
= VEC_index (bitmap_set_t
, value_expressions
, val
);
1874 EXECUTE_IF_AND_IN_BITMAP (exprset
->expressions
,
1875 set
->expressions
, 0, i
, bi
)
1877 pre_expr val
= expression_for_id (i
);
1878 /* At the point where stmt is not null, there should always
1879 be an SSA_NAME first in the list of expressions. */
1882 gimple def_stmt
= SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val
));
1883 if (gimple_code (def_stmt
) != GIMPLE_PHI
1884 && gimple_bb (def_stmt
) == gimple_bb (stmt
)
1885 && gimple_uid (def_stmt
) >= gimple_uid (stmt
))
1894 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1895 BLOCK by seeing if it is not killed in the block. Note that we are
1896 only determining whether there is a store that kills it. Because
1897 of the order in which clean iterates over values, we are guaranteed
1898 that altered operands will have caused us to be eliminated from the
1899 ANTIC_IN set already. */
1902 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1904 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1905 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1907 gimple_stmt_iterator gsi
;
1908 unsigned id
= get_expression_id (expr
);
1915 /* Lookup a previously calculated result. */
1916 if (EXPR_DIES (block
)
1917 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1918 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1920 /* A memory expression {e, VUSE} dies in the block if there is a
1921 statement that may clobber e. If, starting statement walk from the
1922 top of the basic block, a statement uses VUSE there can be no kill
1923 inbetween that use and the original statement that loaded {e, VUSE},
1924 so we can stop walking. */
1925 ref
.base
= NULL_TREE
;
1926 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1928 tree def_vuse
, def_vdef
;
1929 def
= gsi_stmt (gsi
);
1930 def_vuse
= gimple_vuse (def
);
1931 def_vdef
= gimple_vdef (def
);
1933 /* Not a memory statement. */
1937 /* Not a may-def. */
1940 /* A load with the same VUSE, we're done. */
1941 if (def_vuse
== vuse
)
1947 /* Init ref only if we really need it. */
1948 if (ref
.base
== NULL_TREE
1949 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->type
,
1955 /* If the statement may clobber expr, it dies. */
1956 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1963 /* Remember the result. */
1964 if (!EXPR_DIES (block
))
1965 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1966 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1968 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1974 #define union_contains_value(SET1, SET2, VAL) \
1975 (bitmap_set_contains_value ((SET1), (VAL)) \
1976 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
1978 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
1981 vro_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
,
1982 vn_reference_op_t vro
)
1984 if (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
)
1986 struct pre_expr_d temp
;
1989 PRE_EXPR_NAME (&temp
) = vro
->op0
;
1990 temp
.id
= lookup_expression_id (&temp
);
1993 if (!union_contains_value (set1
, set2
,
1994 get_expr_value_id (&temp
)))
1997 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1999 struct pre_expr_d temp
;
2002 PRE_EXPR_NAME (&temp
) = vro
->op1
;
2003 temp
.id
= lookup_expression_id (&temp
);
2006 if (!union_contains_value (set1
, set2
,
2007 get_expr_value_id (&temp
)))
2011 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
2013 struct pre_expr_d temp
;
2016 PRE_EXPR_NAME (&temp
) = vro
->op2
;
2017 temp
.id
= lookup_expression_id (&temp
);
2020 if (!union_contains_value (set1
, set2
,
2021 get_expr_value_id (&temp
)))
2028 /* Determine if the expression EXPR is valid in SET1 U SET2.
2029 ONLY SET2 CAN BE NULL.
2030 This means that we have a leader for each part of the expression
2031 (if it consists of values), or the expression is an SSA_NAME.
2032 For loads/calls, we also see if the vuse is killed in this block. */
2035 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
,
2041 return bitmap_set_contains_expr (AVAIL_OUT (block
), expr
);
2045 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2046 for (i
= 0; i
< nary
->length
; i
++)
2048 if (TREE_CODE (nary
->op
[i
]) == SSA_NAME
)
2050 struct pre_expr_d temp
;
2053 PRE_EXPR_NAME (&temp
) = nary
->op
[i
];
2054 temp
.id
= lookup_expression_id (&temp
);
2057 if (!union_contains_value (set1
, set2
,
2058 get_expr_value_id (&temp
)))
2062 /* If the NARY may trap make sure the block does not contain
2063 a possible exit point.
2064 ??? This is overly conservative if we translate AVAIL_OUT
2065 as the available expression might be after the exit point. */
2066 if (BB_MAY_NOTRETURN (block
)
2067 && vn_nary_may_trap (nary
))
2074 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2075 vn_reference_op_t vro
;
2078 for (i
= 0; VEC_iterate (vn_reference_op_s
, ref
->operands
, i
, vro
); i
++)
2080 if (!vro_valid_in_sets (set1
, set2
, vro
))
2085 gimple def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
2086 if (!gimple_nop_p (def_stmt
)
2087 && gimple_bb (def_stmt
) != block
2088 && !dominated_by_p (CDI_DOMINATORS
,
2089 block
, gimple_bb (def_stmt
)))
2092 return !value_dies_in_block_x (expr
, block
);
2099 /* Clean the set of expressions that are no longer valid in SET1 or
2100 SET2. This means expressions that are made up of values we have no
2101 leaders for in SET1 or SET2. This version is used for partial
2102 anticipation, which means it is not valid in either ANTIC_IN or
2106 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
, basic_block block
)
2108 VEC (pre_expr
, heap
) *exprs
= sorted_array_from_bitmap_set (set1
);
2112 for (i
= 0; VEC_iterate (pre_expr
, exprs
, i
, expr
); i
++)
2114 if (!valid_in_sets (set1
, set2
, expr
, block
))
2115 bitmap_remove_from_set (set1
, expr
);
2117 VEC_free (pre_expr
, heap
, exprs
);
2120 /* Clean the set of expressions that are no longer valid in SET. This
2121 means expressions that are made up of values we have no leaders for
2125 clean (bitmap_set_t set
, basic_block block
)
2127 VEC (pre_expr
, heap
) *exprs
= sorted_array_from_bitmap_set (set
);
2131 for (i
= 0; VEC_iterate (pre_expr
, exprs
, i
, expr
); i
++)
2133 if (!valid_in_sets (set
, NULL
, expr
, block
))
2134 bitmap_remove_from_set (set
, expr
);
2136 VEC_free (pre_expr
, heap
, exprs
);
2139 static sbitmap has_abnormal_preds
;
2141 /* List of blocks that may have changed during ANTIC computation and
2142 thus need to be iterated over. */
2144 static sbitmap changed_blocks
;
2146 /* Decide whether to defer a block for a later iteration, or PHI
2147 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2148 should defer the block, and true if we processed it. */
2151 defer_or_phi_translate_block (bitmap_set_t dest
, bitmap_set_t source
,
2152 basic_block block
, basic_block phiblock
)
2154 if (!BB_VISITED (phiblock
))
2156 SET_BIT (changed_blocks
, block
->index
);
2157 BB_VISITED (block
) = 0;
2158 BB_DEFERRED (block
) = 1;
2162 phi_translate_set (dest
, source
, block
, phiblock
);
2166 /* Compute the ANTIC set for BLOCK.
2168 If succs(BLOCK) > 1 then
2169 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2170 else if succs(BLOCK) == 1 then
2171 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2173 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2177 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2179 bool changed
= false;
2180 bitmap_set_t S
, old
, ANTIC_OUT
;
2186 old
= ANTIC_OUT
= S
= NULL
;
2187 BB_VISITED (block
) = 1;
2189 /* If any edges from predecessors are abnormal, antic_in is empty,
2191 if (block_has_abnormal_pred_edge
)
2192 goto maybe_dump_sets
;
2194 old
= ANTIC_IN (block
);
2195 ANTIC_OUT
= bitmap_set_new ();
2197 /* If the block has no successors, ANTIC_OUT is empty. */
2198 if (EDGE_COUNT (block
->succs
) == 0)
2200 /* If we have one successor, we could have some phi nodes to
2201 translate through. */
2202 else if (single_succ_p (block
))
2204 basic_block succ_bb
= single_succ (block
);
2206 /* We trade iterations of the dataflow equations for having to
2207 phi translate the maximal set, which is incredibly slow
2208 (since the maximal set often has 300+ members, even when you
2209 have a small number of blocks).
2210 Basically, we defer the computation of ANTIC for this block
2211 until we have processed it's successor, which will inevitably
2212 have a *much* smaller set of values to phi translate once
2213 clean has been run on it.
2214 The cost of doing this is that we technically perform more
2215 iterations, however, they are lower cost iterations.
2217 Timings for PRE on tramp3d-v4:
2218 without maximal set fix: 11 seconds
2219 with maximal set fix/without deferring: 26 seconds
2220 with maximal set fix/with deferring: 11 seconds
2223 if (!defer_or_phi_translate_block (ANTIC_OUT
, ANTIC_IN (succ_bb
),
2227 goto maybe_dump_sets
;
2230 /* If we have multiple successors, we take the intersection of all of
2231 them. Note that in the case of loop exit phi nodes, we may have
2232 phis to translate through. */
2235 VEC(basic_block
, heap
) * worklist
;
2237 basic_block bprime
, first
= NULL
;
2239 worklist
= VEC_alloc (basic_block
, heap
, EDGE_COUNT (block
->succs
));
2240 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2243 && BB_VISITED (e
->dest
))
2245 else if (BB_VISITED (e
->dest
))
2246 VEC_quick_push (basic_block
, worklist
, e
->dest
);
2249 /* Of multiple successors we have to have visited one already. */
2252 SET_BIT (changed_blocks
, block
->index
);
2253 BB_VISITED (block
) = 0;
2254 BB_DEFERRED (block
) = 1;
2256 VEC_free (basic_block
, heap
, worklist
);
2257 goto maybe_dump_sets
;
2260 if (!gimple_seq_empty_p (phi_nodes (first
)))
2261 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
), block
, first
);
2263 bitmap_set_copy (ANTIC_OUT
, ANTIC_IN (first
));
2265 for (i
= 0; VEC_iterate (basic_block
, worklist
, i
, bprime
); i
++)
2267 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2269 bitmap_set_t tmp
= bitmap_set_new ();
2270 phi_translate_set (tmp
, ANTIC_IN (bprime
), block
, bprime
);
2271 bitmap_set_and (ANTIC_OUT
, tmp
);
2272 bitmap_set_free (tmp
);
2275 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
2277 VEC_free (basic_block
, heap
, worklist
);
2280 /* Generate ANTIC_OUT - TMP_GEN. */
2281 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
2283 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2284 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
2287 /* Then union in the ANTIC_OUT - TMP_GEN values,
2288 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2289 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
2290 bitmap_value_insert_into_set (ANTIC_IN (block
),
2291 expression_for_id (bii
));
2293 clean (ANTIC_IN (block
), block
);
2295 /* !old->expressions can happen when we deferred a block. */
2296 if (!old
->expressions
|| !bitmap_set_equal (old
, ANTIC_IN (block
)))
2299 SET_BIT (changed_blocks
, block
->index
);
2300 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2301 SET_BIT (changed_blocks
, e
->src
->index
);
2304 RESET_BIT (changed_blocks
, block
->index
);
2307 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2309 if (!BB_DEFERRED (block
) || BB_VISITED (block
))
2312 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2314 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2318 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2323 "Block %d was deferred for a future iteration.\n",
2328 bitmap_set_free (old
);
2330 bitmap_set_free (S
);
2332 bitmap_set_free (ANTIC_OUT
);
2336 /* Compute PARTIAL_ANTIC for BLOCK.
2338 If succs(BLOCK) > 1 then
2339 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2340 in ANTIC_OUT for all succ(BLOCK)
2341 else if succs(BLOCK) == 1 then
2342 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2344 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2349 compute_partial_antic_aux (basic_block block
,
2350 bool block_has_abnormal_pred_edge
)
2352 bool changed
= false;
2353 bitmap_set_t old_PA_IN
;
2354 bitmap_set_t PA_OUT
;
2357 unsigned long max_pa
= PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH
);
2359 old_PA_IN
= PA_OUT
= NULL
;
2361 /* If any edges from predecessors are abnormal, antic_in is empty,
2363 if (block_has_abnormal_pred_edge
)
2364 goto maybe_dump_sets
;
2366 /* If there are too many partially anticipatable values in the
2367 block, phi_translate_set can take an exponential time: stop
2368 before the translation starts. */
2370 && single_succ_p (block
)
2371 && bitmap_count_bits (PA_IN (single_succ (block
))->values
) > max_pa
)
2372 goto maybe_dump_sets
;
2374 old_PA_IN
= PA_IN (block
);
2375 PA_OUT
= bitmap_set_new ();
2377 /* If the block has no successors, ANTIC_OUT is empty. */
2378 if (EDGE_COUNT (block
->succs
) == 0)
2380 /* If we have one successor, we could have some phi nodes to
2381 translate through. Note that we can't phi translate across DFS
2382 back edges in partial antic, because it uses a union operation on
2383 the successors. For recurrences like IV's, we will end up
2384 generating a new value in the set on each go around (i + 3 (VH.1)
2385 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2386 else if (single_succ_p (block
))
2388 basic_block succ
= single_succ (block
);
2389 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
2390 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
2392 /* If we have multiple successors, we take the union of all of
2396 VEC(basic_block
, heap
) * worklist
;
2400 worklist
= VEC_alloc (basic_block
, heap
, EDGE_COUNT (block
->succs
));
2401 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2403 if (e
->flags
& EDGE_DFS_BACK
)
2405 VEC_quick_push (basic_block
, worklist
, e
->dest
);
2407 if (VEC_length (basic_block
, worklist
) > 0)
2409 for (i
= 0; VEC_iterate (basic_block
, worklist
, i
, bprime
); i
++)
2414 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
2415 bitmap_value_insert_into_set (PA_OUT
,
2416 expression_for_id (i
));
2417 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2419 bitmap_set_t pa_in
= bitmap_set_new ();
2420 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
2421 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2422 bitmap_value_insert_into_set (PA_OUT
,
2423 expression_for_id (i
));
2424 bitmap_set_free (pa_in
);
2427 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
2428 bitmap_value_insert_into_set (PA_OUT
,
2429 expression_for_id (i
));
2432 VEC_free (basic_block
, heap
, worklist
);
2435 /* PA_IN starts with PA_OUT - TMP_GEN.
2436 Then we subtract things from ANTIC_IN. */
2437 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
2439 /* For partial antic, we want to put back in the phi results, since
2440 we will properly avoid making them partially antic over backedges. */
2441 bitmap_ior_into (PA_IN (block
)->values
, PHI_GEN (block
)->values
);
2442 bitmap_ior_into (PA_IN (block
)->expressions
, PHI_GEN (block
)->expressions
);
2444 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2445 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2447 dependent_clean (PA_IN (block
), ANTIC_IN (block
), block
);
2449 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
2452 SET_BIT (changed_blocks
, block
->index
);
2453 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2454 SET_BIT (changed_blocks
, e
->src
->index
);
2457 RESET_BIT (changed_blocks
, block
->index
);
2460 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2463 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2465 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2468 bitmap_set_free (old_PA_IN
);
2470 bitmap_set_free (PA_OUT
);
2474 /* Compute ANTIC and partial ANTIC sets. */
2477 compute_antic (void)
2479 bool changed
= true;
2480 int num_iterations
= 0;
2484 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2485 We pre-build the map of blocks with incoming abnormal edges here. */
2486 has_abnormal_preds
= sbitmap_alloc (last_basic_block
);
2487 sbitmap_zero (has_abnormal_preds
);
2494 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2496 e
->flags
&= ~EDGE_DFS_BACK
;
2497 if (e
->flags
& EDGE_ABNORMAL
)
2499 SET_BIT (has_abnormal_preds
, block
->index
);
2504 BB_VISITED (block
) = 0;
2505 BB_DEFERRED (block
) = 0;
2507 /* While we are here, give empty ANTIC_IN sets to each block. */
2508 ANTIC_IN (block
) = bitmap_set_new ();
2509 PA_IN (block
) = bitmap_set_new ();
2512 /* At the exit block we anticipate nothing. */
2513 ANTIC_IN (EXIT_BLOCK_PTR
) = bitmap_set_new ();
2514 BB_VISITED (EXIT_BLOCK_PTR
) = 1;
2515 PA_IN (EXIT_BLOCK_PTR
) = bitmap_set_new ();
2517 changed_blocks
= sbitmap_alloc (last_basic_block
+ 1);
2518 sbitmap_ones (changed_blocks
);
2521 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2522 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2525 for (i
= n_basic_blocks
- NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
2527 if (TEST_BIT (changed_blocks
, postorder
[i
]))
2529 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2530 changed
|= compute_antic_aux (block
,
2531 TEST_BIT (has_abnormal_preds
,
2535 #ifdef ENABLE_CHECKING
2536 /* Theoretically possible, but *highly* unlikely. */
2537 gcc_assert (num_iterations
< 500);
2541 statistics_histogram_event (cfun
, "compute_antic iterations",
2544 if (do_partial_partial
)
2546 sbitmap_ones (changed_blocks
);
2547 mark_dfs_back_edges ();
2552 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2553 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2556 for (i
= n_basic_blocks
- NUM_FIXED_BLOCKS
- 1 ; i
>= 0; i
--)
2558 if (TEST_BIT (changed_blocks
, postorder
[i
]))
2560 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2562 |= compute_partial_antic_aux (block
,
2563 TEST_BIT (has_abnormal_preds
,
2567 #ifdef ENABLE_CHECKING
2568 /* Theoretically possible, but *highly* unlikely. */
2569 gcc_assert (num_iterations
< 500);
2572 statistics_histogram_event (cfun
, "compute_partial_antic iterations",
2575 sbitmap_free (has_abnormal_preds
);
2576 sbitmap_free (changed_blocks
);
2579 /* Return true if we can value number the call in STMT. This is true
2580 if we have a pure or constant call. */
2583 can_value_number_call (gimple stmt
)
2585 if (gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
))
2590 /* Return true if OP is a tree which we can perform PRE on.
2591 This may not match the operations we can value number, but in
2592 a perfect world would. */
2595 can_PRE_operation (tree op
)
2597 return UNARY_CLASS_P (op
)
2598 || BINARY_CLASS_P (op
)
2599 || COMPARISON_CLASS_P (op
)
2600 || TREE_CODE (op
) == INDIRECT_REF
2601 || TREE_CODE (op
) == COMPONENT_REF
2602 || TREE_CODE (op
) == VIEW_CONVERT_EXPR
2603 || TREE_CODE (op
) == CALL_EXPR
2604 || TREE_CODE (op
) == ARRAY_REF
;
2608 /* Inserted expressions are placed onto this worklist, which is used
2609 for performing quick dead code elimination of insertions we made
2610 that didn't turn out to be necessary. */
2611 static VEC(gimple
,heap
) *inserted_exprs
;
2612 static bitmap inserted_phi_names
;
2614 /* Pool allocated fake store expressions are placed onto this
2615 worklist, which, after performing dead code elimination, is walked
2616 to see which expressions need to be put into GC'able memory */
2617 static VEC(gimple
, heap
) *need_creation
;
2619 /* The actual worker for create_component_ref_by_pieces. */
2622 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2623 unsigned int *operand
, gimple_seq
*stmts
,
2626 vn_reference_op_t currop
= VEC_index (vn_reference_op_s
, ref
->operands
,
2630 switch (currop
->opcode
)
2634 tree folded
, sc
= NULL_TREE
;
2635 unsigned int nargs
= 0;
2637 if (TREE_CODE (currop
->op0
) == FUNCTION_DECL
)
2641 pre_expr op0
= get_or_alloc_expr_for (currop
->op0
);
2642 fn
= find_or_generate_expression (block
, op0
, stmts
, domstmt
);
2648 pre_expr scexpr
= get_or_alloc_expr_for (currop
->op1
);
2649 sc
= find_or_generate_expression (block
, scexpr
, stmts
, domstmt
);
2653 args
= XNEWVEC (tree
, VEC_length (vn_reference_op_s
,
2654 ref
->operands
) - 1);
2655 while (*operand
< VEC_length (vn_reference_op_s
, ref
->operands
))
2657 args
[nargs
] = create_component_ref_by_pieces_1 (block
, ref
,
2667 folded
= build_call_array (currop
->type
,
2668 (TREE_CODE (fn
) == FUNCTION_DECL
2669 ? build_fold_addr_expr (fn
) : fn
),
2673 CALL_EXPR_STATIC_CHAIN (folded
) = sc
;
2677 case TARGET_MEM_REF
:
2679 vn_reference_op_t nextop
= VEC_index (vn_reference_op_s
, ref
->operands
,
2682 tree genop0
= NULL_TREE
;
2683 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2689 op0expr
= get_or_alloc_expr_for (currop
->op0
);
2690 genop0
= find_or_generate_expression (block
, op0expr
,
2695 if (DECL_P (baseop
))
2696 return build6 (TARGET_MEM_REF
, currop
->type
,
2698 genop0
, currop
->op1
, currop
->op2
,
2699 unshare_expr (nextop
->op1
));
2701 return build6 (TARGET_MEM_REF
, currop
->type
,
2703 genop0
, currop
->op1
, currop
->op2
,
2704 unshare_expr (nextop
->op1
));
2710 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2716 case VIEW_CONVERT_EXPR
:
2719 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
,
2724 folded
= fold_build1 (currop
->opcode
, currop
->type
,
2729 case ALIGN_INDIRECT_REF
:
2730 case MISALIGNED_INDIRECT_REF
:
2734 tree genop1
= create_component_ref_by_pieces_1 (block
, ref
,
2739 genop1
= fold_convert (build_pointer_type (currop
->type
),
2742 if (currop
->opcode
== MISALIGNED_INDIRECT_REF
)
2743 folded
= fold_build2 (currop
->opcode
, currop
->type
,
2744 genop1
, currop
->op1
);
2746 folded
= fold_build1 (currop
->opcode
, currop
->type
,
2754 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2756 pre_expr op1expr
= get_or_alloc_expr_for (currop
->op0
);
2757 pre_expr op2expr
= get_or_alloc_expr_for (currop
->op1
);
2763 genop1
= find_or_generate_expression (block
, op1expr
, stmts
, domstmt
);
2766 genop2
= find_or_generate_expression (block
, op2expr
, stmts
, domstmt
);
2769 folded
= fold_build3 (BIT_FIELD_REF
, currop
->type
, genop0
, genop1
,
2774 /* For array ref vn_reference_op's, operand 1 of the array ref
2775 is op0 of the reference op and operand 3 of the array ref is
2777 case ARRAY_RANGE_REF
:
2781 tree genop1
= currop
->op0
;
2783 tree genop2
= currop
->op1
;
2785 tree genop3
= currop
->op2
;
2787 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2791 op1expr
= get_or_alloc_expr_for (genop1
);
2792 genop1
= find_or_generate_expression (block
, op1expr
, stmts
, domstmt
);
2797 /* Drop zero minimum index. */
2798 if (tree_int_cst_equal (genop2
, integer_zero_node
))
2802 op2expr
= get_or_alloc_expr_for (genop2
);
2803 genop2
= find_or_generate_expression (block
, op2expr
, stmts
,
2811 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2812 /* We can't always put a size in units of the element alignment
2813 here as the element alignment may be not visible. See
2814 PR43783. Simply drop the element size for constant
2816 if (tree_int_cst_equal (genop3
, TYPE_SIZE_UNIT (elmt_type
)))
2820 genop3
= size_binop (EXACT_DIV_EXPR
, genop3
,
2821 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
2822 op3expr
= get_or_alloc_expr_for (genop3
);
2823 genop3
= find_or_generate_expression (block
, op3expr
, stmts
,
2829 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2836 tree genop2
= currop
->op1
;
2838 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2842 /* op1 should be a FIELD_DECL, which are represented by
2847 op2expr
= get_or_alloc_expr_for (genop2
);
2848 genop2
= find_or_generate_expression (block
, op2expr
, stmts
,
2854 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
,
2860 pre_expr op0expr
= get_or_alloc_expr_for (currop
->op0
);
2861 genop
= find_or_generate_expression (block
, op0expr
, stmts
, domstmt
);
2882 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2883 COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2884 trying to rename aggregates into ssa form directly, which is a no no.
2886 Thus, this routine doesn't create temporaries, it just builds a
2887 single access expression for the array, calling
2888 find_or_generate_expression to build the innermost pieces.
2890 This function is a subroutine of create_expression_by_pieces, and
2891 should not be called on it's own unless you really know what you
2895 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2896 gimple_seq
*stmts
, gimple domstmt
)
2898 unsigned int op
= 0;
2899 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
, domstmt
);
2902 /* Find a leader for an expression, or generate one using
2903 create_expression_by_pieces if it's ANTIC but
2905 BLOCK is the basic_block we are looking for leaders in.
2906 EXPR is the expression to find a leader or generate for.
2907 STMTS is the statement list to put the inserted expressions on.
2908 Returns the SSA_NAME of the LHS of the generated expression or the
2910 DOMSTMT if non-NULL is a statement that should be dominated by
2911 all uses in the generated expression. If DOMSTMT is non-NULL this
2912 routine can fail and return NULL_TREE. Otherwise it will assert
2916 find_or_generate_expression (basic_block block
, pre_expr expr
,
2917 gimple_seq
*stmts
, gimple domstmt
)
2919 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
),
2920 get_expr_value_id (expr
), domstmt
);
2924 if (leader
->kind
== NAME
)
2925 genop
= PRE_EXPR_NAME (leader
);
2926 else if (leader
->kind
== CONSTANT
)
2927 genop
= PRE_EXPR_CONSTANT (leader
);
2930 /* If it's still NULL, it must be a complex expression, so generate
2931 it recursively. Not so for FRE though. */
2935 bitmap_set_t exprset
;
2936 unsigned int lookfor
= get_expr_value_id (expr
);
2937 bool handled
= false;
2941 exprset
= VEC_index (bitmap_set_t
, value_expressions
, lookfor
);
2942 FOR_EACH_EXPR_ID_IN_SET (exprset
, i
, bi
)
2944 pre_expr temp
= expression_for_id (i
);
2945 if (temp
->kind
!= NAME
)
2948 genop
= create_expression_by_pieces (block
, temp
, stmts
,
2950 get_expr_type (expr
));
2954 if (!handled
&& domstmt
)
2957 gcc_assert (handled
);
2962 #define NECESSARY GF_PLF_1
2964 /* Create an expression in pieces, so that we can handle very complex
2965 expressions that may be ANTIC, but not necessary GIMPLE.
2966 BLOCK is the basic block the expression will be inserted into,
2967 EXPR is the expression to insert (in value form)
2968 STMTS is a statement list to append the necessary insertions into.
2970 This function will die if we hit some value that shouldn't be
2971 ANTIC but is (IE there is no leader for it, or its components).
2972 This function may also generate expressions that are themselves
2973 partially or fully redundant. Those that are will be either made
2974 fully redundant during the next iteration of insert (for partially
2975 redundant ones), or eliminated by eliminate (for fully redundant
2978 If DOMSTMT is non-NULL then we make sure that all uses in the
2979 expressions dominate that statement. In this case the function
2980 can return NULL_TREE to signal failure. */
2983 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2984 gimple_seq
*stmts
, gimple domstmt
, tree type
)
2988 gimple_seq forced_stmts
= NULL
;
2989 unsigned int value_id
;
2990 gimple_stmt_iterator gsi
;
2991 tree exprtype
= type
? type
: get_expr_type (expr
);
2997 /* We may hit the NAME/CONSTANT case if we have to convert types
2998 that value numbering saw through. */
3000 folded
= PRE_EXPR_NAME (expr
);
3003 folded
= PRE_EXPR_CONSTANT (expr
);
3007 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
3008 folded
= create_component_ref_by_pieces (block
, ref
, stmts
, domstmt
);
3013 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
3014 switch (nary
->length
)
3018 pre_expr op1
= get_or_alloc_expr_for (nary
->op
[0]);
3019 pre_expr op2
= get_or_alloc_expr_for (nary
->op
[1]);
3020 tree genop1
= find_or_generate_expression (block
, op1
,
3022 tree genop2
= find_or_generate_expression (block
, op2
,
3024 if (!genop1
|| !genop2
)
3026 /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
3027 may be a constant with the wrong type. */
3028 if (nary
->opcode
== POINTER_PLUS_EXPR
)
3030 genop1
= fold_convert (nary
->type
, genop1
);
3031 genop2
= fold_convert (sizetype
, genop2
);
3035 genop1
= fold_convert (TREE_TYPE (nary
->op
[0]), genop1
);
3036 genop2
= fold_convert (TREE_TYPE (nary
->op
[1]), genop2
);
3039 folded
= fold_build2 (nary
->opcode
, nary
->type
,
3045 pre_expr op1
= get_or_alloc_expr_for (nary
->op
[0]);
3046 tree genop1
= find_or_generate_expression (block
, op1
,
3050 genop1
= fold_convert (TREE_TYPE (nary
->op
[0]), genop1
);
3052 folded
= fold_build1 (nary
->opcode
, nary
->type
,
3065 if (!useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
3066 folded
= fold_convert (exprtype
, folded
);
3068 /* Force the generated expression to be a sequence of GIMPLE
3070 We have to call unshare_expr because force_gimple_operand may
3071 modify the tree we pass to it. */
3072 folded
= force_gimple_operand (unshare_expr (folded
), &forced_stmts
,
3075 /* If we have any intermediate expressions to the value sets, add them
3076 to the value sets and chain them in the instruction stream. */
3079 gsi
= gsi_start (forced_stmts
);
3080 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3082 gimple stmt
= gsi_stmt (gsi
);
3083 tree forcedname
= gimple_get_lhs (stmt
);
3086 VEC_safe_push (gimple
, heap
, inserted_exprs
, stmt
);
3087 if (TREE_CODE (forcedname
) == SSA_NAME
)
3089 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
3090 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
3091 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
3092 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
3094 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
3095 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
3097 mark_symbols_for_renaming (stmt
);
3099 gimple_seq_add_seq (stmts
, forced_stmts
);
3102 /* Build and insert the assignment of the end result to the temporary
3103 that we will return. */
3104 if (!pretemp
|| exprtype
!= TREE_TYPE (pretemp
))
3106 pretemp
= create_tmp_reg (exprtype
, "pretmp");
3107 get_var_ann (pretemp
);
3111 add_referenced_var (temp
);
3113 newstmt
= gimple_build_assign (temp
, folded
);
3114 name
= make_ssa_name (temp
, newstmt
);
3115 gimple_assign_set_lhs (newstmt
, name
);
3116 gimple_set_plf (newstmt
, NECESSARY
, false);
3118 gimple_seq_add_stmt (stmts
, newstmt
);
3119 VEC_safe_push (gimple
, heap
, inserted_exprs
, newstmt
);
3121 /* All the symbols in NEWEXPR should be put into SSA form. */
3122 mark_symbols_for_renaming (newstmt
);
3124 /* Add a value number to the temporary.
3125 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3126 we are creating the expression by pieces, and this particular piece of
3127 the expression may have been represented. There is no harm in replacing
3129 VN_INFO_GET (name
)->valnum
= name
;
3130 value_id
= get_expr_value_id (expr
);
3131 VN_INFO (name
)->value_id
= value_id
;
3132 nameexpr
= get_or_alloc_expr_for_name (name
);
3133 add_to_value (value_id
, nameexpr
);
3135 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
3136 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
3138 pre_stats
.insertions
++;
3139 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3141 fprintf (dump_file
, "Inserted ");
3142 print_gimple_stmt (dump_file
, newstmt
, 0, 0);
3143 fprintf (dump_file
, " in predecessor %d\n", block
->index
);
3150 /* Returns true if we want to inhibit the insertions of PHI nodes
3151 for the given EXPR for basic block BB (a member of a loop).
3152 We want to do this, when we fear that the induction variable we
3153 create might inhibit vectorization. */
3156 inhibit_phi_insertion (basic_block bb
, pre_expr expr
)
3158 vn_reference_t vr
= PRE_EXPR_REFERENCE (expr
);
3159 VEC (vn_reference_op_s
, heap
) *ops
= vr
->operands
;
3160 vn_reference_op_t op
;
3163 /* If we aren't going to vectorize we don't inhibit anything. */
3164 if (!flag_tree_vectorize
)
3167 /* Otherwise we inhibit the insertion when the address of the
3168 memory reference is a simple induction variable. In other
3169 cases the vectorizer won't do anything anyway (either it's
3170 loop invariant or a complicated expression). */
3171 for (i
= 0; VEC_iterate (vn_reference_op_s
, ops
, i
, op
); ++i
)
3176 case ARRAY_RANGE_REF
:
3177 if (TREE_CODE (op
->op0
) != SSA_NAME
)
3182 basic_block defbb
= gimple_bb (SSA_NAME_DEF_STMT (op
->op0
));
3184 /* Default defs are loop invariant. */
3187 /* Defined outside this loop, also loop invariant. */
3188 if (!flow_bb_inside_loop_p (bb
->loop_father
, defbb
))
3190 /* If it's a simple induction variable inhibit insertion,
3191 the vectorizer might be interested in this one. */
3192 if (simple_iv (bb
->loop_father
, bb
->loop_father
,
3193 op
->op0
, &iv
, true))
3195 /* No simple IV, vectorizer can't do anything, hence no
3196 reason to inhibit the transformation for this operand. */
3206 /* Insert the to-be-made-available values of expression EXPRNUM for each
3207 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3208 merge the result with a phi node, given the same value number as
3209 NODE. Return true if we have inserted new stuff. */
3212 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
3215 pre_expr expr
= expression_for_id (exprnum
);
3217 unsigned int val
= get_expr_value_id (expr
);
3219 bool insertions
= false;
3224 tree type
= get_expr_type (expr
);
3228 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3230 fprintf (dump_file
, "Found partial redundancy for expression ");
3231 print_pre_expr (dump_file
, expr
);
3232 fprintf (dump_file
, " (%04d)\n", val
);
3235 /* Make sure we aren't creating an induction variable. */
3236 if (block
->loop_depth
> 0 && EDGE_COUNT (block
->preds
) == 2)
3238 bool firstinsideloop
= false;
3239 bool secondinsideloop
= false;
3240 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3241 EDGE_PRED (block
, 0)->src
);
3242 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3243 EDGE_PRED (block
, 1)->src
);
3244 /* Induction variables only have one edge inside the loop. */
3245 if ((firstinsideloop
^ secondinsideloop
)
3246 && (expr
->kind
!= REFERENCE
3247 || inhibit_phi_insertion (block
, expr
)))
3249 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3250 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3255 /* Make the necessary insertions. */
3256 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3258 gimple_seq stmts
= NULL
;
3261 eprime
= avail
[bprime
->index
];
3263 if (eprime
->kind
!= NAME
&& eprime
->kind
!= CONSTANT
)
3265 builtexpr
= create_expression_by_pieces (bprime
,
3269 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3270 gsi_insert_seq_on_edge (pred
, stmts
);
3271 avail
[bprime
->index
] = get_or_alloc_expr_for_name (builtexpr
);
3274 else if (eprime
->kind
== CONSTANT
)
3276 /* Constants may not have the right type, fold_convert
3277 should give us back a constant with the right type.
3279 tree constant
= PRE_EXPR_CONSTANT (eprime
);
3280 if (!useless_type_conversion_p (type
, TREE_TYPE (constant
)))
3282 tree builtexpr
= fold_convert (type
, constant
);
3283 if (!is_gimple_min_invariant (builtexpr
))
3285 tree forcedexpr
= force_gimple_operand (builtexpr
,
3288 if (!is_gimple_min_invariant (forcedexpr
))
3290 if (forcedexpr
!= builtexpr
)
3292 VN_INFO_GET (forcedexpr
)->valnum
= PRE_EXPR_CONSTANT (eprime
);
3293 VN_INFO (forcedexpr
)->value_id
= get_expr_value_id (eprime
);
3297 gimple_stmt_iterator gsi
;
3298 gsi
= gsi_start (stmts
);
3299 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3301 gimple stmt
= gsi_stmt (gsi
);
3302 VEC_safe_push (gimple
, heap
, inserted_exprs
, stmt
);
3303 gimple_set_plf (stmt
, NECESSARY
, false);
3305 gsi_insert_seq_on_edge (pred
, stmts
);
3307 avail
[bprime
->index
] = get_or_alloc_expr_for_name (forcedexpr
);
3312 else if (eprime
->kind
== NAME
)
3314 /* We may have to do a conversion because our value
3315 numbering can look through types in certain cases, but
3316 our IL requires all operands of a phi node have the same
3318 tree name
= PRE_EXPR_NAME (eprime
);
3319 if (!useless_type_conversion_p (type
, TREE_TYPE (name
)))
3323 builtexpr
= fold_convert (type
, name
);
3324 forcedexpr
= force_gimple_operand (builtexpr
,
3328 if (forcedexpr
!= name
)
3330 VN_INFO_GET (forcedexpr
)->valnum
= VN_INFO (name
)->valnum
;
3331 VN_INFO (forcedexpr
)->value_id
= VN_INFO (name
)->value_id
;
3336 gimple_stmt_iterator gsi
;
3337 gsi
= gsi_start (stmts
);
3338 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3340 gimple stmt
= gsi_stmt (gsi
);
3341 VEC_safe_push (gimple
, heap
, inserted_exprs
, stmt
);
3342 gimple_set_plf (stmt
, NECESSARY
, false);
3344 gsi_insert_seq_on_edge (pred
, stmts
);
3346 avail
[bprime
->index
] = get_or_alloc_expr_for_name (forcedexpr
);
3350 /* If we didn't want a phi node, and we made insertions, we still have
3351 inserted new stuff, and thus return true. If we didn't want a phi node,
3352 and didn't make insertions, we haven't added anything new, so return
3354 if (nophi
&& insertions
)
3356 else if (nophi
&& !insertions
)
3359 /* Now build a phi for the new variable. */
3360 if (!prephitemp
|| TREE_TYPE (prephitemp
) != type
)
3362 prephitemp
= create_tmp_var (type
, "prephitmp");
3363 get_var_ann (prephitemp
);
3367 add_referenced_var (temp
);
3369 if (TREE_CODE (type
) == COMPLEX_TYPE
3370 || TREE_CODE (type
) == VECTOR_TYPE
)
3371 DECL_GIMPLE_REG_P (temp
) = 1;
3372 phi
= create_phi_node (temp
, block
);
3374 gimple_set_plf (phi
, NECESSARY
, false);
3375 VN_INFO_GET (gimple_phi_result (phi
))->valnum
= gimple_phi_result (phi
);
3376 VN_INFO (gimple_phi_result (phi
))->value_id
= val
;
3377 VEC_safe_push (gimple
, heap
, inserted_exprs
, phi
);
3378 bitmap_set_bit (inserted_phi_names
,
3379 SSA_NAME_VERSION (gimple_phi_result (phi
)));
3380 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3382 pre_expr ae
= avail
[pred
->src
->index
];
3383 gcc_assert (get_expr_type (ae
) == type
3384 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3385 if (ae
->kind
== CONSTANT
)
3386 add_phi_arg (phi
, PRE_EXPR_CONSTANT (ae
), pred
, UNKNOWN_LOCATION
);
3388 add_phi_arg (phi
, PRE_EXPR_NAME (avail
[pred
->src
->index
]), pred
,
3392 newphi
= get_or_alloc_expr_for_name (gimple_phi_result (phi
));
3393 add_to_value (val
, newphi
);
3395 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3396 this insertion, since we test for the existence of this value in PHI_GEN
3397 before proceeding with the partial redundancy checks in insert_aux.
3399 The value may exist in AVAIL_OUT, in particular, it could be represented
3400 by the expression we are trying to eliminate, in which case we want the
3401 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3404 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3405 this block, because if it did, it would have existed in our dominator's
3406 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3409 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3410 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3412 bitmap_insert_into_set (NEW_SETS (block
),
3415 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3417 fprintf (dump_file
, "Created phi ");
3418 print_gimple_stmt (dump_file
, phi
, 0, 0);
3419 fprintf (dump_file
, " in block %d\n", block
->index
);
3427 /* Perform insertion of partially redundant values.
3428 For BLOCK, do the following:
3429 1. Propagate the NEW_SETS of the dominator into the current block.
3430 If the block has multiple predecessors,
3431 2a. Iterate over the ANTIC expressions for the block to see if
3432 any of them are partially redundant.
3433 2b. If so, insert them into the necessary predecessors to make
3434 the expression fully redundant.
3435 2c. Insert a new PHI merging the values of the predecessors.
3436 2d. Insert the new PHI, and the new expressions, into the
3438 3. Recursively call ourselves on the dominator children of BLOCK.
3440 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3441 do_regular_insertion and do_partial_insertion.
3446 do_regular_insertion (basic_block block
, basic_block dom
)
3448 bool new_stuff
= false;
3449 VEC (pre_expr
, heap
) *exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3453 for (i
= 0; VEC_iterate (pre_expr
, exprs
, i
, expr
); i
++)
3455 if (expr
->kind
!= NAME
)
3459 bool by_some
= false;
3460 bool cant_insert
= false;
3461 bool all_same
= true;
3462 pre_expr first_s
= NULL
;
3465 pre_expr eprime
= NULL
;
3467 pre_expr edoubleprime
= NULL
;
3468 bool do_insertion
= false;
3470 val
= get_expr_value_id (expr
);
3471 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3473 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3475 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3476 fprintf (dump_file
, "Found fully redundant value\n");
3480 avail
= XCNEWVEC (pre_expr
, last_basic_block
);
3481 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3483 unsigned int vprime
;
3485 /* We should never run insertion for the exit block
3486 and so not come across fake pred edges. */
3487 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3489 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
3492 /* eprime will generally only be NULL if the
3493 value of the expression, translated
3494 through the PHI for this predecessor, is
3495 undefined. If that is the case, we can't
3496 make the expression fully redundant,
3497 because its value is undefined along a
3498 predecessor path. We can thus break out
3499 early because it doesn't matter what the
3500 rest of the results are. */
3507 eprime
= fully_constant_expression (eprime
);
3508 vprime
= get_expr_value_id (eprime
);
3509 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3511 if (edoubleprime
== NULL
)
3513 avail
[bprime
->index
] = eprime
;
3518 avail
[bprime
->index
] = edoubleprime
;
3520 /* We want to perform insertions to remove a redundancy on
3521 a path in the CFG we want to optimize for speed. */
3522 if (optimize_edge_for_speed_p (pred
))
3523 do_insertion
= true;
3524 if (first_s
== NULL
)
3525 first_s
= edoubleprime
;
3526 else if (!pre_expr_eq (first_s
, edoubleprime
))
3530 /* If we can insert it, it's not the same value
3531 already existing along every predecessor, and
3532 it's defined by some predecessor, it is
3533 partially redundant. */
3534 if (!cant_insert
&& !all_same
&& by_some
&& do_insertion
3535 && dbg_cnt (treepre_insert
))
3537 if (insert_into_preds_of_block (block
, get_expression_id (expr
),
3541 /* If all edges produce the same value and that value is
3542 an invariant, then the PHI has the same value on all
3543 edges. Note this. */
3544 else if (!cant_insert
&& all_same
&& eprime
3545 && (edoubleprime
->kind
== CONSTANT
3546 || edoubleprime
->kind
== NAME
)
3547 && !value_id_constant_p (val
))
3551 bitmap_set_t exprset
= VEC_index (bitmap_set_t
,
3552 value_expressions
, val
);
3554 unsigned int new_val
= get_expr_value_id (edoubleprime
);
3555 FOR_EACH_EXPR_ID_IN_SET (exprset
, j
, bi
)
3557 pre_expr expr
= expression_for_id (j
);
3559 if (expr
->kind
== NAME
)
3561 vn_ssa_aux_t info
= VN_INFO (PRE_EXPR_NAME (expr
));
3562 /* Just reset the value id and valnum so it is
3563 the same as the constant we have discovered. */
3564 if (edoubleprime
->kind
== CONSTANT
)
3566 info
->valnum
= PRE_EXPR_CONSTANT (edoubleprime
);
3567 pre_stats
.constified
++;
3570 info
->valnum
= VN_INFO (PRE_EXPR_NAME (edoubleprime
))->valnum
;
3571 info
->value_id
= new_val
;
3579 VEC_free (pre_expr
, heap
, exprs
);
3584 /* Perform insertion for partially anticipatable expressions. There
3585 is only one case we will perform insertion for these. This case is
3586 if the expression is partially anticipatable, and fully available.
3587 In this case, we know that putting it earlier will enable us to
3588 remove the later computation. */
3592 do_partial_partial_insertion (basic_block block
, basic_block dom
)
3594 bool new_stuff
= false;
3595 VEC (pre_expr
, heap
) *exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3599 for (i
= 0; VEC_iterate (pre_expr
, exprs
, i
, expr
); i
++)
3601 if (expr
->kind
!= NAME
)
3606 bool cant_insert
= false;
3609 pre_expr eprime
= NULL
;
3612 val
= get_expr_value_id (expr
);
3613 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3615 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3618 avail
= XCNEWVEC (pre_expr
, last_basic_block
);
3619 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3621 unsigned int vprime
;
3622 pre_expr edoubleprime
;
3624 /* We should never run insertion for the exit block
3625 and so not come across fake pred edges. */
3626 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3628 eprime
= phi_translate (expr
, ANTIC_IN (block
),
3632 /* eprime will generally only be NULL if the
3633 value of the expression, translated
3634 through the PHI for this predecessor, is
3635 undefined. If that is the case, we can't
3636 make the expression fully redundant,
3637 because its value is undefined along a
3638 predecessor path. We can thus break out
3639 early because it doesn't matter what the
3640 rest of the results are. */
3647 eprime
= fully_constant_expression (eprime
);
3648 vprime
= get_expr_value_id (eprime
);
3649 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3651 if (edoubleprime
== NULL
)
3657 avail
[bprime
->index
] = edoubleprime
;
3661 /* If we can insert it, it's not the same value
3662 already existing along every predecessor, and
3663 it's defined by some predecessor, it is
3664 partially redundant. */
3665 if (!cant_insert
&& by_all
&& dbg_cnt (treepre_insert
))
3667 pre_stats
.pa_insert
++;
3668 if (insert_into_preds_of_block (block
, get_expression_id (expr
),
3676 VEC_free (pre_expr
, heap
, exprs
);
3681 insert_aux (basic_block block
)
3684 bool new_stuff
= false;
3689 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3694 bitmap_set_t newset
= NEW_SETS (dom
);
3697 /* Note that we need to value_replace both NEW_SETS, and
3698 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3699 represented by some non-simple expression here that we want
3700 to replace it with. */
3701 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3703 pre_expr expr
= expression_for_id (i
);
3704 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3705 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3708 if (!single_pred_p (block
))
3710 new_stuff
|= do_regular_insertion (block
, dom
);
3711 if (do_partial_partial
)
3712 new_stuff
|= do_partial_partial_insertion (block
, dom
);
3716 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3718 son
= next_dom_son (CDI_DOMINATORS
, son
))
3720 new_stuff
|= insert_aux (son
);
3726 /* Perform insertion of partially redundant values. */
3731 bool new_stuff
= true;
3733 int num_iterations
= 0;
3736 NEW_SETS (bb
) = bitmap_set_new ();
3741 new_stuff
= insert_aux (ENTRY_BLOCK_PTR
);
3743 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3747 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3750 add_to_exp_gen (basic_block block
, tree op
)
3755 if (TREE_CODE (op
) == SSA_NAME
&& ssa_undefined_value_p (op
))
3757 result
= get_or_alloc_expr_for_name (op
);
3758 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3762 /* Create value ids for PHI in BLOCK. */
3765 make_values_for_phi (gimple phi
, basic_block block
)
3767 tree result
= gimple_phi_result (phi
);
3769 /* We have no need for virtual phis, as they don't represent
3770 actual computations. */
3771 if (is_gimple_reg (result
))
3773 pre_expr e
= get_or_alloc_expr_for_name (result
);
3774 add_to_value (get_expr_value_id (e
), e
);
3775 bitmap_insert_into_set (PHI_GEN (block
), e
);
3776 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3780 for (i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
3782 tree arg
= gimple_phi_arg_def (phi
, i
);
3783 if (TREE_CODE (arg
) == SSA_NAME
)
3785 e
= get_or_alloc_expr_for_name (arg
);
3786 add_to_value (get_expr_value_id (e
), e
);
3793 /* Compute the AVAIL set for all basic blocks.
3795 This function performs value numbering of the statements in each basic
3796 block. The AVAIL sets are built from information we glean while doing
3797 this value numbering, since the AVAIL sets contain only one entry per
3800 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3801 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3804 compute_avail (void)
3807 basic_block block
, son
;
3808 basic_block
*worklist
;
3812 /* We pretend that default definitions are defined in the entry block.
3813 This includes function arguments and the static chain decl. */
3814 for (i
= 1; i
< num_ssa_names
; ++i
)
3816 tree name
= ssa_name (i
);
3819 || !SSA_NAME_IS_DEFAULT_DEF (name
)
3820 || has_zero_uses (name
)
3821 || !is_gimple_reg (name
))
3824 e
= get_or_alloc_expr_for_name (name
);
3825 add_to_value (get_expr_value_id (e
), e
);
3827 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR
), e
);
3828 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR
), e
);
3831 /* Allocate the worklist. */
3832 worklist
= XNEWVEC (basic_block
, n_basic_blocks
);
3834 /* Seed the algorithm by putting the dominator children of the entry
3835 block on the worklist. */
3836 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR
);
3838 son
= next_dom_son (CDI_DOMINATORS
, son
))
3839 worklist
[sp
++] = son
;
3841 /* Loop until the worklist is empty. */
3844 gimple_stmt_iterator gsi
;
3847 unsigned int stmt_uid
= 1;
3849 /* Pick a block from the worklist. */
3850 block
= worklist
[--sp
];
3852 /* Initially, the set of available values in BLOCK is that of
3853 its immediate dominator. */
3854 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3856 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3858 /* Generate values for PHI nodes. */
3859 for (gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3860 make_values_for_phi (gsi_stmt (gsi
), block
);
3862 BB_MAY_NOTRETURN (block
) = 0;
3864 /* Now compute value numbers and populate value sets with all
3865 the expressions computed in BLOCK. */
3866 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3871 stmt
= gsi_stmt (gsi
);
3872 gimple_set_uid (stmt
, stmt_uid
++);
3874 /* Cache whether the basic-block has any non-visible side-effect
3876 If this isn't a call or it is the last stmt in the
3877 basic-block then the CFG represents things correctly. */
3878 if (is_gimple_call (stmt
)
3879 && !stmt_ends_bb_p (stmt
))
3881 /* Non-looping const functions always return normally.
3882 Otherwise the call might not return or have side-effects
3883 that forbids hoisting possibly trapping expressions
3885 int flags
= gimple_call_flags (stmt
);
3886 if (!(flags
& ECF_CONST
)
3887 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3888 BB_MAY_NOTRETURN (block
) = 1;
3891 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3893 pre_expr e
= get_or_alloc_expr_for_name (op
);
3895 add_to_value (get_expr_value_id (e
), e
);
3897 bitmap_insert_into_set (TMP_GEN (block
), e
);
3898 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3901 if (gimple_has_volatile_ops (stmt
)
3902 || stmt_could_throw_p (stmt
))
3905 switch (gimple_code (stmt
))
3908 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3909 add_to_exp_gen (block
, op
);
3916 vn_reference_op_t vro
;
3917 pre_expr result
= NULL
;
3918 VEC(vn_reference_op_s
, heap
) *ops
= NULL
;
3920 if (!can_value_number_call (stmt
))
3923 copy_reference_ops_from_call (stmt
, &ops
);
3924 vn_reference_lookup_pieces (gimple_vuse (stmt
), 0,
3925 gimple_expr_type (stmt
),
3927 VEC_free (vn_reference_op_s
, heap
, ops
);
3931 for (i
= 0; VEC_iterate (vn_reference_op_s
,
3935 if (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
)
3936 add_to_exp_gen (block
, vro
->op0
);
3937 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
3938 add_to_exp_gen (block
, vro
->op1
);
3939 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
3940 add_to_exp_gen (block
, vro
->op2
);
3942 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3943 result
->kind
= REFERENCE
;
3945 PRE_EXPR_REFERENCE (result
) = ref
;
3947 get_or_alloc_expression_id (result
);
3948 add_to_value (get_expr_value_id (result
), result
);
3950 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3956 pre_expr result
= NULL
;
3957 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)))
3961 case tcc_comparison
:
3966 vn_nary_op_lookup_pieces (gimple_num_ops (stmt
) - 1,
3967 gimple_assign_rhs_code (stmt
),
3968 gimple_expr_type (stmt
),
3969 gimple_assign_rhs1 (stmt
),
3970 gimple_assign_rhs2 (stmt
),
3971 NULL_TREE
, NULL_TREE
, &nary
);
3976 for (i
= 0; i
< nary
->length
; i
++)
3977 if (TREE_CODE (nary
->op
[i
]) == SSA_NAME
)
3978 add_to_exp_gen (block
, nary
->op
[i
]);
3980 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3981 result
->kind
= NARY
;
3983 PRE_EXPR_NARY (result
) = nary
;
3987 case tcc_declaration
:
3992 vn_reference_op_t vro
;
3994 vn_reference_lookup (gimple_assign_rhs1 (stmt
),
4000 for (i
= 0; VEC_iterate (vn_reference_op_s
,
4004 if (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
)
4005 add_to_exp_gen (block
, vro
->op0
);
4006 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
4007 add_to_exp_gen (block
, vro
->op1
);
4008 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
4009 add_to_exp_gen (block
, vro
->op2
);
4011 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
4012 result
->kind
= REFERENCE
;
4014 PRE_EXPR_REFERENCE (result
) = ref
;
4019 /* For any other statement that we don't
4020 recognize, simply add all referenced
4021 SSA_NAMEs to EXP_GEN. */
4022 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
4023 add_to_exp_gen (block
, op
);
4027 get_or_alloc_expression_id (result
);
4028 add_to_value (get_expr_value_id (result
), result
);
4030 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
4039 /* Put the dominator children of BLOCK on the worklist of blocks
4040 to compute available sets for. */
4041 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
4043 son
= next_dom_son (CDI_DOMINATORS
, son
))
4044 worklist
[sp
++] = son
;
4050 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4051 than the available expressions for it. The insertion point is
4052 right before the first use in STMT. Returns the SSA_NAME that should
4053 be used for replacement. */
4056 do_SCCVN_insertion (gimple stmt
, tree ssa_vn
)
4058 basic_block bb
= gimple_bb (stmt
);
4059 gimple_stmt_iterator gsi
;
4060 gimple_seq stmts
= NULL
;
4064 /* First create a value expression from the expression we want
4065 to insert and associate it with the value handle for SSA_VN. */
4066 e
= get_or_alloc_expr_for (vn_get_expr_for (ssa_vn
));
4070 /* Then use create_expression_by_pieces to generate a valid
4071 expression to insert at this point of the IL stream. */
4072 expr
= create_expression_by_pieces (bb
, e
, &stmts
, stmt
, NULL
);
4073 if (expr
== NULL_TREE
)
4075 gsi
= gsi_for_stmt (stmt
);
4076 gsi_insert_seq_before (&gsi
, stmts
, GSI_SAME_STMT
);
4081 /* Eliminate fully redundant computations. */
4086 VEC (gimple
, heap
) *to_remove
= NULL
;
4088 unsigned int todo
= 0;
4089 gimple_stmt_iterator gsi
;
4095 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4097 stmt
= gsi_stmt (gsi
);
4099 /* Lookup the RHS of the expression, see if we have an
4100 available computation for it. If so, replace the RHS with
4101 the available computation. */
4102 if (gimple_has_lhs (stmt
)
4103 && TREE_CODE (gimple_get_lhs (stmt
)) == SSA_NAME
4104 && !gimple_assign_ssa_name_copy_p (stmt
)
4105 && (!gimple_assign_single_p (stmt
)
4106 || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
4107 && !gimple_has_volatile_ops (stmt
)
4108 && !has_zero_uses (gimple_get_lhs (stmt
)))
4110 tree lhs
= gimple_get_lhs (stmt
);
4111 tree rhs
= NULL_TREE
;
4113 pre_expr lhsexpr
= get_or_alloc_expr_for_name (lhs
);
4114 pre_expr sprimeexpr
;
4116 if (gimple_assign_single_p (stmt
))
4117 rhs
= gimple_assign_rhs1 (stmt
);
4119 sprimeexpr
= bitmap_find_leader (AVAIL_OUT (b
),
4120 get_expr_value_id (lhsexpr
),
4125 if (sprimeexpr
->kind
== CONSTANT
)
4126 sprime
= PRE_EXPR_CONSTANT (sprimeexpr
);
4127 else if (sprimeexpr
->kind
== NAME
)
4128 sprime
= PRE_EXPR_NAME (sprimeexpr
);
4133 /* If there is no existing leader but SCCVN knows this
4134 value is constant, use that constant. */
4135 if (!sprime
&& is_gimple_min_invariant (VN_INFO (lhs
)->valnum
))
4137 sprime
= VN_INFO (lhs
)->valnum
;
4138 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4139 TREE_TYPE (sprime
)))
4140 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4142 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4144 fprintf (dump_file
, "Replaced ");
4145 print_gimple_expr (dump_file
, stmt
, 0, 0);
4146 fprintf (dump_file
, " with ");
4147 print_generic_expr (dump_file
, sprime
, 0);
4148 fprintf (dump_file
, " in ");
4149 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4151 pre_stats
.eliminations
++;
4152 propagate_tree_value_into_stmt (&gsi
, sprime
);
4153 stmt
= gsi_stmt (gsi
);
4158 /* If there is no existing usable leader but SCCVN thinks
4159 it has an expression it wants to use as replacement,
4161 if (!sprime
|| sprime
== lhs
)
4163 tree val
= VN_INFO (lhs
)->valnum
;
4165 && TREE_CODE (val
) == SSA_NAME
4166 && VN_INFO (val
)->needs_insertion
4167 && can_PRE_operation (vn_get_expr_for (val
)))
4168 sprime
= do_SCCVN_insertion (stmt
, val
);
4172 && (rhs
== NULL_TREE
4173 || TREE_CODE (rhs
) != SSA_NAME
4174 || may_propagate_copy (rhs
, sprime
)))
4176 gcc_assert (sprime
!= rhs
);
4178 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4180 fprintf (dump_file
, "Replaced ");
4181 print_gimple_expr (dump_file
, stmt
, 0, 0);
4182 fprintf (dump_file
, " with ");
4183 print_generic_expr (dump_file
, sprime
, 0);
4184 fprintf (dump_file
, " in ");
4185 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4188 if (TREE_CODE (sprime
) == SSA_NAME
)
4189 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4191 /* We need to make sure the new and old types actually match,
4192 which may require adding a simple cast, which fold_convert
4194 if ((!rhs
|| TREE_CODE (rhs
) != SSA_NAME
)
4195 && !useless_type_conversion_p (gimple_expr_type (stmt
),
4196 TREE_TYPE (sprime
)))
4197 sprime
= fold_convert (gimple_expr_type (stmt
), sprime
);
4199 pre_stats
.eliminations
++;
4200 propagate_tree_value_into_stmt (&gsi
, sprime
);
4201 stmt
= gsi_stmt (gsi
);
4204 /* If we removed EH side effects from the statement, clean
4205 its EH information. */
4206 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
4208 bitmap_set_bit (need_eh_cleanup
,
4209 gimple_bb (stmt
)->index
);
4210 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4211 fprintf (dump_file
, " Removed EH side effects.\n");
4215 /* If the statement is a scalar store, see if the expression
4216 has the same value number as its rhs. If so, the store is
4218 else if (gimple_assign_single_p (stmt
)
4219 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4220 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4221 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
4223 tree rhs
= gimple_assign_rhs1 (stmt
);
4225 val
= vn_reference_lookup (gimple_assign_lhs (stmt
),
4226 gimple_vuse (stmt
), true, NULL
);
4227 if (TREE_CODE (rhs
) == SSA_NAME
)
4228 rhs
= VN_INFO (rhs
)->valnum
;
4230 && operand_equal_p (val
, rhs
, 0))
4232 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4234 fprintf (dump_file
, "Deleted redundant store ");
4235 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4238 /* Queue stmt for removal. */
4239 VEC_safe_push (gimple
, heap
, to_remove
, stmt
);
4242 /* Visit COND_EXPRs and fold the comparison with the
4243 available value-numbers. */
4244 else if (gimple_code (stmt
) == GIMPLE_COND
)
4246 tree op0
= gimple_cond_lhs (stmt
);
4247 tree op1
= gimple_cond_rhs (stmt
);
4250 if (TREE_CODE (op0
) == SSA_NAME
)
4251 op0
= VN_INFO (op0
)->valnum
;
4252 if (TREE_CODE (op1
) == SSA_NAME
)
4253 op1
= VN_INFO (op1
)->valnum
;
4254 result
= fold_binary (gimple_cond_code (stmt
), boolean_type_node
,
4256 if (result
&& TREE_CODE (result
) == INTEGER_CST
)
4258 if (integer_zerop (result
))
4259 gimple_cond_make_false (stmt
);
4261 gimple_cond_make_true (stmt
);
4263 todo
= TODO_cleanup_cfg
;
4266 /* Visit indirect calls and turn them into direct calls if
4268 if (gimple_code (stmt
) == GIMPLE_CALL
4269 && TREE_CODE (gimple_call_fn (stmt
)) == SSA_NAME
)
4271 tree fn
= VN_INFO (gimple_call_fn (stmt
))->valnum
;
4272 if (TREE_CODE (fn
) == ADDR_EXPR
4273 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
4275 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4277 fprintf (dump_file
, "Replacing call target with ");
4278 print_generic_expr (dump_file
, fn
, 0);
4279 fprintf (dump_file
, " in ");
4280 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4283 gimple_call_set_fn (stmt
, fn
);
4285 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
4287 bitmap_set_bit (need_eh_cleanup
,
4288 gimple_bb (stmt
)->index
);
4289 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4290 fprintf (dump_file
, " Removed EH side effects.\n");
4293 /* Changing an indirect call to a direct call may
4294 have exposed different semantics. This may
4295 require an SSA update. */
4296 todo
|= TODO_update_ssa_only_virtuals
;
4301 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
4303 gimple stmt
, phi
= gsi_stmt (gsi
);
4304 tree sprime
= NULL_TREE
, res
= PHI_RESULT (phi
);
4305 pre_expr sprimeexpr
, resexpr
;
4306 gimple_stmt_iterator gsi2
;
4308 /* We want to perform redundant PHI elimination. Do so by
4309 replacing the PHI with a single copy if possible.
4310 Do not touch inserted, single-argument or virtual PHIs. */
4311 if (gimple_phi_num_args (phi
) == 1
4312 || !is_gimple_reg (res
)
4313 || bitmap_bit_p (inserted_phi_names
, SSA_NAME_VERSION (res
)))
4319 resexpr
= get_or_alloc_expr_for_name (res
);
4320 sprimeexpr
= bitmap_find_leader (AVAIL_OUT (b
),
4321 get_expr_value_id (resexpr
), NULL
);
4324 if (sprimeexpr
->kind
== CONSTANT
)
4325 sprime
= PRE_EXPR_CONSTANT (sprimeexpr
);
4326 else if (sprimeexpr
->kind
== NAME
)
4327 sprime
= PRE_EXPR_NAME (sprimeexpr
);
4338 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4340 fprintf (dump_file
, "Replaced redundant PHI node defining ");
4341 print_generic_expr (dump_file
, res
, 0);
4342 fprintf (dump_file
, " with ");
4343 print_generic_expr (dump_file
, sprime
, 0);
4344 fprintf (dump_file
, "\n");
4347 remove_phi_node (&gsi
, false);
4349 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
4350 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4351 stmt
= gimple_build_assign (res
, sprime
);
4352 SSA_NAME_DEF_STMT (res
) = stmt
;
4353 if (TREE_CODE (sprime
) == SSA_NAME
)
4354 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4356 gsi2
= gsi_after_labels (b
);
4357 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
4358 /* Queue the copy for eventual removal. */
4359 VEC_safe_push (gimple
, heap
, to_remove
, stmt
);
4360 pre_stats
.eliminations
++;
4364 /* We cannot remove stmts during BB walk, especially not release SSA
4365 names there as this confuses the VN machinery. The stmts ending
4366 up in to_remove are either stores or simple copies. */
4367 for (i
= 0; VEC_iterate (gimple
, to_remove
, i
, stmt
); ++i
)
4369 tree lhs
= gimple_assign_lhs (stmt
);
4370 tree rhs
= gimple_assign_rhs1 (stmt
);
4371 use_operand_p use_p
;
4374 /* If there is a single use only, propagate the equivalency
4375 instead of keeping the copy. */
4376 if (TREE_CODE (lhs
) == SSA_NAME
4377 && TREE_CODE (rhs
) == SSA_NAME
4378 && single_imm_use (lhs
, &use_p
, &use_stmt
)
4379 && may_propagate_copy (USE_FROM_PTR (use_p
), rhs
))
4381 SET_USE (use_p
, gimple_assign_rhs1 (stmt
));
4382 update_stmt (use_stmt
);
4385 /* If this is a store or a now unused copy, remove it. */
4386 if (TREE_CODE (lhs
) != SSA_NAME
4387 || has_zero_uses (lhs
))
4389 gsi
= gsi_for_stmt (stmt
);
4390 unlink_stmt_vdef (stmt
);
4391 gsi_remove (&gsi
, true);
4392 release_defs (stmt
);
4395 VEC_free (gimple
, heap
, to_remove
);
4400 /* Borrow a bit of tree-ssa-dce.c for the moment.
4401 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4402 this may be a bit faster, and we may want critical edges kept split. */
4404 /* If OP's defining statement has not already been determined to be necessary,
4405 mark that statement necessary. Return the stmt, if it is newly
4408 static inline gimple
4409 mark_operand_necessary (tree op
)
4415 if (TREE_CODE (op
) != SSA_NAME
)
4418 stmt
= SSA_NAME_DEF_STMT (op
);
4421 if (gimple_plf (stmt
, NECESSARY
)
4422 || gimple_nop_p (stmt
))
4425 gimple_set_plf (stmt
, NECESSARY
, true);
4429 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4430 to insert PHI nodes sometimes, and because value numbering of casts isn't
4431 perfect, we sometimes end up inserting dead code. This simple DCE-like
4432 pass removes any insertions we made that weren't actually used. */
4435 remove_dead_inserted_code (void)
4437 VEC(gimple
,heap
) *worklist
= NULL
;
4441 worklist
= VEC_alloc (gimple
, heap
, VEC_length (gimple
, inserted_exprs
));
4442 for (i
= 0; VEC_iterate (gimple
, inserted_exprs
, i
, t
); i
++)
4444 if (gimple_plf (t
, NECESSARY
))
4445 VEC_quick_push (gimple
, worklist
, t
);
4447 while (VEC_length (gimple
, worklist
) > 0)
4449 t
= VEC_pop (gimple
, worklist
);
4451 /* PHI nodes are somewhat special in that each PHI alternative has
4452 data and control dependencies. All the statements feeding the
4453 PHI node's arguments are always necessary. */
4454 if (gimple_code (t
) == GIMPLE_PHI
)
4458 VEC_reserve (gimple
, heap
, worklist
, gimple_phi_num_args (t
));
4459 for (k
= 0; k
< gimple_phi_num_args (t
); k
++)
4461 tree arg
= PHI_ARG_DEF (t
, k
);
4462 if (TREE_CODE (arg
) == SSA_NAME
)
4464 gimple n
= mark_operand_necessary (arg
);
4466 VEC_quick_push (gimple
, worklist
, n
);
4472 /* Propagate through the operands. Examine all the USE, VUSE and
4473 VDEF operands in this statement. Mark all the statements
4474 which feed this statement's uses as necessary. */
4478 /* The operands of VDEF expressions are also needed as they
4479 represent potential definitions that may reach this
4480 statement (VDEF operands allow us to follow def-def
4483 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
4485 gimple n
= mark_operand_necessary (use
);
4487 VEC_safe_push (gimple
, heap
, worklist
, n
);
4492 for (i
= 0; VEC_iterate (gimple
, inserted_exprs
, i
, t
); i
++)
4494 if (!gimple_plf (t
, NECESSARY
))
4496 gimple_stmt_iterator gsi
;
4498 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4500 fprintf (dump_file
, "Removing unnecessary insertion:");
4501 print_gimple_stmt (dump_file
, t
, 0, 0);
4504 gsi
= gsi_for_stmt (t
);
4505 if (gimple_code (t
) == GIMPLE_PHI
)
4506 remove_phi_node (&gsi
, true);
4509 gsi_remove (&gsi
, true);
4514 VEC_free (gimple
, heap
, worklist
);
4517 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4518 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4519 the number of visited blocks. */
4522 my_rev_post_order_compute (int *post_order
, bool include_entry_exit
)
4524 edge_iterator
*stack
;
4526 int post_order_num
= 0;
4529 if (include_entry_exit
)
4530 post_order
[post_order_num
++] = EXIT_BLOCK
;
4532 /* Allocate stack for back-tracking up CFG. */
4533 stack
= XNEWVEC (edge_iterator
, n_basic_blocks
+ 1);
4536 /* Allocate bitmap to track nodes that have been visited. */
4537 visited
= sbitmap_alloc (last_basic_block
);
4539 /* None of the nodes in the CFG have been visited yet. */
4540 sbitmap_zero (visited
);
4542 /* Push the last edge on to the stack. */
4543 stack
[sp
++] = ei_start (EXIT_BLOCK_PTR
->preds
);
4551 /* Look at the edge on the top of the stack. */
4553 src
= ei_edge (ei
)->src
;
4554 dest
= ei_edge (ei
)->dest
;
4556 /* Check if the edge destination has been visited yet. */
4557 if (src
!= ENTRY_BLOCK_PTR
&& ! TEST_BIT (visited
, src
->index
))
4559 /* Mark that we have visited the destination. */
4560 SET_BIT (visited
, src
->index
);
4562 if (EDGE_COUNT (src
->preds
) > 0)
4563 /* Since the DEST node has been visited for the first
4564 time, check its successors. */
4565 stack
[sp
++] = ei_start (src
->preds
);
4567 post_order
[post_order_num
++] = src
->index
;
4571 if (ei_one_before_end_p (ei
) && dest
!= EXIT_BLOCK_PTR
)
4572 post_order
[post_order_num
++] = dest
->index
;
4574 if (!ei_one_before_end_p (ei
))
4575 ei_next (&stack
[sp
- 1]);
4581 if (include_entry_exit
)
4582 post_order
[post_order_num
++] = ENTRY_BLOCK
;
4585 sbitmap_free (visited
);
4586 return post_order_num
;
4590 /* Initialize data structures used by PRE. */
4593 init_pre (bool do_fre
)
4597 next_expression_id
= 1;
4599 VEC_safe_push (pre_expr
, heap
, expressions
, NULL
);
4600 value_expressions
= VEC_alloc (bitmap_set_t
, heap
, get_max_value_id () + 1);
4601 VEC_safe_grow_cleared (bitmap_set_t
, heap
, value_expressions
,
4602 get_max_value_id() + 1);
4607 inserted_exprs
= NULL
;
4608 need_creation
= NULL
;
4609 pretemp
= NULL_TREE
;
4610 storetemp
= NULL_TREE
;
4611 prephitemp
= NULL_TREE
;
4613 connect_infinite_loops_to_exit ();
4614 memset (&pre_stats
, 0, sizeof (pre_stats
));
4617 postorder
= XNEWVEC (int, n_basic_blocks
- NUM_FIXED_BLOCKS
);
4618 my_rev_post_order_compute (postorder
, false);
4621 bb
->aux
= XCNEWVEC (struct bb_bitmap_sets
, 1);
4623 calculate_dominance_info (CDI_POST_DOMINATORS
);
4624 calculate_dominance_info (CDI_DOMINATORS
);
4626 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4627 inserted_phi_names
= BITMAP_ALLOC (&grand_bitmap_obstack
);
4628 phi_translate_table
= htab_create (5110, expr_pred_trans_hash
,
4629 expr_pred_trans_eq
, free
);
4630 expression_to_id
= htab_create (num_ssa_names
* 3,
4633 bitmap_set_pool
= create_alloc_pool ("Bitmap sets",
4634 sizeof (struct bitmap_set
), 30);
4635 pre_expr_pool
= create_alloc_pool ("pre_expr nodes",
4636 sizeof (struct pre_expr_d
), 30);
4639 EXP_GEN (bb
) = bitmap_set_new ();
4640 PHI_GEN (bb
) = bitmap_set_new ();
4641 TMP_GEN (bb
) = bitmap_set_new ();
4642 AVAIL_OUT (bb
) = bitmap_set_new ();
4645 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4649 /* Deallocate data structures used by PRE. */
4652 fini_pre (bool do_fre
)
4657 VEC_free (bitmap_set_t
, heap
, value_expressions
);
4658 VEC_free (gimple
, heap
, inserted_exprs
);
4659 VEC_free (gimple
, heap
, need_creation
);
4660 bitmap_obstack_release (&grand_bitmap_obstack
);
4661 free_alloc_pool (bitmap_set_pool
);
4662 free_alloc_pool (pre_expr_pool
);
4663 htab_delete (phi_translate_table
);
4664 htab_delete (expression_to_id
);
4665 VEC_free (unsigned, heap
, name_to_id
);
4673 free_dominance_info (CDI_POST_DOMINATORS
);
4675 if (!bitmap_empty_p (need_eh_cleanup
))
4677 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
4678 cleanup_tree_cfg ();
4681 BITMAP_FREE (need_eh_cleanup
);
4684 loop_optimizer_finalize ();
4687 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4688 only wants to do full redundancy elimination. */
4691 execute_pre (bool do_fre
)
4693 unsigned int todo
= 0;
4695 do_partial_partial
= optimize
> 2 && optimize_function_for_speed_p (cfun
);
4697 /* This has to happen before SCCVN runs because
4698 loop_optimizer_init may create new phis, etc. */
4700 loop_optimizer_init (LOOPS_NORMAL
);
4702 if (!run_scc_vn (do_fre
))
4706 remove_dead_inserted_code ();
4707 loop_optimizer_finalize ();
4716 /* Collect and value number expressions computed in each basic block. */
4719 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4725 print_bitmap_set (dump_file
, EXP_GEN (bb
), "exp_gen", bb
->index
);
4726 print_bitmap_set (dump_file
, PHI_GEN (bb
), "phi_gen", bb
->index
);
4727 print_bitmap_set (dump_file
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
4728 print_bitmap_set (dump_file
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
4732 /* Insert can get quite slow on an incredibly large number of basic
4733 blocks due to some quadratic behavior. Until this behavior is
4734 fixed, don't run it when he have an incredibly large number of
4735 bb's. If we aren't going to run insert, there is no point in
4736 computing ANTIC, either, even though it's plenty fast. */
4737 if (!do_fre
&& n_basic_blocks
< 4000)
4743 /* Remove all the redundant expressions. */
4744 todo
|= eliminate ();
4746 statistics_counter_event (cfun
, "Insertions", pre_stats
.insertions
);
4747 statistics_counter_event (cfun
, "PA inserted", pre_stats
.pa_insert
);
4748 statistics_counter_event (cfun
, "New PHIs", pre_stats
.phis
);
4749 statistics_counter_event (cfun
, "Eliminated", pre_stats
.eliminations
);
4750 statistics_counter_event (cfun
, "Constified", pre_stats
.constified
);
4752 /* Make sure to remove fake edges before committing our inserts.
4753 This makes sure we don't end up with extra critical edges that
4754 we would need to split. */
4755 remove_fake_exit_edges ();
4756 gsi_commit_edge_inserts ();
4758 clear_expression_ids ();
4761 remove_dead_inserted_code ();
4769 /* Gate and execute functions for PRE. */
4774 return execute_pre (false);
4780 return flag_tree_pre
!= 0;
4783 struct gimple_opt_pass pass_pre
=
4788 gate_pre
, /* gate */
4789 do_pre
, /* execute */
4792 0, /* static_pass_number */
4793 TV_TREE_PRE
, /* tv_id */
4794 PROP_no_crit_edges
| PROP_cfg
4795 | PROP_ssa
, /* properties_required */
4796 0, /* properties_provided */
4797 0, /* properties_destroyed */
4798 TODO_rebuild_alias
, /* todo_flags_start */
4799 TODO_update_ssa_only_virtuals
| TODO_dump_func
| TODO_ggc_collect
4800 | TODO_verify_ssa
/* todo_flags_finish */
4805 /* Gate and execute functions for FRE. */
4810 return execute_pre (true);
4816 return flag_tree_fre
!= 0;
4819 struct gimple_opt_pass pass_fre
=
4824 gate_fre
, /* gate */
4825 execute_fre
, /* execute */
4828 0, /* static_pass_number */
4829 TV_TREE_FRE
, /* tv_id */
4830 PROP_cfg
| PROP_ssa
, /* properties_required */
4831 0, /* properties_provided */
4832 0, /* properties_destroyed */
4833 0, /* todo_flags_start */
4834 TODO_dump_func
| TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */