2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "fold-const.h"
34 #include "gimple-pretty-print.h"
35 #include "tree-inline.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
43 #include "tree-ssa-loop.h"
44 #include "tree-into-ssa.h"
46 #include "insn-config.h"
57 #include "tree-iterator.h"
58 #include "alloc-pool.h"
59 #include "tree-pass.h"
60 #include "langhooks.h"
62 #include "tree-ssa-sccvn.h"
63 #include "tree-scalar-evolution.h"
68 #include "symbol-summary.h"
70 #include "tree-ssa-propagate.h"
71 #include "ipa-utils.h"
72 #include "tree-cfgcleanup.h"
76 1. Avail sets can be shared by making an avail_find_leader that
77 walks up the dominator tree and looks in those avail sets.
78 This might affect code optimality, it's unclear right now.
79 2. Strength reduction can be performed by anticipating expressions
80 we can repair later on.
81 3. We can do back-substitution or smarter value numbering to catch
82 commutative expressions split up over multiple statements.
85 /* For ease of terminology, "expression node" in the below refers to
86 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
87 represent the actual statement containing the expressions we care about,
88 and we cache the value number by putting it in the expression. */
92 First we walk the statements to generate the AVAIL sets, the
93 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
94 generation of values/expressions by a given block. We use them
95 when computing the ANTIC sets. The AVAIL sets consist of
96 SSA_NAME's that represent values, so we know what values are
97 available in what blocks. AVAIL is a forward dataflow problem. In
98 SSA, values are never killed, so we don't need a kill set, or a
99 fixpoint iteration, in order to calculate the AVAIL sets. In
100 traditional parlance, AVAIL sets tell us the downsafety of the
103 Next, we generate the ANTIC sets. These sets represent the
104 anticipatable expressions. ANTIC is a backwards dataflow
105 problem. An expression is anticipatable in a given block if it could
106 be generated in that block. This means that if we had to perform
107 an insertion in that block, of the value of that expression, we
108 could. Calculating the ANTIC sets requires phi translation of
109 expressions, because the flow goes backwards through phis. We must
110 iterate to a fixpoint of the ANTIC sets, because we have a kill
111 set. Even in SSA form, values are not live over the entire
112 function, only from their definition point onwards. So we have to
113 remove values from the ANTIC set once we go past the definition
114 point of the leaders that make them up.
115 compute_antic/compute_antic_aux performs this computation.
117 Third, we perform insertions to make partially redundant
118 expressions fully redundant.
120 An expression is partially redundant (excluding partial
123 1. It is AVAIL in some, but not all, of the predecessors of a
125 2. It is ANTIC in all the predecessors.
127 In order to make it fully redundant, we insert the expression into
128 the predecessors where it is not available, but is ANTIC.
130 For the partial anticipation case, we only perform insertion if it
131 is partially anticipated in some block, and fully available in all
134 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
135 performs these steps.
137 Fourth, we eliminate fully redundant expressions.
138 This is a simple statement walk that replaces redundant
139 calculations with the now available values. */
141 /* Representations of value numbers:
143 Value numbers are represented by a representative SSA_NAME. We
144 will create fake SSA_NAME's in situations where we need a
145 representative but do not have one (because it is a complex
146 expression). In order to facilitate storing the value numbers in
147 bitmaps, and keep the number of wasted SSA_NAME's down, we also
148 associate a value_id with each value number, and create full blown
149 ssa_name's only where we actually need them (IE in operands of
150 existing expressions).
152 Theoretically you could replace all the value_id's with
153 SSA_NAME_VERSION, but this would allocate a large number of
154 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
155 It would also require an additional indirection at each point we
158 /* Representation of expressions on value numbers:
160 Expressions consisting of value numbers are represented the same
161 way as our VN internally represents them, with an additional
162 "pre_expr" wrapping around them in order to facilitate storing all
163 of the expressions in the same sets. */
165 /* Representation of sets:
167 The dataflow sets do not need to be sorted in any particular order
168 for the majority of their lifetime, are simply represented as two
169 bitmaps, one that keeps track of values present in the set, and one
170 that keeps track of expressions present in the set.
172 When we need them in topological order, we produce it on demand by
173 transforming the bitmap into an array and sorting it into topo
176 /* Type of expression, used to know which member of the PRE_EXPR union
192 vn_reference_t reference
;
195 typedef struct pre_expr_d
: nofree_ptr_hash
<pre_expr_d
>
197 enum pre_expr_kind kind
;
201 /* hash_table support. */
202 static inline hashval_t
hash (const pre_expr_d
*);
203 static inline int equal (const pre_expr_d
*, const pre_expr_d
*);
206 #define PRE_EXPR_NAME(e) (e)->u.name
207 #define PRE_EXPR_NARY(e) (e)->u.nary
208 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
209 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
211 /* Compare E1 and E1 for equality. */
214 pre_expr_d::equal (const pre_expr_d
*e1
, const pre_expr_d
*e2
)
216 if (e1
->kind
!= e2
->kind
)
222 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
223 PRE_EXPR_CONSTANT (e2
));
225 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
227 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
229 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
230 PRE_EXPR_REFERENCE (e2
));
239 pre_expr_d::hash (const pre_expr_d
*e
)
244 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
246 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
248 return PRE_EXPR_NARY (e
)->hashcode
;
250 return PRE_EXPR_REFERENCE (e
)->hashcode
;
256 /* Next global expression id number. */
257 static unsigned int next_expression_id
;
259 /* Mapping from expression to id number we can use in bitmap sets. */
260 static vec
<pre_expr
> expressions
;
261 static hash_table
<pre_expr_d
> *expression_to_id
;
262 static vec
<unsigned> name_to_id
;
264 /* Allocate an expression id for EXPR. */
266 static inline unsigned int
267 alloc_expression_id (pre_expr expr
)
269 struct pre_expr_d
**slot
;
270 /* Make sure we won't overflow. */
271 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
272 expr
->id
= next_expression_id
++;
273 expressions
.safe_push (expr
);
274 if (expr
->kind
== NAME
)
276 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
277 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
278 re-allocations by using vec::reserve upfront. */
279 unsigned old_len
= name_to_id
.length ();
280 name_to_id
.reserve (num_ssa_names
- old_len
);
281 name_to_id
.quick_grow_cleared (num_ssa_names
);
282 gcc_assert (name_to_id
[version
] == 0);
283 name_to_id
[version
] = expr
->id
;
287 slot
= expression_to_id
->find_slot (expr
, INSERT
);
291 return next_expression_id
- 1;
294 /* Return the expression id for tree EXPR. */
296 static inline unsigned int
297 get_expression_id (const pre_expr expr
)
302 static inline unsigned int
303 lookup_expression_id (const pre_expr expr
)
305 struct pre_expr_d
**slot
;
307 if (expr
->kind
== NAME
)
309 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
310 if (name_to_id
.length () <= version
)
312 return name_to_id
[version
];
316 slot
= expression_to_id
->find_slot (expr
, NO_INSERT
);
319 return ((pre_expr
)*slot
)->id
;
323 /* Return the existing expression id for EXPR, or create one if one
324 does not exist yet. */
326 static inline unsigned int
327 get_or_alloc_expression_id (pre_expr expr
)
329 unsigned int id
= lookup_expression_id (expr
);
331 return alloc_expression_id (expr
);
332 return expr
->id
= id
;
335 /* Return the expression that has expression id ID */
337 static inline pre_expr
338 expression_for_id (unsigned int id
)
340 return expressions
[id
];
343 /* Free the expression id field in all of our expressions,
344 and then destroy the expressions array. */
347 clear_expression_ids (void)
349 expressions
.release ();
352 static object_allocator
<pre_expr_d
> pre_expr_pool ("pre_expr nodes", 30);
354 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
357 get_or_alloc_expr_for_name (tree name
)
359 struct pre_expr_d expr
;
361 unsigned int result_id
;
365 PRE_EXPR_NAME (&expr
) = name
;
366 result_id
= lookup_expression_id (&expr
);
368 return expression_for_id (result_id
);
370 result
= pre_expr_pool
.allocate ();
372 PRE_EXPR_NAME (result
) = name
;
373 alloc_expression_id (result
);
377 /* An unordered bitmap set. One bitmap tracks values, the other,
379 typedef struct bitmap_set
381 bitmap_head expressions
;
385 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
386 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
388 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
389 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
391 /* Mapping from value id to expressions with that value_id. */
392 static vec
<bitmap
> value_expressions
;
394 /* Sets that we need to keep track of. */
395 typedef struct bb_bitmap_sets
397 /* The EXP_GEN set, which represents expressions/values generated in
399 bitmap_set_t exp_gen
;
401 /* The PHI_GEN set, which represents PHI results generated in a
403 bitmap_set_t phi_gen
;
405 /* The TMP_GEN set, which represents results/temporaries generated
406 in a basic block. IE the LHS of an expression. */
407 bitmap_set_t tmp_gen
;
409 /* The AVAIL_OUT set, which represents which values are available in
410 a given basic block. */
411 bitmap_set_t avail_out
;
413 /* The ANTIC_IN set, which represents which values are anticipatable
414 in a given basic block. */
415 bitmap_set_t antic_in
;
417 /* The PA_IN set, which represents which values are
418 partially anticipatable in a given basic block. */
421 /* The NEW_SETS set, which is used during insertion to augment the
422 AVAIL_OUT set of blocks with the new insertions performed during
423 the current iteration. */
424 bitmap_set_t new_sets
;
426 /* A cache for value_dies_in_block_x. */
429 /* The live virtual operand on successor edges. */
432 /* True if we have visited this block during ANTIC calculation. */
433 unsigned int visited
: 1;
435 /* True when the block contains a call that might not return. */
436 unsigned int contains_may_not_return_call
: 1;
439 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
440 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
441 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
442 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
443 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
444 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
445 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
446 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
447 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
448 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
449 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
452 /* Basic block list in postorder. */
453 static int *postorder
;
454 static int postorder_num
;
456 /* This structure is used to keep track of statistics on what
457 optimization PRE was able to perform. */
460 /* The number of RHS computations eliminated by PRE. */
463 /* The number of new expressions/temporaries generated by PRE. */
466 /* The number of inserts found due to partial anticipation */
469 /* The number of new PHI nodes added by PRE. */
473 static bool do_partial_partial
;
474 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int);
475 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
476 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
477 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
478 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
479 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
480 static void bitmap_insert_into_set_1 (bitmap_set_t
, pre_expr
,
482 static bitmap_set_t
bitmap_set_new (void);
483 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
485 static tree
find_or_generate_expression (basic_block
, tree
, gimple_seq
*);
486 static unsigned int get_expr_value_id (pre_expr
);
488 /* We can add and remove elements and entries to and from sets
489 and hash tables, so we use alloc pools for them. */
491 static object_allocator
<bitmap_set
> bitmap_set_pool ("Bitmap sets", 30);
492 static bitmap_obstack grand_bitmap_obstack
;
494 /* Set of blocks with statements that have had their EH properties changed. */
495 static bitmap need_eh_cleanup
;
497 /* Set of blocks with statements that have had their AB properties changed. */
498 static bitmap need_ab_cleanup
;
500 /* A three tuple {e, pred, v} used to cache phi translations in the
501 phi_translate_table. */
503 typedef struct expr_pred_trans_d
: free_ptr_hash
<expr_pred_trans_d
>
505 /* The expression. */
508 /* The predecessor block along which we translated the expression. */
511 /* The value that resulted from the translation. */
514 /* The hashcode for the expression, pred pair. This is cached for
518 /* hash_table support. */
519 static inline hashval_t
hash (const expr_pred_trans_d
*);
520 static inline int equal (const expr_pred_trans_d
*, const expr_pred_trans_d
*);
521 } *expr_pred_trans_t
;
522 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
525 expr_pred_trans_d::hash (const expr_pred_trans_d
*e
)
531 expr_pred_trans_d::equal (const expr_pred_trans_d
*ve1
,
532 const expr_pred_trans_d
*ve2
)
534 basic_block b1
= ve1
->pred
;
535 basic_block b2
= ve2
->pred
;
537 /* If they are not translations for the same basic block, they can't
541 return pre_expr_d::equal (ve1
->e
, ve2
->e
);
544 /* The phi_translate_table caches phi translations for a given
545 expression and predecessor. */
546 static hash_table
<expr_pred_trans_d
> *phi_translate_table
;
548 /* Add the tuple mapping from {expression E, basic block PRED} to
549 the phi translation table and return whether it pre-existed. */
552 phi_trans_add (expr_pred_trans_t
*entry
, pre_expr e
, basic_block pred
)
554 expr_pred_trans_t
*slot
;
555 expr_pred_trans_d tem
;
556 hashval_t hash
= iterative_hash_hashval_t (pre_expr_d::hash (e
),
561 slot
= phi_translate_table
->find_slot_with_hash (&tem
, hash
, INSERT
);
568 *entry
= *slot
= XNEW (struct expr_pred_trans_d
);
570 (*entry
)->pred
= pred
;
571 (*entry
)->hashcode
= hash
;
576 /* Add expression E to the expression set of value id V. */
579 add_to_value (unsigned int v
, pre_expr e
)
583 gcc_checking_assert (get_expr_value_id (e
) == v
);
585 if (v
>= value_expressions
.length ())
587 value_expressions
.safe_grow_cleared (v
+ 1);
590 set
= value_expressions
[v
];
593 set
= BITMAP_ALLOC (&grand_bitmap_obstack
);
594 value_expressions
[v
] = set
;
597 bitmap_set_bit (set
, get_or_alloc_expression_id (e
));
600 /* Create a new bitmap set and return it. */
603 bitmap_set_new (void)
605 bitmap_set_t ret
= bitmap_set_pool
.allocate ();
606 bitmap_initialize (&ret
->expressions
, &grand_bitmap_obstack
);
607 bitmap_initialize (&ret
->values
, &grand_bitmap_obstack
);
611 /* Return the value id for a PRE expression EXPR. */
614 get_expr_value_id (pre_expr expr
)
620 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
623 id
= VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
626 id
= PRE_EXPR_NARY (expr
)->value_id
;
629 id
= PRE_EXPR_REFERENCE (expr
)->value_id
;
634 /* ??? We cannot assert that expr has a value-id (it can be 0), because
635 we assign value-ids only to expressions that have a result
636 in set_hashtable_value_ids. */
640 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
643 sccvn_valnum_from_value_id (unsigned int val
)
647 bitmap exprset
= value_expressions
[val
];
648 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
650 pre_expr vexpr
= expression_for_id (i
);
651 if (vexpr
->kind
== NAME
)
652 return VN_INFO (PRE_EXPR_NAME (vexpr
))->valnum
;
653 else if (vexpr
->kind
== CONSTANT
)
654 return PRE_EXPR_CONSTANT (vexpr
);
659 /* Remove an expression EXPR from a bitmapped set. */
662 bitmap_remove_from_set (bitmap_set_t set
, pre_expr expr
)
664 unsigned int val
= get_expr_value_id (expr
);
665 if (!value_id_constant_p (val
))
667 bitmap_clear_bit (&set
->values
, val
);
668 bitmap_clear_bit (&set
->expressions
, get_expression_id (expr
));
673 bitmap_insert_into_set_1 (bitmap_set_t set
, pre_expr expr
,
674 unsigned int val
, bool allow_constants
)
676 if (allow_constants
|| !value_id_constant_p (val
))
678 /* We specifically expect this and only this function to be able to
679 insert constants into a set. */
680 bitmap_set_bit (&set
->values
, val
);
681 bitmap_set_bit (&set
->expressions
, get_or_alloc_expression_id (expr
));
685 /* Insert an expression EXPR into a bitmapped set. */
688 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
690 bitmap_insert_into_set_1 (set
, expr
, get_expr_value_id (expr
), false);
693 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
696 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
698 bitmap_copy (&dest
->expressions
, &orig
->expressions
);
699 bitmap_copy (&dest
->values
, &orig
->values
);
703 /* Free memory used up by SET. */
705 bitmap_set_free (bitmap_set_t set
)
707 bitmap_clear (&set
->expressions
);
708 bitmap_clear (&set
->values
);
712 /* Generate an topological-ordered array of bitmap set SET. */
715 sorted_array_from_bitmap_set (bitmap_set_t set
)
718 bitmap_iterator bi
, bj
;
719 vec
<pre_expr
> result
;
721 /* Pre-allocate enough space for the array. */
722 result
.create (bitmap_count_bits (&set
->expressions
));
724 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
726 /* The number of expressions having a given value is usually
727 relatively small. Thus, rather than making a vector of all
728 the expressions and sorting it by value-id, we walk the values
729 and check in the reverse mapping that tells us what expressions
730 have a given value, to filter those in our set. As a result,
731 the expressions are inserted in value-id order, which means
734 If this is somehow a significant lose for some cases, we can
735 choose which set to walk based on the set size. */
736 bitmap exprset
= value_expressions
[i
];
737 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bj
)
739 if (bitmap_bit_p (&set
->expressions
, j
))
740 result
.quick_push (expression_for_id (j
));
747 /* Perform bitmapped set operation DEST &= ORIG. */
750 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
758 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
760 bitmap_and_into (&dest
->values
, &orig
->values
);
761 bitmap_copy (&temp
, &dest
->expressions
);
762 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
764 pre_expr expr
= expression_for_id (i
);
765 unsigned int value_id
= get_expr_value_id (expr
);
766 if (!bitmap_bit_p (&dest
->values
, value_id
))
767 bitmap_clear_bit (&dest
->expressions
, i
);
769 bitmap_clear (&temp
);
773 /* Subtract all values and expressions contained in ORIG from DEST. */
776 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
778 bitmap_set_t result
= bitmap_set_new ();
782 bitmap_and_compl (&result
->expressions
, &dest
->expressions
,
785 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
787 pre_expr expr
= expression_for_id (i
);
788 unsigned int value_id
= get_expr_value_id (expr
);
789 bitmap_set_bit (&result
->values
, value_id
);
795 /* Subtract all the values in bitmap set B from bitmap set A. */
798 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
804 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
806 bitmap_copy (&temp
, &a
->expressions
);
807 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
809 pre_expr expr
= expression_for_id (i
);
810 if (bitmap_set_contains_value (b
, get_expr_value_id (expr
)))
811 bitmap_remove_from_set (a
, expr
);
813 bitmap_clear (&temp
);
817 /* Return true if bitmapped set SET contains the value VALUE_ID. */
820 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
822 if (value_id_constant_p (value_id
))
825 if (!set
|| bitmap_empty_p (&set
->expressions
))
828 return bitmap_bit_p (&set
->values
, value_id
);
832 bitmap_set_contains_expr (bitmap_set_t set
, const pre_expr expr
)
834 return bitmap_bit_p (&set
->expressions
, get_expression_id (expr
));
837 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
840 bitmap_set_replace_value (bitmap_set_t set
, unsigned int lookfor
,
847 if (value_id_constant_p (lookfor
))
850 if (!bitmap_set_contains_value (set
, lookfor
))
853 /* The number of expressions having a given value is usually
854 significantly less than the total number of expressions in SET.
855 Thus, rather than check, for each expression in SET, whether it
856 has the value LOOKFOR, we walk the reverse mapping that tells us
857 what expressions have a given value, and see if any of those
858 expressions are in our set. For large testcases, this is about
859 5-10x faster than walking the bitmap. If this is somehow a
860 significant lose for some cases, we can choose which set to walk
861 based on the set size. */
862 exprset
= value_expressions
[lookfor
];
863 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
865 if (bitmap_clear_bit (&set
->expressions
, i
))
867 bitmap_set_bit (&set
->expressions
, get_expression_id (expr
));
875 /* Return true if two bitmap sets are equal. */
878 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
880 return bitmap_equal_p (&a
->values
, &b
->values
);
883 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
884 and add it otherwise. */
887 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
889 unsigned int val
= get_expr_value_id (expr
);
891 if (bitmap_set_contains_value (set
, val
))
892 bitmap_set_replace_value (set
, val
, expr
);
894 bitmap_insert_into_set (set
, expr
);
897 /* Insert EXPR into SET if EXPR's value is not already present in
901 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
903 unsigned int val
= get_expr_value_id (expr
);
905 gcc_checking_assert (expr
->id
== get_or_alloc_expression_id (expr
));
907 /* Constant values are always considered to be part of the set. */
908 if (value_id_constant_p (val
))
911 /* If the value membership changed, add the expression. */
912 if (bitmap_set_bit (&set
->values
, val
))
913 bitmap_set_bit (&set
->expressions
, expr
->id
);
916 /* Print out EXPR to outfile. */
919 print_pre_expr (FILE *outfile
, const pre_expr expr
)
924 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
), 0);
927 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
), 0);
932 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
933 fprintf (outfile
, "{%s,", get_tree_code_name (nary
->opcode
));
934 for (i
= 0; i
< nary
->length
; i
++)
936 print_generic_expr (outfile
, nary
->op
[i
], 0);
937 if (i
!= (unsigned) nary
->length
- 1)
938 fprintf (outfile
, ",");
940 fprintf (outfile
, "}");
946 vn_reference_op_t vro
;
948 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
949 fprintf (outfile
, "{");
951 ref
->operands
.iterate (i
, &vro
);
954 bool closebrace
= false;
955 if (vro
->opcode
!= SSA_NAME
956 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
958 fprintf (outfile
, "%s", get_tree_code_name (vro
->opcode
));
961 fprintf (outfile
, "<");
967 print_generic_expr (outfile
, vro
->op0
, 0);
970 fprintf (outfile
, ",");
971 print_generic_expr (outfile
, vro
->op1
, 0);
975 fprintf (outfile
, ",");
976 print_generic_expr (outfile
, vro
->op2
, 0);
980 fprintf (outfile
, ">");
981 if (i
!= ref
->operands
.length () - 1)
982 fprintf (outfile
, ",");
984 fprintf (outfile
, "}");
987 fprintf (outfile
, "@");
988 print_generic_expr (outfile
, ref
->vuse
, 0);
994 void debug_pre_expr (pre_expr
);
996 /* Like print_pre_expr but always prints to stderr. */
998 debug_pre_expr (pre_expr e
)
1000 print_pre_expr (stderr
, e
);
1001 fprintf (stderr
, "\n");
1004 /* Print out SET to OUTFILE. */
1007 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
1008 const char *setname
, int blockindex
)
1010 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
1017 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1019 const pre_expr expr
= expression_for_id (i
);
1022 fprintf (outfile
, ", ");
1024 print_pre_expr (outfile
, expr
);
1026 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1029 fprintf (outfile
, " }\n");
1032 void debug_bitmap_set (bitmap_set_t
);
1035 debug_bitmap_set (bitmap_set_t set
)
1037 print_bitmap_set (stderr
, set
, "debug", 0);
1040 void debug_bitmap_sets_for (basic_block
);
1043 debug_bitmap_sets_for (basic_block bb
)
1045 print_bitmap_set (stderr
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
1046 print_bitmap_set (stderr
, EXP_GEN (bb
), "exp_gen", bb
->index
);
1047 print_bitmap_set (stderr
, PHI_GEN (bb
), "phi_gen", bb
->index
);
1048 print_bitmap_set (stderr
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
1049 print_bitmap_set (stderr
, ANTIC_IN (bb
), "antic_in", bb
->index
);
1050 if (do_partial_partial
)
1051 print_bitmap_set (stderr
, PA_IN (bb
), "pa_in", bb
->index
);
1052 print_bitmap_set (stderr
, NEW_SETS (bb
), "new_sets", bb
->index
);
1055 /* Print out the expressions that have VAL to OUTFILE. */
1058 print_value_expressions (FILE *outfile
, unsigned int val
)
1060 bitmap set
= value_expressions
[val
];
1065 sprintf (s
, "%04d", val
);
1066 x
.expressions
= *set
;
1067 print_bitmap_set (outfile
, &x
, s
, 0);
1073 debug_value_expressions (unsigned int val
)
1075 print_value_expressions (stderr
, val
);
1078 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1082 get_or_alloc_expr_for_constant (tree constant
)
1084 unsigned int result_id
;
1085 unsigned int value_id
;
1086 struct pre_expr_d expr
;
1089 expr
.kind
= CONSTANT
;
1090 PRE_EXPR_CONSTANT (&expr
) = constant
;
1091 result_id
= lookup_expression_id (&expr
);
1093 return expression_for_id (result_id
);
1095 newexpr
= pre_expr_pool
.allocate ();
1096 newexpr
->kind
= CONSTANT
;
1097 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1098 alloc_expression_id (newexpr
);
1099 value_id
= get_or_alloc_constant_value_id (constant
);
1100 add_to_value (value_id
, newexpr
);
1104 /* Given a value id V, find the actual tree representing the constant
1105 value if there is one, and return it. Return NULL if we can't find
1109 get_constant_for_value_id (unsigned int v
)
1111 if (value_id_constant_p (v
))
1115 bitmap exprset
= value_expressions
[v
];
1117 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1119 pre_expr expr
= expression_for_id (i
);
1120 if (expr
->kind
== CONSTANT
)
1121 return PRE_EXPR_CONSTANT (expr
);
1127 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1128 Currently only supports constants and SSA_NAMES. */
1130 get_or_alloc_expr_for (tree t
)
1132 if (TREE_CODE (t
) == SSA_NAME
)
1133 return get_or_alloc_expr_for_name (t
);
1134 else if (is_gimple_min_invariant (t
))
1135 return get_or_alloc_expr_for_constant (t
);
1138 /* More complex expressions can result from SCCVN expression
1139 simplification that inserts values for them. As they all
1140 do not have VOPs the get handled by the nary ops struct. */
1141 vn_nary_op_t result
;
1142 unsigned int result_id
;
1143 vn_nary_op_lookup (t
, &result
);
1146 pre_expr e
= pre_expr_pool
.allocate ();
1148 PRE_EXPR_NARY (e
) = result
;
1149 result_id
= lookup_expression_id (e
);
1152 pre_expr_pool
.remove (e
);
1153 e
= expression_for_id (result_id
);
1156 alloc_expression_id (e
);
1163 /* Return the folded version of T if T, when folded, is a gimple
1164 min_invariant. Otherwise, return T. */
1167 fully_constant_expression (pre_expr e
)
1175 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1176 switch (TREE_CODE_CLASS (nary
->opcode
))
1179 case tcc_comparison
:
1181 /* We have to go from trees to pre exprs to value ids to
1183 tree naryop0
= nary
->op
[0];
1184 tree naryop1
= nary
->op
[1];
1186 if (!is_gimple_min_invariant (naryop0
))
1188 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1189 unsigned int vrep0
= get_expr_value_id (rep0
);
1190 tree const0
= get_constant_for_value_id (vrep0
);
1192 naryop0
= fold_convert (TREE_TYPE (naryop0
), const0
);
1194 if (!is_gimple_min_invariant (naryop1
))
1196 pre_expr rep1
= get_or_alloc_expr_for (naryop1
);
1197 unsigned int vrep1
= get_expr_value_id (rep1
);
1198 tree const1
= get_constant_for_value_id (vrep1
);
1200 naryop1
= fold_convert (TREE_TYPE (naryop1
), const1
);
1202 result
= fold_binary (nary
->opcode
, nary
->type
,
1204 if (result
&& is_gimple_min_invariant (result
))
1205 return get_or_alloc_expr_for_constant (result
);
1206 /* We might have simplified the expression to a
1207 SSA_NAME for example from x_1 * 1. But we cannot
1208 insert a PHI for x_1 unconditionally as x_1 might
1209 not be available readily. */
1213 if (nary
->opcode
!= REALPART_EXPR
1214 && nary
->opcode
!= IMAGPART_EXPR
1215 && nary
->opcode
!= VIEW_CONVERT_EXPR
)
1220 /* We have to go from trees to pre exprs to value ids to
1222 tree naryop0
= nary
->op
[0];
1223 tree const0
, result
;
1224 if (is_gimple_min_invariant (naryop0
))
1228 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1229 unsigned int vrep0
= get_expr_value_id (rep0
);
1230 const0
= get_constant_for_value_id (vrep0
);
1235 tree type1
= TREE_TYPE (nary
->op
[0]);
1236 const0
= fold_convert (type1
, const0
);
1237 result
= fold_unary (nary
->opcode
, nary
->type
, const0
);
1239 if (result
&& is_gimple_min_invariant (result
))
1240 return get_or_alloc_expr_for_constant (result
);
1249 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1251 if ((folded
= fully_constant_vn_reference_p (ref
)))
1252 return get_or_alloc_expr_for_constant (folded
);
1261 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1262 it has the value it would have in BLOCK. Set *SAME_VALID to true
1263 in case the new vuse doesn't change the value id of the OPERANDS. */
1266 translate_vuse_through_block (vec
<vn_reference_op_s
> operands
,
1267 alias_set_type set
, tree type
, tree vuse
,
1268 basic_block phiblock
,
1269 basic_block block
, bool *same_valid
)
1271 gimple phi
= SSA_NAME_DEF_STMT (vuse
);
1278 if (gimple_bb (phi
) != phiblock
)
1281 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, type
, operands
);
1283 /* Use the alias-oracle to find either the PHI node in this block,
1284 the first VUSE used in this block that is equivalent to vuse or
1285 the first VUSE which definition in this block kills the value. */
1286 if (gimple_code (phi
) == GIMPLE_PHI
)
1287 e
= find_edge (block
, phiblock
);
1288 else if (use_oracle
)
1289 while (!stmt_may_clobber_ref_p_1 (phi
, &ref
))
1291 vuse
= gimple_vuse (phi
);
1292 phi
= SSA_NAME_DEF_STMT (vuse
);
1293 if (gimple_bb (phi
) != phiblock
)
1295 if (gimple_code (phi
) == GIMPLE_PHI
)
1297 e
= find_edge (block
, phiblock
);
1308 bitmap visited
= NULL
;
1310 /* Try to find a vuse that dominates this phi node by skipping
1311 non-clobbering statements. */
1312 vuse
= get_continuation_for_phi (phi
, &ref
, &cnt
, &visited
, false,
1315 BITMAP_FREE (visited
);
1321 /* If we didn't find any, the value ID can't stay the same,
1322 but return the translated vuse. */
1323 *same_valid
= false;
1324 vuse
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1326 /* ??? We would like to return vuse here as this is the canonical
1327 upmost vdef that this reference is associated with. But during
1328 insertion of the references into the hash tables we only ever
1329 directly insert with their direct gimple_vuse, hence returning
1330 something else would make us not find the other expression. */
1331 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1337 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1338 SET2. This is used to avoid making a set consisting of the union
1339 of PA_IN and ANTIC_IN during insert. */
1341 static inline pre_expr
1342 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
)
1346 result
= bitmap_find_leader (set1
, val
);
1347 if (!result
&& set2
)
1348 result
= bitmap_find_leader (set2
, val
);
1352 /* Get the tree type for our PRE expression e. */
1355 get_expr_type (const pre_expr e
)
1360 return TREE_TYPE (PRE_EXPR_NAME (e
));
1362 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1364 return PRE_EXPR_REFERENCE (e
)->type
;
1366 return PRE_EXPR_NARY (e
)->type
;
1371 /* Get a representative SSA_NAME for a given expression.
1372 Since all of our sub-expressions are treated as values, we require
1373 them to be SSA_NAME's for simplicity.
1374 Prior versions of GVNPRE used to use "value handles" here, so that
1375 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1376 either case, the operands are really values (IE we do not expect
1377 them to be usable without finding leaders). */
1380 get_representative_for (const pre_expr e
)
1383 unsigned int value_id
= get_expr_value_id (e
);
1388 return PRE_EXPR_NAME (e
);
1390 return PRE_EXPR_CONSTANT (e
);
1394 /* Go through all of the expressions representing this value
1395 and pick out an SSA_NAME. */
1398 bitmap exprs
= value_expressions
[value_id
];
1399 EXECUTE_IF_SET_IN_BITMAP (exprs
, 0, i
, bi
)
1401 pre_expr rep
= expression_for_id (i
);
1402 if (rep
->kind
== NAME
)
1403 return PRE_EXPR_NAME (rep
);
1404 else if (rep
->kind
== CONSTANT
)
1405 return PRE_EXPR_CONSTANT (rep
);
1411 /* If we reached here we couldn't find an SSA_NAME. This can
1412 happen when we've discovered a value that has never appeared in
1413 the program as set to an SSA_NAME, as the result of phi translation.
1415 ??? We should be able to re-use this when we insert the statement
1417 name
= make_temp_ssa_name (get_expr_type (e
), gimple_build_nop (), "pretmp");
1418 VN_INFO_GET (name
)->value_id
= value_id
;
1419 VN_INFO (name
)->valnum
= name
;
1420 /* ??? For now mark this SSA name for release by SCCVN. */
1421 VN_INFO (name
)->needs_insertion
= true;
1422 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1423 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1425 fprintf (dump_file
, "Created SSA_NAME representative ");
1426 print_generic_expr (dump_file
, name
, 0);
1427 fprintf (dump_file
, " for expression:");
1428 print_pre_expr (dump_file
, e
);
1429 fprintf (dump_file
, " (%04d)\n", value_id
);
1438 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1439 basic_block pred
, basic_block phiblock
);
1441 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1442 the phis in PRED. Return NULL if we can't find a leader for each part
1443 of the translated expression. */
1446 phi_translate_1 (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1447 basic_block pred
, basic_block phiblock
)
1454 bool changed
= false;
1455 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1456 vn_nary_op_t newnary
= XALLOCAVAR (struct vn_nary_op_s
,
1457 sizeof_vn_nary_op (nary
->length
));
1458 memcpy (newnary
, nary
, sizeof_vn_nary_op (nary
->length
));
1460 for (i
= 0; i
< newnary
->length
; i
++)
1462 if (TREE_CODE (newnary
->op
[i
]) != SSA_NAME
)
1466 pre_expr leader
, result
;
1467 unsigned int op_val_id
= VN_INFO (newnary
->op
[i
])->value_id
;
1468 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1469 result
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1470 if (result
&& result
!= leader
)
1472 tree name
= get_representative_for (result
);
1475 newnary
->op
[i
] = name
;
1480 changed
|= newnary
->op
[i
] != nary
->op
[i
];
1486 unsigned int new_val_id
;
1488 tree result
= vn_nary_op_lookup_pieces (newnary
->length
,
1493 if (result
&& is_gimple_min_invariant (result
))
1494 return get_or_alloc_expr_for_constant (result
);
1496 expr
= pre_expr_pool
.allocate ();
1501 PRE_EXPR_NARY (expr
) = nary
;
1502 constant
= fully_constant_expression (expr
);
1503 if (constant
!= expr
)
1506 new_val_id
= nary
->value_id
;
1507 get_or_alloc_expression_id (expr
);
1511 new_val_id
= get_next_value_id ();
1512 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
1513 nary
= vn_nary_op_insert_pieces (newnary
->length
,
1517 result
, new_val_id
);
1518 PRE_EXPR_NARY (expr
) = nary
;
1519 constant
= fully_constant_expression (expr
);
1520 if (constant
!= expr
)
1522 get_or_alloc_expression_id (expr
);
1524 add_to_value (new_val_id
, expr
);
1532 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1533 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1534 tree vuse
= ref
->vuse
;
1535 tree newvuse
= vuse
;
1536 vec
<vn_reference_op_s
> newoperands
= vNULL
;
1537 bool changed
= false, same_valid
= true;
1539 vn_reference_op_t operand
;
1540 vn_reference_t newref
;
1542 for (i
= 0; operands
.iterate (i
, &operand
); i
++)
1547 tree type
= operand
->type
;
1548 vn_reference_op_s newop
= *operand
;
1549 op
[0] = operand
->op0
;
1550 op
[1] = operand
->op1
;
1551 op
[2] = operand
->op2
;
1552 for (n
= 0; n
< 3; ++n
)
1554 unsigned int op_val_id
;
1557 if (TREE_CODE (op
[n
]) != SSA_NAME
)
1559 /* We can't possibly insert these. */
1561 && !is_gimple_min_invariant (op
[n
]))
1565 op_val_id
= VN_INFO (op
[n
])->value_id
;
1566 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1569 opresult
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1572 if (opresult
!= leader
)
1574 tree name
= get_representative_for (opresult
);
1577 changed
|= name
!= op
[n
];
1583 newoperands
.release ();
1588 if (!newoperands
.exists ())
1589 newoperands
= operands
.copy ();
1590 /* We may have changed from an SSA_NAME to a constant */
1591 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op
[0]) != SSA_NAME
)
1592 newop
.opcode
= TREE_CODE (op
[0]);
1597 newoperands
[i
] = newop
;
1599 gcc_checking_assert (i
== operands
.length ());
1603 newvuse
= translate_vuse_through_block (newoperands
.exists ()
1604 ? newoperands
: operands
,
1605 ref
->set
, ref
->type
,
1606 vuse
, phiblock
, pred
,
1608 if (newvuse
== NULL_TREE
)
1610 newoperands
.release ();
1615 if (changed
|| newvuse
!= vuse
)
1617 unsigned int new_val_id
;
1620 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1622 newoperands
.exists ()
1623 ? newoperands
: operands
,
1626 newoperands
.release ();
1628 /* We can always insert constants, so if we have a partial
1629 redundant constant load of another type try to translate it
1630 to a constant of appropriate type. */
1631 if (result
&& is_gimple_min_invariant (result
))
1634 if (!useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1636 tem
= fold_unary (VIEW_CONVERT_EXPR
, ref
->type
, result
);
1637 if (tem
&& !is_gimple_min_invariant (tem
))
1641 return get_or_alloc_expr_for_constant (tem
);
1644 /* If we'd have to convert things we would need to validate
1645 if we can insert the translated expression. So fail
1646 here for now - we cannot insert an alias with a different
1647 type in the VN tables either, as that would assert. */
1649 && !useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1651 else if (!result
&& newref
1652 && !useless_type_conversion_p (ref
->type
, newref
->type
))
1654 newoperands
.release ();
1658 expr
= pre_expr_pool
.allocate ();
1659 expr
->kind
= REFERENCE
;
1664 PRE_EXPR_REFERENCE (expr
) = newref
;
1665 constant
= fully_constant_expression (expr
);
1666 if (constant
!= expr
)
1669 new_val_id
= newref
->value_id
;
1670 get_or_alloc_expression_id (expr
);
1674 if (changed
|| !same_valid
)
1676 new_val_id
= get_next_value_id ();
1677 value_expressions
.safe_grow_cleared
1678 (get_max_value_id () + 1);
1681 new_val_id
= ref
->value_id
;
1682 if (!newoperands
.exists ())
1683 newoperands
= operands
.copy ();
1684 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1687 result
, new_val_id
);
1688 newoperands
= vNULL
;
1689 PRE_EXPR_REFERENCE (expr
) = newref
;
1690 constant
= fully_constant_expression (expr
);
1691 if (constant
!= expr
)
1693 get_or_alloc_expression_id (expr
);
1695 add_to_value (new_val_id
, expr
);
1697 newoperands
.release ();
1704 tree name
= PRE_EXPR_NAME (expr
);
1705 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
1706 /* If the SSA name is defined by a PHI node in this block,
1708 if (gimple_code (def_stmt
) == GIMPLE_PHI
1709 && gimple_bb (def_stmt
) == phiblock
)
1711 edge e
= find_edge (pred
, gimple_bb (def_stmt
));
1712 tree def
= PHI_ARG_DEF (def_stmt
, e
->dest_idx
);
1714 /* Handle constant. */
1715 if (is_gimple_min_invariant (def
))
1716 return get_or_alloc_expr_for_constant (def
);
1718 return get_or_alloc_expr_for_name (def
);
1720 /* Otherwise return it unchanged - it will get removed if its
1721 value is not available in PREDs AVAIL_OUT set of expressions
1722 by the subtraction of TMP_GEN. */
1731 /* Wrapper around phi_translate_1 providing caching functionality. */
1734 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1735 basic_block pred
, basic_block phiblock
)
1737 expr_pred_trans_t slot
= NULL
;
1743 /* Constants contain no values that need translation. */
1744 if (expr
->kind
== CONSTANT
)
1747 if (value_id_constant_p (get_expr_value_id (expr
)))
1750 /* Don't add translations of NAMEs as those are cheap to translate. */
1751 if (expr
->kind
!= NAME
)
1753 if (phi_trans_add (&slot
, expr
, pred
))
1755 /* Store NULL for the value we want to return in the case of
1761 phitrans
= phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
);
1768 /* Remove failed translations again, they cause insert
1769 iteration to not pick up new opportunities reliably. */
1770 phi_translate_table
->remove_elt_with_hash (slot
, slot
->hashcode
);
1777 /* For each expression in SET, translate the values through phi nodes
1778 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1779 expressions in DEST. */
1782 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1783 basic_block phiblock
)
1785 vec
<pre_expr
> exprs
;
1789 if (gimple_seq_empty_p (phi_nodes (phiblock
)))
1791 bitmap_set_copy (dest
, set
);
1795 exprs
= sorted_array_from_bitmap_set (set
);
1796 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
1798 pre_expr translated
;
1799 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1803 /* We might end up with multiple expressions from SET being
1804 translated to the same value. In this case we do not want
1805 to retain the NARY or REFERENCE expression but prefer a NAME
1806 which would be the leader. */
1807 if (translated
->kind
== NAME
)
1808 bitmap_value_replace_in_set (dest
, translated
);
1810 bitmap_value_insert_into_set (dest
, translated
);
1815 /* Find the leader for a value (i.e., the name representing that
1816 value) in a given set, and return it. Return NULL if no leader
1820 bitmap_find_leader (bitmap_set_t set
, unsigned int val
)
1822 if (value_id_constant_p (val
))
1826 bitmap exprset
= value_expressions
[val
];
1828 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1830 pre_expr expr
= expression_for_id (i
);
1831 if (expr
->kind
== CONSTANT
)
1835 if (bitmap_set_contains_value (set
, val
))
1837 /* Rather than walk the entire bitmap of expressions, and see
1838 whether any of them has the value we are looking for, we look
1839 at the reverse mapping, which tells us the set of expressions
1840 that have a given value (IE value->expressions with that
1841 value) and see if any of those expressions are in our set.
1842 The number of expressions per value is usually significantly
1843 less than the number of expressions in the set. In fact, for
1844 large testcases, doing it this way is roughly 5-10x faster
1845 than walking the bitmap.
1846 If this is somehow a significant lose for some cases, we can
1847 choose which set to walk based on which set is smaller. */
1850 bitmap exprset
= value_expressions
[val
];
1852 EXECUTE_IF_AND_IN_BITMAP (exprset
, &set
->expressions
, 0, i
, bi
)
1853 return expression_for_id (i
);
1858 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1859 BLOCK by seeing if it is not killed in the block. Note that we are
1860 only determining whether there is a store that kills it. Because
1861 of the order in which clean iterates over values, we are guaranteed
1862 that altered operands will have caused us to be eliminated from the
1863 ANTIC_IN set already. */
1866 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1868 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1869 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1871 gimple_stmt_iterator gsi
;
1872 unsigned id
= get_expression_id (expr
);
1879 /* Lookup a previously calculated result. */
1880 if (EXPR_DIES (block
)
1881 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1882 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1884 /* A memory expression {e, VUSE} dies in the block if there is a
1885 statement that may clobber e. If, starting statement walk from the
1886 top of the basic block, a statement uses VUSE there can be no kill
1887 inbetween that use and the original statement that loaded {e, VUSE},
1888 so we can stop walking. */
1889 ref
.base
= NULL_TREE
;
1890 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1892 tree def_vuse
, def_vdef
;
1893 def
= gsi_stmt (gsi
);
1894 def_vuse
= gimple_vuse (def
);
1895 def_vdef
= gimple_vdef (def
);
1897 /* Not a memory statement. */
1901 /* Not a may-def. */
1904 /* A load with the same VUSE, we're done. */
1905 if (def_vuse
== vuse
)
1911 /* Init ref only if we really need it. */
1912 if (ref
.base
== NULL_TREE
1913 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->type
,
1919 /* If the statement may clobber expr, it dies. */
1920 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1927 /* Remember the result. */
1928 if (!EXPR_DIES (block
))
1929 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1930 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1932 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1938 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1939 contains its value-id. */
1942 op_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree op
)
1944 if (op
&& TREE_CODE (op
) == SSA_NAME
)
1946 unsigned int value_id
= VN_INFO (op
)->value_id
;
1947 if (!(bitmap_set_contains_value (set1
, value_id
)
1948 || (set2
&& bitmap_set_contains_value (set2
, value_id
))))
1954 /* Determine if the expression EXPR is valid in SET1 U SET2.
1955 ONLY SET2 CAN BE NULL.
1956 This means that we have a leader for each part of the expression
1957 (if it consists of values), or the expression is an SSA_NAME.
1958 For loads/calls, we also see if the vuse is killed in this block. */
1961 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
)
1966 /* By construction all NAMEs are available. Non-available
1967 NAMEs are removed by subtracting TMP_GEN from the sets. */
1972 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1973 for (i
= 0; i
< nary
->length
; i
++)
1974 if (!op_valid_in_sets (set1
, set2
, nary
->op
[i
]))
1981 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1982 vn_reference_op_t vro
;
1985 FOR_EACH_VEC_ELT (ref
->operands
, i
, vro
)
1987 if (!op_valid_in_sets (set1
, set2
, vro
->op0
)
1988 || !op_valid_in_sets (set1
, set2
, vro
->op1
)
1989 || !op_valid_in_sets (set1
, set2
, vro
->op2
))
1999 /* Clean the set of expressions that are no longer valid in SET1 or
2000 SET2. This means expressions that are made up of values we have no
2001 leaders for in SET1 or SET2. This version is used for partial
2002 anticipation, which means it is not valid in either ANTIC_IN or
2006 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
)
2008 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set1
);
2012 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2014 if (!valid_in_sets (set1
, set2
, expr
))
2015 bitmap_remove_from_set (set1
, expr
);
2020 /* Clean the set of expressions that are no longer valid in SET. This
2021 means expressions that are made up of values we have no leaders for
2025 clean (bitmap_set_t set
)
2027 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set
);
2031 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2033 if (!valid_in_sets (set
, NULL
, expr
))
2034 bitmap_remove_from_set (set
, expr
);
2039 /* Clean the set of expressions that are no longer valid in SET because
2040 they are clobbered in BLOCK or because they trap and may not be executed. */
2043 prune_clobbered_mems (bitmap_set_t set
, basic_block block
)
2048 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
2050 pre_expr expr
= expression_for_id (i
);
2051 if (expr
->kind
== REFERENCE
)
2053 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2056 gimple def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
2057 if (!gimple_nop_p (def_stmt
)
2058 && ((gimple_bb (def_stmt
) != block
2059 && !dominated_by_p (CDI_DOMINATORS
,
2060 block
, gimple_bb (def_stmt
)))
2061 || (gimple_bb (def_stmt
) == block
2062 && value_dies_in_block_x (expr
, block
))))
2063 bitmap_remove_from_set (set
, expr
);
2066 else if (expr
->kind
== NARY
)
2068 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2069 /* If the NARY may trap make sure the block does not contain
2070 a possible exit point.
2071 ??? This is overly conservative if we translate AVAIL_OUT
2072 as the available expression might be after the exit point. */
2073 if (BB_MAY_NOTRETURN (block
)
2074 && vn_nary_may_trap (nary
))
2075 bitmap_remove_from_set (set
, expr
);
2080 static sbitmap has_abnormal_preds
;
2082 /* List of blocks that may have changed during ANTIC computation and
2083 thus need to be iterated over. */
2085 static sbitmap changed_blocks
;
2087 /* Compute the ANTIC set for BLOCK.
2089 If succs(BLOCK) > 1 then
2090 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2091 else if succs(BLOCK) == 1 then
2092 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2094 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2098 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2100 bool changed
= false;
2101 bitmap_set_t S
, old
, ANTIC_OUT
;
2107 old
= ANTIC_OUT
= S
= NULL
;
2108 BB_VISITED (block
) = 1;
2110 /* If any edges from predecessors are abnormal, antic_in is empty,
2112 if (block_has_abnormal_pred_edge
)
2113 goto maybe_dump_sets
;
2115 old
= ANTIC_IN (block
);
2116 ANTIC_OUT
= bitmap_set_new ();
2118 /* If the block has no successors, ANTIC_OUT is empty. */
2119 if (EDGE_COUNT (block
->succs
) == 0)
2121 /* If we have one successor, we could have some phi nodes to
2122 translate through. */
2123 else if (single_succ_p (block
))
2125 basic_block succ_bb
= single_succ (block
);
2126 gcc_assert (BB_VISITED (succ_bb
));
2127 phi_translate_set (ANTIC_OUT
, ANTIC_IN (succ_bb
), block
, succ_bb
);
2129 /* If we have multiple successors, we take the intersection of all of
2130 them. Note that in the case of loop exit phi nodes, we may have
2131 phis to translate through. */
2135 basic_block bprime
, first
= NULL
;
2137 auto_vec
<basic_block
> worklist (EDGE_COUNT (block
->succs
));
2138 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2141 && BB_VISITED (e
->dest
))
2143 else if (BB_VISITED (e
->dest
))
2144 worklist
.quick_push (e
->dest
);
2147 /* Of multiple successors we have to have visited one already
2148 which is guaranteed by iteration order. */
2149 gcc_assert (first
!= NULL
);
2151 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
), block
, first
);
2153 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2155 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2157 bitmap_set_t tmp
= bitmap_set_new ();
2158 phi_translate_set (tmp
, ANTIC_IN (bprime
), block
, bprime
);
2159 bitmap_set_and (ANTIC_OUT
, tmp
);
2160 bitmap_set_free (tmp
);
2163 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
2167 /* Prune expressions that are clobbered in block and thus become
2168 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2169 prune_clobbered_mems (ANTIC_OUT
, block
);
2171 /* Generate ANTIC_OUT - TMP_GEN. */
2172 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
2174 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2175 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
2178 /* Then union in the ANTIC_OUT - TMP_GEN values,
2179 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2180 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
2181 bitmap_value_insert_into_set (ANTIC_IN (block
),
2182 expression_for_id (bii
));
2184 clean (ANTIC_IN (block
));
2186 if (!bitmap_set_equal (old
, ANTIC_IN (block
)))
2189 bitmap_set_bit (changed_blocks
, block
->index
);
2190 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2191 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2194 bitmap_clear_bit (changed_blocks
, block
->index
);
2197 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2200 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2202 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2206 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2209 bitmap_set_free (old
);
2211 bitmap_set_free (S
);
2213 bitmap_set_free (ANTIC_OUT
);
2217 /* Compute PARTIAL_ANTIC for BLOCK.
2219 If succs(BLOCK) > 1 then
2220 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2221 in ANTIC_OUT for all succ(BLOCK)
2222 else if succs(BLOCK) == 1 then
2223 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2225 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2230 compute_partial_antic_aux (basic_block block
,
2231 bool block_has_abnormal_pred_edge
)
2233 bool changed
= false;
2234 bitmap_set_t old_PA_IN
;
2235 bitmap_set_t PA_OUT
;
2238 unsigned long max_pa
= PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH
);
2240 old_PA_IN
= PA_OUT
= NULL
;
2242 /* If any edges from predecessors are abnormal, antic_in is empty,
2244 if (block_has_abnormal_pred_edge
)
2245 goto maybe_dump_sets
;
2247 /* If there are too many partially anticipatable values in the
2248 block, phi_translate_set can take an exponential time: stop
2249 before the translation starts. */
2251 && single_succ_p (block
)
2252 && bitmap_count_bits (&PA_IN (single_succ (block
))->values
) > max_pa
)
2253 goto maybe_dump_sets
;
2255 old_PA_IN
= PA_IN (block
);
2256 PA_OUT
= bitmap_set_new ();
2258 /* If the block has no successors, ANTIC_OUT is empty. */
2259 if (EDGE_COUNT (block
->succs
) == 0)
2261 /* If we have one successor, we could have some phi nodes to
2262 translate through. Note that we can't phi translate across DFS
2263 back edges in partial antic, because it uses a union operation on
2264 the successors. For recurrences like IV's, we will end up
2265 generating a new value in the set on each go around (i + 3 (VH.1)
2266 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2267 else if (single_succ_p (block
))
2269 basic_block succ
= single_succ (block
);
2270 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
2271 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
2273 /* If we have multiple successors, we take the union of all of
2280 auto_vec
<basic_block
> worklist (EDGE_COUNT (block
->succs
));
2281 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2283 if (e
->flags
& EDGE_DFS_BACK
)
2285 worklist
.quick_push (e
->dest
);
2287 if (worklist
.length () > 0)
2289 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2294 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
2295 bitmap_value_insert_into_set (PA_OUT
,
2296 expression_for_id (i
));
2297 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2299 bitmap_set_t pa_in
= bitmap_set_new ();
2300 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
2301 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2302 bitmap_value_insert_into_set (PA_OUT
,
2303 expression_for_id (i
));
2304 bitmap_set_free (pa_in
);
2307 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
2308 bitmap_value_insert_into_set (PA_OUT
,
2309 expression_for_id (i
));
2314 /* Prune expressions that are clobbered in block and thus become
2315 invalid if translated from PA_OUT to PA_IN. */
2316 prune_clobbered_mems (PA_OUT
, block
);
2318 /* PA_IN starts with PA_OUT - TMP_GEN.
2319 Then we subtract things from ANTIC_IN. */
2320 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
2322 /* For partial antic, we want to put back in the phi results, since
2323 we will properly avoid making them partially antic over backedges. */
2324 bitmap_ior_into (&PA_IN (block
)->values
, &PHI_GEN (block
)->values
);
2325 bitmap_ior_into (&PA_IN (block
)->expressions
, &PHI_GEN (block
)->expressions
);
2327 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2328 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2330 dependent_clean (PA_IN (block
), ANTIC_IN (block
));
2332 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
2335 bitmap_set_bit (changed_blocks
, block
->index
);
2336 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2337 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2340 bitmap_clear_bit (changed_blocks
, block
->index
);
2343 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2346 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2348 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2351 bitmap_set_free (old_PA_IN
);
2353 bitmap_set_free (PA_OUT
);
2357 /* Compute ANTIC and partial ANTIC sets. */
2360 compute_antic (void)
2362 bool changed
= true;
2363 int num_iterations
= 0;
2367 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2368 We pre-build the map of blocks with incoming abnormal edges here. */
2369 has_abnormal_preds
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
2370 bitmap_clear (has_abnormal_preds
);
2372 FOR_ALL_BB_FN (block
, cfun
)
2377 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2379 e
->flags
&= ~EDGE_DFS_BACK
;
2380 if (e
->flags
& EDGE_ABNORMAL
)
2382 bitmap_set_bit (has_abnormal_preds
, block
->index
);
2387 BB_VISITED (block
) = 0;
2389 /* While we are here, give empty ANTIC_IN sets to each block. */
2390 ANTIC_IN (block
) = bitmap_set_new ();
2391 PA_IN (block
) = bitmap_set_new ();
2394 /* At the exit block we anticipate nothing. */
2395 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun
)) = 1;
2397 changed_blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
) + 1);
2398 bitmap_ones (changed_blocks
);
2401 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2402 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2403 /* ??? We need to clear our PHI translation cache here as the
2404 ANTIC sets shrink and we restrict valid translations to
2405 those having operands with leaders in ANTIC. Same below
2406 for PA ANTIC computation. */
2409 for (i
= postorder_num
- 1; i
>= 0; i
--)
2411 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2413 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2414 changed
|= compute_antic_aux (block
,
2415 bitmap_bit_p (has_abnormal_preds
,
2419 /* Theoretically possible, but *highly* unlikely. */
2420 gcc_checking_assert (num_iterations
< 500);
2423 statistics_histogram_event (cfun
, "compute_antic iterations",
2426 if (do_partial_partial
)
2428 bitmap_ones (changed_blocks
);
2429 mark_dfs_back_edges ();
2434 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2435 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2438 for (i
= postorder_num
- 1 ; i
>= 0; i
--)
2440 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2442 basic_block block
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
2444 |= compute_partial_antic_aux (block
,
2445 bitmap_bit_p (has_abnormal_preds
,
2449 /* Theoretically possible, but *highly* unlikely. */
2450 gcc_checking_assert (num_iterations
< 500);
2452 statistics_histogram_event (cfun
, "compute_partial_antic iterations",
2455 sbitmap_free (has_abnormal_preds
);
2456 sbitmap_free (changed_blocks
);
2460 /* Inserted expressions are placed onto this worklist, which is used
2461 for performing quick dead code elimination of insertions we made
2462 that didn't turn out to be necessary. */
2463 static bitmap inserted_exprs
;
2465 /* The actual worker for create_component_ref_by_pieces. */
2468 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2469 unsigned int *operand
, gimple_seq
*stmts
)
2471 vn_reference_op_t currop
= &ref
->operands
[*operand
];
2474 switch (currop
->opcode
)
2478 tree folded
, sc
= NULL_TREE
;
2479 unsigned int nargs
= 0;
2481 if (TREE_CODE (currop
->op0
) == FUNCTION_DECL
)
2484 fn
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2489 sc
= find_or_generate_expression (block
, currop
->op1
, stmts
);
2493 args
= XNEWVEC (tree
, ref
->operands
.length () - 1);
2494 while (*operand
< ref
->operands
.length ())
2496 args
[nargs
] = create_component_ref_by_pieces_1 (block
, ref
,
2502 folded
= build_call_array (currop
->type
,
2503 (TREE_CODE (fn
) == FUNCTION_DECL
2504 ? build_fold_addr_expr (fn
) : fn
),
2506 if (currop
->with_bounds
)
2507 CALL_WITH_BOUNDS_P (folded
) = true;
2510 CALL_EXPR_STATIC_CHAIN (folded
) = sc
;
2516 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2520 tree offset
= currop
->op0
;
2521 if (TREE_CODE (baseop
) == ADDR_EXPR
2522 && handled_component_p (TREE_OPERAND (baseop
, 0)))
2526 base
= get_addr_base_and_unit_offset (TREE_OPERAND (baseop
, 0),
2529 offset
= int_const_binop (PLUS_EXPR
, offset
,
2530 build_int_cst (TREE_TYPE (offset
),
2532 baseop
= build_fold_addr_expr (base
);
2534 return fold_build2 (MEM_REF
, currop
->type
, baseop
, offset
);
2537 case TARGET_MEM_REF
:
2539 tree genop0
= NULL_TREE
, genop1
= NULL_TREE
;
2540 vn_reference_op_t nextop
= &ref
->operands
[++*operand
];
2541 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2547 genop0
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2553 genop1
= find_or_generate_expression (block
, nextop
->op0
, stmts
);
2557 return build5 (TARGET_MEM_REF
, currop
->type
,
2558 baseop
, currop
->op2
, genop0
, currop
->op1
, genop1
);
2564 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2570 case VIEW_CONVERT_EXPR
:
2572 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2576 return fold_build1 (currop
->opcode
, currop
->type
, genop0
);
2579 case WITH_SIZE_EXPR
:
2581 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2585 tree genop1
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2588 return fold_build2 (currop
->opcode
, currop
->type
, genop0
, genop1
);
2593 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2597 tree op1
= currop
->op0
;
2598 tree op2
= currop
->op1
;
2599 return fold_build3 (BIT_FIELD_REF
, currop
->type
, genop0
, op1
, op2
);
2602 /* For array ref vn_reference_op's, operand 1 of the array ref
2603 is op0 of the reference op and operand 3 of the array ref is
2605 case ARRAY_RANGE_REF
:
2609 tree genop1
= currop
->op0
;
2610 tree genop2
= currop
->op1
;
2611 tree genop3
= currop
->op2
;
2612 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2616 genop1
= find_or_generate_expression (block
, genop1
, stmts
);
2621 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (genop0
));
2622 /* Drop zero minimum index if redundant. */
2623 if (integer_zerop (genop2
)
2625 || integer_zerop (TYPE_MIN_VALUE (domain_type
))))
2629 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2636 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2637 /* We can't always put a size in units of the element alignment
2638 here as the element alignment may be not visible. See
2639 PR43783. Simply drop the element size for constant
2641 if (tree_int_cst_equal (genop3
, TYPE_SIZE_UNIT (elmt_type
)))
2645 genop3
= size_binop (EXACT_DIV_EXPR
, genop3
,
2646 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
2647 genop3
= find_or_generate_expression (block
, genop3
, stmts
);
2652 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2659 tree genop2
= currop
->op1
;
2660 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2663 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2667 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2671 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
, genop2
);
2676 genop
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2697 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2698 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2699 trying to rename aggregates into ssa form directly, which is a no no.
2701 Thus, this routine doesn't create temporaries, it just builds a
2702 single access expression for the array, calling
2703 find_or_generate_expression to build the innermost pieces.
2705 This function is a subroutine of create_expression_by_pieces, and
2706 should not be called on it's own unless you really know what you
2710 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2713 unsigned int op
= 0;
2714 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
);
2717 /* Find a simple leader for an expression, or generate one using
2718 create_expression_by_pieces from a NARY expression for the value.
2719 BLOCK is the basic_block we are looking for leaders in.
2720 OP is the tree expression to find a leader for or generate.
2721 Returns the leader or NULL_TREE on failure. */
2724 find_or_generate_expression (basic_block block
, tree op
, gimple_seq
*stmts
)
2726 pre_expr expr
= get_or_alloc_expr_for (op
);
2727 unsigned int lookfor
= get_expr_value_id (expr
);
2728 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
), lookfor
);
2731 if (leader
->kind
== NAME
)
2732 return PRE_EXPR_NAME (leader
);
2733 else if (leader
->kind
== CONSTANT
)
2734 return PRE_EXPR_CONSTANT (leader
);
2740 /* It must be a complex expression, so generate it recursively. Note
2741 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2742 where the insert algorithm fails to insert a required expression. */
2743 bitmap exprset
= value_expressions
[lookfor
];
2746 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
2748 pre_expr temp
= expression_for_id (i
);
2749 /* We cannot insert random REFERENCE expressions at arbitrary
2750 places. We can insert NARYs which eventually re-materializes
2751 its operand values. */
2752 if (temp
->kind
== NARY
)
2753 return create_expression_by_pieces (block
, temp
, stmts
,
2754 get_expr_type (expr
));
2761 #define NECESSARY GF_PLF_1
2763 /* Create an expression in pieces, so that we can handle very complex
2764 expressions that may be ANTIC, but not necessary GIMPLE.
2765 BLOCK is the basic block the expression will be inserted into,
2766 EXPR is the expression to insert (in value form)
2767 STMTS is a statement list to append the necessary insertions into.
2769 This function will die if we hit some value that shouldn't be
2770 ANTIC but is (IE there is no leader for it, or its components).
2771 The function returns NULL_TREE in case a different antic expression
2772 has to be inserted first.
2773 This function may also generate expressions that are themselves
2774 partially or fully redundant. Those that are will be either made
2775 fully redundant during the next iteration of insert (for partially
2776 redundant ones), or eliminated by eliminate (for fully redundant
2780 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2781 gimple_seq
*stmts
, tree type
)
2785 gimple_seq forced_stmts
= NULL
;
2786 unsigned int value_id
;
2787 gimple_stmt_iterator gsi
;
2788 tree exprtype
= type
? type
: get_expr_type (expr
);
2794 /* We may hit the NAME/CONSTANT case if we have to convert types
2795 that value numbering saw through. */
2797 folded
= PRE_EXPR_NAME (expr
);
2800 folded
= PRE_EXPR_CONSTANT (expr
);
2804 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2805 folded
= create_component_ref_by_pieces (block
, ref
, stmts
);
2812 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2813 tree
*genop
= XALLOCAVEC (tree
, nary
->length
);
2815 for (i
= 0; i
< nary
->length
; ++i
)
2817 genop
[i
] = find_or_generate_expression (block
, nary
->op
[i
], stmts
);
2820 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2821 may have conversions stripped. */
2822 if (nary
->opcode
== POINTER_PLUS_EXPR
)
2825 genop
[i
] = gimple_convert (&forced_stmts
,
2826 nary
->type
, genop
[i
]);
2828 genop
[i
] = gimple_convert (&forced_stmts
,
2829 sizetype
, genop
[i
]);
2832 genop
[i
] = gimple_convert (&forced_stmts
,
2833 TREE_TYPE (nary
->op
[i
]), genop
[i
]);
2835 if (nary
->opcode
== CONSTRUCTOR
)
2837 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
2838 for (i
= 0; i
< nary
->length
; ++i
)
2839 CONSTRUCTOR_APPEND_ELT (elts
, NULL_TREE
, genop
[i
]);
2840 folded
= build_constructor (nary
->type
, elts
);
2844 switch (nary
->length
)
2847 folded
= fold_build1 (nary
->opcode
, nary
->type
,
2851 folded
= fold_build2 (nary
->opcode
, nary
->type
,
2852 genop
[0], genop
[1]);
2855 folded
= fold_build3 (nary
->opcode
, nary
->type
,
2856 genop
[0], genop
[1], genop
[2]);
2868 if (!useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
2869 folded
= fold_convert (exprtype
, folded
);
2871 /* Force the generated expression to be a sequence of GIMPLE
2873 We have to call unshare_expr because force_gimple_operand may
2874 modify the tree we pass to it. */
2875 gimple_seq tem
= NULL
;
2876 folded
= force_gimple_operand (unshare_expr (folded
), &tem
,
2878 gimple_seq_add_seq_without_update (&forced_stmts
, tem
);
2880 /* If we have any intermediate expressions to the value sets, add them
2881 to the value sets and chain them in the instruction stream. */
2884 gsi
= gsi_start (forced_stmts
);
2885 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
2887 gimple stmt
= gsi_stmt (gsi
);
2888 tree forcedname
= gimple_get_lhs (stmt
);
2891 if (TREE_CODE (forcedname
) == SSA_NAME
)
2893 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (forcedname
));
2894 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
2895 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
2896 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
2897 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
2898 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2899 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2902 gimple_set_vuse (stmt
, BB_LIVE_VOP_ON_EXIT (block
));
2903 gimple_set_modified (stmt
, true);
2905 gimple_seq_add_seq (stmts
, forced_stmts
);
2908 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2909 newstmt
= gimple_build_assign (name
, folded
);
2910 gimple_set_vuse (newstmt
, BB_LIVE_VOP_ON_EXIT (block
));
2911 gimple_set_modified (newstmt
, true);
2912 gimple_set_plf (newstmt
, NECESSARY
, false);
2914 gimple_seq_add_stmt (stmts
, newstmt
);
2915 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (name
));
2917 /* Fold the last statement. */
2918 gsi
= gsi_last (*stmts
);
2919 if (fold_stmt_inplace (&gsi
))
2920 update_stmt (gsi_stmt (gsi
));
2922 /* Add a value number to the temporary.
2923 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2924 we are creating the expression by pieces, and this particular piece of
2925 the expression may have been represented. There is no harm in replacing
2927 value_id
= get_expr_value_id (expr
);
2928 VN_INFO_GET (name
)->value_id
= value_id
;
2929 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
2930 if (VN_INFO (name
)->valnum
== NULL_TREE
)
2931 VN_INFO (name
)->valnum
= name
;
2932 gcc_assert (VN_INFO (name
)->valnum
!= NULL_TREE
);
2933 nameexpr
= get_or_alloc_expr_for_name (name
);
2934 add_to_value (value_id
, nameexpr
);
2935 if (NEW_SETS (block
))
2936 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2937 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2939 pre_stats
.insertions
++;
2940 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2942 fprintf (dump_file
, "Inserted ");
2943 print_gimple_stmt (dump_file
, newstmt
, 0, 0);
2944 fprintf (dump_file
, " in predecessor %d (%04d)\n",
2945 block
->index
, value_id
);
2952 /* Insert the to-be-made-available values of expression EXPRNUM for each
2953 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2954 merge the result with a phi node, given the same value number as
2955 NODE. Return true if we have inserted new stuff. */
2958 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
2959 vec
<pre_expr
> avail
)
2961 pre_expr expr
= expression_for_id (exprnum
);
2963 unsigned int val
= get_expr_value_id (expr
);
2965 bool insertions
= false;
2970 tree type
= get_expr_type (expr
);
2974 /* Make sure we aren't creating an induction variable. */
2975 if (bb_loop_depth (block
) > 0 && EDGE_COUNT (block
->preds
) == 2)
2977 bool firstinsideloop
= false;
2978 bool secondinsideloop
= false;
2979 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
2980 EDGE_PRED (block
, 0)->src
);
2981 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
2982 EDGE_PRED (block
, 1)->src
);
2983 /* Induction variables only have one edge inside the loop. */
2984 if ((firstinsideloop
^ secondinsideloop
)
2985 && expr
->kind
!= REFERENCE
)
2987 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2988 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2993 /* Make the necessary insertions. */
2994 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
2996 gimple_seq stmts
= NULL
;
2999 eprime
= avail
[pred
->dest_idx
];
3001 if (eprime
->kind
!= NAME
&& eprime
->kind
!= CONSTANT
)
3003 builtexpr
= create_expression_by_pieces (bprime
, eprime
,
3005 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3006 gsi_insert_seq_on_edge (pred
, stmts
);
3009 /* We cannot insert a PHI node if we failed to insert
3014 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (builtexpr
);
3017 else if (eprime
->kind
== CONSTANT
)
3019 /* Constants may not have the right type, fold_convert
3020 should give us back a constant with the right type. */
3021 tree constant
= PRE_EXPR_CONSTANT (eprime
);
3022 if (!useless_type_conversion_p (type
, TREE_TYPE (constant
)))
3024 tree builtexpr
= fold_convert (type
, constant
);
3025 if (!is_gimple_min_invariant (builtexpr
))
3027 tree forcedexpr
= force_gimple_operand (builtexpr
,
3030 if (!is_gimple_min_invariant (forcedexpr
))
3032 if (forcedexpr
!= builtexpr
)
3034 VN_INFO_GET (forcedexpr
)->valnum
= PRE_EXPR_CONSTANT (eprime
);
3035 VN_INFO (forcedexpr
)->value_id
= get_expr_value_id (eprime
);
3039 gimple_stmt_iterator gsi
;
3040 gsi
= gsi_start (stmts
);
3041 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3043 gimple stmt
= gsi_stmt (gsi
);
3044 tree lhs
= gimple_get_lhs (stmt
);
3045 if (TREE_CODE (lhs
) == SSA_NAME
)
3046 bitmap_set_bit (inserted_exprs
,
3047 SSA_NAME_VERSION (lhs
));
3048 gimple_set_plf (stmt
, NECESSARY
, false);
3050 gsi_insert_seq_on_edge (pred
, stmts
);
3052 avail
[pred
->dest_idx
]
3053 = get_or_alloc_expr_for_name (forcedexpr
);
3057 avail
[pred
->dest_idx
]
3058 = get_or_alloc_expr_for_constant (builtexpr
);
3061 else if (eprime
->kind
== NAME
)
3063 /* We may have to do a conversion because our value
3064 numbering can look through types in certain cases, but
3065 our IL requires all operands of a phi node have the same
3067 tree name
= PRE_EXPR_NAME (eprime
);
3068 if (!useless_type_conversion_p (type
, TREE_TYPE (name
)))
3072 builtexpr
= fold_convert (type
, name
);
3073 forcedexpr
= force_gimple_operand (builtexpr
,
3077 if (forcedexpr
!= name
)
3079 VN_INFO_GET (forcedexpr
)->valnum
= VN_INFO (name
)->valnum
;
3080 VN_INFO (forcedexpr
)->value_id
= VN_INFO (name
)->value_id
;
3085 gimple_stmt_iterator gsi
;
3086 gsi
= gsi_start (stmts
);
3087 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3089 gimple stmt
= gsi_stmt (gsi
);
3090 tree lhs
= gimple_get_lhs (stmt
);
3091 if (TREE_CODE (lhs
) == SSA_NAME
)
3092 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
3093 gimple_set_plf (stmt
, NECESSARY
, false);
3095 gsi_insert_seq_on_edge (pred
, stmts
);
3097 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (forcedexpr
);
3101 /* If we didn't want a phi node, and we made insertions, we still have
3102 inserted new stuff, and thus return true. If we didn't want a phi node,
3103 and didn't make insertions, we haven't added anything new, so return
3105 if (nophi
&& insertions
)
3107 else if (nophi
&& !insertions
)
3110 /* Now build a phi for the new variable. */
3111 temp
= make_temp_ssa_name (type
, NULL
, "prephitmp");
3112 phi
= create_phi_node (temp
, block
);
3114 gimple_set_plf (phi
, NECESSARY
, false);
3115 VN_INFO_GET (temp
)->value_id
= val
;
3116 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3117 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3118 VN_INFO (temp
)->valnum
= temp
;
3119 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3120 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3122 pre_expr ae
= avail
[pred
->dest_idx
];
3123 gcc_assert (get_expr_type (ae
) == type
3124 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3125 if (ae
->kind
== CONSTANT
)
3126 add_phi_arg (phi
, unshare_expr (PRE_EXPR_CONSTANT (ae
)),
3127 pred
, UNKNOWN_LOCATION
);
3129 add_phi_arg (phi
, PRE_EXPR_NAME (ae
), pred
, UNKNOWN_LOCATION
);
3132 newphi
= get_or_alloc_expr_for_name (temp
);
3133 add_to_value (val
, newphi
);
3135 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3136 this insertion, since we test for the existence of this value in PHI_GEN
3137 before proceeding with the partial redundancy checks in insert_aux.
3139 The value may exist in AVAIL_OUT, in particular, it could be represented
3140 by the expression we are trying to eliminate, in which case we want the
3141 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3144 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3145 this block, because if it did, it would have existed in our dominator's
3146 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3149 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3150 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3152 bitmap_insert_into_set (NEW_SETS (block
),
3155 /* If we insert a PHI node for a conversion of another PHI node
3156 in the same basic-block try to preserve range information.
3157 This is important so that followup loop passes receive optimal
3158 number of iteration analysis results. See PR61743. */
3159 if (expr
->kind
== NARY
3160 && CONVERT_EXPR_CODE_P (expr
->u
.nary
->opcode
)
3161 && TREE_CODE (expr
->u
.nary
->op
[0]) == SSA_NAME
3162 && gimple_bb (SSA_NAME_DEF_STMT (expr
->u
.nary
->op
[0])) == block
3163 && INTEGRAL_TYPE_P (type
)
3164 && INTEGRAL_TYPE_P (TREE_TYPE (expr
->u
.nary
->op
[0]))
3165 && (TYPE_PRECISION (type
)
3166 >= TYPE_PRECISION (TREE_TYPE (expr
->u
.nary
->op
[0])))
3167 && SSA_NAME_RANGE_INFO (expr
->u
.nary
->op
[0]))
3170 if (get_range_info (expr
->u
.nary
->op
[0], &min
, &max
) == VR_RANGE
3171 && !wi::neg_p (min
, SIGNED
)
3172 && !wi::neg_p (max
, SIGNED
))
3173 /* Just handle extension and sign-changes of all-positive ranges. */
3174 set_range_info (temp
,
3175 SSA_NAME_RANGE_TYPE (expr
->u
.nary
->op
[0]),
3176 wide_int_storage::from (min
, TYPE_PRECISION (type
),
3178 wide_int_storage::from (max
, TYPE_PRECISION (type
),
3182 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3184 fprintf (dump_file
, "Created phi ");
3185 print_gimple_stmt (dump_file
, phi
, 0, 0);
3186 fprintf (dump_file
, " in block %d (%04d)\n", block
->index
, val
);
3194 /* Perform insertion of partially redundant values.
3195 For BLOCK, do the following:
3196 1. Propagate the NEW_SETS of the dominator into the current block.
3197 If the block has multiple predecessors,
3198 2a. Iterate over the ANTIC expressions for the block to see if
3199 any of them are partially redundant.
3200 2b. If so, insert them into the necessary predecessors to make
3201 the expression fully redundant.
3202 2c. Insert a new PHI merging the values of the predecessors.
3203 2d. Insert the new PHI, and the new expressions, into the
3205 3. Recursively call ourselves on the dominator children of BLOCK.
3207 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3208 do_regular_insertion and do_partial_insertion.
3213 do_regular_insertion (basic_block block
, basic_block dom
)
3215 bool new_stuff
= false;
3216 vec
<pre_expr
> exprs
;
3218 auto_vec
<pre_expr
> avail
;
3221 exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3222 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3224 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3226 if (expr
->kind
== NARY
3227 || expr
->kind
== REFERENCE
)
3230 bool by_some
= false;
3231 bool cant_insert
= false;
3232 bool all_same
= true;
3233 pre_expr first_s
= NULL
;
3236 pre_expr eprime
= NULL
;
3238 pre_expr edoubleprime
= NULL
;
3239 bool do_insertion
= false;
3241 val
= get_expr_value_id (expr
);
3242 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3244 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3246 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3248 fprintf (dump_file
, "Found fully redundant value: ");
3249 print_pre_expr (dump_file
, expr
);
3250 fprintf (dump_file
, "\n");
3255 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3257 unsigned int vprime
;
3259 /* We should never run insertion for the exit block
3260 and so not come across fake pred edges. */
3261 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3263 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
3266 /* eprime will generally only be NULL if the
3267 value of the expression, translated
3268 through the PHI for this predecessor, is
3269 undefined. If that is the case, we can't
3270 make the expression fully redundant,
3271 because its value is undefined along a
3272 predecessor path. We can thus break out
3273 early because it doesn't matter what the
3274 rest of the results are. */
3277 avail
[pred
->dest_idx
] = NULL
;
3282 eprime
= fully_constant_expression (eprime
);
3283 vprime
= get_expr_value_id (eprime
);
3284 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3286 if (edoubleprime
== NULL
)
3288 avail
[pred
->dest_idx
] = eprime
;
3293 avail
[pred
->dest_idx
] = edoubleprime
;
3295 /* We want to perform insertions to remove a redundancy on
3296 a path in the CFG we want to optimize for speed. */
3297 if (optimize_edge_for_speed_p (pred
))
3298 do_insertion
= true;
3299 if (first_s
== NULL
)
3300 first_s
= edoubleprime
;
3301 else if (!pre_expr_d::equal (first_s
, edoubleprime
))
3305 /* If we can insert it, it's not the same value
3306 already existing along every predecessor, and
3307 it's defined by some predecessor, it is
3308 partially redundant. */
3309 if (!cant_insert
&& !all_same
&& by_some
)
3313 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3315 fprintf (dump_file
, "Skipping partial redundancy for "
3317 print_pre_expr (dump_file
, expr
);
3318 fprintf (dump_file
, " (%04d), no redundancy on to be "
3319 "optimized for speed edge\n", val
);
3322 else if (dbg_cnt (treepre_insert
))
3324 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3326 fprintf (dump_file
, "Found partial redundancy for "
3328 print_pre_expr (dump_file
, expr
);
3329 fprintf (dump_file
, " (%04d)\n",
3330 get_expr_value_id (expr
));
3332 if (insert_into_preds_of_block (block
,
3333 get_expression_id (expr
),
3338 /* If all edges produce the same value and that value is
3339 an invariant, then the PHI has the same value on all
3340 edges. Note this. */
3341 else if (!cant_insert
&& all_same
)
3343 gcc_assert (edoubleprime
->kind
== CONSTANT
3344 || edoubleprime
->kind
== NAME
);
3346 tree temp
= make_temp_ssa_name (get_expr_type (expr
),
3349 = gimple_build_assign (temp
,
3350 edoubleprime
->kind
== CONSTANT
?
3351 PRE_EXPR_CONSTANT (edoubleprime
) :
3352 PRE_EXPR_NAME (edoubleprime
));
3353 gimple_stmt_iterator gsi
= gsi_after_labels (block
);
3354 gsi_insert_before (&gsi
, assign
, GSI_NEW_STMT
);
3356 gimple_set_plf (assign
, NECESSARY
, false);
3357 VN_INFO_GET (temp
)->value_id
= val
;
3358 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3359 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3360 VN_INFO (temp
)->valnum
= temp
;
3361 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3362 pre_expr newe
= get_or_alloc_expr_for_name (temp
);
3363 add_to_value (val
, newe
);
3364 bitmap_value_replace_in_set (AVAIL_OUT (block
), newe
);
3365 bitmap_insert_into_set (NEW_SETS (block
), newe
);
3375 /* Perform insertion for partially anticipatable expressions. There
3376 is only one case we will perform insertion for these. This case is
3377 if the expression is partially anticipatable, and fully available.
3378 In this case, we know that putting it earlier will enable us to
3379 remove the later computation. */
3383 do_partial_partial_insertion (basic_block block
, basic_block dom
)
3385 bool new_stuff
= false;
3386 vec
<pre_expr
> exprs
;
3388 auto_vec
<pre_expr
> avail
;
3391 exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3392 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3394 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3396 if (expr
->kind
== NARY
3397 || expr
->kind
== REFERENCE
)
3401 bool cant_insert
= false;
3404 pre_expr eprime
= NULL
;
3407 val
= get_expr_value_id (expr
);
3408 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3410 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3413 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3415 unsigned int vprime
;
3416 pre_expr edoubleprime
;
3418 /* We should never run insertion for the exit block
3419 and so not come across fake pred edges. */
3420 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3422 eprime
= phi_translate (expr
, ANTIC_IN (block
),
3426 /* eprime will generally only be NULL if the
3427 value of the expression, translated
3428 through the PHI for this predecessor, is
3429 undefined. If that is the case, we can't
3430 make the expression fully redundant,
3431 because its value is undefined along a
3432 predecessor path. We can thus break out
3433 early because it doesn't matter what the
3434 rest of the results are. */
3437 avail
[pred
->dest_idx
] = NULL
;
3442 eprime
= fully_constant_expression (eprime
);
3443 vprime
= get_expr_value_id (eprime
);
3444 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
), vprime
);
3445 avail
[pred
->dest_idx
] = edoubleprime
;
3446 if (edoubleprime
== NULL
)
3453 /* If we can insert it, it's not the same value
3454 already existing along every predecessor, and
3455 it's defined by some predecessor, it is
3456 partially redundant. */
3457 if (!cant_insert
&& by_all
)
3460 bool do_insertion
= false;
3462 /* Insert only if we can remove a later expression on a path
3463 that we want to optimize for speed.
3464 The phi node that we will be inserting in BLOCK is not free,
3465 and inserting it for the sake of !optimize_for_speed successor
3466 may cause regressions on the speed path. */
3467 FOR_EACH_EDGE (succ
, ei
, block
->succs
)
3469 if (bitmap_set_contains_value (PA_IN (succ
->dest
), val
)
3470 || bitmap_set_contains_value (ANTIC_IN (succ
->dest
), val
))
3472 if (optimize_edge_for_speed_p (succ
))
3473 do_insertion
= true;
3479 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3481 fprintf (dump_file
, "Skipping partial partial redundancy "
3483 print_pre_expr (dump_file
, expr
);
3484 fprintf (dump_file
, " (%04d), not (partially) anticipated "
3485 "on any to be optimized for speed edges\n", val
);
3488 else if (dbg_cnt (treepre_insert
))
3490 pre_stats
.pa_insert
++;
3491 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3493 fprintf (dump_file
, "Found partial partial redundancy "
3495 print_pre_expr (dump_file
, expr
);
3496 fprintf (dump_file
, " (%04d)\n",
3497 get_expr_value_id (expr
));
3499 if (insert_into_preds_of_block (block
,
3500 get_expression_id (expr
),
3513 insert_aux (basic_block block
)
3516 bool new_stuff
= false;
3521 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3526 bitmap_set_t newset
= NEW_SETS (dom
);
3529 /* Note that we need to value_replace both NEW_SETS, and
3530 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3531 represented by some non-simple expression here that we want
3532 to replace it with. */
3533 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3535 pre_expr expr
= expression_for_id (i
);
3536 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3537 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3540 if (!single_pred_p (block
))
3542 new_stuff
|= do_regular_insertion (block
, dom
);
3543 if (do_partial_partial
)
3544 new_stuff
|= do_partial_partial_insertion (block
, dom
);
3548 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3550 son
= next_dom_son (CDI_DOMINATORS
, son
))
3552 new_stuff
|= insert_aux (son
);
3558 /* Perform insertion of partially redundant values. */
3563 bool new_stuff
= true;
3565 int num_iterations
= 0;
3567 FOR_ALL_BB_FN (bb
, cfun
)
3568 NEW_SETS (bb
) = bitmap_set_new ();
3573 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3574 fprintf (dump_file
, "Starting insert iteration %d\n", num_iterations
);
3575 new_stuff
= insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3577 /* Clear the NEW sets before the next iteration. We have already
3578 fully propagated its contents. */
3580 FOR_ALL_BB_FN (bb
, cfun
)
3581 bitmap_set_free (NEW_SETS (bb
));
3583 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3587 /* Compute the AVAIL set for all basic blocks.
3589 This function performs value numbering of the statements in each basic
3590 block. The AVAIL sets are built from information we glean while doing
3591 this value numbering, since the AVAIL sets contain only one entry per
3594 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3595 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3598 compute_avail (void)
3601 basic_block block
, son
;
3602 basic_block
*worklist
;
3606 /* We pretend that default definitions are defined in the entry block.
3607 This includes function arguments and the static chain decl. */
3608 for (i
= 1; i
< num_ssa_names
; ++i
)
3610 tree name
= ssa_name (i
);
3613 || !SSA_NAME_IS_DEFAULT_DEF (name
)
3614 || has_zero_uses (name
)
3615 || virtual_operand_p (name
))
3618 e
= get_or_alloc_expr_for_name (name
);
3619 add_to_value (get_expr_value_id (e
), e
);
3620 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), e
);
3621 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3625 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3627 print_bitmap_set (dump_file
, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3628 "tmp_gen", ENTRY_BLOCK
);
3629 print_bitmap_set (dump_file
, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3630 "avail_out", ENTRY_BLOCK
);
3633 /* Allocate the worklist. */
3634 worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
));
3636 /* Seed the algorithm by putting the dominator children of the entry
3637 block on the worklist. */
3638 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3640 son
= next_dom_son (CDI_DOMINATORS
, son
))
3641 worklist
[sp
++] = son
;
3643 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3644 = ssa_default_def (cfun
, gimple_vop (cfun
));
3646 /* Loop until the worklist is empty. */
3652 /* Pick a block from the worklist. */
3653 block
= worklist
[--sp
];
3655 /* Initially, the set of available values in BLOCK is that of
3656 its immediate dominator. */
3657 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3660 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3661 BB_LIVE_VOP_ON_EXIT (block
) = BB_LIVE_VOP_ON_EXIT (dom
);
3664 /* Generate values for PHI nodes. */
3665 for (gphi_iterator gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
);
3668 tree result
= gimple_phi_result (gsi
.phi ());
3670 /* We have no need for virtual phis, as they don't represent
3671 actual computations. */
3672 if (virtual_operand_p (result
))
3674 BB_LIVE_VOP_ON_EXIT (block
) = result
;
3678 pre_expr e
= get_or_alloc_expr_for_name (result
);
3679 add_to_value (get_expr_value_id (e
), e
);
3680 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3681 bitmap_insert_into_set (PHI_GEN (block
), e
);
3684 BB_MAY_NOTRETURN (block
) = 0;
3686 /* Now compute value numbers and populate value sets with all
3687 the expressions computed in BLOCK. */
3688 for (gimple_stmt_iterator gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
);
3694 stmt
= gsi_stmt (gsi
);
3696 /* Cache whether the basic-block has any non-visible side-effect
3698 If this isn't a call or it is the last stmt in the
3699 basic-block then the CFG represents things correctly. */
3700 if (is_gimple_call (stmt
) && !stmt_ends_bb_p (stmt
))
3702 /* Non-looping const functions always return normally.
3703 Otherwise the call might not return or have side-effects
3704 that forbids hoisting possibly trapping expressions
3706 int flags
= gimple_call_flags (stmt
);
3707 if (!(flags
& ECF_CONST
)
3708 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3709 BB_MAY_NOTRETURN (block
) = 1;
3712 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3714 pre_expr e
= get_or_alloc_expr_for_name (op
);
3716 add_to_value (get_expr_value_id (e
), e
);
3717 bitmap_insert_into_set (TMP_GEN (block
), e
);
3718 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3721 if (gimple_vdef (stmt
))
3722 BB_LIVE_VOP_ON_EXIT (block
) = gimple_vdef (stmt
);
3724 if (gimple_has_side_effects (stmt
)
3725 || stmt_could_throw_p (stmt
)
3726 || is_gimple_debug (stmt
))
3729 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3731 if (ssa_undefined_value_p (op
))
3733 pre_expr e
= get_or_alloc_expr_for_name (op
);
3734 bitmap_value_insert_into_set (EXP_GEN (block
), e
);
3737 switch (gimple_code (stmt
))
3745 vn_reference_s ref1
;
3746 pre_expr result
= NULL
;
3748 /* We can value number only calls to real functions. */
3749 if (gimple_call_internal_p (stmt
))
3752 vn_reference_lookup_call (as_a
<gcall
*> (stmt
), &ref
, &ref1
);
3756 /* If the value of the call is not invalidated in
3757 this block until it is computed, add the expression
3759 if (!gimple_vuse (stmt
)
3761 (SSA_NAME_DEF_STMT (gimple_vuse (stmt
))) == GIMPLE_PHI
3762 || gimple_bb (SSA_NAME_DEF_STMT
3763 (gimple_vuse (stmt
))) != block
)
3765 result
= pre_expr_pool
.allocate ();
3766 result
->kind
= REFERENCE
;
3768 PRE_EXPR_REFERENCE (result
) = ref
;
3770 get_or_alloc_expression_id (result
);
3771 add_to_value (get_expr_value_id (result
), result
);
3772 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3779 pre_expr result
= NULL
;
3780 switch (vn_get_stmt_kind (stmt
))
3784 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3787 /* COND_EXPR and VEC_COND_EXPR are awkward in
3788 that they contain an embedded complex expression.
3789 Don't even try to shove those through PRE. */
3790 if (code
== COND_EXPR
3791 || code
== VEC_COND_EXPR
)
3794 vn_nary_op_lookup_stmt (stmt
, &nary
);
3798 /* If the NARY traps and there was a preceding
3799 point in the block that might not return avoid
3800 adding the nary to EXP_GEN. */
3801 if (BB_MAY_NOTRETURN (block
)
3802 && vn_nary_may_trap (nary
))
3805 result
= pre_expr_pool
.allocate ();
3806 result
->kind
= NARY
;
3808 PRE_EXPR_NARY (result
) = nary
;
3815 vn_reference_lookup (gimple_assign_rhs1 (stmt
),
3821 /* If the value of the reference is not invalidated in
3822 this block until it is computed, add the expression
3824 if (gimple_vuse (stmt
))
3828 def_stmt
= SSA_NAME_DEF_STMT (gimple_vuse (stmt
));
3829 while (!gimple_nop_p (def_stmt
)
3830 && gimple_code (def_stmt
) != GIMPLE_PHI
3831 && gimple_bb (def_stmt
) == block
)
3833 if (stmt_may_clobber_ref_p
3834 (def_stmt
, gimple_assign_rhs1 (stmt
)))
3840 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt
));
3846 result
= pre_expr_pool
.allocate ();
3847 result
->kind
= REFERENCE
;
3849 PRE_EXPR_REFERENCE (result
) = ref
;
3857 get_or_alloc_expression_id (result
);
3858 add_to_value (get_expr_value_id (result
), result
);
3859 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3867 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3869 print_bitmap_set (dump_file
, EXP_GEN (block
),
3870 "exp_gen", block
->index
);
3871 print_bitmap_set (dump_file
, PHI_GEN (block
),
3872 "phi_gen", block
->index
);
3873 print_bitmap_set (dump_file
, TMP_GEN (block
),
3874 "tmp_gen", block
->index
);
3875 print_bitmap_set (dump_file
, AVAIL_OUT (block
),
3876 "avail_out", block
->index
);
3879 /* Put the dominator children of BLOCK on the worklist of blocks
3880 to compute available sets for. */
3881 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3883 son
= next_dom_son (CDI_DOMINATORS
, son
))
3884 worklist
[sp
++] = son
;
3891 /* Local state for the eliminate domwalk. */
3892 static vec
<gimple
> el_to_remove
;
3893 static vec
<gimple
> el_to_fixup
;
3894 static unsigned int el_todo
;
3895 static vec
<tree
> el_avail
;
3896 static vec
<tree
> el_avail_stack
;
3898 /* Return a leader for OP that is available at the current point of the
3899 eliminate domwalk. */
3902 eliminate_avail (tree op
)
3904 tree valnum
= VN_INFO (op
)->valnum
;
3905 if (TREE_CODE (valnum
) == SSA_NAME
)
3907 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
3909 if (el_avail
.length () > SSA_NAME_VERSION (valnum
))
3910 return el_avail
[SSA_NAME_VERSION (valnum
)];
3912 else if (is_gimple_min_invariant (valnum
))
3917 /* At the current point of the eliminate domwalk make OP available. */
3920 eliminate_push_avail (tree op
)
3922 tree valnum
= VN_INFO (op
)->valnum
;
3923 if (TREE_CODE (valnum
) == SSA_NAME
)
3925 if (el_avail
.length () <= SSA_NAME_VERSION (valnum
))
3926 el_avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
3928 if (el_avail
[SSA_NAME_VERSION (valnum
)])
3929 pushop
= el_avail
[SSA_NAME_VERSION (valnum
)];
3930 el_avail_stack
.safe_push (pushop
);
3931 el_avail
[SSA_NAME_VERSION (valnum
)] = op
;
3935 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3936 the leader for the expression if insertion was successful. */
3939 eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
)
3941 tree expr
= vn_get_expr_for (val
);
3942 if (!CONVERT_EXPR_P (expr
)
3943 && TREE_CODE (expr
) != VIEW_CONVERT_EXPR
)
3946 tree op
= TREE_OPERAND (expr
, 0);
3947 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (op
) : op
;
3951 tree res
= make_temp_ssa_name (TREE_TYPE (val
), NULL
, "pretmp");
3952 gassign
*tem
= gimple_build_assign (res
,
3953 fold_build1 (TREE_CODE (expr
),
3954 TREE_TYPE (expr
), leader
));
3955 gsi_insert_before (gsi
, tem
, GSI_SAME_STMT
);
3956 VN_INFO_GET (res
)->valnum
= val
;
3958 if (TREE_CODE (leader
) == SSA_NAME
)
3959 gimple_set_plf (SSA_NAME_DEF_STMT (leader
), NECESSARY
, true);
3961 pre_stats
.insertions
++;
3962 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3964 fprintf (dump_file
, "Inserted ");
3965 print_gimple_stmt (dump_file
, tem
, 0, 0);
3971 class eliminate_dom_walker
: public dom_walker
3974 eliminate_dom_walker (cdi_direction direction
, bool do_pre_
)
3975 : dom_walker (direction
), do_pre (do_pre_
) {}
3977 virtual void before_dom_children (basic_block
);
3978 virtual void after_dom_children (basic_block
);
3983 /* Perform elimination for the basic-block B during the domwalk. */
3986 eliminate_dom_walker::before_dom_children (basic_block b
)
3989 el_avail_stack
.safe_push (NULL_TREE
);
3991 /* ??? If we do nothing for unreachable blocks then this will confuse
3992 tailmerging. Eventually we can reduce its reliance on SCCVN now
3993 that we fully copy/constant-propagate (most) things. */
3995 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
3997 gphi
*phi
= gsi
.phi ();
3998 tree res
= PHI_RESULT (phi
);
4000 if (virtual_operand_p (res
))
4006 tree sprime
= eliminate_avail (res
);
4010 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4012 fprintf (dump_file
, "Replaced redundant PHI node defining ");
4013 print_generic_expr (dump_file
, res
, 0);
4014 fprintf (dump_file
, " with ");
4015 print_generic_expr (dump_file
, sprime
, 0);
4016 fprintf (dump_file
, "\n");
4019 /* If we inserted this PHI node ourself, it's not an elimination. */
4021 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
4024 pre_stats
.eliminations
++;
4026 /* If we will propagate into all uses don't bother to do
4028 if (may_propagate_copy (res
, sprime
))
4030 /* Mark the PHI for removal. */
4031 el_to_remove
.safe_push (phi
);
4036 remove_phi_node (&gsi
, false);
4039 && !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
))
4040 && TREE_CODE (sprime
) == SSA_NAME
)
4041 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4043 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
4044 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4045 gimple stmt
= gimple_build_assign (res
, sprime
);
4046 /* ??? It cannot yet be necessary (DOM walk). */
4047 gimple_set_plf (stmt
, NECESSARY
, gimple_plf (phi
, NECESSARY
));
4049 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
4050 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
4054 eliminate_push_avail (res
);
4058 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
4062 tree sprime
= NULL_TREE
;
4063 gimple stmt
= gsi_stmt (gsi
);
4064 tree lhs
= gimple_get_lhs (stmt
);
4065 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
4066 && !gimple_has_volatile_ops (stmt
)
4067 /* See PR43491. Do not replace a global register variable when
4068 it is a the RHS of an assignment. Do replace local register
4069 variables since gcc does not guarantee a local variable will
4070 be allocated in register.
4071 ??? The fix isn't effective here. This should instead
4072 be ensured by not value-numbering them the same but treating
4073 them like volatiles? */
4074 && !(gimple_assign_single_p (stmt
)
4075 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
4076 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
4077 && is_global_var (gimple_assign_rhs1 (stmt
)))))
4079 sprime
= eliminate_avail (lhs
);
4082 /* If there is no existing usable leader but SCCVN thinks
4083 it has an expression it wants to use as replacement,
4085 tree val
= VN_INFO (lhs
)->valnum
;
4087 && TREE_CODE (val
) == SSA_NAME
4088 && VN_INFO (val
)->needs_insertion
4089 && VN_INFO (val
)->expr
!= NULL_TREE
4090 && (sprime
= eliminate_insert (&gsi
, val
)) != NULL_TREE
)
4091 eliminate_push_avail (sprime
);
4094 /* If this now constitutes a copy duplicate points-to
4095 and range info appropriately. This is especially
4096 important for inserted code. See tree-ssa-copy.c
4097 for similar code. */
4099 && TREE_CODE (sprime
) == SSA_NAME
)
4101 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
4102 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
4103 && SSA_NAME_PTR_INFO (lhs
)
4104 && !SSA_NAME_PTR_INFO (sprime
))
4106 duplicate_ssa_name_ptr_info (sprime
,
4107 SSA_NAME_PTR_INFO (lhs
));
4109 mark_ptr_info_alignment_unknown
4110 (SSA_NAME_PTR_INFO (sprime
));
4112 else if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
4113 && SSA_NAME_RANGE_INFO (lhs
)
4114 && !SSA_NAME_RANGE_INFO (sprime
)
4116 duplicate_ssa_name_range_info (sprime
,
4117 SSA_NAME_RANGE_TYPE (lhs
),
4118 SSA_NAME_RANGE_INFO (lhs
));
4121 /* Inhibit the use of an inserted PHI on a loop header when
4122 the address of the memory reference is a simple induction
4123 variable. In other cases the vectorizer won't do anything
4124 anyway (either it's loop invariant or a complicated
4127 && TREE_CODE (sprime
) == SSA_NAME
4129 && flag_tree_loop_vectorize
4130 && loop_outer (b
->loop_father
)
4131 && has_zero_uses (sprime
)
4132 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
4133 && gimple_assign_load_p (stmt
))
4135 gimple def_stmt
= SSA_NAME_DEF_STMT (sprime
);
4136 basic_block def_bb
= gimple_bb (def_stmt
);
4137 if (gimple_code (def_stmt
) == GIMPLE_PHI
4138 && b
->loop_father
->header
== def_bb
)
4143 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
4146 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
4148 && flow_bb_inside_loop_p (b
->loop_father
, def_bb
)
4149 && simple_iv (b
->loop_father
,
4150 b
->loop_father
, op
, &iv
, true))
4158 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4160 fprintf (dump_file
, "Not replacing ");
4161 print_gimple_expr (dump_file
, stmt
, 0, 0);
4162 fprintf (dump_file
, " with ");
4163 print_generic_expr (dump_file
, sprime
, 0);
4164 fprintf (dump_file
, " which would add a loop"
4165 " carried dependence to loop %d\n",
4166 b
->loop_father
->num
);
4168 /* Don't keep sprime available. */
4176 /* If we can propagate the value computed for LHS into
4177 all uses don't bother doing anything with this stmt. */
4178 if (may_propagate_copy (lhs
, sprime
))
4180 /* Mark it for removal. */
4181 el_to_remove
.safe_push (stmt
);
4183 /* ??? Don't count copy/constant propagations. */
4184 if (gimple_assign_single_p (stmt
)
4185 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4186 || gimple_assign_rhs1 (stmt
) == sprime
))
4189 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4191 fprintf (dump_file
, "Replaced ");
4192 print_gimple_expr (dump_file
, stmt
, 0, 0);
4193 fprintf (dump_file
, " with ");
4194 print_generic_expr (dump_file
, sprime
, 0);
4195 fprintf (dump_file
, " in all uses of ");
4196 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4199 pre_stats
.eliminations
++;
4203 /* If this is an assignment from our leader (which
4204 happens in the case the value-number is a constant)
4205 then there is nothing to do. */
4206 if (gimple_assign_single_p (stmt
)
4207 && sprime
== gimple_assign_rhs1 (stmt
))
4210 /* Else replace its RHS. */
4211 bool can_make_abnormal_goto
4212 = is_gimple_call (stmt
)
4213 && stmt_can_make_abnormal_goto (stmt
);
4215 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4217 fprintf (dump_file
, "Replaced ");
4218 print_gimple_expr (dump_file
, stmt
, 0, 0);
4219 fprintf (dump_file
, " with ");
4220 print_generic_expr (dump_file
, sprime
, 0);
4221 fprintf (dump_file
, " in ");
4222 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4225 if (TREE_CODE (sprime
) == SSA_NAME
)
4226 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4229 pre_stats
.eliminations
++;
4230 gimple orig_stmt
= stmt
;
4231 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4232 TREE_TYPE (sprime
)))
4233 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4234 tree vdef
= gimple_vdef (stmt
);
4235 tree vuse
= gimple_vuse (stmt
);
4236 propagate_tree_value_into_stmt (&gsi
, sprime
);
4237 stmt
= gsi_stmt (gsi
);
4239 if (vdef
!= gimple_vdef (stmt
))
4240 VN_INFO (vdef
)->valnum
= vuse
;
4242 /* If we removed EH side-effects from the statement, clean
4243 its EH information. */
4244 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4246 bitmap_set_bit (need_eh_cleanup
,
4247 gimple_bb (stmt
)->index
);
4248 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4249 fprintf (dump_file
, " Removed EH side-effects.\n");
4252 /* Likewise for AB side-effects. */
4253 if (can_make_abnormal_goto
4254 && !stmt_can_make_abnormal_goto (stmt
))
4256 bitmap_set_bit (need_ab_cleanup
,
4257 gimple_bb (stmt
)->index
);
4258 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4259 fprintf (dump_file
, " Removed AB side-effects.\n");
4266 /* If the statement is a scalar store, see if the expression
4267 has the same value number as its rhs. If so, the store is
4269 if (gimple_assign_single_p (stmt
)
4270 && !gimple_has_volatile_ops (stmt
)
4271 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4272 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4273 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
4276 tree rhs
= gimple_assign_rhs1 (stmt
);
4277 val
= vn_reference_lookup (gimple_assign_lhs (stmt
),
4278 gimple_vuse (stmt
), VN_WALK
, NULL
);
4279 if (TREE_CODE (rhs
) == SSA_NAME
)
4280 rhs
= VN_INFO (rhs
)->valnum
;
4282 && operand_equal_p (val
, rhs
, 0))
4284 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4286 fprintf (dump_file
, "Deleted redundant store ");
4287 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4290 /* Queue stmt for removal. */
4291 el_to_remove
.safe_push (stmt
);
4296 /* If this is a control statement value numbering left edges
4297 unexecuted on force the condition in a way consistent with
4299 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
4301 if ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
)
4302 ^ (EDGE_SUCC (b
, 1)->flags
& EDGE_EXECUTABLE
))
4304 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4306 fprintf (dump_file
, "Removing unexecutable edge from ");
4307 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4309 if (((EDGE_SUCC (b
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
4310 == ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
4311 gimple_cond_make_true (cond
);
4313 gimple_cond_make_false (cond
);
4315 el_todo
|= TODO_cleanup_cfg
;
4320 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
4321 bool was_noreturn
= (is_gimple_call (stmt
)
4322 && gimple_call_noreturn_p (stmt
));
4323 tree vdef
= gimple_vdef (stmt
);
4324 tree vuse
= gimple_vuse (stmt
);
4326 /* If we didn't replace the whole stmt (or propagate the result
4327 into all uses), replace all uses on this stmt with their
4329 use_operand_p use_p
;
4331 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4333 tree use
= USE_FROM_PTR (use_p
);
4334 /* ??? The call code above leaves stmt operands un-updated. */
4335 if (TREE_CODE (use
) != SSA_NAME
)
4337 tree sprime
= eliminate_avail (use
);
4338 if (sprime
&& sprime
!= use
4339 && may_propagate_copy (use
, sprime
)
4340 /* We substitute into debug stmts to avoid excessive
4341 debug temporaries created by removed stmts, but we need
4342 to avoid doing so for inserted sprimes as we never want
4343 to create debug temporaries for them. */
4345 || TREE_CODE (sprime
) != SSA_NAME
4346 || !is_gimple_debug (stmt
)
4347 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
4349 propagate_value (use_p
, sprime
);
4350 gimple_set_modified (stmt
, true);
4351 if (TREE_CODE (sprime
) == SSA_NAME
4352 && !is_gimple_debug (stmt
))
4353 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4358 /* Visit indirect calls and turn them into direct calls if
4359 possible using the devirtualization machinery. */
4360 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
4362 tree fn
= gimple_call_fn (call_stmt
);
4364 && flag_devirtualize
4365 && virtual_method_call_p (fn
))
4367 tree otr_type
= obj_type_ref_class (fn
);
4369 ipa_polymorphic_call_context
context (current_function_decl
, fn
, stmt
, &instance
);
4372 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
), otr_type
, stmt
);
4374 vec
<cgraph_node
*>targets
4375 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
4377 (OBJ_TYPE_REF_TOKEN (fn
)),
4381 dump_possible_polymorphic_call_targets (dump_file
,
4382 obj_type_ref_class (fn
),
4384 (OBJ_TYPE_REF_TOKEN (fn
)),
4386 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
4389 if (targets
.length () == 1)
4390 fn
= targets
[0]->decl
;
4392 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4393 if (dump_enabled_p ())
4395 location_t loc
= gimple_location_safe (stmt
);
4396 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
4397 "converting indirect call to "
4399 cgraph_node::get (fn
)->name ());
4401 gimple_call_set_fndecl (call_stmt
, fn
);
4402 maybe_remove_unused_call_args (cfun
, call_stmt
);
4403 gimple_set_modified (stmt
, true);
4408 if (gimple_modified_p (stmt
))
4410 /* If a formerly non-invariant ADDR_EXPR is turned into an
4411 invariant one it was on a separate stmt. */
4412 if (gimple_assign_single_p (stmt
)
4413 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
4414 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
4415 gimple old_stmt
= stmt
;
4416 if (is_gimple_call (stmt
))
4418 /* ??? Only fold calls inplace for now, this may create new
4419 SSA names which in turn will confuse free_scc_vn SSA name
4421 fold_stmt_inplace (&gsi
);
4422 /* When changing a call into a noreturn call, cfg cleanup
4423 is needed to fix up the noreturn call. */
4424 if (!was_noreturn
&& gimple_call_noreturn_p (stmt
))
4425 el_to_fixup
.safe_push (stmt
);
4430 stmt
= gsi_stmt (gsi
);
4431 if ((gimple_code (stmt
) == GIMPLE_COND
4432 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
4433 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
4434 || (gimple_code (stmt
) == GIMPLE_SWITCH
4435 && TREE_CODE (gimple_switch_index (
4436 as_a
<gswitch
*> (stmt
)))
4438 el_todo
|= TODO_cleanup_cfg
;
4440 /* If we removed EH side-effects from the statement, clean
4441 its EH information. */
4442 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
4444 bitmap_set_bit (need_eh_cleanup
,
4445 gimple_bb (stmt
)->index
);
4446 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4447 fprintf (dump_file
, " Removed EH side-effects.\n");
4449 /* Likewise for AB side-effects. */
4450 if (can_make_abnormal_goto
4451 && !stmt_can_make_abnormal_goto (stmt
))
4453 bitmap_set_bit (need_ab_cleanup
,
4454 gimple_bb (stmt
)->index
);
4455 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4456 fprintf (dump_file
, " Removed AB side-effects.\n");
4459 if (vdef
!= gimple_vdef (stmt
))
4460 VN_INFO (vdef
)->valnum
= vuse
;
4463 /* Make new values available - for fully redundant LHS we
4464 continue with the next stmt above and skip this. */
4466 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
4467 eliminate_push_avail (DEF_FROM_PTR (defp
));
4470 /* Replace destination PHI arguments. */
4473 FOR_EACH_EDGE (e
, ei
, b
->succs
)
4475 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
4479 gphi
*phi
= gsi
.phi ();
4480 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
4481 tree arg
= USE_FROM_PTR (use_p
);
4482 if (TREE_CODE (arg
) != SSA_NAME
4483 || virtual_operand_p (arg
))
4485 tree sprime
= eliminate_avail (arg
);
4486 if (sprime
&& may_propagate_copy (arg
, sprime
))
4488 propagate_value (use_p
, sprime
);
4489 if (TREE_CODE (sprime
) == SSA_NAME
)
4490 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4496 /* Make no longer available leaders no longer available. */
4499 eliminate_dom_walker::after_dom_children (basic_block
)
4502 while ((entry
= el_avail_stack
.pop ()) != NULL_TREE
)
4504 tree valnum
= VN_INFO (entry
)->valnum
;
4505 tree old
= el_avail
[SSA_NAME_VERSION (valnum
)];
4507 el_avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
4509 el_avail
[SSA_NAME_VERSION (valnum
)] = entry
;
4513 /* Eliminate fully redundant computations. */
4516 eliminate (bool do_pre
)
4518 gimple_stmt_iterator gsi
;
4521 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4522 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4524 el_to_remove
.create (0);
4525 el_to_fixup
.create (0);
4527 el_avail
.create (num_ssa_names
);
4528 el_avail_stack
.create (0);
4530 eliminate_dom_walker (CDI_DOMINATORS
,
4531 do_pre
).walk (cfun
->cfg
->x_entry_block_ptr
);
4533 el_avail
.release ();
4534 el_avail_stack
.release ();
4536 /* We cannot remove stmts during BB walk, especially not release SSA
4537 names there as this confuses the VN machinery. The stmts ending
4538 up in el_to_remove are either stores or simple copies.
4539 Remove stmts in reverse order to make debug stmt creation possible. */
4540 while (!el_to_remove
.is_empty ())
4542 stmt
= el_to_remove
.pop ();
4544 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4546 fprintf (dump_file
, "Removing dead stmt ");
4547 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4551 if (gimple_code (stmt
) == GIMPLE_PHI
)
4552 lhs
= gimple_phi_result (stmt
);
4554 lhs
= gimple_get_lhs (stmt
);
4557 && TREE_CODE (lhs
) == SSA_NAME
)
4558 bitmap_clear_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
4560 gsi
= gsi_for_stmt (stmt
);
4561 if (gimple_code (stmt
) == GIMPLE_PHI
)
4562 remove_phi_node (&gsi
, true);
4565 basic_block bb
= gimple_bb (stmt
);
4566 unlink_stmt_vdef (stmt
);
4567 if (gsi_remove (&gsi
, true))
4568 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
4569 release_defs (stmt
);
4572 /* Removing a stmt may expose a forwarder block. */
4573 el_todo
|= TODO_cleanup_cfg
;
4575 el_to_remove
.release ();
4577 /* Fixup stmts that became noreturn calls. This may require splitting
4578 blocks and thus isn't possible during the dominator walk. Do this
4579 in reverse order so we don't inadvertedly remove a stmt we want to
4580 fixup by visiting a dominating now noreturn call first. */
4581 while (!el_to_fixup
.is_empty ())
4583 stmt
= el_to_fixup
.pop ();
4585 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4587 fprintf (dump_file
, "Fixing up noreturn call ");
4588 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4591 if (fixup_noreturn_call (stmt
))
4592 el_todo
|= TODO_cleanup_cfg
;
4594 el_to_fixup
.release ();
4599 /* Perform CFG cleanups made necessary by elimination. */
4602 fini_eliminate (void)
4604 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
4605 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
4608 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
4611 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
4613 BITMAP_FREE (need_eh_cleanup
);
4614 BITMAP_FREE (need_ab_cleanup
);
4616 if (do_eh_cleanup
|| do_ab_cleanup
)
4617 return TODO_cleanup_cfg
;
4621 /* Borrow a bit of tree-ssa-dce.c for the moment.
4622 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4623 this may be a bit faster, and we may want critical edges kept split. */
4625 /* If OP's defining statement has not already been determined to be necessary,
4626 mark that statement necessary. Return the stmt, if it is newly
4629 static inline gimple
4630 mark_operand_necessary (tree op
)
4636 if (TREE_CODE (op
) != SSA_NAME
)
4639 stmt
= SSA_NAME_DEF_STMT (op
);
4642 if (gimple_plf (stmt
, NECESSARY
)
4643 || gimple_nop_p (stmt
))
4646 gimple_set_plf (stmt
, NECESSARY
, true);
4650 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4651 to insert PHI nodes sometimes, and because value numbering of casts isn't
4652 perfect, we sometimes end up inserting dead code. This simple DCE-like
4653 pass removes any insertions we made that weren't actually used. */
4656 remove_dead_inserted_code (void)
4663 worklist
= BITMAP_ALLOC (NULL
);
4664 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4666 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4667 if (gimple_plf (t
, NECESSARY
))
4668 bitmap_set_bit (worklist
, i
);
4670 while (!bitmap_empty_p (worklist
))
4672 i
= bitmap_first_set_bit (worklist
);
4673 bitmap_clear_bit (worklist
, i
);
4674 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4676 /* PHI nodes are somewhat special in that each PHI alternative has
4677 data and control dependencies. All the statements feeding the
4678 PHI node's arguments are always necessary. */
4679 if (gimple_code (t
) == GIMPLE_PHI
)
4683 for (k
= 0; k
< gimple_phi_num_args (t
); k
++)
4685 tree arg
= PHI_ARG_DEF (t
, k
);
4686 if (TREE_CODE (arg
) == SSA_NAME
)
4688 gimple n
= mark_operand_necessary (arg
);
4690 bitmap_set_bit (worklist
, SSA_NAME_VERSION (arg
));
4696 /* Propagate through the operands. Examine all the USE, VUSE and
4697 VDEF operands in this statement. Mark all the statements
4698 which feed this statement's uses as necessary. */
4702 /* The operands of VDEF expressions are also needed as they
4703 represent potential definitions that may reach this
4704 statement (VDEF operands allow us to follow def-def
4707 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
4709 gimple n
= mark_operand_necessary (use
);
4711 bitmap_set_bit (worklist
, SSA_NAME_VERSION (use
));
4716 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4718 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4719 if (!gimple_plf (t
, NECESSARY
))
4721 gimple_stmt_iterator gsi
;
4723 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4725 fprintf (dump_file
, "Removing unnecessary insertion:");
4726 print_gimple_stmt (dump_file
, t
, 0, 0);
4729 gsi
= gsi_for_stmt (t
);
4730 if (gimple_code (t
) == GIMPLE_PHI
)
4731 remove_phi_node (&gsi
, true);
4734 gsi_remove (&gsi
, true);
4739 BITMAP_FREE (worklist
);
4743 /* Initialize data structures used by PRE. */
4750 next_expression_id
= 1;
4751 expressions
.create (0);
4752 expressions
.safe_push (NULL
);
4753 value_expressions
.create (get_max_value_id () + 1);
4754 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
4755 name_to_id
.create (0);
4757 inserted_exprs
= BITMAP_ALLOC (NULL
);
4759 connect_infinite_loops_to_exit ();
4760 memset (&pre_stats
, 0, sizeof (pre_stats
));
4762 postorder
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
4763 postorder_num
= inverted_post_order_compute (postorder
);
4765 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets
));
4767 calculate_dominance_info (CDI_POST_DOMINATORS
);
4768 calculate_dominance_info (CDI_DOMINATORS
);
4770 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4771 phi_translate_table
= new hash_table
<expr_pred_trans_d
> (5110);
4772 expression_to_id
= new hash_table
<pre_expr_d
> (num_ssa_names
* 3);
4773 FOR_ALL_BB_FN (bb
, cfun
)
4775 EXP_GEN (bb
) = bitmap_set_new ();
4776 PHI_GEN (bb
) = bitmap_set_new ();
4777 TMP_GEN (bb
) = bitmap_set_new ();
4778 AVAIL_OUT (bb
) = bitmap_set_new ();
4783 /* Deallocate data structures used by PRE. */
4789 value_expressions
.release ();
4790 BITMAP_FREE (inserted_exprs
);
4791 bitmap_obstack_release (&grand_bitmap_obstack
);
4792 bitmap_set_pool
.release ();
4793 pre_expr_pool
.release ();
4794 delete phi_translate_table
;
4795 phi_translate_table
= NULL
;
4796 delete expression_to_id
;
4797 expression_to_id
= NULL
;
4798 name_to_id
.release ();
4800 free_aux_for_blocks ();
4802 free_dominance_info (CDI_POST_DOMINATORS
);
4807 const pass_data pass_data_pre
=
4809 GIMPLE_PASS
, /* type */
4811 OPTGROUP_NONE
, /* optinfo_flags */
4812 TV_TREE_PRE
, /* tv_id */
4813 /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
4815 ( PROP_no_crit_edges
| PROP_cfg
| PROP_ssa
), /* properties_required */
4816 0, /* properties_provided */
4817 PROP_no_crit_edges
, /* properties_destroyed */
4818 TODO_rebuild_alias
, /* todo_flags_start */
4819 0, /* todo_flags_finish */
4822 class pass_pre
: public gimple_opt_pass
4825 pass_pre (gcc::context
*ctxt
)
4826 : gimple_opt_pass (pass_data_pre
, ctxt
)
4829 /* opt_pass methods: */
4830 virtual bool gate (function
*) { return flag_tree_pre
!= 0; }
4831 virtual unsigned int execute (function
*);
4833 }; // class pass_pre
4836 pass_pre::execute (function
*fun
)
4838 unsigned int todo
= 0;
4840 do_partial_partial
=
4841 flag_tree_partial_pre
&& optimize_function_for_speed_p (fun
);
4843 /* This has to happen before SCCVN runs because
4844 loop_optimizer_init may create new phis, etc. */
4845 loop_optimizer_init (LOOPS_NORMAL
);
4847 if (!run_scc_vn (VN_WALK
))
4849 loop_optimizer_finalize ();
4856 /* Collect and value number expressions computed in each basic block. */
4859 /* Insert can get quite slow on an incredibly large number of basic
4860 blocks due to some quadratic behavior. Until this behavior is
4861 fixed, don't run it when he have an incredibly large number of
4862 bb's. If we aren't going to run insert, there is no point in
4863 computing ANTIC, either, even though it's plenty fast. */
4864 if (n_basic_blocks_for_fn (fun
) < 4000)
4870 /* Make sure to remove fake edges before committing our inserts.
4871 This makes sure we don't end up with extra critical edges that
4872 we would need to split. */
4873 remove_fake_exit_edges ();
4874 gsi_commit_edge_inserts ();
4876 /* Eliminate folds statements which might (should not...) end up
4877 not keeping virtual operands up-to-date. */
4878 gcc_assert (!need_ssa_update_p (fun
));
4880 /* Remove all the redundant expressions. */
4881 todo
|= eliminate (true);
4883 statistics_counter_event (fun
, "Insertions", pre_stats
.insertions
);
4884 statistics_counter_event (fun
, "PA inserted", pre_stats
.pa_insert
);
4885 statistics_counter_event (fun
, "New PHIs", pre_stats
.phis
);
4886 statistics_counter_event (fun
, "Eliminated", pre_stats
.eliminations
);
4888 clear_expression_ids ();
4889 remove_dead_inserted_code ();
4893 todo
|= fini_eliminate ();
4894 loop_optimizer_finalize ();
4896 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4897 case we can merge the block with the remaining predecessor of the block.
4899 - call merge_blocks after each tail merge iteration
4900 - call merge_blocks after all tail merge iterations
4901 - mark TODO_cleanup_cfg when necessary
4902 - share the cfg cleanup with fini_pre. */
4903 todo
|= tail_merge_optimize (todo
);
4907 /* Tail merging invalidates the virtual SSA web, together with
4908 cfg-cleanup opportunities exposed by PRE this will wreck the
4909 SSA updating machinery. So make sure to run update-ssa
4910 manually, before eventually scheduling cfg-cleanup as part of
4912 update_ssa (TODO_update_ssa_only_virtuals
);
4920 make_pass_pre (gcc::context
*ctxt
)
4922 return new pass_pre (ctxt
);
4927 const pass_data pass_data_fre
=
4929 GIMPLE_PASS
, /* type */
4931 OPTGROUP_NONE
, /* optinfo_flags */
4932 TV_TREE_FRE
, /* tv_id */
4933 ( PROP_cfg
| PROP_ssa
), /* properties_required */
4934 0, /* properties_provided */
4935 0, /* properties_destroyed */
4936 0, /* todo_flags_start */
4937 0, /* todo_flags_finish */
4940 class pass_fre
: public gimple_opt_pass
4943 pass_fre (gcc::context
*ctxt
)
4944 : gimple_opt_pass (pass_data_fre
, ctxt
)
4947 /* opt_pass methods: */
4948 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
4949 virtual bool gate (function
*) { return flag_tree_fre
!= 0; }
4950 virtual unsigned int execute (function
*);
4952 }; // class pass_fre
4955 pass_fre::execute (function
*fun
)
4957 unsigned int todo
= 0;
4959 if (!run_scc_vn (VN_WALKREWRITE
))
4962 memset (&pre_stats
, 0, sizeof (pre_stats
));
4964 /* Remove all the redundant expressions. */
4965 todo
|= eliminate (false);
4967 todo
|= fini_eliminate ();
4971 statistics_counter_event (fun
, "Insertions", pre_stats
.insertions
);
4972 statistics_counter_event (fun
, "Eliminated", pre_stats
.eliminations
);
4980 make_pass_fre (gcc::context
*ctxt
)
4982 return new pass_fre (ctxt
);