var-tracking.c (vt_add_function_parameter): Adjust for VEC changes.
[official-gcc.git] / gcc / tree-ssa-pre.c
blob64f538e344f43b20c73f6c189e2e999081272200
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
5 <stevenb@suse.de>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
12 any later version.
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "hash-table.h"
34 #include "tree-iterator.h"
35 #include "alloc-pool.h"
36 #include "obstack.h"
37 #include "tree-pass.h"
38 #include "flags.h"
39 #include "bitmap.h"
40 #include "langhooks.h"
41 #include "cfgloop.h"
42 #include "tree-ssa-sccvn.h"
43 #include "tree-scalar-evolution.h"
44 #include "params.h"
45 #include "dbgcnt.h"
47 /* TODO:
49 1. Avail sets can be shared by making an avail_find_leader that
50 walks up the dominator tree and looks in those avail sets.
51 This might affect code optimality, it's unclear right now.
52 2. Strength reduction can be performed by anticipating expressions
53 we can repair later on.
54 3. We can do back-substitution or smarter value numbering to catch
55 commutative expressions split up over multiple statements.
58 /* For ease of terminology, "expression node" in the below refers to
59 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
60 represent the actual statement containing the expressions we care about,
61 and we cache the value number by putting it in the expression. */
63 /* Basic algorithm
65 First we walk the statements to generate the AVAIL sets, the
66 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
67 generation of values/expressions by a given block. We use them
68 when computing the ANTIC sets. The AVAIL sets consist of
69 SSA_NAME's that represent values, so we know what values are
70 available in what blocks. AVAIL is a forward dataflow problem. In
71 SSA, values are never killed, so we don't need a kill set, or a
72 fixpoint iteration, in order to calculate the AVAIL sets. In
73 traditional parlance, AVAIL sets tell us the downsafety of the
74 expressions/values.
76 Next, we generate the ANTIC sets. These sets represent the
77 anticipatable expressions. ANTIC is a backwards dataflow
78 problem. An expression is anticipatable in a given block if it could
79 be generated in that block. This means that if we had to perform
80 an insertion in that block, of the value of that expression, we
81 could. Calculating the ANTIC sets requires phi translation of
82 expressions, because the flow goes backwards through phis. We must
83 iterate to a fixpoint of the ANTIC sets, because we have a kill
84 set. Even in SSA form, values are not live over the entire
85 function, only from their definition point onwards. So we have to
86 remove values from the ANTIC set once we go past the definition
87 point of the leaders that make them up.
88 compute_antic/compute_antic_aux performs this computation.
90 Third, we perform insertions to make partially redundant
91 expressions fully redundant.
93 An expression is partially redundant (excluding partial
94 anticipation) if:
96 1. It is AVAIL in some, but not all, of the predecessors of a
97 given block.
98 2. It is ANTIC in all the predecessors.
100 In order to make it fully redundant, we insert the expression into
101 the predecessors where it is not available, but is ANTIC.
103 For the partial anticipation case, we only perform insertion if it
104 is partially anticipated in some block, and fully available in all
105 of the predecessors.
107 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
108 performs these steps.
110 Fourth, we eliminate fully redundant expressions.
111 This is a simple statement walk that replaces redundant
112 calculations with the now available values. */
114 /* Representations of value numbers:
116 Value numbers are represented by a representative SSA_NAME. We
117 will create fake SSA_NAME's in situations where we need a
118 representative but do not have one (because it is a complex
119 expression). In order to facilitate storing the value numbers in
120 bitmaps, and keep the number of wasted SSA_NAME's down, we also
121 associate a value_id with each value number, and create full blown
122 ssa_name's only where we actually need them (IE in operands of
123 existing expressions).
125 Theoretically you could replace all the value_id's with
126 SSA_NAME_VERSION, but this would allocate a large number of
127 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
128 It would also require an additional indirection at each point we
129 use the value id. */
131 /* Representation of expressions on value numbers:
133 Expressions consisting of value numbers are represented the same
134 way as our VN internally represents them, with an additional
135 "pre_expr" wrapping around them in order to facilitate storing all
136 of the expressions in the same sets. */
138 /* Representation of sets:
140 The dataflow sets do not need to be sorted in any particular order
141 for the majority of their lifetime, are simply represented as two
142 bitmaps, one that keeps track of values present in the set, and one
143 that keeps track of expressions present in the set.
145 When we need them in topological order, we produce it on demand by
146 transforming the bitmap into an array and sorting it into topo
147 order. */
149 /* Type of expression, used to know which member of the PRE_EXPR union
150 is valid. */
152 enum pre_expr_kind
154 NAME,
155 NARY,
156 REFERENCE,
157 CONSTANT
160 typedef union pre_expr_union_d
162 tree name;
163 tree constant;
164 vn_nary_op_t nary;
165 vn_reference_t reference;
166 } pre_expr_union;
168 typedef struct pre_expr_d : typed_noop_remove <pre_expr_d>
170 enum pre_expr_kind kind;
171 unsigned int id;
172 pre_expr_union u;
174 /* hash_table support. */
175 typedef pre_expr_d T;
176 static inline hashval_t hash (const pre_expr_d *);
177 static inline int equal (const pre_expr_d *, const pre_expr_d *);
178 } *pre_expr;
180 #define PRE_EXPR_NAME(e) (e)->u.name
181 #define PRE_EXPR_NARY(e) (e)->u.nary
182 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
183 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
185 /* Compare E1 and E1 for equality. */
187 inline int
188 pre_expr_d::equal (const struct pre_expr_d *e1, const struct pre_expr_d *e2)
190 if (e1->kind != e2->kind)
191 return false;
193 switch (e1->kind)
195 case CONSTANT:
196 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
197 PRE_EXPR_CONSTANT (e2));
198 case NAME:
199 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
200 case NARY:
201 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
202 case REFERENCE:
203 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
204 PRE_EXPR_REFERENCE (e2));
205 default:
206 gcc_unreachable ();
210 /* Hash E. */
212 inline hashval_t
213 pre_expr_d::hash (const struct pre_expr_d *e)
215 switch (e->kind)
217 case CONSTANT:
218 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
219 case NAME:
220 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
221 case NARY:
222 return PRE_EXPR_NARY (e)->hashcode;
223 case REFERENCE:
224 return PRE_EXPR_REFERENCE (e)->hashcode;
225 default:
226 gcc_unreachable ();
230 /* Next global expression id number. */
231 static unsigned int next_expression_id;
233 /* Mapping from expression to id number we can use in bitmap sets. */
234 DEF_VEC_P (pre_expr);
235 DEF_VEC_ALLOC_P (pre_expr, heap);
236 static VEC(pre_expr, heap) *expressions;
237 static hash_table <pre_expr_d> expression_to_id;
238 static VEC(unsigned, heap) *name_to_id;
240 /* Allocate an expression id for EXPR. */
242 static inline unsigned int
243 alloc_expression_id (pre_expr expr)
245 struct pre_expr_d **slot;
246 /* Make sure we won't overflow. */
247 gcc_assert (next_expression_id + 1 > next_expression_id);
248 expr->id = next_expression_id++;
249 VEC_safe_push (pre_expr, heap, expressions, expr);
250 if (expr->kind == NAME)
252 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
253 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
254 re-allocations by using VEC_reserve upfront. There is no
255 VEC_quick_grow_cleared unfortunately. */
256 unsigned old_len = VEC_length (unsigned, name_to_id);
257 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names - old_len);
258 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
259 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
260 VEC_replace (unsigned, name_to_id, version, expr->id);
262 else
264 slot = expression_to_id.find_slot (expr, INSERT);
265 gcc_assert (!*slot);
266 *slot = expr;
268 return next_expression_id - 1;
271 /* Return the expression id for tree EXPR. */
273 static inline unsigned int
274 get_expression_id (const pre_expr expr)
276 return expr->id;
279 static inline unsigned int
280 lookup_expression_id (const pre_expr expr)
282 struct pre_expr_d **slot;
284 if (expr->kind == NAME)
286 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
287 if (VEC_length (unsigned, name_to_id) <= version)
288 return 0;
289 return VEC_index (unsigned, name_to_id, version);
291 else
293 slot = expression_to_id.find_slot (expr, NO_INSERT);
294 if (!slot)
295 return 0;
296 return ((pre_expr)*slot)->id;
300 /* Return the existing expression id for EXPR, or create one if one
301 does not exist yet. */
303 static inline unsigned int
304 get_or_alloc_expression_id (pre_expr expr)
306 unsigned int id = lookup_expression_id (expr);
307 if (id == 0)
308 return alloc_expression_id (expr);
309 return expr->id = id;
312 /* Return the expression that has expression id ID */
314 static inline pre_expr
315 expression_for_id (unsigned int id)
317 return VEC_index (pre_expr, expressions, id);
320 /* Free the expression id field in all of our expressions,
321 and then destroy the expressions array. */
323 static void
324 clear_expression_ids (void)
326 VEC_free (pre_expr, heap, expressions);
329 static alloc_pool pre_expr_pool;
331 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
333 static pre_expr
334 get_or_alloc_expr_for_name (tree name)
336 struct pre_expr_d expr;
337 pre_expr result;
338 unsigned int result_id;
340 expr.kind = NAME;
341 expr.id = 0;
342 PRE_EXPR_NAME (&expr) = name;
343 result_id = lookup_expression_id (&expr);
344 if (result_id != 0)
345 return expression_for_id (result_id);
347 result = (pre_expr) pool_alloc (pre_expr_pool);
348 result->kind = NAME;
349 PRE_EXPR_NAME (result) = name;
350 alloc_expression_id (result);
351 return result;
354 static bool in_fre = false;
356 /* An unordered bitmap set. One bitmap tracks values, the other,
357 expressions. */
358 typedef struct bitmap_set
360 bitmap_head expressions;
361 bitmap_head values;
362 } *bitmap_set_t;
364 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
365 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
367 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
368 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
370 /* Mapping from value id to expressions with that value_id. */
371 static VEC(bitmap, heap) *value_expressions;
373 /* Sets that we need to keep track of. */
374 typedef struct bb_bitmap_sets
376 /* The EXP_GEN set, which represents expressions/values generated in
377 a basic block. */
378 bitmap_set_t exp_gen;
380 /* The PHI_GEN set, which represents PHI results generated in a
381 basic block. */
382 bitmap_set_t phi_gen;
384 /* The TMP_GEN set, which represents results/temporaries generated
385 in a basic block. IE the LHS of an expression. */
386 bitmap_set_t tmp_gen;
388 /* The AVAIL_OUT set, which represents which values are available in
389 a given basic block. */
390 bitmap_set_t avail_out;
392 /* The ANTIC_IN set, which represents which values are anticipatable
393 in a given basic block. */
394 bitmap_set_t antic_in;
396 /* The PA_IN set, which represents which values are
397 partially anticipatable in a given basic block. */
398 bitmap_set_t pa_in;
400 /* The NEW_SETS set, which is used during insertion to augment the
401 AVAIL_OUT set of blocks with the new insertions performed during
402 the current iteration. */
403 bitmap_set_t new_sets;
405 /* A cache for value_dies_in_block_x. */
406 bitmap expr_dies;
408 /* True if we have visited this block during ANTIC calculation. */
409 unsigned int visited : 1;
411 /* True we have deferred processing this block during ANTIC
412 calculation until its successor is processed. */
413 unsigned int deferred : 1;
415 /* True when the block contains a call that might not return. */
416 unsigned int contains_may_not_return_call : 1;
417 } *bb_value_sets_t;
419 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
420 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
421 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
422 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
423 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
424 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
425 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
426 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
427 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
428 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
429 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
432 /* Basic block list in postorder. */
433 static int *postorder;
435 /* This structure is used to keep track of statistics on what
436 optimization PRE was able to perform. */
437 static struct
439 /* The number of RHS computations eliminated by PRE. */
440 int eliminations;
442 /* The number of new expressions/temporaries generated by PRE. */
443 int insertions;
445 /* The number of inserts found due to partial anticipation */
446 int pa_insert;
448 /* The number of new PHI nodes added by PRE. */
449 int phis;
451 /* The number of values found constant. */
452 int constified;
454 } pre_stats;
456 static bool do_partial_partial;
457 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
458 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
459 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
460 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
461 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
462 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
463 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
464 unsigned int, bool);
465 static bitmap_set_t bitmap_set_new (void);
466 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
467 gimple, tree);
468 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
469 gimple);
470 static unsigned int get_expr_value_id (pre_expr);
472 /* We can add and remove elements and entries to and from sets
473 and hash tables, so we use alloc pools for them. */
475 static alloc_pool bitmap_set_pool;
476 static bitmap_obstack grand_bitmap_obstack;
478 /* Set of blocks with statements that have had their EH properties changed. */
479 static bitmap need_eh_cleanup;
481 /* Set of blocks with statements that have had their AB properties changed. */
482 static bitmap need_ab_cleanup;
484 /* A three tuple {e, pred, v} used to cache phi translations in the
485 phi_translate_table. */
487 typedef struct expr_pred_trans_d : typed_free_remove<expr_pred_trans_d>
489 /* The expression. */
490 pre_expr e;
492 /* The predecessor block along which we translated the expression. */
493 basic_block pred;
495 /* The value that resulted from the translation. */
496 pre_expr v;
498 /* The hashcode for the expression, pred pair. This is cached for
499 speed reasons. */
500 hashval_t hashcode;
502 /* hash_table support. */
503 typedef expr_pred_trans_d T;
504 static inline hashval_t hash (const expr_pred_trans_d *);
505 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
506 } *expr_pred_trans_t;
507 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
509 inline hashval_t
510 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
512 return e->hashcode;
515 inline int
516 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
517 const expr_pred_trans_d *ve2)
519 basic_block b1 = ve1->pred;
520 basic_block b2 = ve2->pred;
522 /* If they are not translations for the same basic block, they can't
523 be equal. */
524 if (b1 != b2)
525 return false;
526 return pre_expr_d::equal (ve1->e, ve2->e);
529 /* The phi_translate_table caches phi translations for a given
530 expression and predecessor. */
531 static hash_table <expr_pred_trans_d> phi_translate_table;
533 /* Search in the phi translation table for the translation of
534 expression E in basic block PRED.
535 Return the translated value, if found, NULL otherwise. */
537 static inline pre_expr
538 phi_trans_lookup (pre_expr e, basic_block pred)
540 expr_pred_trans_t *slot;
541 struct expr_pred_trans_d ept;
543 ept.e = e;
544 ept.pred = pred;
545 ept.hashcode = iterative_hash_hashval_t (pre_expr_d::hash (e), pred->index);
546 slot = phi_translate_table.find_slot_with_hash (&ept, ept.hashcode,
547 NO_INSERT);
548 if (!slot)
549 return NULL;
550 else
551 return (*slot)->v;
555 /* Add the tuple mapping from {expression E, basic block PRED} to
556 value V, to the phi translation table. */
558 static inline void
559 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
561 expr_pred_trans_t *slot;
562 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
563 new_pair->e = e;
564 new_pair->pred = pred;
565 new_pair->v = v;
566 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_d::hash (e),
567 pred->index);
569 slot = phi_translate_table.find_slot_with_hash (new_pair,
570 new_pair->hashcode, INSERT);
571 free (*slot);
572 *slot = new_pair;
576 /* Add expression E to the expression set of value id V. */
578 static void
579 add_to_value (unsigned int v, pre_expr e)
581 bitmap set;
583 gcc_checking_assert (get_expr_value_id (e) == v);
585 if (v >= VEC_length (bitmap, value_expressions))
587 VEC_safe_grow_cleared (bitmap, heap, value_expressions, v + 1);
590 set = VEC_index (bitmap, value_expressions, v);
591 if (!set)
593 set = BITMAP_ALLOC (&grand_bitmap_obstack);
594 VEC_replace (bitmap, value_expressions, v, set);
597 bitmap_set_bit (set, get_or_alloc_expression_id (e));
600 /* Create a new bitmap set and return it. */
602 static bitmap_set_t
603 bitmap_set_new (void)
605 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
606 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
607 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
608 return ret;
611 /* Return the value id for a PRE expression EXPR. */
613 static unsigned int
614 get_expr_value_id (pre_expr expr)
616 switch (expr->kind)
618 case CONSTANT:
620 unsigned int id;
621 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
622 if (id == 0)
624 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
625 add_to_value (id, expr);
627 return id;
629 case NAME:
630 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
631 case NARY:
632 return PRE_EXPR_NARY (expr)->value_id;
633 case REFERENCE:
634 return PRE_EXPR_REFERENCE (expr)->value_id;
635 default:
636 gcc_unreachable ();
640 /* Remove an expression EXPR from a bitmapped set. */
642 static void
643 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
645 unsigned int val = get_expr_value_id (expr);
646 if (!value_id_constant_p (val))
648 bitmap_clear_bit (&set->values, val);
649 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
653 static void
654 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
655 unsigned int val, bool allow_constants)
657 if (allow_constants || !value_id_constant_p (val))
659 /* We specifically expect this and only this function to be able to
660 insert constants into a set. */
661 bitmap_set_bit (&set->values, val);
662 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
666 /* Insert an expression EXPR into a bitmapped set. */
668 static void
669 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
671 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
674 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
676 static void
677 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
679 bitmap_copy (&dest->expressions, &orig->expressions);
680 bitmap_copy (&dest->values, &orig->values);
684 /* Free memory used up by SET. */
685 static void
686 bitmap_set_free (bitmap_set_t set)
688 bitmap_clear (&set->expressions);
689 bitmap_clear (&set->values);
693 /* Generate an topological-ordered array of bitmap set SET. */
695 static VEC(pre_expr, heap) *
696 sorted_array_from_bitmap_set (bitmap_set_t set)
698 unsigned int i, j;
699 bitmap_iterator bi, bj;
700 VEC(pre_expr, heap) *result;
702 /* Pre-allocate roughly enough space for the array. */
703 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values));
705 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
707 /* The number of expressions having a given value is usually
708 relatively small. Thus, rather than making a vector of all
709 the expressions and sorting it by value-id, we walk the values
710 and check in the reverse mapping that tells us what expressions
711 have a given value, to filter those in our set. As a result,
712 the expressions are inserted in value-id order, which means
713 topological order.
715 If this is somehow a significant lose for some cases, we can
716 choose which set to walk based on the set size. */
717 bitmap exprset = VEC_index (bitmap, value_expressions, i);
718 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
720 if (bitmap_bit_p (&set->expressions, j))
721 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
725 return result;
728 /* Perform bitmapped set operation DEST &= ORIG. */
730 static void
731 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
733 bitmap_iterator bi;
734 unsigned int i;
736 if (dest != orig)
738 bitmap_head temp;
739 bitmap_initialize (&temp, &grand_bitmap_obstack);
741 bitmap_and_into (&dest->values, &orig->values);
742 bitmap_copy (&temp, &dest->expressions);
743 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
745 pre_expr expr = expression_for_id (i);
746 unsigned int value_id = get_expr_value_id (expr);
747 if (!bitmap_bit_p (&dest->values, value_id))
748 bitmap_clear_bit (&dest->expressions, i);
750 bitmap_clear (&temp);
754 /* Subtract all values and expressions contained in ORIG from DEST. */
756 static bitmap_set_t
757 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
759 bitmap_set_t result = bitmap_set_new ();
760 bitmap_iterator bi;
761 unsigned int i;
763 bitmap_and_compl (&result->expressions, &dest->expressions,
764 &orig->expressions);
766 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
768 pre_expr expr = expression_for_id (i);
769 unsigned int value_id = get_expr_value_id (expr);
770 bitmap_set_bit (&result->values, value_id);
773 return result;
776 /* Subtract all the values in bitmap set B from bitmap set A. */
778 static void
779 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
781 unsigned int i;
782 bitmap_iterator bi;
783 bitmap_head temp;
785 bitmap_initialize (&temp, &grand_bitmap_obstack);
787 bitmap_copy (&temp, &a->expressions);
788 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
790 pre_expr expr = expression_for_id (i);
791 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
792 bitmap_remove_from_set (a, expr);
794 bitmap_clear (&temp);
798 /* Return true if bitmapped set SET contains the value VALUE_ID. */
800 static bool
801 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
803 if (value_id_constant_p (value_id))
804 return true;
806 if (!set || bitmap_empty_p (&set->expressions))
807 return false;
809 return bitmap_bit_p (&set->values, value_id);
812 static inline bool
813 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
815 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
818 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
820 static void
821 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
822 const pre_expr expr)
824 bitmap exprset;
825 unsigned int i;
826 bitmap_iterator bi;
828 if (value_id_constant_p (lookfor))
829 return;
831 if (!bitmap_set_contains_value (set, lookfor))
832 return;
834 /* The number of expressions having a given value is usually
835 significantly less than the total number of expressions in SET.
836 Thus, rather than check, for each expression in SET, whether it
837 has the value LOOKFOR, we walk the reverse mapping that tells us
838 what expressions have a given value, and see if any of those
839 expressions are in our set. For large testcases, this is about
840 5-10x faster than walking the bitmap. If this is somehow a
841 significant lose for some cases, we can choose which set to walk
842 based on the set size. */
843 exprset = VEC_index (bitmap, value_expressions, lookfor);
844 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
846 if (bitmap_clear_bit (&set->expressions, i))
848 bitmap_set_bit (&set->expressions, get_expression_id (expr));
849 return;
854 /* Return true if two bitmap sets are equal. */
856 static bool
857 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
859 return bitmap_equal_p (&a->values, &b->values);
862 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
863 and add it otherwise. */
865 static void
866 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
868 unsigned int val = get_expr_value_id (expr);
870 if (bitmap_set_contains_value (set, val))
871 bitmap_set_replace_value (set, val, expr);
872 else
873 bitmap_insert_into_set (set, expr);
876 /* Insert EXPR into SET if EXPR's value is not already present in
877 SET. */
879 static void
880 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
882 unsigned int val = get_expr_value_id (expr);
884 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
886 /* Constant values are always considered to be part of the set. */
887 if (value_id_constant_p (val))
888 return;
890 /* If the value membership changed, add the expression. */
891 if (bitmap_set_bit (&set->values, val))
892 bitmap_set_bit (&set->expressions, expr->id);
895 /* Print out EXPR to outfile. */
897 static void
898 print_pre_expr (FILE *outfile, const pre_expr expr)
900 switch (expr->kind)
902 case CONSTANT:
903 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
904 break;
905 case NAME:
906 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
907 break;
908 case NARY:
910 unsigned int i;
911 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
912 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
913 for (i = 0; i < nary->length; i++)
915 print_generic_expr (outfile, nary->op[i], 0);
916 if (i != (unsigned) nary->length - 1)
917 fprintf (outfile, ",");
919 fprintf (outfile, "}");
921 break;
923 case REFERENCE:
925 vn_reference_op_t vro;
926 unsigned int i;
927 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
928 fprintf (outfile, "{");
929 for (i = 0;
930 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
931 i++)
933 bool closebrace = false;
934 if (vro->opcode != SSA_NAME
935 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
937 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
938 if (vro->op0)
940 fprintf (outfile, "<");
941 closebrace = true;
944 if (vro->op0)
946 print_generic_expr (outfile, vro->op0, 0);
947 if (vro->op1)
949 fprintf (outfile, ",");
950 print_generic_expr (outfile, vro->op1, 0);
952 if (vro->op2)
954 fprintf (outfile, ",");
955 print_generic_expr (outfile, vro->op2, 0);
958 if (closebrace)
959 fprintf (outfile, ">");
960 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
961 fprintf (outfile, ",");
963 fprintf (outfile, "}");
964 if (ref->vuse)
966 fprintf (outfile, "@");
967 print_generic_expr (outfile, ref->vuse, 0);
970 break;
973 void debug_pre_expr (pre_expr);
975 /* Like print_pre_expr but always prints to stderr. */
976 DEBUG_FUNCTION void
977 debug_pre_expr (pre_expr e)
979 print_pre_expr (stderr, e);
980 fprintf (stderr, "\n");
983 /* Print out SET to OUTFILE. */
985 static void
986 print_bitmap_set (FILE *outfile, bitmap_set_t set,
987 const char *setname, int blockindex)
989 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
990 if (set)
992 bool first = true;
993 unsigned i;
994 bitmap_iterator bi;
996 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
998 const pre_expr expr = expression_for_id (i);
1000 if (!first)
1001 fprintf (outfile, ", ");
1002 first = false;
1003 print_pre_expr (outfile, expr);
1005 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1008 fprintf (outfile, " }\n");
1011 void debug_bitmap_set (bitmap_set_t);
1013 DEBUG_FUNCTION void
1014 debug_bitmap_set (bitmap_set_t set)
1016 print_bitmap_set (stderr, set, "debug", 0);
1019 void debug_bitmap_sets_for (basic_block);
1021 DEBUG_FUNCTION void
1022 debug_bitmap_sets_for (basic_block bb)
1024 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1025 if (!in_fre)
1027 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1028 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1029 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1030 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1031 if (do_partial_partial)
1032 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1033 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1037 /* Print out the expressions that have VAL to OUTFILE. */
1039 static void
1040 print_value_expressions (FILE *outfile, unsigned int val)
1042 bitmap set = VEC_index (bitmap, value_expressions, val);
1043 if (set)
1045 bitmap_set x;
1046 char s[10];
1047 sprintf (s, "%04d", val);
1048 x.expressions = *set;
1049 print_bitmap_set (outfile, &x, s, 0);
1054 DEBUG_FUNCTION void
1055 debug_value_expressions (unsigned int val)
1057 print_value_expressions (stderr, val);
1060 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1061 represent it. */
1063 static pre_expr
1064 get_or_alloc_expr_for_constant (tree constant)
1066 unsigned int result_id;
1067 unsigned int value_id;
1068 struct pre_expr_d expr;
1069 pre_expr newexpr;
1071 expr.kind = CONSTANT;
1072 PRE_EXPR_CONSTANT (&expr) = constant;
1073 result_id = lookup_expression_id (&expr);
1074 if (result_id != 0)
1075 return expression_for_id (result_id);
1077 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1078 newexpr->kind = CONSTANT;
1079 PRE_EXPR_CONSTANT (newexpr) = constant;
1080 alloc_expression_id (newexpr);
1081 value_id = get_or_alloc_constant_value_id (constant);
1082 add_to_value (value_id, newexpr);
1083 return newexpr;
1086 /* Given a value id V, find the actual tree representing the constant
1087 value if there is one, and return it. Return NULL if we can't find
1088 a constant. */
1090 static tree
1091 get_constant_for_value_id (unsigned int v)
1093 if (value_id_constant_p (v))
1095 unsigned int i;
1096 bitmap_iterator bi;
1097 bitmap exprset = VEC_index (bitmap, value_expressions, v);
1099 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1101 pre_expr expr = expression_for_id (i);
1102 if (expr->kind == CONSTANT)
1103 return PRE_EXPR_CONSTANT (expr);
1106 return NULL;
1109 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1110 Currently only supports constants and SSA_NAMES. */
1111 static pre_expr
1112 get_or_alloc_expr_for (tree t)
1114 if (TREE_CODE (t) == SSA_NAME)
1115 return get_or_alloc_expr_for_name (t);
1116 else if (is_gimple_min_invariant (t))
1117 return get_or_alloc_expr_for_constant (t);
1118 else
1120 /* More complex expressions can result from SCCVN expression
1121 simplification that inserts values for them. As they all
1122 do not have VOPs the get handled by the nary ops struct. */
1123 vn_nary_op_t result;
1124 unsigned int result_id;
1125 vn_nary_op_lookup (t, &result);
1126 if (result != NULL)
1128 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1129 e->kind = NARY;
1130 PRE_EXPR_NARY (e) = result;
1131 result_id = lookup_expression_id (e);
1132 if (result_id != 0)
1134 pool_free (pre_expr_pool, e);
1135 e = expression_for_id (result_id);
1136 return e;
1138 alloc_expression_id (e);
1139 return e;
1142 return NULL;
1145 /* Return the folded version of T if T, when folded, is a gimple
1146 min_invariant. Otherwise, return T. */
1148 static pre_expr
1149 fully_constant_expression (pre_expr e)
1151 switch (e->kind)
1153 case CONSTANT:
1154 return e;
1155 case NARY:
1157 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1158 switch (TREE_CODE_CLASS (nary->opcode))
1160 case tcc_binary:
1161 case tcc_comparison:
1163 /* We have to go from trees to pre exprs to value ids to
1164 constants. */
1165 tree naryop0 = nary->op[0];
1166 tree naryop1 = nary->op[1];
1167 tree result;
1168 if (!is_gimple_min_invariant (naryop0))
1170 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1171 unsigned int vrep0 = get_expr_value_id (rep0);
1172 tree const0 = get_constant_for_value_id (vrep0);
1173 if (const0)
1174 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1176 if (!is_gimple_min_invariant (naryop1))
1178 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1179 unsigned int vrep1 = get_expr_value_id (rep1);
1180 tree const1 = get_constant_for_value_id (vrep1);
1181 if (const1)
1182 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1184 result = fold_binary (nary->opcode, nary->type,
1185 naryop0, naryop1);
1186 if (result && is_gimple_min_invariant (result))
1187 return get_or_alloc_expr_for_constant (result);
1188 /* We might have simplified the expression to a
1189 SSA_NAME for example from x_1 * 1. But we cannot
1190 insert a PHI for x_1 unconditionally as x_1 might
1191 not be available readily. */
1192 return e;
1194 case tcc_reference:
1195 if (nary->opcode != REALPART_EXPR
1196 && nary->opcode != IMAGPART_EXPR
1197 && nary->opcode != VIEW_CONVERT_EXPR)
1198 return e;
1199 /* Fallthrough. */
1200 case tcc_unary:
1202 /* We have to go from trees to pre exprs to value ids to
1203 constants. */
1204 tree naryop0 = nary->op[0];
1205 tree const0, result;
1206 if (is_gimple_min_invariant (naryop0))
1207 const0 = naryop0;
1208 else
1210 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1211 unsigned int vrep0 = get_expr_value_id (rep0);
1212 const0 = get_constant_for_value_id (vrep0);
1214 result = NULL;
1215 if (const0)
1217 tree type1 = TREE_TYPE (nary->op[0]);
1218 const0 = fold_convert (type1, const0);
1219 result = fold_unary (nary->opcode, nary->type, const0);
1221 if (result && is_gimple_min_invariant (result))
1222 return get_or_alloc_expr_for_constant (result);
1223 return e;
1225 default:
1226 return e;
1229 case REFERENCE:
1231 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1232 tree folded;
1233 if ((folded = fully_constant_vn_reference_p (ref)))
1234 return get_or_alloc_expr_for_constant (folded);
1235 return e;
1237 default:
1238 return e;
1240 return e;
1243 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1244 it has the value it would have in BLOCK. Set *SAME_VALID to true
1245 in case the new vuse doesn't change the value id of the OPERANDS. */
1247 static tree
1248 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1249 alias_set_type set, tree type, tree vuse,
1250 basic_block phiblock,
1251 basic_block block, bool *same_valid)
1253 gimple phi = SSA_NAME_DEF_STMT (vuse);
1254 ao_ref ref;
1255 edge e = NULL;
1256 bool use_oracle;
1258 *same_valid = true;
1260 if (gimple_bb (phi) != phiblock)
1261 return vuse;
1263 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1265 /* Use the alias-oracle to find either the PHI node in this block,
1266 the first VUSE used in this block that is equivalent to vuse or
1267 the first VUSE which definition in this block kills the value. */
1268 if (gimple_code (phi) == GIMPLE_PHI)
1269 e = find_edge (block, phiblock);
1270 else if (use_oracle)
1271 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1273 vuse = gimple_vuse (phi);
1274 phi = SSA_NAME_DEF_STMT (vuse);
1275 if (gimple_bb (phi) != phiblock)
1276 return vuse;
1277 if (gimple_code (phi) == GIMPLE_PHI)
1279 e = find_edge (block, phiblock);
1280 break;
1283 else
1284 return NULL_TREE;
1286 if (e)
1288 if (use_oracle)
1290 bitmap visited = NULL;
1291 unsigned int cnt;
1292 /* Try to find a vuse that dominates this phi node by skipping
1293 non-clobbering statements. */
1294 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false);
1295 if (visited)
1296 BITMAP_FREE (visited);
1298 else
1299 vuse = NULL_TREE;
1300 if (!vuse)
1302 /* If we didn't find any, the value ID can't stay the same,
1303 but return the translated vuse. */
1304 *same_valid = false;
1305 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1307 /* ??? We would like to return vuse here as this is the canonical
1308 upmost vdef that this reference is associated with. But during
1309 insertion of the references into the hash tables we only ever
1310 directly insert with their direct gimple_vuse, hence returning
1311 something else would make us not find the other expression. */
1312 return PHI_ARG_DEF (phi, e->dest_idx);
1315 return NULL_TREE;
1318 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1319 SET2. This is used to avoid making a set consisting of the union
1320 of PA_IN and ANTIC_IN during insert. */
1322 static inline pre_expr
1323 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1325 pre_expr result;
1327 result = bitmap_find_leader (set1, val, NULL);
1328 if (!result && set2)
1329 result = bitmap_find_leader (set2, val, NULL);
1330 return result;
1333 /* Get the tree type for our PRE expression e. */
1335 static tree
1336 get_expr_type (const pre_expr e)
1338 switch (e->kind)
1340 case NAME:
1341 return TREE_TYPE (PRE_EXPR_NAME (e));
1342 case CONSTANT:
1343 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1344 case REFERENCE:
1345 return PRE_EXPR_REFERENCE (e)->type;
1346 case NARY:
1347 return PRE_EXPR_NARY (e)->type;
1349 gcc_unreachable();
1352 /* Get a representative SSA_NAME for a given expression.
1353 Since all of our sub-expressions are treated as values, we require
1354 them to be SSA_NAME's for simplicity.
1355 Prior versions of GVNPRE used to use "value handles" here, so that
1356 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1357 either case, the operands are really values (IE we do not expect
1358 them to be usable without finding leaders). */
1360 static tree
1361 get_representative_for (const pre_expr e)
1363 tree name;
1364 unsigned int value_id = get_expr_value_id (e);
1366 switch (e->kind)
1368 case NAME:
1369 return PRE_EXPR_NAME (e);
1370 case CONSTANT:
1371 return PRE_EXPR_CONSTANT (e);
1372 case NARY:
1373 case REFERENCE:
1375 /* Go through all of the expressions representing this value
1376 and pick out an SSA_NAME. */
1377 unsigned int i;
1378 bitmap_iterator bi;
1379 bitmap exprs = VEC_index (bitmap, value_expressions, value_id);
1380 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1382 pre_expr rep = expression_for_id (i);
1383 if (rep->kind == NAME)
1384 return PRE_EXPR_NAME (rep);
1387 break;
1389 /* If we reached here we couldn't find an SSA_NAME. This can
1390 happen when we've discovered a value that has never appeared in
1391 the program as set to an SSA_NAME, most likely as the result of
1392 phi translation. */
1393 if (dump_file)
1395 fprintf (dump_file,
1396 "Could not find SSA_NAME representative for expression:");
1397 print_pre_expr (dump_file, e);
1398 fprintf (dump_file, "\n");
1401 /* Build and insert the assignment of the end result to the temporary
1402 that we will return. */
1403 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1404 VN_INFO_GET (name)->value_id = value_id;
1405 if (e->kind == CONSTANT)
1406 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1407 else
1408 VN_INFO (name)->valnum = name;
1410 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1411 if (dump_file)
1413 fprintf (dump_file, "Created SSA_NAME representative ");
1414 print_generic_expr (dump_file, name, 0);
1415 fprintf (dump_file, " for expression:");
1416 print_pre_expr (dump_file, e);
1417 fprintf (dump_file, "\n");
1420 return name;
1425 static pre_expr
1426 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1427 basic_block pred, basic_block phiblock);
1429 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1430 the phis in PRED. Return NULL if we can't find a leader for each part
1431 of the translated expression. */
1433 static pre_expr
1434 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1435 basic_block pred, basic_block phiblock)
1437 switch (expr->kind)
1439 case NARY:
1441 unsigned int i;
1442 bool changed = false;
1443 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1444 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1445 sizeof_vn_nary_op (nary->length));
1446 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1448 for (i = 0; i < newnary->length; i++)
1450 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1451 continue;
1452 else
1454 pre_expr leader, result;
1455 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1456 leader = find_leader_in_sets (op_val_id, set1, set2);
1457 result = phi_translate (leader, set1, set2, pred, phiblock);
1458 if (result && result != leader)
1460 tree name = get_representative_for (result);
1461 if (!name)
1462 return NULL;
1463 newnary->op[i] = name;
1465 else if (!result)
1466 return NULL;
1468 changed |= newnary->op[i] != nary->op[i];
1471 if (changed)
1473 pre_expr constant;
1474 unsigned int new_val_id;
1476 tree result = vn_nary_op_lookup_pieces (newnary->length,
1477 newnary->opcode,
1478 newnary->type,
1479 &newnary->op[0],
1480 &nary);
1481 if (result && is_gimple_min_invariant (result))
1482 return get_or_alloc_expr_for_constant (result);
1484 expr = (pre_expr) pool_alloc (pre_expr_pool);
1485 expr->kind = NARY;
1486 expr->id = 0;
1487 if (nary)
1489 PRE_EXPR_NARY (expr) = nary;
1490 constant = fully_constant_expression (expr);
1491 if (constant != expr)
1492 return constant;
1494 new_val_id = nary->value_id;
1495 get_or_alloc_expression_id (expr);
1497 else
1499 new_val_id = get_next_value_id ();
1500 VEC_safe_grow_cleared (bitmap, heap,
1501 value_expressions,
1502 get_max_value_id() + 1);
1503 nary = vn_nary_op_insert_pieces (newnary->length,
1504 newnary->opcode,
1505 newnary->type,
1506 &newnary->op[0],
1507 result, new_val_id);
1508 PRE_EXPR_NARY (expr) = nary;
1509 constant = fully_constant_expression (expr);
1510 if (constant != expr)
1511 return constant;
1512 get_or_alloc_expression_id (expr);
1514 add_to_value (new_val_id, expr);
1516 return expr;
1518 break;
1520 case REFERENCE:
1522 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1523 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1524 tree vuse = ref->vuse;
1525 tree newvuse = vuse;
1526 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1527 bool changed = false, same_valid = true;
1528 unsigned int i, j, n;
1529 vn_reference_op_t operand;
1530 vn_reference_t newref;
1532 for (i = 0, j = 0;
1533 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1535 pre_expr opresult;
1536 pre_expr leader;
1537 tree op[3];
1538 tree type = operand->type;
1539 vn_reference_op_s newop = *operand;
1540 op[0] = operand->op0;
1541 op[1] = operand->op1;
1542 op[2] = operand->op2;
1543 for (n = 0; n < 3; ++n)
1545 unsigned int op_val_id;
1546 if (!op[n])
1547 continue;
1548 if (TREE_CODE (op[n]) != SSA_NAME)
1550 /* We can't possibly insert these. */
1551 if (n != 0
1552 && !is_gimple_min_invariant (op[n]))
1553 break;
1554 continue;
1556 op_val_id = VN_INFO (op[n])->value_id;
1557 leader = find_leader_in_sets (op_val_id, set1, set2);
1558 if (!leader)
1559 break;
1560 /* Make sure we do not recursively translate ourselves
1561 like for translating a[n_1] with the leader for
1562 n_1 being a[n_1]. */
1563 if (get_expression_id (leader) != get_expression_id (expr))
1565 opresult = phi_translate (leader, set1, set2,
1566 pred, phiblock);
1567 if (!opresult)
1568 break;
1569 if (opresult != leader)
1571 tree name = get_representative_for (opresult);
1572 if (!name)
1573 break;
1574 changed |= name != op[n];
1575 op[n] = name;
1579 if (n != 3)
1581 if (newoperands)
1582 VEC_free (vn_reference_op_s, heap, newoperands);
1583 return NULL;
1585 if (!newoperands)
1586 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1587 /* We may have changed from an SSA_NAME to a constant */
1588 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1589 newop.opcode = TREE_CODE (op[0]);
1590 newop.type = type;
1591 newop.op0 = op[0];
1592 newop.op1 = op[1];
1593 newop.op2 = op[2];
1594 /* If it transforms a non-constant ARRAY_REF into a constant
1595 one, adjust the constant offset. */
1596 if (newop.opcode == ARRAY_REF
1597 && newop.off == -1
1598 && TREE_CODE (op[0]) == INTEGER_CST
1599 && TREE_CODE (op[1]) == INTEGER_CST
1600 && TREE_CODE (op[2]) == INTEGER_CST)
1602 double_int off = tree_to_double_int (op[0]);
1603 off += -tree_to_double_int (op[1]);
1604 off *= tree_to_double_int (op[2]);
1605 if (off.fits_shwi ())
1606 newop.off = off.low;
1608 VEC_replace (vn_reference_op_s, newoperands, j, newop);
1609 /* If it transforms from an SSA_NAME to an address, fold with
1610 a preceding indirect reference. */
1611 if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR
1612 && VEC_index (vn_reference_op_s,
1613 newoperands, j - 1).opcode == MEM_REF)
1614 vn_reference_fold_indirect (&newoperands, &j);
1616 if (i != VEC_length (vn_reference_op_s, operands))
1618 if (newoperands)
1619 VEC_free (vn_reference_op_s, heap, newoperands);
1620 return NULL;
1623 if (vuse)
1625 newvuse = translate_vuse_through_block (newoperands,
1626 ref->set, ref->type,
1627 vuse, phiblock, pred,
1628 &same_valid);
1629 if (newvuse == NULL_TREE)
1631 VEC_free (vn_reference_op_s, heap, newoperands);
1632 return NULL;
1636 if (changed || newvuse != vuse)
1638 unsigned int new_val_id;
1639 pre_expr constant;
1641 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1642 ref->type,
1643 newoperands,
1644 &newref, VN_WALK);
1645 if (result)
1646 VEC_free (vn_reference_op_s, heap, newoperands);
1648 /* We can always insert constants, so if we have a partial
1649 redundant constant load of another type try to translate it
1650 to a constant of appropriate type. */
1651 if (result && is_gimple_min_invariant (result))
1653 tree tem = result;
1654 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1656 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1657 if (tem && !is_gimple_min_invariant (tem))
1658 tem = NULL_TREE;
1660 if (tem)
1661 return get_or_alloc_expr_for_constant (tem);
1664 /* If we'd have to convert things we would need to validate
1665 if we can insert the translated expression. So fail
1666 here for now - we cannot insert an alias with a different
1667 type in the VN tables either, as that would assert. */
1668 if (result
1669 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1670 return NULL;
1671 else if (!result && newref
1672 && !useless_type_conversion_p (ref->type, newref->type))
1674 VEC_free (vn_reference_op_s, heap, newoperands);
1675 return NULL;
1678 expr = (pre_expr) pool_alloc (pre_expr_pool);
1679 expr->kind = REFERENCE;
1680 expr->id = 0;
1682 if (newref)
1684 PRE_EXPR_REFERENCE (expr) = newref;
1685 constant = fully_constant_expression (expr);
1686 if (constant != expr)
1687 return constant;
1689 new_val_id = newref->value_id;
1690 get_or_alloc_expression_id (expr);
1692 else
1694 if (changed || !same_valid)
1696 new_val_id = get_next_value_id ();
1697 VEC_safe_grow_cleared (bitmap, heap,
1698 value_expressions,
1699 get_max_value_id() + 1);
1701 else
1702 new_val_id = ref->value_id;
1703 newref = vn_reference_insert_pieces (newvuse, ref->set,
1704 ref->type,
1705 newoperands,
1706 result, new_val_id);
1707 newoperands = NULL;
1708 PRE_EXPR_REFERENCE (expr) = newref;
1709 constant = fully_constant_expression (expr);
1710 if (constant != expr)
1711 return constant;
1712 get_or_alloc_expression_id (expr);
1714 add_to_value (new_val_id, expr);
1716 VEC_free (vn_reference_op_s, heap, newoperands);
1717 return expr;
1719 break;
1721 case NAME:
1723 gimple phi = NULL;
1724 edge e;
1725 gimple def_stmt;
1726 tree name = PRE_EXPR_NAME (expr);
1728 def_stmt = SSA_NAME_DEF_STMT (name);
1729 if (gimple_code (def_stmt) == GIMPLE_PHI
1730 && gimple_bb (def_stmt) == phiblock)
1731 phi = def_stmt;
1732 else
1733 return expr;
1735 e = find_edge (pred, gimple_bb (phi));
1736 if (e)
1738 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1739 pre_expr newexpr;
1741 if (TREE_CODE (def) == SSA_NAME)
1742 def = VN_INFO (def)->valnum;
1744 /* Handle constant. */
1745 if (is_gimple_min_invariant (def))
1746 return get_or_alloc_expr_for_constant (def);
1748 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1749 return NULL;
1751 newexpr = get_or_alloc_expr_for_name (def);
1752 return newexpr;
1755 return expr;
1757 default:
1758 gcc_unreachable ();
1762 /* Wrapper around phi_translate_1 providing caching functionality. */
1764 static pre_expr
1765 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1766 basic_block pred, basic_block phiblock)
1768 pre_expr phitrans;
1770 if (!expr)
1771 return NULL;
1773 /* Constants contain no values that need translation. */
1774 if (expr->kind == CONSTANT)
1775 return expr;
1777 if (value_id_constant_p (get_expr_value_id (expr)))
1778 return expr;
1780 if (expr->kind != NAME)
1782 phitrans = phi_trans_lookup (expr, pred);
1783 if (phitrans)
1784 return phitrans;
1787 /* Translate. */
1788 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1790 /* Don't add empty translations to the cache. Neither add
1791 translations of NAMEs as those are cheap to translate. */
1792 if (phitrans
1793 && expr->kind != NAME)
1794 phi_trans_add (expr, phitrans, pred);
1796 return phitrans;
1800 /* For each expression in SET, translate the values through phi nodes
1801 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1802 expressions in DEST. */
1804 static void
1805 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1806 basic_block phiblock)
1808 VEC (pre_expr, heap) *exprs;
1809 pre_expr expr;
1810 int i;
1812 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1814 bitmap_set_copy (dest, set);
1815 return;
1818 exprs = sorted_array_from_bitmap_set (set);
1819 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
1821 pre_expr translated;
1822 translated = phi_translate (expr, set, NULL, pred, phiblock);
1823 if (!translated)
1824 continue;
1826 /* We might end up with multiple expressions from SET being
1827 translated to the same value. In this case we do not want
1828 to retain the NARY or REFERENCE expression but prefer a NAME
1829 which would be the leader. */
1830 if (translated->kind == NAME)
1831 bitmap_value_replace_in_set (dest, translated);
1832 else
1833 bitmap_value_insert_into_set (dest, translated);
1835 VEC_free (pre_expr, heap, exprs);
1838 /* Find the leader for a value (i.e., the name representing that
1839 value) in a given set, and return it. If STMT is non-NULL it
1840 makes sure the defining statement for the leader dominates it.
1841 Return NULL if no leader is found. */
1843 static pre_expr
1844 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1846 if (value_id_constant_p (val))
1848 unsigned int i;
1849 bitmap_iterator bi;
1850 bitmap exprset = VEC_index (bitmap, value_expressions, val);
1852 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1854 pre_expr expr = expression_for_id (i);
1855 if (expr->kind == CONSTANT)
1856 return expr;
1859 if (bitmap_set_contains_value (set, val))
1861 /* Rather than walk the entire bitmap of expressions, and see
1862 whether any of them has the value we are looking for, we look
1863 at the reverse mapping, which tells us the set of expressions
1864 that have a given value (IE value->expressions with that
1865 value) and see if any of those expressions are in our set.
1866 The number of expressions per value is usually significantly
1867 less than the number of expressions in the set. In fact, for
1868 large testcases, doing it this way is roughly 5-10x faster
1869 than walking the bitmap.
1870 If this is somehow a significant lose for some cases, we can
1871 choose which set to walk based on which set is smaller. */
1872 unsigned int i;
1873 bitmap_iterator bi;
1874 bitmap exprset = VEC_index (bitmap, value_expressions, val);
1876 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1878 pre_expr val = expression_for_id (i);
1879 /* At the point where stmt is not null, there should always
1880 be an SSA_NAME first in the list of expressions. */
1881 if (stmt)
1883 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1884 if (gimple_code (def_stmt) != GIMPLE_PHI
1885 && gimple_bb (def_stmt) == gimple_bb (stmt)
1886 /* PRE insertions are at the end of the basic-block
1887 and have UID 0. */
1888 && (gimple_uid (def_stmt) == 0
1889 || gimple_uid (def_stmt) >= gimple_uid (stmt)))
1890 continue;
1892 return val;
1895 return NULL;
1898 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1899 BLOCK by seeing if it is not killed in the block. Note that we are
1900 only determining whether there is a store that kills it. Because
1901 of the order in which clean iterates over values, we are guaranteed
1902 that altered operands will have caused us to be eliminated from the
1903 ANTIC_IN set already. */
1905 static bool
1906 value_dies_in_block_x (pre_expr expr, basic_block block)
1908 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1909 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1910 gimple def;
1911 gimple_stmt_iterator gsi;
1912 unsigned id = get_expression_id (expr);
1913 bool res = false;
1914 ao_ref ref;
1916 if (!vuse)
1917 return false;
1919 /* Lookup a previously calculated result. */
1920 if (EXPR_DIES (block)
1921 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1922 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1924 /* A memory expression {e, VUSE} dies in the block if there is a
1925 statement that may clobber e. If, starting statement walk from the
1926 top of the basic block, a statement uses VUSE there can be no kill
1927 inbetween that use and the original statement that loaded {e, VUSE},
1928 so we can stop walking. */
1929 ref.base = NULL_TREE;
1930 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1932 tree def_vuse, def_vdef;
1933 def = gsi_stmt (gsi);
1934 def_vuse = gimple_vuse (def);
1935 def_vdef = gimple_vdef (def);
1937 /* Not a memory statement. */
1938 if (!def_vuse)
1939 continue;
1941 /* Not a may-def. */
1942 if (!def_vdef)
1944 /* A load with the same VUSE, we're done. */
1945 if (def_vuse == vuse)
1946 break;
1948 continue;
1951 /* Init ref only if we really need it. */
1952 if (ref.base == NULL_TREE
1953 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1954 refx->operands))
1956 res = true;
1957 break;
1959 /* If the statement may clobber expr, it dies. */
1960 if (stmt_may_clobber_ref_p_1 (def, &ref))
1962 res = true;
1963 break;
1967 /* Remember the result. */
1968 if (!EXPR_DIES (block))
1969 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1970 bitmap_set_bit (EXPR_DIES (block), id * 2);
1971 if (res)
1972 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1974 return res;
1978 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1979 contains its value-id. */
1981 static bool
1982 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1984 if (op && TREE_CODE (op) == SSA_NAME)
1986 unsigned int value_id = VN_INFO (op)->value_id;
1987 if (!(bitmap_set_contains_value (set1, value_id)
1988 || (set2 && bitmap_set_contains_value (set2, value_id))))
1989 return false;
1991 return true;
1994 /* Determine if the expression EXPR is valid in SET1 U SET2.
1995 ONLY SET2 CAN BE NULL.
1996 This means that we have a leader for each part of the expression
1997 (if it consists of values), or the expression is an SSA_NAME.
1998 For loads/calls, we also see if the vuse is killed in this block. */
2000 static bool
2001 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2002 basic_block block)
2004 switch (expr->kind)
2006 case NAME:
2007 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2008 case NARY:
2010 unsigned int i;
2011 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2012 for (i = 0; i < nary->length; i++)
2013 if (!op_valid_in_sets (set1, set2, nary->op[i]))
2014 return false;
2015 return true;
2017 break;
2018 case REFERENCE:
2020 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2021 vn_reference_op_t vro;
2022 unsigned int i;
2024 FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro)
2026 if (!op_valid_in_sets (set1, set2, vro->op0)
2027 || !op_valid_in_sets (set1, set2, vro->op1)
2028 || !op_valid_in_sets (set1, set2, vro->op2))
2029 return false;
2031 return true;
2033 default:
2034 gcc_unreachable ();
2038 /* Clean the set of expressions that are no longer valid in SET1 or
2039 SET2. This means expressions that are made up of values we have no
2040 leaders for in SET1 or SET2. This version is used for partial
2041 anticipation, which means it is not valid in either ANTIC_IN or
2042 PA_IN. */
2044 static void
2045 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2047 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2048 pre_expr expr;
2049 int i;
2051 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2053 if (!valid_in_sets (set1, set2, expr, block))
2054 bitmap_remove_from_set (set1, expr);
2056 VEC_free (pre_expr, heap, exprs);
2059 /* Clean the set of expressions that are no longer valid in SET. This
2060 means expressions that are made up of values we have no leaders for
2061 in SET. */
2063 static void
2064 clean (bitmap_set_t set, basic_block block)
2066 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2067 pre_expr expr;
2068 int i;
2070 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2072 if (!valid_in_sets (set, NULL, expr, block))
2073 bitmap_remove_from_set (set, expr);
2075 VEC_free (pre_expr, heap, exprs);
2078 /* Clean the set of expressions that are no longer valid in SET because
2079 they are clobbered in BLOCK or because they trap and may not be executed. */
2081 static void
2082 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2084 bitmap_iterator bi;
2085 unsigned i;
2087 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2089 pre_expr expr = expression_for_id (i);
2090 if (expr->kind == REFERENCE)
2092 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2093 if (ref->vuse)
2095 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2096 if (!gimple_nop_p (def_stmt)
2097 && ((gimple_bb (def_stmt) != block
2098 && !dominated_by_p (CDI_DOMINATORS,
2099 block, gimple_bb (def_stmt)))
2100 || (gimple_bb (def_stmt) == block
2101 && value_dies_in_block_x (expr, block))))
2102 bitmap_remove_from_set (set, expr);
2105 else if (expr->kind == NARY)
2107 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2108 /* If the NARY may trap make sure the block does not contain
2109 a possible exit point.
2110 ??? This is overly conservative if we translate AVAIL_OUT
2111 as the available expression might be after the exit point. */
2112 if (BB_MAY_NOTRETURN (block)
2113 && vn_nary_may_trap (nary))
2114 bitmap_remove_from_set (set, expr);
2119 static sbitmap has_abnormal_preds;
2121 /* List of blocks that may have changed during ANTIC computation and
2122 thus need to be iterated over. */
2124 static sbitmap changed_blocks;
2126 /* Decide whether to defer a block for a later iteration, or PHI
2127 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2128 should defer the block, and true if we processed it. */
2130 static bool
2131 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2132 basic_block block, basic_block phiblock)
2134 if (!BB_VISITED (phiblock))
2136 SET_BIT (changed_blocks, block->index);
2137 BB_VISITED (block) = 0;
2138 BB_DEFERRED (block) = 1;
2139 return false;
2141 else
2142 phi_translate_set (dest, source, block, phiblock);
2143 return true;
2146 /* Compute the ANTIC set for BLOCK.
2148 If succs(BLOCK) > 1 then
2149 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2150 else if succs(BLOCK) == 1 then
2151 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2153 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2156 static bool
2157 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2159 bool changed = false;
2160 bitmap_set_t S, old, ANTIC_OUT;
2161 bitmap_iterator bi;
2162 unsigned int bii;
2163 edge e;
2164 edge_iterator ei;
2166 old = ANTIC_OUT = S = NULL;
2167 BB_VISITED (block) = 1;
2169 /* If any edges from predecessors are abnormal, antic_in is empty,
2170 so do nothing. */
2171 if (block_has_abnormal_pred_edge)
2172 goto maybe_dump_sets;
2174 old = ANTIC_IN (block);
2175 ANTIC_OUT = bitmap_set_new ();
2177 /* If the block has no successors, ANTIC_OUT is empty. */
2178 if (EDGE_COUNT (block->succs) == 0)
2180 /* If we have one successor, we could have some phi nodes to
2181 translate through. */
2182 else if (single_succ_p (block))
2184 basic_block succ_bb = single_succ (block);
2186 /* We trade iterations of the dataflow equations for having to
2187 phi translate the maximal set, which is incredibly slow
2188 (since the maximal set often has 300+ members, even when you
2189 have a small number of blocks).
2190 Basically, we defer the computation of ANTIC for this block
2191 until we have processed it's successor, which will inevitably
2192 have a *much* smaller set of values to phi translate once
2193 clean has been run on it.
2194 The cost of doing this is that we technically perform more
2195 iterations, however, they are lower cost iterations.
2197 Timings for PRE on tramp3d-v4:
2198 without maximal set fix: 11 seconds
2199 with maximal set fix/without deferring: 26 seconds
2200 with maximal set fix/with deferring: 11 seconds
2203 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2204 block, succ_bb))
2206 changed = true;
2207 goto maybe_dump_sets;
2210 /* If we have multiple successors, we take the intersection of all of
2211 them. Note that in the case of loop exit phi nodes, we may have
2212 phis to translate through. */
2213 else
2215 VEC(basic_block, heap) * worklist;
2216 size_t i;
2217 basic_block bprime, first = NULL;
2219 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2220 FOR_EACH_EDGE (e, ei, block->succs)
2222 if (!first
2223 && BB_VISITED (e->dest))
2224 first = e->dest;
2225 else if (BB_VISITED (e->dest))
2226 VEC_quick_push (basic_block, worklist, e->dest);
2229 /* Of multiple successors we have to have visited one already. */
2230 if (!first)
2232 SET_BIT (changed_blocks, block->index);
2233 BB_VISITED (block) = 0;
2234 BB_DEFERRED (block) = 1;
2235 changed = true;
2236 VEC_free (basic_block, heap, worklist);
2237 goto maybe_dump_sets;
2240 if (!gimple_seq_empty_p (phi_nodes (first)))
2241 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2242 else
2243 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2245 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2247 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2249 bitmap_set_t tmp = bitmap_set_new ();
2250 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2251 bitmap_set_and (ANTIC_OUT, tmp);
2252 bitmap_set_free (tmp);
2254 else
2255 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2257 VEC_free (basic_block, heap, worklist);
2260 /* Prune expressions that are clobbered in block and thus become
2261 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2262 prune_clobbered_mems (ANTIC_OUT, block);
2264 /* Generate ANTIC_OUT - TMP_GEN. */
2265 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2267 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2268 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2269 TMP_GEN (block));
2271 /* Then union in the ANTIC_OUT - TMP_GEN values,
2272 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2273 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2274 bitmap_value_insert_into_set (ANTIC_IN (block),
2275 expression_for_id (bii));
2277 clean (ANTIC_IN (block), block);
2279 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2281 changed = true;
2282 SET_BIT (changed_blocks, block->index);
2283 FOR_EACH_EDGE (e, ei, block->preds)
2284 SET_BIT (changed_blocks, e->src->index);
2286 else
2287 RESET_BIT (changed_blocks, block->index);
2289 maybe_dump_sets:
2290 if (dump_file && (dump_flags & TDF_DETAILS))
2292 if (!BB_DEFERRED (block) || BB_VISITED (block))
2294 if (ANTIC_OUT)
2295 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2297 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2298 block->index);
2300 if (S)
2301 print_bitmap_set (dump_file, S, "S", block->index);
2303 else
2305 fprintf (dump_file,
2306 "Block %d was deferred for a future iteration.\n",
2307 block->index);
2310 if (old)
2311 bitmap_set_free (old);
2312 if (S)
2313 bitmap_set_free (S);
2314 if (ANTIC_OUT)
2315 bitmap_set_free (ANTIC_OUT);
2316 return changed;
2319 /* Compute PARTIAL_ANTIC for BLOCK.
2321 If succs(BLOCK) > 1 then
2322 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2323 in ANTIC_OUT for all succ(BLOCK)
2324 else if succs(BLOCK) == 1 then
2325 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2327 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2328 - ANTIC_IN[BLOCK])
2331 static bool
2332 compute_partial_antic_aux (basic_block block,
2333 bool block_has_abnormal_pred_edge)
2335 bool changed = false;
2336 bitmap_set_t old_PA_IN;
2337 bitmap_set_t PA_OUT;
2338 edge e;
2339 edge_iterator ei;
2340 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2342 old_PA_IN = PA_OUT = NULL;
2344 /* If any edges from predecessors are abnormal, antic_in is empty,
2345 so do nothing. */
2346 if (block_has_abnormal_pred_edge)
2347 goto maybe_dump_sets;
2349 /* If there are too many partially anticipatable values in the
2350 block, phi_translate_set can take an exponential time: stop
2351 before the translation starts. */
2352 if (max_pa
2353 && single_succ_p (block)
2354 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2355 goto maybe_dump_sets;
2357 old_PA_IN = PA_IN (block);
2358 PA_OUT = bitmap_set_new ();
2360 /* If the block has no successors, ANTIC_OUT is empty. */
2361 if (EDGE_COUNT (block->succs) == 0)
2363 /* If we have one successor, we could have some phi nodes to
2364 translate through. Note that we can't phi translate across DFS
2365 back edges in partial antic, because it uses a union operation on
2366 the successors. For recurrences like IV's, we will end up
2367 generating a new value in the set on each go around (i + 3 (VH.1)
2368 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2369 else if (single_succ_p (block))
2371 basic_block succ = single_succ (block);
2372 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2373 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2375 /* If we have multiple successors, we take the union of all of
2376 them. */
2377 else
2379 VEC(basic_block, heap) * worklist;
2380 size_t i;
2381 basic_block bprime;
2383 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2384 FOR_EACH_EDGE (e, ei, block->succs)
2386 if (e->flags & EDGE_DFS_BACK)
2387 continue;
2388 VEC_quick_push (basic_block, worklist, e->dest);
2390 if (VEC_length (basic_block, worklist) > 0)
2392 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2394 unsigned int i;
2395 bitmap_iterator bi;
2397 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2398 bitmap_value_insert_into_set (PA_OUT,
2399 expression_for_id (i));
2400 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2402 bitmap_set_t pa_in = bitmap_set_new ();
2403 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2404 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2405 bitmap_value_insert_into_set (PA_OUT,
2406 expression_for_id (i));
2407 bitmap_set_free (pa_in);
2409 else
2410 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2411 bitmap_value_insert_into_set (PA_OUT,
2412 expression_for_id (i));
2415 VEC_free (basic_block, heap, worklist);
2418 /* Prune expressions that are clobbered in block and thus become
2419 invalid if translated from PA_OUT to PA_IN. */
2420 prune_clobbered_mems (PA_OUT, block);
2422 /* PA_IN starts with PA_OUT - TMP_GEN.
2423 Then we subtract things from ANTIC_IN. */
2424 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2426 /* For partial antic, we want to put back in the phi results, since
2427 we will properly avoid making them partially antic over backedges. */
2428 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2429 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2431 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2432 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2434 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2436 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2438 changed = true;
2439 SET_BIT (changed_blocks, block->index);
2440 FOR_EACH_EDGE (e, ei, block->preds)
2441 SET_BIT (changed_blocks, e->src->index);
2443 else
2444 RESET_BIT (changed_blocks, block->index);
2446 maybe_dump_sets:
2447 if (dump_file && (dump_flags & TDF_DETAILS))
2449 if (PA_OUT)
2450 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2452 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2454 if (old_PA_IN)
2455 bitmap_set_free (old_PA_IN);
2456 if (PA_OUT)
2457 bitmap_set_free (PA_OUT);
2458 return changed;
2461 /* Compute ANTIC and partial ANTIC sets. */
2463 static void
2464 compute_antic (void)
2466 bool changed = true;
2467 int num_iterations = 0;
2468 basic_block block;
2469 int i;
2471 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2472 We pre-build the map of blocks with incoming abnormal edges here. */
2473 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2474 sbitmap_zero (has_abnormal_preds);
2476 FOR_EACH_BB (block)
2478 edge_iterator ei;
2479 edge e;
2481 FOR_EACH_EDGE (e, ei, block->preds)
2483 e->flags &= ~EDGE_DFS_BACK;
2484 if (e->flags & EDGE_ABNORMAL)
2486 SET_BIT (has_abnormal_preds, block->index);
2487 break;
2491 BB_VISITED (block) = 0;
2492 BB_DEFERRED (block) = 0;
2494 /* While we are here, give empty ANTIC_IN sets to each block. */
2495 ANTIC_IN (block) = bitmap_set_new ();
2496 PA_IN (block) = bitmap_set_new ();
2499 /* At the exit block we anticipate nothing. */
2500 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2501 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2502 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2504 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2505 sbitmap_ones (changed_blocks);
2506 while (changed)
2508 if (dump_file && (dump_flags & TDF_DETAILS))
2509 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2510 /* ??? We need to clear our PHI translation cache here as the
2511 ANTIC sets shrink and we restrict valid translations to
2512 those having operands with leaders in ANTIC. Same below
2513 for PA ANTIC computation. */
2514 num_iterations++;
2515 changed = false;
2516 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2518 if (TEST_BIT (changed_blocks, postorder[i]))
2520 basic_block block = BASIC_BLOCK (postorder[i]);
2521 changed |= compute_antic_aux (block,
2522 TEST_BIT (has_abnormal_preds,
2523 block->index));
2526 /* Theoretically possible, but *highly* unlikely. */
2527 gcc_checking_assert (num_iterations < 500);
2530 statistics_histogram_event (cfun, "compute_antic iterations",
2531 num_iterations);
2533 if (do_partial_partial)
2535 sbitmap_ones (changed_blocks);
2536 mark_dfs_back_edges ();
2537 num_iterations = 0;
2538 changed = true;
2539 while (changed)
2541 if (dump_file && (dump_flags & TDF_DETAILS))
2542 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2543 num_iterations++;
2544 changed = false;
2545 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2547 if (TEST_BIT (changed_blocks, postorder[i]))
2549 basic_block block = BASIC_BLOCK (postorder[i]);
2550 changed
2551 |= compute_partial_antic_aux (block,
2552 TEST_BIT (has_abnormal_preds,
2553 block->index));
2556 /* Theoretically possible, but *highly* unlikely. */
2557 gcc_checking_assert (num_iterations < 500);
2559 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2560 num_iterations);
2562 sbitmap_free (has_abnormal_preds);
2563 sbitmap_free (changed_blocks);
2566 /* Return true if OP is a tree which we can perform PRE on.
2567 This may not match the operations we can value number, but in
2568 a perfect world would. */
2570 static bool
2571 can_PRE_operation (tree op)
2573 return UNARY_CLASS_P (op)
2574 || BINARY_CLASS_P (op)
2575 || COMPARISON_CLASS_P (op)
2576 || TREE_CODE (op) == MEM_REF
2577 || TREE_CODE (op) == COMPONENT_REF
2578 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2579 || TREE_CODE (op) == CALL_EXPR
2580 || TREE_CODE (op) == ARRAY_REF;
2584 /* Inserted expressions are placed onto this worklist, which is used
2585 for performing quick dead code elimination of insertions we made
2586 that didn't turn out to be necessary. */
2587 static bitmap inserted_exprs;
2589 /* The actual worker for create_component_ref_by_pieces. */
2591 static tree
2592 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2593 unsigned int *operand, gimple_seq *stmts,
2594 gimple domstmt)
2596 vn_reference_op_t currop = &VEC_index (vn_reference_op_s, ref->operands,
2597 *operand);
2598 tree genop;
2599 ++*operand;
2600 switch (currop->opcode)
2602 case CALL_EXPR:
2604 tree folded, sc = NULL_TREE;
2605 unsigned int nargs = 0;
2606 tree fn, *args;
2607 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2608 fn = currop->op0;
2609 else
2611 pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2612 fn = find_or_generate_expression (block, op0, stmts, domstmt);
2613 if (!fn)
2614 return NULL_TREE;
2616 if (currop->op1)
2618 pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2619 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2620 if (!sc)
2621 return NULL_TREE;
2623 args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2624 ref->operands) - 1);
2625 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2627 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2628 operand, stmts,
2629 domstmt);
2630 if (!args[nargs])
2632 free (args);
2633 return NULL_TREE;
2635 nargs++;
2637 folded = build_call_array (currop->type,
2638 (TREE_CODE (fn) == FUNCTION_DECL
2639 ? build_fold_addr_expr (fn) : fn),
2640 nargs, args);
2641 free (args);
2642 if (sc)
2643 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2644 return folded;
2647 case MEM_REF:
2649 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2650 stmts, domstmt);
2651 tree offset = currop->op0;
2652 if (!baseop)
2653 return NULL_TREE;
2654 if (TREE_CODE (baseop) == ADDR_EXPR
2655 && handled_component_p (TREE_OPERAND (baseop, 0)))
2657 HOST_WIDE_INT off;
2658 tree base;
2659 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2660 &off);
2661 gcc_assert (base);
2662 offset = int_const_binop (PLUS_EXPR, offset,
2663 build_int_cst (TREE_TYPE (offset),
2664 off));
2665 baseop = build_fold_addr_expr (base);
2667 return fold_build2 (MEM_REF, currop->type, baseop, offset);
2670 case TARGET_MEM_REF:
2672 pre_expr op0expr, op1expr;
2673 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2674 vn_reference_op_t nextop = &VEC_index (vn_reference_op_s, ref->operands,
2675 ++*operand);
2676 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2677 stmts, domstmt);
2678 if (!baseop)
2679 return NULL_TREE;
2680 if (currop->op0)
2682 op0expr = get_or_alloc_expr_for (currop->op0);
2683 genop0 = find_or_generate_expression (block, op0expr,
2684 stmts, domstmt);
2685 if (!genop0)
2686 return NULL_TREE;
2688 if (nextop->op0)
2690 op1expr = get_or_alloc_expr_for (nextop->op0);
2691 genop1 = find_or_generate_expression (block, op1expr,
2692 stmts, domstmt);
2693 if (!genop1)
2694 return NULL_TREE;
2696 return build5 (TARGET_MEM_REF, currop->type,
2697 baseop, currop->op2, genop0, currop->op1, genop1);
2700 case ADDR_EXPR:
2701 if (currop->op0)
2703 gcc_assert (is_gimple_min_invariant (currop->op0));
2704 return currop->op0;
2706 /* Fallthrough. */
2707 case REALPART_EXPR:
2708 case IMAGPART_EXPR:
2709 case VIEW_CONVERT_EXPR:
2711 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2712 operand,
2713 stmts, domstmt);
2714 if (!genop0)
2715 return NULL_TREE;
2717 return fold_build1 (currop->opcode, currop->type, genop0);
2720 case WITH_SIZE_EXPR:
2722 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2723 stmts, domstmt);
2724 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2725 tree genop1;
2727 if (!genop0)
2728 return NULL_TREE;
2730 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2731 if (!genop1)
2732 return NULL_TREE;
2734 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2737 case BIT_FIELD_REF:
2739 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2740 stmts, domstmt);
2741 tree op1 = currop->op0;
2742 tree op2 = currop->op1;
2744 if (!genop0)
2745 return NULL_TREE;
2747 return fold_build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2750 /* For array ref vn_reference_op's, operand 1 of the array ref
2751 is op0 of the reference op and operand 3 of the array ref is
2752 op1. */
2753 case ARRAY_RANGE_REF:
2754 case ARRAY_REF:
2756 tree genop0;
2757 tree genop1 = currop->op0;
2758 pre_expr op1expr;
2759 tree genop2 = currop->op1;
2760 pre_expr op2expr;
2761 tree genop3 = currop->op2;
2762 pre_expr op3expr;
2763 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2764 stmts, domstmt);
2765 if (!genop0)
2766 return NULL_TREE;
2767 op1expr = get_or_alloc_expr_for (genop1);
2768 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2769 if (!genop1)
2770 return NULL_TREE;
2771 if (genop2)
2773 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2774 /* Drop zero minimum index if redundant. */
2775 if (integer_zerop (genop2)
2776 && (!domain_type
2777 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2778 genop2 = NULL_TREE;
2779 else
2781 op2expr = get_or_alloc_expr_for (genop2);
2782 genop2 = find_or_generate_expression (block, op2expr, stmts,
2783 domstmt);
2784 if (!genop2)
2785 return NULL_TREE;
2788 if (genop3)
2790 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2791 /* We can't always put a size in units of the element alignment
2792 here as the element alignment may be not visible. See
2793 PR43783. Simply drop the element size for constant
2794 sizes. */
2795 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2796 genop3 = NULL_TREE;
2797 else
2799 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2800 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2801 op3expr = get_or_alloc_expr_for (genop3);
2802 genop3 = find_or_generate_expression (block, op3expr, stmts,
2803 domstmt);
2804 if (!genop3)
2805 return NULL_TREE;
2808 return build4 (currop->opcode, currop->type, genop0, genop1,
2809 genop2, genop3);
2811 case COMPONENT_REF:
2813 tree op0;
2814 tree op1;
2815 tree genop2 = currop->op1;
2816 pre_expr op2expr;
2817 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2818 stmts, domstmt);
2819 if (!op0)
2820 return NULL_TREE;
2821 /* op1 should be a FIELD_DECL, which are represented by
2822 themselves. */
2823 op1 = currop->op0;
2824 if (genop2)
2826 op2expr = get_or_alloc_expr_for (genop2);
2827 genop2 = find_or_generate_expression (block, op2expr, stmts,
2828 domstmt);
2829 if (!genop2)
2830 return NULL_TREE;
2833 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2836 case SSA_NAME:
2838 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2839 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2840 return genop;
2842 case STRING_CST:
2843 case INTEGER_CST:
2844 case COMPLEX_CST:
2845 case VECTOR_CST:
2846 case REAL_CST:
2847 case CONSTRUCTOR:
2848 case VAR_DECL:
2849 case PARM_DECL:
2850 case CONST_DECL:
2851 case RESULT_DECL:
2852 case FUNCTION_DECL:
2853 return currop->op0;
2855 default:
2856 gcc_unreachable ();
2860 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2861 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2862 trying to rename aggregates into ssa form directly, which is a no no.
2864 Thus, this routine doesn't create temporaries, it just builds a
2865 single access expression for the array, calling
2866 find_or_generate_expression to build the innermost pieces.
2868 This function is a subroutine of create_expression_by_pieces, and
2869 should not be called on it's own unless you really know what you
2870 are doing. */
2872 static tree
2873 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2874 gimple_seq *stmts, gimple domstmt)
2876 unsigned int op = 0;
2877 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2880 /* Find a leader for an expression, or generate one using
2881 create_expression_by_pieces if it's ANTIC but
2882 complex.
2883 BLOCK is the basic_block we are looking for leaders in.
2884 EXPR is the expression to find a leader or generate for.
2885 STMTS is the statement list to put the inserted expressions on.
2886 Returns the SSA_NAME of the LHS of the generated expression or the
2887 leader.
2888 DOMSTMT if non-NULL is a statement that should be dominated by
2889 all uses in the generated expression. If DOMSTMT is non-NULL this
2890 routine can fail and return NULL_TREE. Otherwise it will assert
2891 on failure. */
2893 static tree
2894 find_or_generate_expression (basic_block block, pre_expr expr,
2895 gimple_seq *stmts, gimple domstmt)
2897 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2898 get_expr_value_id (expr), domstmt);
2899 tree genop = NULL;
2900 if (leader)
2902 if (leader->kind == NAME)
2903 genop = PRE_EXPR_NAME (leader);
2904 else if (leader->kind == CONSTANT)
2905 genop = PRE_EXPR_CONSTANT (leader);
2908 /* If it's still NULL, it must be a complex expression, so generate
2909 it recursively. Not so if inserting expressions for values generated
2910 by SCCVN. */
2911 if (genop == NULL
2912 && !domstmt)
2914 bitmap exprset;
2915 unsigned int lookfor = get_expr_value_id (expr);
2916 bool handled = false;
2917 bitmap_iterator bi;
2918 unsigned int i;
2920 exprset = VEC_index (bitmap, value_expressions, lookfor);
2921 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2923 pre_expr temp = expression_for_id (i);
2924 if (temp->kind != NAME)
2926 handled = true;
2927 genop = create_expression_by_pieces (block, temp, stmts,
2928 domstmt,
2929 get_expr_type (expr));
2930 break;
2933 if (!handled && domstmt)
2934 return NULL_TREE;
2936 gcc_assert (handled);
2938 return genop;
2941 #define NECESSARY GF_PLF_1
2943 /* Create an expression in pieces, so that we can handle very complex
2944 expressions that may be ANTIC, but not necessary GIMPLE.
2945 BLOCK is the basic block the expression will be inserted into,
2946 EXPR is the expression to insert (in value form)
2947 STMTS is a statement list to append the necessary insertions into.
2949 This function will die if we hit some value that shouldn't be
2950 ANTIC but is (IE there is no leader for it, or its components).
2951 This function may also generate expressions that are themselves
2952 partially or fully redundant. Those that are will be either made
2953 fully redundant during the next iteration of insert (for partially
2954 redundant ones), or eliminated by eliminate (for fully redundant
2955 ones).
2957 If DOMSTMT is non-NULL then we make sure that all uses in the
2958 expressions dominate that statement. In this case the function
2959 can return NULL_TREE to signal failure. */
2961 static tree
2962 create_expression_by_pieces (basic_block block, pre_expr expr,
2963 gimple_seq *stmts, gimple domstmt, tree type)
2965 tree name;
2966 tree folded;
2967 gimple_seq forced_stmts = NULL;
2968 unsigned int value_id;
2969 gimple_stmt_iterator gsi;
2970 tree exprtype = type ? type : get_expr_type (expr);
2971 pre_expr nameexpr;
2972 gimple newstmt;
2974 switch (expr->kind)
2976 /* We may hit the NAME/CONSTANT case if we have to convert types
2977 that value numbering saw through. */
2978 case NAME:
2979 folded = PRE_EXPR_NAME (expr);
2980 break;
2981 case CONSTANT:
2982 folded = PRE_EXPR_CONSTANT (expr);
2983 break;
2984 case REFERENCE:
2986 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2987 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
2989 break;
2990 case NARY:
2992 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2993 tree genop[4];
2994 unsigned i;
2995 for (i = 0; i < nary->length; ++i)
2997 pre_expr op = get_or_alloc_expr_for (nary->op[i]);
2998 genop[i] = find_or_generate_expression (block, op,
2999 stmts, domstmt);
3000 if (!genop[i])
3001 return NULL_TREE;
3002 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
3003 may have conversions stripped. */
3004 if (nary->opcode == POINTER_PLUS_EXPR)
3006 if (i == 0)
3007 genop[i] = fold_convert (nary->type, genop[i]);
3008 else if (i == 1)
3009 genop[i] = convert_to_ptrofftype (genop[i]);
3011 else
3012 genop[i] = fold_convert (TREE_TYPE (nary->op[i]), genop[i]);
3014 if (nary->opcode == CONSTRUCTOR)
3016 VEC(constructor_elt,gc) *elts = NULL;
3017 for (i = 0; i < nary->length; ++i)
3018 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
3019 folded = build_constructor (nary->type, elts);
3021 else
3023 switch (nary->length)
3025 case 1:
3026 folded = fold_build1 (nary->opcode, nary->type,
3027 genop[0]);
3028 break;
3029 case 2:
3030 folded = fold_build2 (nary->opcode, nary->type,
3031 genop[0], genop[1]);
3032 break;
3033 case 3:
3034 folded = fold_build3 (nary->opcode, nary->type,
3035 genop[0], genop[1], genop[3]);
3036 break;
3037 default:
3038 gcc_unreachable ();
3042 break;
3043 default:
3044 return NULL_TREE;
3047 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3048 folded = fold_convert (exprtype, folded);
3050 /* Force the generated expression to be a sequence of GIMPLE
3051 statements.
3052 We have to call unshare_expr because force_gimple_operand may
3053 modify the tree we pass to it. */
3054 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3055 false, NULL);
3057 /* If we have any intermediate expressions to the value sets, add them
3058 to the value sets and chain them in the instruction stream. */
3059 if (forced_stmts)
3061 gsi = gsi_start (forced_stmts);
3062 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3064 gimple stmt = gsi_stmt (gsi);
3065 tree forcedname = gimple_get_lhs (stmt);
3066 pre_expr nameexpr;
3068 if (TREE_CODE (forcedname) == SSA_NAME)
3070 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3071 VN_INFO_GET (forcedname)->valnum = forcedname;
3072 VN_INFO (forcedname)->value_id = get_next_value_id ();
3073 nameexpr = get_or_alloc_expr_for_name (forcedname);
3074 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3075 if (!in_fre)
3076 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3077 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3080 gimple_seq_add_seq (stmts, forced_stmts);
3083 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
3084 newstmt = gimple_build_assign (name, folded);
3085 gimple_set_plf (newstmt, NECESSARY, false);
3087 gimple_seq_add_stmt (stmts, newstmt);
3088 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
3090 /* Fold the last statement. */
3091 gsi = gsi_last (*stmts);
3092 if (fold_stmt_inplace (&gsi))
3093 update_stmt (gsi_stmt (gsi));
3095 /* Add a value number to the temporary.
3096 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3097 we are creating the expression by pieces, and this particular piece of
3098 the expression may have been represented. There is no harm in replacing
3099 here. */
3100 VN_INFO_GET (name)->valnum = name;
3101 value_id = get_expr_value_id (expr);
3102 VN_INFO (name)->value_id = value_id;
3103 nameexpr = get_or_alloc_expr_for_name (name);
3104 add_to_value (value_id, nameexpr);
3105 if (NEW_SETS (block))
3106 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3107 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3109 pre_stats.insertions++;
3110 if (dump_file && (dump_flags & TDF_DETAILS))
3112 fprintf (dump_file, "Inserted ");
3113 print_gimple_stmt (dump_file, newstmt, 0, 0);
3114 fprintf (dump_file, " in predecessor %d\n", block->index);
3117 return name;
3121 /* Returns true if we want to inhibit the insertions of PHI nodes
3122 for the given EXPR for basic block BB (a member of a loop).
3123 We want to do this, when we fear that the induction variable we
3124 create might inhibit vectorization. */
3126 static bool
3127 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3129 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3130 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3131 vn_reference_op_t op;
3132 unsigned i;
3134 /* If we aren't going to vectorize we don't inhibit anything. */
3135 if (!flag_tree_vectorize)
3136 return false;
3138 /* Otherwise we inhibit the insertion when the address of the
3139 memory reference is a simple induction variable. In other
3140 cases the vectorizer won't do anything anyway (either it's
3141 loop invariant or a complicated expression). */
3142 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
3144 switch (op->opcode)
3146 case CALL_EXPR:
3147 /* Calls are not a problem. */
3148 return false;
3150 case ARRAY_REF:
3151 case ARRAY_RANGE_REF:
3152 if (TREE_CODE (op->op0) != SSA_NAME)
3153 break;
3154 /* Fallthru. */
3155 case SSA_NAME:
3157 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3158 affine_iv iv;
3159 /* Default defs are loop invariant. */
3160 if (!defbb)
3161 break;
3162 /* Defined outside this loop, also loop invariant. */
3163 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3164 break;
3165 /* If it's a simple induction variable inhibit insertion,
3166 the vectorizer might be interested in this one. */
3167 if (simple_iv (bb->loop_father, bb->loop_father,
3168 op->op0, &iv, true))
3169 return true;
3170 /* No simple IV, vectorizer can't do anything, hence no
3171 reason to inhibit the transformation for this operand. */
3172 break;
3174 default:
3175 break;
3178 return false;
3181 /* Insert the to-be-made-available values of expression EXPRNUM for each
3182 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3183 merge the result with a phi node, given the same value number as
3184 NODE. Return true if we have inserted new stuff. */
3186 static bool
3187 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3188 VEC(pre_expr, heap) *avail)
3190 pre_expr expr = expression_for_id (exprnum);
3191 pre_expr newphi;
3192 unsigned int val = get_expr_value_id (expr);
3193 edge pred;
3194 bool insertions = false;
3195 bool nophi = false;
3196 basic_block bprime;
3197 pre_expr eprime;
3198 edge_iterator ei;
3199 tree type = get_expr_type (expr);
3200 tree temp;
3201 gimple phi;
3203 /* Make sure we aren't creating an induction variable. */
3204 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
3206 bool firstinsideloop = false;
3207 bool secondinsideloop = false;
3208 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3209 EDGE_PRED (block, 0)->src);
3210 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3211 EDGE_PRED (block, 1)->src);
3212 /* Induction variables only have one edge inside the loop. */
3213 if ((firstinsideloop ^ secondinsideloop)
3214 && (expr->kind != REFERENCE
3215 || inhibit_phi_insertion (block, expr)))
3217 if (dump_file && (dump_flags & TDF_DETAILS))
3218 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3219 nophi = true;
3223 /* Make the necessary insertions. */
3224 FOR_EACH_EDGE (pred, ei, block->preds)
3226 gimple_seq stmts = NULL;
3227 tree builtexpr;
3228 bprime = pred->src;
3229 eprime = VEC_index (pre_expr, avail, pred->dest_idx);
3231 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3233 builtexpr = create_expression_by_pieces (bprime,
3234 eprime,
3235 &stmts, NULL,
3236 type);
3237 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3238 gsi_insert_seq_on_edge (pred, stmts);
3239 VEC_replace (pre_expr, avail, pred->dest_idx,
3240 get_or_alloc_expr_for_name (builtexpr));
3241 insertions = true;
3243 else if (eprime->kind == CONSTANT)
3245 /* Constants may not have the right type, fold_convert
3246 should give us back a constant with the right type. */
3247 tree constant = PRE_EXPR_CONSTANT (eprime);
3248 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3250 tree builtexpr = fold_convert (type, constant);
3251 if (!is_gimple_min_invariant (builtexpr))
3253 tree forcedexpr = force_gimple_operand (builtexpr,
3254 &stmts, true,
3255 NULL);
3256 if (!is_gimple_min_invariant (forcedexpr))
3258 if (forcedexpr != builtexpr)
3260 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3261 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3263 if (stmts)
3265 gimple_stmt_iterator gsi;
3266 gsi = gsi_start (stmts);
3267 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3269 gimple stmt = gsi_stmt (gsi);
3270 tree lhs = gimple_get_lhs (stmt);
3271 if (TREE_CODE (lhs) == SSA_NAME)
3272 bitmap_set_bit (inserted_exprs,
3273 SSA_NAME_VERSION (lhs));
3274 gimple_set_plf (stmt, NECESSARY, false);
3276 gsi_insert_seq_on_edge (pred, stmts);
3278 VEC_replace (pre_expr, avail, pred->dest_idx,
3279 get_or_alloc_expr_for_name (forcedexpr));
3282 else
3283 VEC_replace (pre_expr, avail, pred->dest_idx,
3284 get_or_alloc_expr_for_constant (builtexpr));
3287 else if (eprime->kind == NAME)
3289 /* We may have to do a conversion because our value
3290 numbering can look through types in certain cases, but
3291 our IL requires all operands of a phi node have the same
3292 type. */
3293 tree name = PRE_EXPR_NAME (eprime);
3294 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3296 tree builtexpr;
3297 tree forcedexpr;
3298 builtexpr = fold_convert (type, name);
3299 forcedexpr = force_gimple_operand (builtexpr,
3300 &stmts, true,
3301 NULL);
3303 if (forcedexpr != name)
3305 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3306 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3309 if (stmts)
3311 gimple_stmt_iterator gsi;
3312 gsi = gsi_start (stmts);
3313 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3315 gimple stmt = gsi_stmt (gsi);
3316 tree lhs = gimple_get_lhs (stmt);
3317 if (TREE_CODE (lhs) == SSA_NAME)
3318 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3319 gimple_set_plf (stmt, NECESSARY, false);
3321 gsi_insert_seq_on_edge (pred, stmts);
3323 VEC_replace (pre_expr, avail, pred->dest_idx,
3324 get_or_alloc_expr_for_name (forcedexpr));
3328 /* If we didn't want a phi node, and we made insertions, we still have
3329 inserted new stuff, and thus return true. If we didn't want a phi node,
3330 and didn't make insertions, we haven't added anything new, so return
3331 false. */
3332 if (nophi && insertions)
3333 return true;
3334 else if (nophi && !insertions)
3335 return false;
3337 /* Now build a phi for the new variable. */
3338 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3339 phi = create_phi_node (temp, block);
3341 gimple_set_plf (phi, NECESSARY, false);
3342 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3343 VN_INFO (gimple_phi_result (phi))->value_id = val;
3344 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
3345 FOR_EACH_EDGE (pred, ei, block->preds)
3347 pre_expr ae = VEC_index (pre_expr, avail, pred->dest_idx);
3348 gcc_assert (get_expr_type (ae) == type
3349 || useless_type_conversion_p (type, get_expr_type (ae)));
3350 if (ae->kind == CONSTANT)
3351 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3352 else
3353 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3356 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3357 add_to_value (val, newphi);
3359 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3360 this insertion, since we test for the existence of this value in PHI_GEN
3361 before proceeding with the partial redundancy checks in insert_aux.
3363 The value may exist in AVAIL_OUT, in particular, it could be represented
3364 by the expression we are trying to eliminate, in which case we want the
3365 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3366 inserted there.
3368 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3369 this block, because if it did, it would have existed in our dominator's
3370 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3373 bitmap_insert_into_set (PHI_GEN (block), newphi);
3374 bitmap_value_replace_in_set (AVAIL_OUT (block),
3375 newphi);
3376 bitmap_insert_into_set (NEW_SETS (block),
3377 newphi);
3379 if (dump_file && (dump_flags & TDF_DETAILS))
3381 fprintf (dump_file, "Created phi ");
3382 print_gimple_stmt (dump_file, phi, 0, 0);
3383 fprintf (dump_file, " in block %d\n", block->index);
3385 pre_stats.phis++;
3386 return true;
3391 /* Perform insertion of partially redundant values.
3392 For BLOCK, do the following:
3393 1. Propagate the NEW_SETS of the dominator into the current block.
3394 If the block has multiple predecessors,
3395 2a. Iterate over the ANTIC expressions for the block to see if
3396 any of them are partially redundant.
3397 2b. If so, insert them into the necessary predecessors to make
3398 the expression fully redundant.
3399 2c. Insert a new PHI merging the values of the predecessors.
3400 2d. Insert the new PHI, and the new expressions, into the
3401 NEW_SETS set.
3402 3. Recursively call ourselves on the dominator children of BLOCK.
3404 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3405 do_regular_insertion and do_partial_insertion.
3409 static bool
3410 do_regular_insertion (basic_block block, basic_block dom)
3412 bool new_stuff = false;
3413 VEC (pre_expr, heap) *exprs;
3414 pre_expr expr;
3415 VEC (pre_expr, heap) *avail = NULL;
3416 int i;
3418 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3419 VEC_safe_grow (pre_expr, heap, avail, EDGE_COUNT (block->preds));
3421 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3423 if (expr->kind != NAME)
3425 unsigned int val;
3426 bool by_some = false;
3427 bool cant_insert = false;
3428 bool all_same = true;
3429 pre_expr first_s = NULL;
3430 edge pred;
3431 basic_block bprime;
3432 pre_expr eprime = NULL;
3433 edge_iterator ei;
3434 pre_expr edoubleprime = NULL;
3435 bool do_insertion = false;
3437 val = get_expr_value_id (expr);
3438 if (bitmap_set_contains_value (PHI_GEN (block), val))
3439 continue;
3440 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3442 if (dump_file && (dump_flags & TDF_DETAILS))
3443 fprintf (dump_file, "Found fully redundant value\n");
3444 continue;
3447 FOR_EACH_EDGE (pred, ei, block->preds)
3449 unsigned int vprime;
3451 /* We should never run insertion for the exit block
3452 and so not come across fake pred edges. */
3453 gcc_assert (!(pred->flags & EDGE_FAKE));
3454 bprime = pred->src;
3455 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3456 bprime, block);
3458 /* eprime will generally only be NULL if the
3459 value of the expression, translated
3460 through the PHI for this predecessor, is
3461 undefined. If that is the case, we can't
3462 make the expression fully redundant,
3463 because its value is undefined along a
3464 predecessor path. We can thus break out
3465 early because it doesn't matter what the
3466 rest of the results are. */
3467 if (eprime == NULL)
3469 VEC_replace (pre_expr, avail, pred->dest_idx, NULL);
3470 cant_insert = true;
3471 break;
3474 eprime = fully_constant_expression (eprime);
3475 vprime = get_expr_value_id (eprime);
3476 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3477 vprime, NULL);
3478 if (edoubleprime == NULL)
3480 VEC_replace (pre_expr, avail, pred->dest_idx, eprime);
3481 all_same = false;
3483 else
3485 VEC_replace (pre_expr, avail, pred->dest_idx, edoubleprime);
3486 by_some = true;
3487 /* We want to perform insertions to remove a redundancy on
3488 a path in the CFG we want to optimize for speed. */
3489 if (optimize_edge_for_speed_p (pred))
3490 do_insertion = true;
3491 if (first_s == NULL)
3492 first_s = edoubleprime;
3493 else if (!pre_expr_d::equal (first_s, edoubleprime))
3494 all_same = false;
3497 /* If we can insert it, it's not the same value
3498 already existing along every predecessor, and
3499 it's defined by some predecessor, it is
3500 partially redundant. */
3501 if (!cant_insert && !all_same && by_some)
3503 if (!do_insertion)
3505 if (dump_file && (dump_flags & TDF_DETAILS))
3507 fprintf (dump_file, "Skipping partial redundancy for "
3508 "expression ");
3509 print_pre_expr (dump_file, expr);
3510 fprintf (dump_file, " (%04d), no redundancy on to be "
3511 "optimized for speed edge\n", val);
3514 else if (dbg_cnt (treepre_insert))
3516 if (dump_file && (dump_flags & TDF_DETAILS))
3518 fprintf (dump_file, "Found partial redundancy for "
3519 "expression ");
3520 print_pre_expr (dump_file, expr);
3521 fprintf (dump_file, " (%04d)\n",
3522 get_expr_value_id (expr));
3524 if (insert_into_preds_of_block (block,
3525 get_expression_id (expr),
3526 avail))
3527 new_stuff = true;
3530 /* If all edges produce the same value and that value is
3531 an invariant, then the PHI has the same value on all
3532 edges. Note this. */
3533 else if (!cant_insert && all_same && eprime
3534 && (edoubleprime->kind == CONSTANT
3535 || edoubleprime->kind == NAME)
3536 && !value_id_constant_p (val))
3538 unsigned int j;
3539 bitmap_iterator bi;
3540 bitmap exprset = VEC_index (bitmap, value_expressions, val);
3542 unsigned int new_val = get_expr_value_id (edoubleprime);
3543 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bi)
3545 pre_expr expr = expression_for_id (j);
3547 if (expr->kind == NAME)
3549 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3550 /* Just reset the value id and valnum so it is
3551 the same as the constant we have discovered. */
3552 if (edoubleprime->kind == CONSTANT)
3554 info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3555 pre_stats.constified++;
3557 else
3558 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3559 info->value_id = new_val;
3566 VEC_free (pre_expr, heap, exprs);
3567 VEC_free (pre_expr, heap, avail);
3568 return new_stuff;
3572 /* Perform insertion for partially anticipatable expressions. There
3573 is only one case we will perform insertion for these. This case is
3574 if the expression is partially anticipatable, and fully available.
3575 In this case, we know that putting it earlier will enable us to
3576 remove the later computation. */
3579 static bool
3580 do_partial_partial_insertion (basic_block block, basic_block dom)
3582 bool new_stuff = false;
3583 VEC (pre_expr, heap) *exprs;
3584 pre_expr expr;
3585 VEC (pre_expr, heap) *avail = NULL;
3586 int i;
3588 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3589 VEC_safe_grow (pre_expr, heap, avail, EDGE_COUNT (block->preds));
3591 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3593 if (expr->kind != NAME)
3595 unsigned int val;
3596 bool by_all = true;
3597 bool cant_insert = false;
3598 edge pred;
3599 basic_block bprime;
3600 pre_expr eprime = NULL;
3601 edge_iterator ei;
3603 val = get_expr_value_id (expr);
3604 if (bitmap_set_contains_value (PHI_GEN (block), val))
3605 continue;
3606 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3607 continue;
3609 FOR_EACH_EDGE (pred, ei, block->preds)
3611 unsigned int vprime;
3612 pre_expr edoubleprime;
3614 /* We should never run insertion for the exit block
3615 and so not come across fake pred edges. */
3616 gcc_assert (!(pred->flags & EDGE_FAKE));
3617 bprime = pred->src;
3618 eprime = phi_translate (expr, ANTIC_IN (block),
3619 PA_IN (block),
3620 bprime, block);
3622 /* eprime will generally only be NULL if the
3623 value of the expression, translated
3624 through the PHI for this predecessor, is
3625 undefined. If that is the case, we can't
3626 make the expression fully redundant,
3627 because its value is undefined along a
3628 predecessor path. We can thus break out
3629 early because it doesn't matter what the
3630 rest of the results are. */
3631 if (eprime == NULL)
3633 VEC_replace (pre_expr, avail, pred->dest_idx, NULL);
3634 cant_insert = true;
3635 break;
3638 eprime = fully_constant_expression (eprime);
3639 vprime = get_expr_value_id (eprime);
3640 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3641 vprime, NULL);
3642 VEC_replace (pre_expr, avail, pred->dest_idx, edoubleprime);
3643 if (edoubleprime == NULL)
3645 by_all = false;
3646 break;
3650 /* If we can insert it, it's not the same value
3651 already existing along every predecessor, and
3652 it's defined by some predecessor, it is
3653 partially redundant. */
3654 if (!cant_insert && by_all)
3656 edge succ;
3657 bool do_insertion = false;
3659 /* Insert only if we can remove a later expression on a path
3660 that we want to optimize for speed.
3661 The phi node that we will be inserting in BLOCK is not free,
3662 and inserting it for the sake of !optimize_for_speed successor
3663 may cause regressions on the speed path. */
3664 FOR_EACH_EDGE (succ, ei, block->succs)
3666 if (bitmap_set_contains_value (PA_IN (succ->dest), val))
3668 if (optimize_edge_for_speed_p (succ))
3669 do_insertion = true;
3673 if (!do_insertion)
3675 if (dump_file && (dump_flags & TDF_DETAILS))
3677 fprintf (dump_file, "Skipping partial partial redundancy "
3678 "for expression ");
3679 print_pre_expr (dump_file, expr);
3680 fprintf (dump_file, " (%04d), not partially anticipated "
3681 "on any to be optimized for speed edges\n", val);
3684 else if (dbg_cnt (treepre_insert))
3686 pre_stats.pa_insert++;
3687 if (dump_file && (dump_flags & TDF_DETAILS))
3689 fprintf (dump_file, "Found partial partial redundancy "
3690 "for expression ");
3691 print_pre_expr (dump_file, expr);
3692 fprintf (dump_file, " (%04d)\n",
3693 get_expr_value_id (expr));
3695 if (insert_into_preds_of_block (block,
3696 get_expression_id (expr),
3697 avail))
3698 new_stuff = true;
3704 VEC_free (pre_expr, heap, exprs);
3705 VEC_free (pre_expr, heap, avail);
3706 return new_stuff;
3709 static bool
3710 insert_aux (basic_block block)
3712 basic_block son;
3713 bool new_stuff = false;
3715 if (block)
3717 basic_block dom;
3718 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3719 if (dom)
3721 unsigned i;
3722 bitmap_iterator bi;
3723 bitmap_set_t newset = NEW_SETS (dom);
3724 if (newset)
3726 /* Note that we need to value_replace both NEW_SETS, and
3727 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3728 represented by some non-simple expression here that we want
3729 to replace it with. */
3730 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3732 pre_expr expr = expression_for_id (i);
3733 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3734 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3737 if (!single_pred_p (block))
3739 new_stuff |= do_regular_insertion (block, dom);
3740 if (do_partial_partial)
3741 new_stuff |= do_partial_partial_insertion (block, dom);
3745 for (son = first_dom_son (CDI_DOMINATORS, block);
3746 son;
3747 son = next_dom_son (CDI_DOMINATORS, son))
3749 new_stuff |= insert_aux (son);
3752 return new_stuff;
3755 /* Perform insertion of partially redundant values. */
3757 static void
3758 insert (void)
3760 bool new_stuff = true;
3761 basic_block bb;
3762 int num_iterations = 0;
3764 FOR_ALL_BB (bb)
3765 NEW_SETS (bb) = bitmap_set_new ();
3767 while (new_stuff)
3769 num_iterations++;
3770 if (dump_file && dump_flags & TDF_DETAILS)
3771 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3772 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3774 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3778 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3780 static void
3781 add_to_exp_gen (basic_block block, tree op)
3783 pre_expr result;
3785 gcc_checking_assert (!in_fre);
3787 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3788 return;
3790 result = get_or_alloc_expr_for_name (op);
3791 bitmap_value_insert_into_set (EXP_GEN (block), result);
3794 /* Create value ids for PHI in BLOCK. */
3796 static void
3797 make_values_for_phi (gimple phi, basic_block block)
3799 tree result = gimple_phi_result (phi);
3801 /* We have no need for virtual phis, as they don't represent
3802 actual computations. */
3803 if (virtual_operand_p (result))
3804 return;
3806 pre_expr e = get_or_alloc_expr_for_name (result);
3807 add_to_value (get_expr_value_id (e), e);
3808 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3809 if (!in_fre)
3811 unsigned i;
3812 bitmap_insert_into_set (PHI_GEN (block), e);
3813 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3815 tree arg = gimple_phi_arg_def (phi, i);
3816 if (TREE_CODE (arg) == SSA_NAME)
3818 e = get_or_alloc_expr_for_name (arg);
3819 add_to_value (get_expr_value_id (e), e);
3825 /* Compute the AVAIL set for all basic blocks.
3827 This function performs value numbering of the statements in each basic
3828 block. The AVAIL sets are built from information we glean while doing
3829 this value numbering, since the AVAIL sets contain only one entry per
3830 value.
3832 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3833 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3835 static void
3836 compute_avail (void)
3839 basic_block block, son;
3840 basic_block *worklist;
3841 size_t sp = 0;
3842 unsigned i;
3844 /* We pretend that default definitions are defined in the entry block.
3845 This includes function arguments and the static chain decl. */
3846 for (i = 1; i < num_ssa_names; ++i)
3848 tree name = ssa_name (i);
3849 pre_expr e;
3850 if (!name
3851 || !SSA_NAME_IS_DEFAULT_DEF (name)
3852 || has_zero_uses (name)
3853 || virtual_operand_p (name))
3854 continue;
3856 e = get_or_alloc_expr_for_name (name);
3857 add_to_value (get_expr_value_id (e), e);
3858 if (!in_fre)
3859 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3860 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3863 /* Allocate the worklist. */
3864 worklist = XNEWVEC (basic_block, n_basic_blocks);
3866 /* Seed the algorithm by putting the dominator children of the entry
3867 block on the worklist. */
3868 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3869 son;
3870 son = next_dom_son (CDI_DOMINATORS, son))
3871 worklist[sp++] = son;
3873 /* Loop until the worklist is empty. */
3874 while (sp)
3876 gimple_stmt_iterator gsi;
3877 gimple stmt;
3878 basic_block dom;
3879 unsigned int stmt_uid = 1;
3881 /* Pick a block from the worklist. */
3882 block = worklist[--sp];
3884 /* Initially, the set of available values in BLOCK is that of
3885 its immediate dominator. */
3886 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3887 if (dom)
3888 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3890 /* Generate values for PHI nodes. */
3891 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3892 make_values_for_phi (gsi_stmt (gsi), block);
3894 BB_MAY_NOTRETURN (block) = 0;
3896 /* Now compute value numbers and populate value sets with all
3897 the expressions computed in BLOCK. */
3898 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3900 ssa_op_iter iter;
3901 tree op;
3903 stmt = gsi_stmt (gsi);
3904 gimple_set_uid (stmt, stmt_uid++);
3906 /* Cache whether the basic-block has any non-visible side-effect
3907 or control flow.
3908 If this isn't a call or it is the last stmt in the
3909 basic-block then the CFG represents things correctly. */
3910 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3912 /* Non-looping const functions always return normally.
3913 Otherwise the call might not return or have side-effects
3914 that forbids hoisting possibly trapping expressions
3915 before it. */
3916 int flags = gimple_call_flags (stmt);
3917 if (!(flags & ECF_CONST)
3918 || (flags & ECF_LOOPING_CONST_OR_PURE))
3919 BB_MAY_NOTRETURN (block) = 1;
3922 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3924 pre_expr e = get_or_alloc_expr_for_name (op);
3926 add_to_value (get_expr_value_id (e), e);
3927 if (!in_fre)
3928 bitmap_insert_into_set (TMP_GEN (block), e);
3929 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3932 /* That's all we need to do when doing FRE. */
3933 if (in_fre)
3934 continue;
3936 if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt))
3937 continue;
3939 switch (gimple_code (stmt))
3941 case GIMPLE_RETURN:
3942 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3943 add_to_exp_gen (block, op);
3944 continue;
3946 case GIMPLE_CALL:
3948 vn_reference_t ref;
3949 unsigned int i;
3950 vn_reference_op_t vro;
3951 pre_expr result = NULL;
3952 VEC(vn_reference_op_s, heap) *ops = NULL;
3954 /* We can value number only calls to real functions. */
3955 if (gimple_call_internal_p (stmt))
3956 continue;
3958 copy_reference_ops_from_call (stmt, &ops);
3959 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3960 gimple_expr_type (stmt),
3961 ops, &ref, VN_NOWALK);
3962 VEC_free (vn_reference_op_s, heap, ops);
3963 if (!ref)
3964 continue;
3966 for (i = 0; VEC_iterate (vn_reference_op_s,
3967 ref->operands, i,
3968 vro); i++)
3970 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
3971 add_to_exp_gen (block, vro->op0);
3972 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3973 add_to_exp_gen (block, vro->op1);
3974 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3975 add_to_exp_gen (block, vro->op2);
3978 /* If the value of the call is not invalidated in
3979 this block until it is computed, add the expression
3980 to EXP_GEN. */
3981 if (!gimple_vuse (stmt)
3982 || gimple_code
3983 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3984 || gimple_bb (SSA_NAME_DEF_STMT
3985 (gimple_vuse (stmt))) != block)
3987 result = (pre_expr) pool_alloc (pre_expr_pool);
3988 result->kind = REFERENCE;
3989 result->id = 0;
3990 PRE_EXPR_REFERENCE (result) = ref;
3992 get_or_alloc_expression_id (result);
3993 add_to_value (get_expr_value_id (result), result);
3994 bitmap_value_insert_into_set (EXP_GEN (block), result);
3996 continue;
3999 case GIMPLE_ASSIGN:
4001 pre_expr result = NULL;
4002 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
4004 case tcc_unary:
4005 case tcc_binary:
4006 case tcc_comparison:
4008 vn_nary_op_t nary;
4009 unsigned int i;
4011 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
4012 gimple_assign_rhs_code (stmt),
4013 gimple_expr_type (stmt),
4014 gimple_assign_rhs1_ptr (stmt),
4015 &nary);
4017 if (!nary)
4018 continue;
4020 for (i = 0; i < nary->length; i++)
4021 if (TREE_CODE (nary->op[i]) == SSA_NAME)
4022 add_to_exp_gen (block, nary->op[i]);
4024 /* If the NARY traps and there was a preceding
4025 point in the block that might not return avoid
4026 adding the nary to EXP_GEN. */
4027 if (BB_MAY_NOTRETURN (block)
4028 && vn_nary_may_trap (nary))
4029 continue;
4031 result = (pre_expr) pool_alloc (pre_expr_pool);
4032 result->kind = NARY;
4033 result->id = 0;
4034 PRE_EXPR_NARY (result) = nary;
4035 break;
4038 case tcc_declaration:
4039 case tcc_reference:
4041 vn_reference_t ref;
4042 unsigned int i;
4043 vn_reference_op_t vro;
4045 vn_reference_lookup (gimple_assign_rhs1 (stmt),
4046 gimple_vuse (stmt),
4047 VN_WALK, &ref);
4048 if (!ref)
4049 continue;
4051 for (i = 0; VEC_iterate (vn_reference_op_s,
4052 ref->operands, i,
4053 vro); i++)
4055 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4056 add_to_exp_gen (block, vro->op0);
4057 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4058 add_to_exp_gen (block, vro->op1);
4059 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4060 add_to_exp_gen (block, vro->op2);
4063 /* If the value of the reference is not invalidated in
4064 this block until it is computed, add the expression
4065 to EXP_GEN. */
4066 if (gimple_vuse (stmt))
4068 gimple def_stmt;
4069 bool ok = true;
4070 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
4071 while (!gimple_nop_p (def_stmt)
4072 && gimple_code (def_stmt) != GIMPLE_PHI
4073 && gimple_bb (def_stmt) == block)
4075 if (stmt_may_clobber_ref_p
4076 (def_stmt, gimple_assign_rhs1 (stmt)))
4078 ok = false;
4079 break;
4081 def_stmt
4082 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
4084 if (!ok)
4085 continue;
4088 result = (pre_expr) pool_alloc (pre_expr_pool);
4089 result->kind = REFERENCE;
4090 result->id = 0;
4091 PRE_EXPR_REFERENCE (result) = ref;
4092 break;
4095 default:
4096 /* For any other statement that we don't
4097 recognize, simply add all referenced
4098 SSA_NAMEs to EXP_GEN. */
4099 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4100 add_to_exp_gen (block, op);
4101 continue;
4104 get_or_alloc_expression_id (result);
4105 add_to_value (get_expr_value_id (result), result);
4106 bitmap_value_insert_into_set (EXP_GEN (block), result);
4108 continue;
4110 default:
4111 break;
4115 /* Put the dominator children of BLOCK on the worklist of blocks
4116 to compute available sets for. */
4117 for (son = first_dom_son (CDI_DOMINATORS, block);
4118 son;
4119 son = next_dom_son (CDI_DOMINATORS, son))
4120 worklist[sp++] = son;
4123 free (worklist);
4126 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4127 than the available expressions for it. The insertion point is
4128 right before the first use in STMT. Returns the SSA_NAME that should
4129 be used for replacement. */
4131 static tree
4132 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4134 basic_block bb = gimple_bb (stmt);
4135 gimple_stmt_iterator gsi;
4136 gimple_seq stmts = NULL;
4137 tree expr;
4138 pre_expr e;
4140 /* First create a value expression from the expression we want
4141 to insert and associate it with the value handle for SSA_VN. */
4142 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4143 if (e == NULL)
4144 return NULL_TREE;
4146 /* Then use create_expression_by_pieces to generate a valid
4147 expression to insert at this point of the IL stream. */
4148 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4149 if (expr == NULL_TREE)
4150 return NULL_TREE;
4151 gsi = gsi_for_stmt (stmt);
4152 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4154 return expr;
4157 /* Eliminate fully redundant computations. */
4159 static unsigned int
4160 eliminate (void)
4162 VEC (gimple, heap) *to_remove = NULL;
4163 VEC (gimple, heap) *to_update = NULL;
4164 basic_block b;
4165 unsigned int todo = 0;
4166 gimple_stmt_iterator gsi;
4167 gimple stmt;
4168 unsigned i;
4170 FOR_EACH_BB (b)
4172 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4174 tree lhs = NULL_TREE;
4175 tree rhs = NULL_TREE;
4177 stmt = gsi_stmt (gsi);
4179 if (gimple_has_lhs (stmt))
4180 lhs = gimple_get_lhs (stmt);
4182 if (gimple_assign_single_p (stmt))
4183 rhs = gimple_assign_rhs1 (stmt);
4185 /* Lookup the RHS of the expression, see if we have an
4186 available computation for it. If so, replace the RHS with
4187 the available computation.
4189 See PR43491.
4190 We don't replace global register variable when it is a the RHS of
4191 a single assign. We do replace local register variable since gcc
4192 does not guarantee local variable will be allocated in register. */
4193 if (gimple_has_lhs (stmt)
4194 && TREE_CODE (lhs) == SSA_NAME
4195 && !gimple_assign_ssa_name_copy_p (stmt)
4196 && (!gimple_assign_single_p (stmt)
4197 || (!is_gimple_min_invariant (rhs)
4198 && (gimple_assign_rhs_code (stmt) != VAR_DECL
4199 || !is_global_var (rhs)
4200 || !DECL_HARD_REGISTER (rhs))))
4201 && !gimple_has_volatile_ops (stmt)
4202 && !has_zero_uses (lhs))
4204 tree sprime = NULL;
4205 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4206 pre_expr sprimeexpr;
4207 gimple orig_stmt = stmt;
4209 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4210 get_expr_value_id (lhsexpr),
4211 NULL);
4213 if (sprimeexpr)
4215 if (sprimeexpr->kind == CONSTANT)
4216 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4217 else if (sprimeexpr->kind == NAME)
4218 sprime = PRE_EXPR_NAME (sprimeexpr);
4219 else
4220 gcc_unreachable ();
4223 /* If there is no existing leader but SCCVN knows this
4224 value is constant, use that constant. */
4225 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4227 sprime = VN_INFO (lhs)->valnum;
4228 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4229 TREE_TYPE (sprime)))
4230 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4232 if (dump_file && (dump_flags & TDF_DETAILS))
4234 fprintf (dump_file, "Replaced ");
4235 print_gimple_expr (dump_file, stmt, 0, 0);
4236 fprintf (dump_file, " with ");
4237 print_generic_expr (dump_file, sprime, 0);
4238 fprintf (dump_file, " in ");
4239 print_gimple_stmt (dump_file, stmt, 0, 0);
4241 pre_stats.eliminations++;
4242 propagate_tree_value_into_stmt (&gsi, sprime);
4243 stmt = gsi_stmt (gsi);
4244 update_stmt (stmt);
4246 /* If we removed EH side-effects from the statement, clean
4247 its EH information. */
4248 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4250 bitmap_set_bit (need_eh_cleanup,
4251 gimple_bb (stmt)->index);
4252 if (dump_file && (dump_flags & TDF_DETAILS))
4253 fprintf (dump_file, " Removed EH side-effects.\n");
4255 continue;
4258 /* If there is no existing usable leader but SCCVN thinks
4259 it has an expression it wants to use as replacement,
4260 insert that. */
4261 if (!sprime || sprime == lhs)
4263 tree val = VN_INFO (lhs)->valnum;
4264 if (val != VN_TOP
4265 && TREE_CODE (val) == SSA_NAME
4266 && VN_INFO (val)->needs_insertion
4267 && can_PRE_operation (vn_get_expr_for (val)))
4268 sprime = do_SCCVN_insertion (stmt, val);
4270 if (sprime
4271 && sprime != lhs
4272 && (rhs == NULL_TREE
4273 || TREE_CODE (rhs) != SSA_NAME
4274 || may_propagate_copy (rhs, sprime)))
4276 bool can_make_abnormal_goto
4277 = is_gimple_call (stmt)
4278 && stmt_can_make_abnormal_goto (stmt);
4280 gcc_assert (sprime != rhs);
4282 if (dump_file && (dump_flags & TDF_DETAILS))
4284 fprintf (dump_file, "Replaced ");
4285 print_gimple_expr (dump_file, stmt, 0, 0);
4286 fprintf (dump_file, " with ");
4287 print_generic_expr (dump_file, sprime, 0);
4288 fprintf (dump_file, " in ");
4289 print_gimple_stmt (dump_file, stmt, 0, 0);
4292 if (TREE_CODE (sprime) == SSA_NAME)
4293 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4294 NECESSARY, true);
4295 /* We need to make sure the new and old types actually match,
4296 which may require adding a simple cast, which fold_convert
4297 will do for us. */
4298 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4299 && !useless_type_conversion_p (gimple_expr_type (stmt),
4300 TREE_TYPE (sprime)))
4301 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4303 pre_stats.eliminations++;
4304 propagate_tree_value_into_stmt (&gsi, sprime);
4305 stmt = gsi_stmt (gsi);
4306 update_stmt (stmt);
4308 /* If we removed EH side-effects from the statement, clean
4309 its EH information. */
4310 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4312 bitmap_set_bit (need_eh_cleanup,
4313 gimple_bb (stmt)->index);
4314 if (dump_file && (dump_flags & TDF_DETAILS))
4315 fprintf (dump_file, " Removed EH side-effects.\n");
4318 /* Likewise for AB side-effects. */
4319 if (can_make_abnormal_goto
4320 && !stmt_can_make_abnormal_goto (stmt))
4322 bitmap_set_bit (need_ab_cleanup,
4323 gimple_bb (stmt)->index);
4324 if (dump_file && (dump_flags & TDF_DETAILS))
4325 fprintf (dump_file, " Removed AB side-effects.\n");
4329 /* If the statement is a scalar store, see if the expression
4330 has the same value number as its rhs. If so, the store is
4331 dead. */
4332 else if (gimple_assign_single_p (stmt)
4333 && !gimple_has_volatile_ops (stmt)
4334 && !is_gimple_reg (gimple_assign_lhs (stmt))
4335 && (TREE_CODE (rhs) == SSA_NAME
4336 || is_gimple_min_invariant (rhs)))
4338 tree val;
4339 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4340 gimple_vuse (stmt), VN_WALK, NULL);
4341 if (TREE_CODE (rhs) == SSA_NAME)
4342 rhs = VN_INFO (rhs)->valnum;
4343 if (val
4344 && operand_equal_p (val, rhs, 0))
4346 if (dump_file && (dump_flags & TDF_DETAILS))
4348 fprintf (dump_file, "Deleted redundant store ");
4349 print_gimple_stmt (dump_file, stmt, 0, 0);
4352 /* Queue stmt for removal. */
4353 VEC_safe_push (gimple, heap, to_remove, stmt);
4356 /* Visit COND_EXPRs and fold the comparison with the
4357 available value-numbers. */
4358 else if (gimple_code (stmt) == GIMPLE_COND)
4360 tree op0 = gimple_cond_lhs (stmt);
4361 tree op1 = gimple_cond_rhs (stmt);
4362 tree result;
4364 if (TREE_CODE (op0) == SSA_NAME)
4365 op0 = VN_INFO (op0)->valnum;
4366 if (TREE_CODE (op1) == SSA_NAME)
4367 op1 = VN_INFO (op1)->valnum;
4368 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4369 op0, op1);
4370 if (result && TREE_CODE (result) == INTEGER_CST)
4372 if (integer_zerop (result))
4373 gimple_cond_make_false (stmt);
4374 else
4375 gimple_cond_make_true (stmt);
4376 update_stmt (stmt);
4377 todo = TODO_cleanup_cfg;
4380 /* Visit indirect calls and turn them into direct calls if
4381 possible. */
4382 if (is_gimple_call (stmt))
4384 tree orig_fn = gimple_call_fn (stmt);
4385 tree fn;
4386 if (!orig_fn)
4387 continue;
4388 if (TREE_CODE (orig_fn) == SSA_NAME)
4389 fn = VN_INFO (orig_fn)->valnum;
4390 else if (TREE_CODE (orig_fn) == OBJ_TYPE_REF
4391 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn)) == SSA_NAME)
4392 fn = VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn))->valnum;
4393 else
4394 continue;
4395 if (gimple_call_addr_fndecl (fn) != NULL_TREE
4396 && useless_type_conversion_p (TREE_TYPE (orig_fn),
4397 TREE_TYPE (fn)))
4399 bool can_make_abnormal_goto
4400 = stmt_can_make_abnormal_goto (stmt);
4401 bool was_noreturn = gimple_call_noreturn_p (stmt);
4403 if (dump_file && (dump_flags & TDF_DETAILS))
4405 fprintf (dump_file, "Replacing call target with ");
4406 print_generic_expr (dump_file, fn, 0);
4407 fprintf (dump_file, " in ");
4408 print_gimple_stmt (dump_file, stmt, 0, 0);
4411 gimple_call_set_fn (stmt, fn);
4412 VEC_safe_push (gimple, heap, to_update, stmt);
4414 /* When changing a call into a noreturn call, cfg cleanup
4415 is needed to fix up the noreturn call. */
4416 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4417 todo |= TODO_cleanup_cfg;
4419 /* If we removed EH side-effects from the statement, clean
4420 its EH information. */
4421 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4423 bitmap_set_bit (need_eh_cleanup,
4424 gimple_bb (stmt)->index);
4425 if (dump_file && (dump_flags & TDF_DETAILS))
4426 fprintf (dump_file, " Removed EH side-effects.\n");
4429 /* Likewise for AB side-effects. */
4430 if (can_make_abnormal_goto
4431 && !stmt_can_make_abnormal_goto (stmt))
4433 bitmap_set_bit (need_ab_cleanup,
4434 gimple_bb (stmt)->index);
4435 if (dump_file && (dump_flags & TDF_DETAILS))
4436 fprintf (dump_file, " Removed AB side-effects.\n");
4439 /* Changing an indirect call to a direct call may
4440 have exposed different semantics. This may
4441 require an SSA update. */
4442 todo |= TODO_update_ssa_only_virtuals;
4447 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4449 gimple stmt, phi = gsi_stmt (gsi);
4450 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4451 pre_expr sprimeexpr, resexpr;
4452 gimple_stmt_iterator gsi2;
4454 /* We want to perform redundant PHI elimination. Do so by
4455 replacing the PHI with a single copy if possible.
4456 Do not touch inserted, single-argument or virtual PHIs. */
4457 if (gimple_phi_num_args (phi) == 1
4458 || virtual_operand_p (res))
4460 gsi_next (&gsi);
4461 continue;
4464 resexpr = get_or_alloc_expr_for_name (res);
4465 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4466 get_expr_value_id (resexpr), NULL);
4467 if (sprimeexpr)
4469 if (sprimeexpr->kind == CONSTANT)
4470 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4471 else if (sprimeexpr->kind == NAME)
4472 sprime = PRE_EXPR_NAME (sprimeexpr);
4473 else
4474 gcc_unreachable ();
4476 if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum))
4478 sprime = VN_INFO (res)->valnum;
4479 if (!useless_type_conversion_p (TREE_TYPE (res),
4480 TREE_TYPE (sprime)))
4481 sprime = fold_convert (TREE_TYPE (res), sprime);
4483 if (!sprime
4484 || sprime == res)
4486 gsi_next (&gsi);
4487 continue;
4490 if (dump_file && (dump_flags & TDF_DETAILS))
4492 fprintf (dump_file, "Replaced redundant PHI node defining ");
4493 print_generic_expr (dump_file, res, 0);
4494 fprintf (dump_file, " with ");
4495 print_generic_expr (dump_file, sprime, 0);
4496 fprintf (dump_file, "\n");
4499 remove_phi_node (&gsi, false);
4501 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4502 && TREE_CODE (sprime) == SSA_NAME)
4503 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4505 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4506 sprime = fold_convert (TREE_TYPE (res), sprime);
4507 stmt = gimple_build_assign (res, sprime);
4508 SSA_NAME_DEF_STMT (res) = stmt;
4509 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4511 gsi2 = gsi_after_labels (b);
4512 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4513 /* Queue the copy for eventual removal. */
4514 VEC_safe_push (gimple, heap, to_remove, stmt);
4515 /* If we inserted this PHI node ourself, it's not an elimination. */
4516 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4517 pre_stats.phis--;
4518 else
4519 pre_stats.eliminations++;
4523 /* We cannot remove stmts during BB walk, especially not release SSA
4524 names there as this confuses the VN machinery. The stmts ending
4525 up in to_remove are either stores or simple copies. */
4526 FOR_EACH_VEC_ELT (gimple, to_remove, i, stmt)
4528 tree lhs = gimple_assign_lhs (stmt);
4529 tree rhs = gimple_assign_rhs1 (stmt);
4530 use_operand_p use_p;
4531 gimple use_stmt;
4533 /* If there is a single use only, propagate the equivalency
4534 instead of keeping the copy. */
4535 if (TREE_CODE (lhs) == SSA_NAME
4536 && TREE_CODE (rhs) == SSA_NAME
4537 && single_imm_use (lhs, &use_p, &use_stmt)
4538 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4540 SET_USE (use_p, rhs);
4541 update_stmt (use_stmt);
4542 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4543 && TREE_CODE (rhs) == SSA_NAME)
4544 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4547 /* If this is a store or a now unused copy, remove it. */
4548 if (TREE_CODE (lhs) != SSA_NAME
4549 || has_zero_uses (lhs))
4551 basic_block bb = gimple_bb (stmt);
4552 gsi = gsi_for_stmt (stmt);
4553 unlink_stmt_vdef (stmt);
4554 if (gsi_remove (&gsi, true))
4555 bitmap_set_bit (need_eh_cleanup, bb->index);
4556 if (TREE_CODE (lhs) == SSA_NAME)
4557 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4558 release_defs (stmt);
4561 VEC_free (gimple, heap, to_remove);
4563 /* We cannot update call statements with virtual operands during
4564 SSA walk. This might remove them which in turn makes our
4565 VN lattice invalid. */
4566 FOR_EACH_VEC_ELT (gimple, to_update, i, stmt)
4567 update_stmt (stmt);
4568 VEC_free (gimple, heap, to_update);
4570 return todo;
4573 /* Borrow a bit of tree-ssa-dce.c for the moment.
4574 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4575 this may be a bit faster, and we may want critical edges kept split. */
4577 /* If OP's defining statement has not already been determined to be necessary,
4578 mark that statement necessary. Return the stmt, if it is newly
4579 necessary. */
4581 static inline gimple
4582 mark_operand_necessary (tree op)
4584 gimple stmt;
4586 gcc_assert (op);
4588 if (TREE_CODE (op) != SSA_NAME)
4589 return NULL;
4591 stmt = SSA_NAME_DEF_STMT (op);
4592 gcc_assert (stmt);
4594 if (gimple_plf (stmt, NECESSARY)
4595 || gimple_nop_p (stmt))
4596 return NULL;
4598 gimple_set_plf (stmt, NECESSARY, true);
4599 return stmt;
4602 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4603 to insert PHI nodes sometimes, and because value numbering of casts isn't
4604 perfect, we sometimes end up inserting dead code. This simple DCE-like
4605 pass removes any insertions we made that weren't actually used. */
4607 static void
4608 remove_dead_inserted_code (void)
4610 bitmap worklist;
4611 unsigned i;
4612 bitmap_iterator bi;
4613 gimple t;
4615 worklist = BITMAP_ALLOC (NULL);
4616 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4618 t = SSA_NAME_DEF_STMT (ssa_name (i));
4619 if (gimple_plf (t, NECESSARY))
4620 bitmap_set_bit (worklist, i);
4622 while (!bitmap_empty_p (worklist))
4624 i = bitmap_first_set_bit (worklist);
4625 bitmap_clear_bit (worklist, i);
4626 t = SSA_NAME_DEF_STMT (ssa_name (i));
4628 /* PHI nodes are somewhat special in that each PHI alternative has
4629 data and control dependencies. All the statements feeding the
4630 PHI node's arguments are always necessary. */
4631 if (gimple_code (t) == GIMPLE_PHI)
4633 unsigned k;
4635 for (k = 0; k < gimple_phi_num_args (t); k++)
4637 tree arg = PHI_ARG_DEF (t, k);
4638 if (TREE_CODE (arg) == SSA_NAME)
4640 gimple n = mark_operand_necessary (arg);
4641 if (n)
4642 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4646 else
4648 /* Propagate through the operands. Examine all the USE, VUSE and
4649 VDEF operands in this statement. Mark all the statements
4650 which feed this statement's uses as necessary. */
4651 ssa_op_iter iter;
4652 tree use;
4654 /* The operands of VDEF expressions are also needed as they
4655 represent potential definitions that may reach this
4656 statement (VDEF operands allow us to follow def-def
4657 links). */
4659 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4661 gimple n = mark_operand_necessary (use);
4662 if (n)
4663 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4668 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4670 t = SSA_NAME_DEF_STMT (ssa_name (i));
4671 if (!gimple_plf (t, NECESSARY))
4673 gimple_stmt_iterator gsi;
4675 if (dump_file && (dump_flags & TDF_DETAILS))
4677 fprintf (dump_file, "Removing unnecessary insertion:");
4678 print_gimple_stmt (dump_file, t, 0, 0);
4681 gsi = gsi_for_stmt (t);
4682 if (gimple_code (t) == GIMPLE_PHI)
4683 remove_phi_node (&gsi, true);
4684 else
4686 gsi_remove (&gsi, true);
4687 release_defs (t);
4691 BITMAP_FREE (worklist);
4694 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4695 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4696 the number of visited blocks. */
4698 static int
4699 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4701 edge_iterator *stack;
4702 int sp;
4703 int post_order_num = 0;
4704 sbitmap visited;
4706 if (include_entry_exit)
4707 post_order[post_order_num++] = EXIT_BLOCK;
4709 /* Allocate stack for back-tracking up CFG. */
4710 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4711 sp = 0;
4713 /* Allocate bitmap to track nodes that have been visited. */
4714 visited = sbitmap_alloc (last_basic_block);
4716 /* None of the nodes in the CFG have been visited yet. */
4717 sbitmap_zero (visited);
4719 /* Push the last edge on to the stack. */
4720 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4722 while (sp)
4724 edge_iterator ei;
4725 basic_block src;
4726 basic_block dest;
4728 /* Look at the edge on the top of the stack. */
4729 ei = stack[sp - 1];
4730 src = ei_edge (ei)->src;
4731 dest = ei_edge (ei)->dest;
4733 /* Check if the edge source has been visited yet. */
4734 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4736 /* Mark that we have visited the destination. */
4737 SET_BIT (visited, src->index);
4739 if (EDGE_COUNT (src->preds) > 0)
4740 /* Since the SRC node has been visited for the first
4741 time, check its predecessors. */
4742 stack[sp++] = ei_start (src->preds);
4743 else
4744 post_order[post_order_num++] = src->index;
4746 else
4748 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4749 post_order[post_order_num++] = dest->index;
4751 if (!ei_one_before_end_p (ei))
4752 ei_next (&stack[sp - 1]);
4753 else
4754 sp--;
4758 if (include_entry_exit)
4759 post_order[post_order_num++] = ENTRY_BLOCK;
4761 free (stack);
4762 sbitmap_free (visited);
4763 return post_order_num;
4767 /* Initialize data structures used by PRE. */
4769 static void
4770 init_pre (bool do_fre)
4772 basic_block bb;
4774 next_expression_id = 1;
4775 expressions = NULL;
4776 VEC_safe_push (pre_expr, heap, expressions, NULL);
4777 value_expressions = VEC_alloc (bitmap, heap, get_max_value_id () + 1);
4778 VEC_safe_grow_cleared (bitmap, heap, value_expressions,
4779 get_max_value_id() + 1);
4780 name_to_id = NULL;
4782 in_fre = do_fre;
4784 inserted_exprs = BITMAP_ALLOC (NULL);
4786 connect_infinite_loops_to_exit ();
4787 memset (&pre_stats, 0, sizeof (pre_stats));
4790 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4791 my_rev_post_order_compute (postorder, false);
4793 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4795 calculate_dominance_info (CDI_POST_DOMINATORS);
4796 calculate_dominance_info (CDI_DOMINATORS);
4798 bitmap_obstack_initialize (&grand_bitmap_obstack);
4799 phi_translate_table.create (5110);
4800 expression_to_id.create (num_ssa_names * 3);
4801 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4802 sizeof (struct bitmap_set), 30);
4803 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4804 sizeof (struct pre_expr_d), 30);
4805 FOR_ALL_BB (bb)
4807 if (!do_fre)
4809 EXP_GEN (bb) = bitmap_set_new ();
4810 PHI_GEN (bb) = bitmap_set_new ();
4811 TMP_GEN (bb) = bitmap_set_new ();
4813 AVAIL_OUT (bb) = bitmap_set_new ();
4816 need_eh_cleanup = BITMAP_ALLOC (NULL);
4817 need_ab_cleanup = BITMAP_ALLOC (NULL);
4821 /* Deallocate data structures used by PRE. */
4823 static void
4824 fini_pre (bool do_fre)
4826 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4827 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4829 free (postorder);
4830 VEC_free (bitmap, heap, value_expressions);
4831 BITMAP_FREE (inserted_exprs);
4832 bitmap_obstack_release (&grand_bitmap_obstack);
4833 free_alloc_pool (bitmap_set_pool);
4834 free_alloc_pool (pre_expr_pool);
4835 phi_translate_table.dispose ();
4836 expression_to_id.dispose ();
4837 VEC_free (unsigned, heap, name_to_id);
4839 free_aux_for_blocks ();
4841 free_dominance_info (CDI_POST_DOMINATORS);
4843 if (do_eh_cleanup)
4844 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4846 if (do_ab_cleanup)
4847 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4849 BITMAP_FREE (need_eh_cleanup);
4850 BITMAP_FREE (need_ab_cleanup);
4852 if (do_eh_cleanup || do_ab_cleanup)
4853 cleanup_tree_cfg ();
4855 if (!do_fre)
4856 loop_optimizer_finalize ();
4859 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4860 only wants to do full redundancy elimination. */
4862 static unsigned int
4863 execute_pre (bool do_fre)
4865 unsigned int todo = 0;
4867 do_partial_partial =
4868 flag_tree_partial_pre && optimize_function_for_speed_p (cfun);
4870 /* This has to happen before SCCVN runs because
4871 loop_optimizer_init may create new phis, etc. */
4872 if (!do_fre)
4873 loop_optimizer_init (LOOPS_NORMAL);
4875 if (!run_scc_vn (do_fre ? VN_WALKREWRITE : VN_WALK))
4877 if (!do_fre)
4878 loop_optimizer_finalize ();
4880 return 0;
4883 init_pre (do_fre);
4884 scev_initialize ();
4886 /* Collect and value number expressions computed in each basic block. */
4887 compute_avail ();
4889 if (dump_file && (dump_flags & TDF_DETAILS))
4891 basic_block bb;
4893 FOR_ALL_BB (bb)
4895 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4896 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4897 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4898 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4902 /* Insert can get quite slow on an incredibly large number of basic
4903 blocks due to some quadratic behavior. Until this behavior is
4904 fixed, don't run it when he have an incredibly large number of
4905 bb's. If we aren't going to run insert, there is no point in
4906 computing ANTIC, either, even though it's plenty fast. */
4907 if (!do_fre && n_basic_blocks < 4000)
4909 compute_antic ();
4910 insert ();
4913 /* Make sure to remove fake edges before committing our inserts.
4914 This makes sure we don't end up with extra critical edges that
4915 we would need to split. */
4916 remove_fake_exit_edges ();
4917 gsi_commit_edge_inserts ();
4919 /* Remove all the redundant expressions. */
4920 todo |= eliminate ();
4922 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4923 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4924 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4925 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4926 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4928 clear_expression_ids ();
4929 if (!do_fre)
4931 remove_dead_inserted_code ();
4932 todo |= TODO_verify_flow;
4935 scev_finalize ();
4936 fini_pre (do_fre);
4938 if (!do_fre)
4939 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4940 case we can merge the block with the remaining predecessor of the block.
4941 It should either:
4942 - call merge_blocks after each tail merge iteration
4943 - call merge_blocks after all tail merge iterations
4944 - mark TODO_cleanup_cfg when necessary
4945 - share the cfg cleanup with fini_pre. */
4946 todo |= tail_merge_optimize (todo);
4947 free_scc_vn ();
4949 return todo;
4952 /* Gate and execute functions for PRE. */
4954 static unsigned int
4955 do_pre (void)
4957 return execute_pre (false);
4960 static bool
4961 gate_pre (void)
4963 return flag_tree_pre != 0;
4966 struct gimple_opt_pass pass_pre =
4969 GIMPLE_PASS,
4970 "pre", /* name */
4971 gate_pre, /* gate */
4972 do_pre, /* execute */
4973 NULL, /* sub */
4974 NULL, /* next */
4975 0, /* static_pass_number */
4976 TV_TREE_PRE, /* tv_id */
4977 PROP_no_crit_edges | PROP_cfg
4978 | PROP_ssa, /* properties_required */
4979 0, /* properties_provided */
4980 0, /* properties_destroyed */
4981 TODO_rebuild_alias, /* todo_flags_start */
4982 TODO_update_ssa_only_virtuals | TODO_ggc_collect
4983 | TODO_verify_ssa /* todo_flags_finish */
4988 /* Gate and execute functions for FRE. */
4990 static unsigned int
4991 execute_fre (void)
4993 return execute_pre (true);
4996 static bool
4997 gate_fre (void)
4999 return flag_tree_fre != 0;
5002 struct gimple_opt_pass pass_fre =
5005 GIMPLE_PASS,
5006 "fre", /* name */
5007 gate_fre, /* gate */
5008 execute_fre, /* execute */
5009 NULL, /* sub */
5010 NULL, /* next */
5011 0, /* static_pass_number */
5012 TV_TREE_FRE, /* tv_id */
5013 PROP_cfg | PROP_ssa, /* properties_required */
5014 0, /* properties_provided */
5015 0, /* properties_destroyed */
5016 0, /* todo_flags_start */
5017 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */