1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "insn-config.h"
34 #include "gimple-pretty-print.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
56 #include "tree-ssa-propagate.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-sccvn.h"
73 /* This algorithm is based on the SCC algorithm presented by Keith
74 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
75 (http://citeseer.ist.psu.edu/41805.html). In
76 straight line code, it is equivalent to a regular hash based value
77 numbering that is performed in reverse postorder.
79 For code with cycles, there are two alternatives, both of which
80 require keeping the hashtables separate from the actual list of
81 value numbers for SSA names.
83 1. Iterate value numbering in an RPO walk of the blocks, removing
84 all the entries from the hashtable after each iteration (but
85 keeping the SSA name->value number mapping between iterations).
86 Iterate until it does not change.
88 2. Perform value numbering as part of an SCC walk on the SSA graph,
89 iterating only the cycles in the SSA graph until they do not change
90 (using a separate, optimistic hashtable for value numbering the SCC
93 The second is not just faster in practice (because most SSA graph
94 cycles do not involve all the variables in the graph), it also has
97 One of these nice properties is that when we pop an SCC off the
98 stack, we are guaranteed to have processed all the operands coming from
99 *outside of that SCC*, so we do not need to do anything special to
100 ensure they have value numbers.
102 Another nice property is that the SCC walk is done as part of a DFS
103 of the SSA graph, which makes it easy to perform combining and
104 simplifying operations at the same time.
106 The code below is deliberately written in a way that makes it easy
107 to separate the SCC walk from the other work it does.
109 In order to propagate constants through the code, we track which
110 expressions contain constants, and use those while folding. In
111 theory, we could also track expressions whose value numbers are
112 replaced, in case we end up folding based on expression
115 In order to value number memory, we assign value numbers to vuses.
116 This enables us to note that, for example, stores to the same
117 address of the same value from the same starting memory states are
121 1. We can iterate only the changing portions of the SCC's, but
122 I have not seen an SCC big enough for this to be a win.
123 2. If you differentiate between phi nodes for loops and phi nodes
124 for if-then-else, you can properly consider phi nodes in different
125 blocks for equivalence.
126 3. We could value number vuses in more cases, particularly, whole
130 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
131 #define BB_EXECUTABLE BB_VISITED
133 static tree
*last_vuse_ptr
;
134 static vn_lookup_kind vn_walk_kind
;
135 static vn_lookup_kind default_vn_walk_kind
;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
141 typedef vn_nary_op_s
*compare_type
;
142 static inline hashval_t
hash (const vn_nary_op_s
*);
143 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
146 /* Return the computed hashcode for nary operation P1. */
149 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
151 return vno1
->hashcode
;
154 /* Compare nary operations P1 and P2 and return true if they are
158 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
160 return vno1
== vno2
|| vn_nary_op_eq (vno1
, vno2
);
163 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
167 /* vn_phi hashtable helpers. */
170 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
172 struct vn_phi_hasher
: nofree_ptr_hash
<vn_phi_s
>
174 static inline hashval_t
hash (const vn_phi_s
*);
175 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
178 /* Return the computed hashcode for phi operation P1. */
181 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
183 return vp1
->hashcode
;
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
191 return vp1
== vp2
|| vn_phi_eq (vp1
, vp2
);
194 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
198 /* Compare two reference operands P1 and P2 for equality. Return true if
199 they are equal, and false otherwise. */
202 vn_reference_op_eq (const void *p1
, const void *p2
)
204 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
205 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
207 return (vro1
->opcode
== vro2
->opcode
208 /* We do not care for differences in type qualification. */
209 && (vro1
->type
== vro2
->type
210 || (vro1
->type
&& vro2
->type
211 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
212 TYPE_MAIN_VARIANT (vro2
->type
))))
213 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
214 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
215 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
218 /* Free a reference operation structure VP. */
221 free_reference (vn_reference_s
*vr
)
223 vr
->operands
.release ();
227 /* vn_reference hashtable helpers. */
229 struct vn_reference_hasher
: nofree_ptr_hash
<vn_reference_s
>
231 static inline hashval_t
hash (const vn_reference_s
*);
232 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
235 /* Return the hashcode for a given reference operation P1. */
238 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
240 return vr1
->hashcode
;
244 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
246 return v
== c
|| vn_reference_eq (v
, c
);
249 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
253 /* The set of VN hashtables. */
255 typedef struct vn_tables_s
257 vn_nary_op_table_type
*nary
;
258 vn_phi_table_type
*phis
;
259 vn_reference_table_type
*references
;
263 /* vn_constant hashtable helpers. */
265 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
267 static inline hashval_t
hash (const vn_constant_s
*);
268 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
271 /* Hash table hash function for vn_constant_t. */
274 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
276 return vc1
->hashcode
;
279 /* Hash table equality function for vn_constant_t. */
282 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
284 if (vc1
->hashcode
!= vc2
->hashcode
)
287 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
290 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
291 static bitmap constant_value_ids
;
294 /* Obstack we allocate the vn-tables elements from. */
295 static obstack vn_tables_obstack
;
296 /* Special obstack we never unwind. */
297 static obstack vn_tables_insert_obstack
;
299 static vn_reference_t last_inserted_ref
;
300 static vn_phi_t last_inserted_phi
;
301 static vn_nary_op_t last_inserted_nary
;
303 /* Valid hashtables storing information we have proven to be
305 static vn_tables_t valid_info
;
308 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 tree (*vn_valueize
) (tree
);
313 /* This represents the top of the VN lattice, which is the universal
318 /* Unique counter for our value ids. */
320 static unsigned int next_value_id
;
323 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
324 are allocated on an obstack for locality reasons, and to free them
325 without looping over the vec. */
327 struct vn_ssa_aux_hasher
: typed_noop_remove
<vn_ssa_aux_t
>
329 typedef vn_ssa_aux_t value_type
;
330 typedef tree compare_type
;
331 static inline hashval_t
hash (const value_type
&);
332 static inline bool equal (const value_type
&, const compare_type
&);
333 static inline void mark_deleted (value_type
&) {}
334 static inline void mark_empty (value_type
&e
) { e
= NULL
; }
335 static inline bool is_deleted (value_type
&) { return false; }
336 static inline bool is_empty (value_type
&e
) { return e
== NULL
; }
340 vn_ssa_aux_hasher::hash (const value_type
&entry
)
342 return SSA_NAME_VERSION (entry
->name
);
346 vn_ssa_aux_hasher::equal (const value_type
&entry
, const compare_type
&name
)
348 return name
== entry
->name
;
351 static hash_table
<vn_ssa_aux_hasher
> *vn_ssa_aux_hash
;
352 typedef hash_table
<vn_ssa_aux_hasher
>::iterator vn_ssa_aux_iterator_type
;
353 static struct obstack vn_ssa_aux_obstack
;
355 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*, tree
);
356 static unsigned int vn_nary_length_from_stmt (gimple
*);
357 static vn_nary_op_t
alloc_vn_nary_op_noinit (unsigned int, obstack
*);
358 static vn_nary_op_t
vn_nary_op_insert_into (vn_nary_op_t
,
359 vn_nary_op_table_type
*, bool);
360 static void init_vn_nary_op_from_stmt (vn_nary_op_t
, gimple
*);
361 static void init_vn_nary_op_from_pieces (vn_nary_op_t
, unsigned int,
362 enum tree_code
, tree
, tree
*);
363 static tree
vn_lookup_simplify_result (gimple_match_op
*);
365 /* Return whether there is value numbering information for a given SSA name. */
368 has_VN_INFO (tree name
)
370 return vn_ssa_aux_hash
->find_with_hash (name
, SSA_NAME_VERSION (name
));
377 = vn_ssa_aux_hash
->find_slot_with_hash (name
, SSA_NAME_VERSION (name
),
382 vn_ssa_aux_t newinfo
= *res
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
383 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
384 newinfo
->name
= name
;
385 newinfo
->valnum
= VN_TOP
;
386 /* We are using the visited flag to handle uses with defs not within the
387 region being value-numbered. */
388 newinfo
->visited
= false;
390 /* Given we create the VN_INFOs on-demand now we have to do initialization
391 different than VN_TOP here. */
392 if (SSA_NAME_IS_DEFAULT_DEF (name
))
393 switch (TREE_CODE (SSA_NAME_VAR (name
)))
396 /* All undefined vars are VARYING. */
397 newinfo
->valnum
= name
;
398 newinfo
->visited
= true;
402 /* Parameters are VARYING but we can record a condition
403 if we know it is a non-NULL pointer. */
404 newinfo
->visited
= true;
405 newinfo
->valnum
= name
;
406 if (POINTER_TYPE_P (TREE_TYPE (name
))
407 && nonnull_arg_p (SSA_NAME_VAR (name
)))
411 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
413 /* Allocate from non-unwinding stack. */
414 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
415 init_vn_nary_op_from_pieces (nary
, 2, NE_EXPR
,
416 boolean_type_node
, ops
);
417 nary
->predicated_values
= 0;
418 nary
->u
.result
= boolean_true_node
;
419 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
420 gcc_assert (nary
->unwind_to
== NULL
);
421 /* Also do not link it into the undo chain. */
422 last_inserted_nary
= nary
->next
;
423 nary
->next
= (vn_nary_op_t
)(void *)-1;
424 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
425 init_vn_nary_op_from_pieces (nary
, 2, EQ_EXPR
,
426 boolean_type_node
, ops
);
427 nary
->predicated_values
= 0;
428 nary
->u
.result
= boolean_false_node
;
429 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
430 gcc_assert (nary
->unwind_to
== NULL
);
431 last_inserted_nary
= nary
->next
;
432 nary
->next
= (vn_nary_op_t
)(void *)-1;
433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
435 fprintf (dump_file
, "Recording ");
436 print_generic_expr (dump_file
, name
, TDF_SLIM
);
437 fprintf (dump_file
, " != 0\n");
443 /* If the result is passed by invisible reference the default
444 def is initialized, otherwise it's uninitialized. Still
445 undefined is varying. */
446 newinfo
->visited
= true;
447 newinfo
->valnum
= name
;
456 /* Return the SSA value of X. */
459 SSA_VAL (tree x
, bool *visited
= NULL
)
461 vn_ssa_aux_t tem
= vn_ssa_aux_hash
->find_with_hash (x
, SSA_NAME_VERSION (x
));
463 *visited
= tem
&& tem
->visited
;
464 return tem
&& tem
->visited
? tem
->valnum
: x
;
467 /* Return the SSA value of the VUSE x, supporting released VDEFs
468 during elimination which will value-number the VDEF to the
469 associated VUSE (but not substitute in the whole lattice). */
472 vuse_ssa_val (tree x
)
480 gcc_assert (x
!= VN_TOP
);
482 while (SSA_NAME_IN_FREE_LIST (x
));
487 /* Similar to the above but used as callback for walk_non_aliases_vuses
488 and thus should stop at unvisited VUSE to not walk across region
492 vuse_valueize (tree vuse
)
497 vuse
= SSA_VAL (vuse
, &visited
);
500 gcc_assert (vuse
!= VN_TOP
);
502 while (SSA_NAME_IN_FREE_LIST (vuse
));
507 /* Return the vn_kind the expression computed by the stmt should be
511 vn_get_stmt_kind (gimple
*stmt
)
513 switch (gimple_code (stmt
))
521 enum tree_code code
= gimple_assign_rhs_code (stmt
);
522 tree rhs1
= gimple_assign_rhs1 (stmt
);
523 switch (get_gimple_rhs_class (code
))
525 case GIMPLE_UNARY_RHS
:
526 case GIMPLE_BINARY_RHS
:
527 case GIMPLE_TERNARY_RHS
:
529 case GIMPLE_SINGLE_RHS
:
530 switch (TREE_CODE_CLASS (code
))
533 /* VOP-less references can go through unary case. */
534 if ((code
== REALPART_EXPR
535 || code
== IMAGPART_EXPR
536 || code
== VIEW_CONVERT_EXPR
537 || code
== BIT_FIELD_REF
)
538 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
542 case tcc_declaration
:
549 if (code
== ADDR_EXPR
)
550 return (is_gimple_min_invariant (rhs1
)
551 ? VN_CONSTANT
: VN_REFERENCE
);
552 else if (code
== CONSTRUCTOR
)
565 /* Lookup a value id for CONSTANT and return it. If it does not
569 get_constant_value_id (tree constant
)
571 vn_constant_s
**slot
;
572 struct vn_constant_s vc
;
574 vc
.hashcode
= vn_hash_constant_with_type (constant
);
575 vc
.constant
= constant
;
576 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
578 return (*slot
)->value_id
;
582 /* Lookup a value id for CONSTANT, and if it does not exist, create a
583 new one and return it. If it does exist, return it. */
586 get_or_alloc_constant_value_id (tree constant
)
588 vn_constant_s
**slot
;
589 struct vn_constant_s vc
;
592 /* If the hashtable isn't initialized we're not running from PRE and thus
593 do not need value-ids. */
594 if (!constant_to_value_id
)
597 vc
.hashcode
= vn_hash_constant_with_type (constant
);
598 vc
.constant
= constant
;
599 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
601 return (*slot
)->value_id
;
603 vcp
= XNEW (struct vn_constant_s
);
604 vcp
->hashcode
= vc
.hashcode
;
605 vcp
->constant
= constant
;
606 vcp
->value_id
= get_next_value_id ();
608 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
609 return vcp
->value_id
;
612 /* Return true if V is a value id for a constant. */
615 value_id_constant_p (unsigned int v
)
617 return bitmap_bit_p (constant_value_ids
, v
);
620 /* Compute the hash for a reference operand VRO1. */
623 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
625 hstate
.add_int (vro1
->opcode
);
627 inchash::add_expr (vro1
->op0
, hstate
);
629 inchash::add_expr (vro1
->op1
, hstate
);
631 inchash::add_expr (vro1
->op2
, hstate
);
634 /* Compute a hash for the reference operation VR1 and return it. */
637 vn_reference_compute_hash (const vn_reference_t vr1
)
639 inchash::hash hstate
;
642 vn_reference_op_t vro
;
646 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
648 if (vro
->opcode
== MEM_REF
)
650 else if (vro
->opcode
!= ADDR_EXPR
)
652 if (maybe_ne (vro
->off
, -1))
654 if (known_eq (off
, -1))
660 if (maybe_ne (off
, -1)
661 && maybe_ne (off
, 0))
662 hstate
.add_poly_int (off
);
665 && vro
->opcode
== ADDR_EXPR
)
669 tree op
= TREE_OPERAND (vro
->op0
, 0);
670 hstate
.add_int (TREE_CODE (op
));
671 inchash::add_expr (op
, hstate
);
675 vn_reference_op_compute_hash (vro
, hstate
);
678 result
= hstate
.end ();
679 /* ??? We would ICE later if we hash instead of adding that in. */
681 result
+= SSA_NAME_VERSION (vr1
->vuse
);
686 /* Return true if reference operations VR1 and VR2 are equivalent. This
687 means they have the same set of operands and vuses. */
690 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
694 /* Early out if this is not a hash collision. */
695 if (vr1
->hashcode
!= vr2
->hashcode
)
698 /* The VOP needs to be the same. */
699 if (vr1
->vuse
!= vr2
->vuse
)
702 /* If the operands are the same we are done. */
703 if (vr1
->operands
== vr2
->operands
)
706 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
709 if (INTEGRAL_TYPE_P (vr1
->type
)
710 && INTEGRAL_TYPE_P (vr2
->type
))
712 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
715 else if (INTEGRAL_TYPE_P (vr1
->type
)
716 && (TYPE_PRECISION (vr1
->type
)
717 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
719 else if (INTEGRAL_TYPE_P (vr2
->type
)
720 && (TYPE_PRECISION (vr2
->type
)
721 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
728 poly_int64 off1
= 0, off2
= 0;
729 vn_reference_op_t vro1
, vro2
;
730 vn_reference_op_s tem1
, tem2
;
731 bool deref1
= false, deref2
= false;
732 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
734 if (vro1
->opcode
== MEM_REF
)
736 /* Do not look through a storage order barrier. */
737 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
739 if (known_eq (vro1
->off
, -1))
743 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
745 if (vro2
->opcode
== MEM_REF
)
747 /* Do not look through a storage order barrier. */
748 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
750 if (known_eq (vro2
->off
, -1))
754 if (maybe_ne (off1
, off2
))
756 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
758 memset (&tem1
, 0, sizeof (tem1
));
759 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
760 tem1
.type
= TREE_TYPE (tem1
.op0
);
761 tem1
.opcode
= TREE_CODE (tem1
.op0
);
765 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
767 memset (&tem2
, 0, sizeof (tem2
));
768 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
769 tem2
.type
= TREE_TYPE (tem2
.op0
);
770 tem2
.opcode
= TREE_CODE (tem2
.op0
);
774 if (deref1
!= deref2
)
776 if (!vn_reference_op_eq (vro1
, vro2
))
781 while (vr1
->operands
.length () != i
782 || vr2
->operands
.length () != j
);
787 /* Copy the operations present in load/store REF into RESULT, a vector of
788 vn_reference_op_s's. */
791 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
793 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
795 vn_reference_op_s temp
;
799 memset (&temp
, 0, sizeof (temp
));
800 temp
.type
= TREE_TYPE (ref
);
801 temp
.opcode
= TREE_CODE (ref
);
802 temp
.op0
= TMR_INDEX (ref
);
803 temp
.op1
= TMR_STEP (ref
);
804 temp
.op2
= TMR_OFFSET (ref
);
806 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
807 temp
.base
= MR_DEPENDENCE_BASE (ref
);
808 result
->quick_push (temp
);
810 memset (&temp
, 0, sizeof (temp
));
811 temp
.type
= NULL_TREE
;
812 temp
.opcode
= ERROR_MARK
;
813 temp
.op0
= TMR_INDEX2 (ref
);
815 result
->quick_push (temp
);
817 memset (&temp
, 0, sizeof (temp
));
818 temp
.type
= NULL_TREE
;
819 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
820 temp
.op0
= TMR_BASE (ref
);
822 result
->quick_push (temp
);
826 /* For non-calls, store the information that makes up the address. */
830 vn_reference_op_s temp
;
832 memset (&temp
, 0, sizeof (temp
));
833 temp
.type
= TREE_TYPE (ref
);
834 temp
.opcode
= TREE_CODE (ref
);
840 temp
.op0
= TREE_OPERAND (ref
, 1);
843 temp
.op0
= TREE_OPERAND (ref
, 1);
847 /* The base address gets its own vn_reference_op_s structure. */
848 temp
.op0
= TREE_OPERAND (ref
, 1);
849 if (!mem_ref_offset (ref
).to_shwi (&temp
.off
))
851 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
852 temp
.base
= MR_DEPENDENCE_BASE (ref
);
853 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
856 /* Record bits, position and storage order. */
857 temp
.op0
= TREE_OPERAND (ref
, 1);
858 temp
.op1
= TREE_OPERAND (ref
, 2);
859 if (!multiple_p (bit_field_offset (ref
), BITS_PER_UNIT
, &temp
.off
))
861 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
864 /* The field decl is enough to unambiguously specify the field,
865 a matching type is not necessary and a mismatching type
866 is always a spurious difference. */
867 temp
.type
= NULL_TREE
;
868 temp
.op0
= TREE_OPERAND (ref
, 1);
869 temp
.op1
= TREE_OPERAND (ref
, 2);
871 tree this_offset
= component_ref_field_offset (ref
);
873 && poly_int_tree_p (this_offset
))
875 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
876 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
879 = (wi::to_poly_offset (this_offset
)
880 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
881 /* Probibit value-numbering zero offset components
882 of addresses the same before the pass folding
883 __builtin_object_size had a chance to run
884 (checking cfun->after_inlining does the
886 if (TREE_CODE (orig
) != ADDR_EXPR
888 || cfun
->after_inlining
)
889 off
.to_shwi (&temp
.off
);
894 case ARRAY_RANGE_REF
:
897 tree eltype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref
, 0)));
898 /* Record index as operand. */
899 temp
.op0
= TREE_OPERAND (ref
, 1);
900 /* Always record lower bounds and element size. */
901 temp
.op1
= array_ref_low_bound (ref
);
902 /* But record element size in units of the type alignment. */
903 temp
.op2
= TREE_OPERAND (ref
, 3);
904 temp
.align
= eltype
->type_common
.align
;
906 temp
.op2
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE_UNIT (eltype
),
907 size_int (TYPE_ALIGN_UNIT (eltype
)));
908 if (poly_int_tree_p (temp
.op0
)
909 && poly_int_tree_p (temp
.op1
)
910 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
912 poly_offset_int off
= ((wi::to_poly_offset (temp
.op0
)
913 - wi::to_poly_offset (temp
.op1
))
914 * wi::to_offset (temp
.op2
)
915 * vn_ref_op_align_unit (&temp
));
916 off
.to_shwi (&temp
.off
);
921 if (DECL_HARD_REGISTER (ref
))
930 /* Canonicalize decls to MEM[&decl] which is what we end up with
931 when valueizing MEM[ptr] with ptr = &decl. */
932 temp
.opcode
= MEM_REF
;
933 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
935 result
->safe_push (temp
);
936 temp
.opcode
= ADDR_EXPR
;
937 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
938 temp
.type
= TREE_TYPE (temp
.op0
);
952 if (is_gimple_min_invariant (ref
))
958 /* These are only interesting for their operands, their
959 existence, and their type. They will never be the last
960 ref in the chain of references (IE they require an
961 operand), so we don't have to put anything
962 for op* as it will be handled by the iteration */
966 case VIEW_CONVERT_EXPR
:
968 temp
.reverse
= storage_order_barrier_p (ref
);
971 /* This is only interesting for its constant offset. */
972 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
977 result
->safe_push (temp
);
979 if (REFERENCE_CLASS_P (ref
)
980 || TREE_CODE (ref
) == MODIFY_EXPR
981 || TREE_CODE (ref
) == WITH_SIZE_EXPR
982 || (TREE_CODE (ref
) == ADDR_EXPR
983 && !is_gimple_min_invariant (ref
)))
984 ref
= TREE_OPERAND (ref
, 0);
990 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
991 operands in *OPS, the reference alias set SET and the reference type TYPE.
992 Return true if something useful was produced. */
995 ao_ref_init_from_vn_reference (ao_ref
*ref
,
996 alias_set_type set
, tree type
,
997 vec
<vn_reference_op_s
> ops
)
999 vn_reference_op_t op
;
1001 tree base
= NULL_TREE
;
1002 tree
*op0_p
= &base
;
1003 poly_offset_int offset
= 0;
1004 poly_offset_int max_size
;
1005 poly_offset_int size
= -1;
1006 tree size_tree
= NULL_TREE
;
1007 alias_set_type base_alias_set
= -1;
1009 /* First get the final access size from just the outermost expression. */
1011 if (op
->opcode
== COMPONENT_REF
)
1012 size_tree
= DECL_SIZE (op
->op0
);
1013 else if (op
->opcode
== BIT_FIELD_REF
)
1014 size_tree
= op
->op0
;
1017 machine_mode mode
= TYPE_MODE (type
);
1018 if (mode
== BLKmode
)
1019 size_tree
= TYPE_SIZE (type
);
1021 size
= GET_MODE_BITSIZE (mode
);
1023 if (size_tree
!= NULL_TREE
1024 && poly_int_tree_p (size_tree
))
1025 size
= wi::to_poly_offset (size_tree
);
1027 /* Initially, maxsize is the same as the accessed element size.
1028 In the following it will only grow (or become -1). */
1031 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1032 and find the ultimate containing object. */
1033 FOR_EACH_VEC_ELT (ops
, i
, op
)
1037 /* These may be in the reference ops, but we cannot do anything
1038 sensible with them here. */
1040 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1041 if (base
!= NULL_TREE
1042 && TREE_CODE (base
) == MEM_REF
1044 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1046 vn_reference_op_t pop
= &ops
[i
-1];
1047 base
= TREE_OPERAND (op
->op0
, 0);
1048 if (known_eq (pop
->off
, -1))
1054 offset
+= pop
->off
* BITS_PER_UNIT
;
1062 /* Record the base objects. */
1064 base_alias_set
= get_deref_alias_set (op
->op0
);
1065 *op0_p
= build2 (MEM_REF
, op
->type
,
1066 NULL_TREE
, op
->op0
);
1067 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
1068 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
1069 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1080 /* And now the usual component-reference style ops. */
1082 offset
+= wi::to_poly_offset (op
->op1
);
1087 tree field
= op
->op0
;
1088 /* We do not have a complete COMPONENT_REF tree here so we
1089 cannot use component_ref_field_offset. Do the interesting
1091 tree this_offset
= DECL_FIELD_OFFSET (field
);
1093 if (op
->op1
|| !poly_int_tree_p (this_offset
))
1097 poly_offset_int woffset
= (wi::to_poly_offset (this_offset
)
1098 << LOG2_BITS_PER_UNIT
);
1099 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1105 case ARRAY_RANGE_REF
:
1107 /* We recorded the lower bound and the element size. */
1108 if (!poly_int_tree_p (op
->op0
)
1109 || !poly_int_tree_p (op
->op1
)
1110 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1114 poly_offset_int woffset
1115 = wi::sext (wi::to_poly_offset (op
->op0
)
1116 - wi::to_poly_offset (op
->op1
),
1117 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1118 woffset
*= wi::to_offset (op
->op2
) * vn_ref_op_align_unit (op
);
1119 woffset
<<= LOG2_BITS_PER_UNIT
;
1131 case VIEW_CONVERT_EXPR
:
1148 if (base
== NULL_TREE
)
1151 ref
->ref
= NULL_TREE
;
1153 ref
->ref_alias_set
= set
;
1154 if (base_alias_set
!= -1)
1155 ref
->base_alias_set
= base_alias_set
;
1157 ref
->base_alias_set
= get_alias_set (base
);
1158 /* We discount volatiles from value-numbering elsewhere. */
1159 ref
->volatile_p
= false;
1161 if (!size
.to_shwi (&ref
->size
) || maybe_lt (ref
->size
, 0))
1169 if (!offset
.to_shwi (&ref
->offset
))
1176 if (!max_size
.to_shwi (&ref
->max_size
) || maybe_lt (ref
->max_size
, 0))
1182 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1183 vn_reference_op_s's. */
1186 copy_reference_ops_from_call (gcall
*call
,
1187 vec
<vn_reference_op_s
> *result
)
1189 vn_reference_op_s temp
;
1191 tree lhs
= gimple_call_lhs (call
);
1194 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1195 different. By adding the lhs here in the vector, we ensure that the
1196 hashcode is different, guaranteeing a different value number. */
1197 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1199 memset (&temp
, 0, sizeof (temp
));
1200 temp
.opcode
= MODIFY_EXPR
;
1201 temp
.type
= TREE_TYPE (lhs
);
1204 result
->safe_push (temp
);
1207 /* Copy the type, opcode, function, static chain and EH region, if any. */
1208 memset (&temp
, 0, sizeof (temp
));
1209 temp
.type
= gimple_call_return_type (call
);
1210 temp
.opcode
= CALL_EXPR
;
1211 temp
.op0
= gimple_call_fn (call
);
1212 temp
.op1
= gimple_call_chain (call
);
1213 if (stmt_could_throw_p (cfun
, call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1214 temp
.op2
= size_int (lr
);
1216 result
->safe_push (temp
);
1218 /* Copy the call arguments. As they can be references as well,
1219 just chain them together. */
1220 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1222 tree callarg
= gimple_call_arg (call
, i
);
1223 copy_reference_ops_from_ref (callarg
, result
);
1227 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1228 *I_P to point to the last element of the replacement. */
1230 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1233 unsigned int i
= *i_p
;
1234 vn_reference_op_t op
= &(*ops
)[i
];
1235 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1237 poly_int64 addr_offset
= 0;
1239 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1240 from .foo.bar to the preceding MEM_REF offset and replace the
1241 address with &OBJ. */
1242 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1244 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1245 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1248 = (poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
),
1251 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1252 op
->op0
= build_fold_addr_expr (addr_base
);
1253 if (tree_fits_shwi_p (mem_op
->op0
))
1254 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1262 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1263 *I_P to point to the last element of the replacement. */
1265 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1268 unsigned int i
= *i_p
;
1269 vn_reference_op_t op
= &(*ops
)[i
];
1270 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1272 enum tree_code code
;
1273 poly_offset_int off
;
1275 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1276 if (!is_gimple_assign (def_stmt
))
1279 code
= gimple_assign_rhs_code (def_stmt
);
1280 if (code
!= ADDR_EXPR
1281 && code
!= POINTER_PLUS_EXPR
)
1284 off
= poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
), SIGNED
);
1286 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1287 from .foo.bar to the preceding MEM_REF offset and replace the
1288 address with &OBJ. */
1289 if (code
== ADDR_EXPR
)
1291 tree addr
, addr_base
;
1292 poly_int64 addr_offset
;
1294 addr
= gimple_assign_rhs1 (def_stmt
);
1295 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1297 /* If that didn't work because the address isn't invariant propagate
1298 the reference tree from the address operation in case the current
1299 dereference isn't offsetted. */
1301 && *i_p
== ops
->length () - 1
1302 && known_eq (off
, 0)
1303 /* This makes us disable this transform for PRE where the
1304 reference ops might be also used for code insertion which
1306 && default_vn_walk_kind
== VN_WALKREWRITE
)
1308 auto_vec
<vn_reference_op_s
, 32> tem
;
1309 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1310 /* Make sure to preserve TBAA info. The only objects not
1311 wrapped in MEM_REFs that can have their address taken are
1313 if (tem
.length () >= 2
1314 && tem
[tem
.length () - 2].opcode
== MEM_REF
)
1316 vn_reference_op_t new_mem_op
= &tem
[tem
.length () - 2];
1318 = wide_int_to_tree (TREE_TYPE (mem_op
->op0
),
1319 wi::to_poly_wide (new_mem_op
->op0
));
1322 gcc_assert (tem
.last ().opcode
== STRING_CST
);
1325 ops
->safe_splice (tem
);
1330 || TREE_CODE (addr_base
) != MEM_REF
1331 || (TREE_CODE (TREE_OPERAND (addr_base
, 0)) == SSA_NAME
1332 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base
, 0))))
1336 off
+= mem_ref_offset (addr_base
);
1337 op
->op0
= TREE_OPERAND (addr_base
, 0);
1342 ptr
= gimple_assign_rhs1 (def_stmt
);
1343 ptroff
= gimple_assign_rhs2 (def_stmt
);
1344 if (TREE_CODE (ptr
) != SSA_NAME
1345 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1346 /* Make sure to not endlessly recurse.
1347 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1348 happen when we value-number a PHI to its backedge value. */
1349 || SSA_VAL (ptr
) == op
->op0
1350 || !poly_int_tree_p (ptroff
))
1353 off
+= wi::to_poly_offset (ptroff
);
1357 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1358 if (tree_fits_shwi_p (mem_op
->op0
))
1359 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1362 /* ??? Can end up with endless recursion here!?
1363 gcc.c-torture/execute/strcmp-1.c */
1364 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1365 op
->op0
= SSA_VAL (op
->op0
);
1366 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1367 op
->opcode
= TREE_CODE (op
->op0
);
1370 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1371 vn_reference_maybe_forwprop_address (ops
, i_p
);
1372 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1373 vn_reference_fold_indirect (ops
, i_p
);
1377 /* Optimize the reference REF to a constant if possible or return
1378 NULL_TREE if not. */
1381 fully_constant_vn_reference_p (vn_reference_t ref
)
1383 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1384 vn_reference_op_t op
;
1386 /* Try to simplify the translated expression if it is
1387 a call to a builtin function with at most two arguments. */
1389 if (op
->opcode
== CALL_EXPR
1390 && TREE_CODE (op
->op0
) == ADDR_EXPR
1391 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1392 && fndecl_built_in_p (TREE_OPERAND (op
->op0
, 0))
1393 && operands
.length () >= 2
1394 && operands
.length () <= 3)
1396 vn_reference_op_t arg0
, arg1
= NULL
;
1397 bool anyconst
= false;
1398 arg0
= &operands
[1];
1399 if (operands
.length () > 2)
1400 arg1
= &operands
[2];
1401 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1402 || (arg0
->opcode
== ADDR_EXPR
1403 && is_gimple_min_invariant (arg0
->op0
)))
1406 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1407 || (arg1
->opcode
== ADDR_EXPR
1408 && is_gimple_min_invariant (arg1
->op0
))))
1412 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1415 arg1
? arg1
->op0
: NULL
);
1417 && TREE_CODE (folded
) == NOP_EXPR
)
1418 folded
= TREE_OPERAND (folded
, 0);
1420 && is_gimple_min_invariant (folded
))
1425 /* Simplify reads from constants or constant initializers. */
1426 else if (BITS_PER_UNIT
== 8
1427 && COMPLETE_TYPE_P (ref
->type
)
1428 && is_gimple_reg_type (ref
->type
))
1432 if (INTEGRAL_TYPE_P (ref
->type
))
1433 size
= TYPE_PRECISION (ref
->type
);
1434 else if (tree_fits_shwi_p (TYPE_SIZE (ref
->type
)))
1435 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1438 if (size
% BITS_PER_UNIT
!= 0
1439 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1441 size
/= BITS_PER_UNIT
;
1443 for (i
= 0; i
< operands
.length (); ++i
)
1445 if (TREE_CODE_CLASS (operands
[i
].opcode
) == tcc_constant
)
1450 if (known_eq (operands
[i
].off
, -1))
1452 off
+= operands
[i
].off
;
1453 if (operands
[i
].opcode
== MEM_REF
)
1459 vn_reference_op_t base
= &operands
[--i
];
1460 tree ctor
= error_mark_node
;
1461 tree decl
= NULL_TREE
;
1462 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1464 else if (base
->opcode
== MEM_REF
1465 && base
[1].opcode
== ADDR_EXPR
1466 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1467 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
1468 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == STRING_CST
))
1470 decl
= TREE_OPERAND (base
[1].op0
, 0);
1471 if (TREE_CODE (decl
) == STRING_CST
)
1474 ctor
= ctor_for_folding (decl
);
1476 if (ctor
== NULL_TREE
)
1477 return build_zero_cst (ref
->type
);
1478 else if (ctor
!= error_mark_node
)
1480 HOST_WIDE_INT const_off
;
1483 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1484 off
* BITS_PER_UNIT
,
1485 size
* BITS_PER_UNIT
, decl
);
1488 STRIP_USELESS_TYPE_CONVERSION (res
);
1489 if (is_gimple_min_invariant (res
))
1493 else if (off
.is_constant (&const_off
))
1495 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1496 int len
= native_encode_expr (ctor
, buf
, size
, const_off
);
1498 return native_interpret_expr (ref
->type
, buf
, len
);
1506 /* Return true if OPS contain a storage order barrier. */
1509 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1511 vn_reference_op_t op
;
1514 FOR_EACH_VEC_ELT (ops
, i
, op
)
1515 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1521 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1522 structures into their value numbers. This is done in-place, and
1523 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1524 whether any operands were valueized. */
1526 static vec
<vn_reference_op_s
>
1527 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
,
1528 bool with_avail
= false)
1530 vn_reference_op_t vro
;
1533 *valueized_anything
= false;
1535 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1537 if (vro
->opcode
== SSA_NAME
1538 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1540 tree tem
= with_avail
? vn_valueize (vro
->op0
) : SSA_VAL (vro
->op0
);
1541 if (tem
!= vro
->op0
)
1543 *valueized_anything
= true;
1546 /* If it transforms from an SSA_NAME to a constant, update
1548 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1549 vro
->opcode
= TREE_CODE (vro
->op0
);
1551 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1553 tree tem
= with_avail
? vn_valueize (vro
->op1
) : SSA_VAL (vro
->op1
);
1554 if (tem
!= vro
->op1
)
1556 *valueized_anything
= true;
1560 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1562 tree tem
= with_avail
? vn_valueize (vro
->op2
) : SSA_VAL (vro
->op2
);
1563 if (tem
!= vro
->op2
)
1565 *valueized_anything
= true;
1569 /* If it transforms from an SSA_NAME to an address, fold with
1570 a preceding indirect reference. */
1573 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1574 && orig
[i
- 1].opcode
== MEM_REF
)
1576 if (vn_reference_fold_indirect (&orig
, &i
))
1577 *valueized_anything
= true;
1580 && vro
->opcode
== SSA_NAME
1581 && orig
[i
- 1].opcode
== MEM_REF
)
1583 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1584 *valueized_anything
= true;
1586 /* If it transforms a non-constant ARRAY_REF into a constant
1587 one, adjust the constant offset. */
1588 else if (vro
->opcode
== ARRAY_REF
1589 && known_eq (vro
->off
, -1)
1590 && poly_int_tree_p (vro
->op0
)
1591 && poly_int_tree_p (vro
->op1
)
1592 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1594 poly_offset_int off
= ((wi::to_poly_offset (vro
->op0
)
1595 - wi::to_poly_offset (vro
->op1
))
1596 * wi::to_offset (vro
->op2
)
1597 * vn_ref_op_align_unit (vro
));
1598 off
.to_shwi (&vro
->off
);
1605 static vec
<vn_reference_op_s
>
1606 valueize_refs (vec
<vn_reference_op_s
> orig
)
1609 return valueize_refs_1 (orig
, &tem
);
1612 static vec
<vn_reference_op_s
> shared_lookup_references
;
1614 /* Create a vector of vn_reference_op_s structures from REF, a
1615 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1616 this function. *VALUEIZED_ANYTHING will specify whether any
1617 operands were valueized. */
1619 static vec
<vn_reference_op_s
>
1620 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1624 shared_lookup_references
.truncate (0);
1625 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1626 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1627 valueized_anything
);
1628 return shared_lookup_references
;
1631 /* Create a vector of vn_reference_op_s structures from CALL, a
1632 call statement. The vector is shared among all callers of
1635 static vec
<vn_reference_op_s
>
1636 valueize_shared_reference_ops_from_call (gcall
*call
)
1640 shared_lookup_references
.truncate (0);
1641 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1642 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1643 return shared_lookup_references
;
1646 /* Lookup a SCCVN reference operation VR in the current hash table.
1647 Returns the resulting value number if it exists in the hash table,
1648 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1649 vn_reference_t stored in the hashtable if something is found. */
1652 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1654 vn_reference_s
**slot
;
1657 hash
= vr
->hashcode
;
1658 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1662 *vnresult
= (vn_reference_t
)*slot
;
1663 return ((vn_reference_t
)*slot
)->result
;
1669 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1670 with the current VUSE and performs the expression lookup. */
1673 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1674 unsigned int cnt
, void *vr_
)
1676 vn_reference_t vr
= (vn_reference_t
)vr_
;
1677 vn_reference_s
**slot
;
1680 /* This bounds the stmt walks we perform on reference lookups
1681 to O(1) instead of O(N) where N is the number of dominating
1683 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1687 *last_vuse_ptr
= vuse
;
1689 /* Fixup vuse and hash. */
1691 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1692 vr
->vuse
= vuse_ssa_val (vuse
);
1694 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1696 hash
= vr
->hashcode
;
1697 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1704 /* Lookup an existing or insert a new vn_reference entry into the
1705 value table for the VUSE, SET, TYPE, OPERANDS reference which
1706 has the value VALUE which is either a constant or an SSA name. */
1708 static vn_reference_t
1709 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1712 vec
<vn_reference_op_s
,
1717 vn_reference_t result
;
1719 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1720 vr1
.operands
= operands
;
1723 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1724 if (vn_reference_lookup_1 (&vr1
, &result
))
1726 if (TREE_CODE (value
) == SSA_NAME
)
1727 value_id
= VN_INFO (value
)->value_id
;
1729 value_id
= get_or_alloc_constant_value_id (value
);
1730 return vn_reference_insert_pieces (vuse
, set
, type
,
1731 operands
.copy (), value
, value_id
);
1734 /* Return a value-number for RCODE OPS... either by looking up an existing
1735 value-number for the simplified result or by inserting the operation if
1739 vn_nary_build_or_lookup_1 (gimple_match_op
*res_op
, bool insert
)
1741 tree result
= NULL_TREE
;
1742 /* We will be creating a value number for
1744 So first simplify and lookup this expression to see if it
1745 is already available. */
1746 mprts_hook
= vn_lookup_simplify_result
;
1748 switch (TREE_CODE_LENGTH ((tree_code
) res_op
->code
))
1751 res
= gimple_resimplify1 (NULL
, res_op
, vn_valueize
);
1754 res
= gimple_resimplify2 (NULL
, res_op
, vn_valueize
);
1757 res
= gimple_resimplify3 (NULL
, res_op
, vn_valueize
);
1761 gimple
*new_stmt
= NULL
;
1763 && gimple_simplified_result_is_gimple_val (res_op
))
1765 /* The expression is already available. */
1766 result
= res_op
->ops
[0];
1767 /* Valueize it, simplification returns sth in AVAIL only. */
1768 if (TREE_CODE (result
) == SSA_NAME
)
1769 result
= SSA_VAL (result
);
1773 tree val
= vn_lookup_simplify_result (res_op
);
1776 gimple_seq stmts
= NULL
;
1777 result
= maybe_push_res_to_seq (res_op
, &stmts
);
1780 gcc_assert (gimple_seq_singleton_p (stmts
));
1781 new_stmt
= gimple_seq_first_stmt (stmts
);
1785 /* The expression is already available. */
1790 /* The expression is not yet available, value-number lhs to
1791 the new SSA_NAME we created. */
1792 /* Initialize value-number information properly. */
1793 vn_ssa_aux_t result_info
= VN_INFO (result
);
1794 result_info
->valnum
= result
;
1795 result_info
->value_id
= get_next_value_id ();
1796 result_info
->visited
= 1;
1797 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
1799 result_info
->needs_insertion
= true;
1800 /* ??? PRE phi-translation inserts NARYs without corresponding
1801 SSA name result. Re-use those but set their result according
1802 to the stmt we just built. */
1803 vn_nary_op_t nary
= NULL
;
1804 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
1807 gcc_assert (! nary
->predicated_values
&& nary
->u
.result
== NULL_TREE
);
1808 nary
->u
.result
= gimple_assign_lhs (new_stmt
);
1810 /* As all "inserted" statements are singleton SCCs, insert
1811 to the valid table. This is strictly needed to
1812 avoid re-generating new value SSA_NAMEs for the same
1813 expression during SCC iteration over and over (the
1814 optimistic table gets cleared after each iteration).
1815 We do not need to insert into the optimistic table, as
1816 lookups there will fall back to the valid table. */
1819 unsigned int length
= vn_nary_length_from_stmt (new_stmt
);
1821 = alloc_vn_nary_op_noinit (length
, &vn_tables_insert_obstack
);
1822 vno1
->value_id
= result_info
->value_id
;
1823 vno1
->length
= length
;
1824 vno1
->predicated_values
= 0;
1825 vno1
->u
.result
= result
;
1826 init_vn_nary_op_from_stmt (vno1
, new_stmt
);
1827 vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
1828 /* Also do not link it into the undo chain. */
1829 last_inserted_nary
= vno1
->next
;
1830 vno1
->next
= (vn_nary_op_t
)(void *)-1;
1832 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1834 fprintf (dump_file
, "Inserting name ");
1835 print_generic_expr (dump_file
, result
);
1836 fprintf (dump_file
, " for expression ");
1837 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
1838 fprintf (dump_file
, "\n");
1844 /* Return a value-number for RCODE OPS... either by looking up an existing
1845 value-number for the simplified result or by inserting the operation. */
1848 vn_nary_build_or_lookup (gimple_match_op
*res_op
)
1850 return vn_nary_build_or_lookup_1 (res_op
, true);
1853 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1854 its value if present. */
1857 vn_nary_simplify (vn_nary_op_t nary
)
1859 if (nary
->length
> gimple_match_op::MAX_NUM_OPS
)
1861 gimple_match_op
op (gimple_match_cond::UNCOND
, nary
->opcode
,
1862 nary
->type
, nary
->length
);
1863 memcpy (op
.ops
, nary
->op
, sizeof (tree
) * nary
->length
);
1864 return vn_nary_build_or_lookup_1 (&op
, false);
1867 basic_block vn_context_bb
;
1869 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1870 from the statement defining VUSE and if not successful tries to
1871 translate *REFP and VR_ through an aggregate copy at the definition
1872 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1873 of *REF and *VR. If only disambiguation was performed then
1874 *DISAMBIGUATE_ONLY is set to true. */
1877 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1878 bool *disambiguate_only
)
1880 vn_reference_t vr
= (vn_reference_t
)vr_
;
1881 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1882 tree base
= ao_ref_base (ref
);
1883 HOST_WIDE_INT offseti
, maxsizei
;
1884 static vec
<vn_reference_op_s
> lhs_ops
;
1886 bool lhs_ref_ok
= false;
1887 poly_int64 copy_size
;
1889 /* First try to disambiguate after value-replacing in the definitions LHS. */
1890 if (is_gimple_assign (def_stmt
))
1892 tree lhs
= gimple_assign_lhs (def_stmt
);
1893 bool valueized_anything
= false;
1894 /* Avoid re-allocation overhead. */
1895 lhs_ops
.truncate (0);
1896 basic_block saved_rpo_bb
= vn_context_bb
;
1897 vn_context_bb
= gimple_bb (def_stmt
);
1898 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1899 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
, true);
1900 vn_context_bb
= saved_rpo_bb
;
1901 if (valueized_anything
)
1903 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1904 get_alias_set (lhs
),
1905 TREE_TYPE (lhs
), lhs_ops
);
1907 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1909 *disambiguate_only
= true;
1915 ao_ref_init (&lhs_ref
, lhs
);
1919 /* If we reach a clobbering statement try to skip it and see if
1920 we find a VN result with exactly the same value as the
1921 possible clobber. In this case we can ignore the clobber
1922 and return the found value.
1923 Note that we don't need to worry about partial overlapping
1924 accesses as we then can use TBAA to disambiguate against the
1925 clobbering statement when looking up a load (thus the
1926 VN_WALKREWRITE guard). */
1927 if (vn_walk_kind
== VN_WALKREWRITE
1928 && is_gimple_reg_type (TREE_TYPE (lhs
))
1929 && types_compatible_p (TREE_TYPE (lhs
), vr
->type
))
1931 tree
*saved_last_vuse_ptr
= last_vuse_ptr
;
1932 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1933 last_vuse_ptr
= NULL
;
1934 tree saved_vuse
= vr
->vuse
;
1935 hashval_t saved_hashcode
= vr
->hashcode
;
1936 void *res
= vn_reference_lookup_2 (ref
,
1937 gimple_vuse (def_stmt
), 0, vr
);
1938 /* Need to restore vr->vuse and vr->hashcode. */
1939 vr
->vuse
= saved_vuse
;
1940 vr
->hashcode
= saved_hashcode
;
1941 last_vuse_ptr
= saved_last_vuse_ptr
;
1942 if (res
&& res
!= (void *)-1)
1944 vn_reference_t vnresult
= (vn_reference_t
) res
;
1945 if (vnresult
->result
1946 && operand_equal_p (vnresult
->result
,
1947 gimple_assign_rhs1 (def_stmt
), 0))
1952 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1953 && gimple_call_num_args (def_stmt
) <= 4)
1955 /* For builtin calls valueize its arguments and call the
1956 alias oracle again. Valueization may improve points-to
1957 info of pointers and constify size and position arguments.
1958 Originally this was motivated by PR61034 which has
1959 conditional calls to free falsely clobbering ref because
1960 of imprecise points-to info of the argument. */
1962 bool valueized_anything
= false;
1963 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1965 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1966 tree val
= vn_valueize (oldargs
[i
]);
1967 if (val
!= oldargs
[i
])
1969 gimple_call_set_arg (def_stmt
, i
, val
);
1970 valueized_anything
= true;
1973 if (valueized_anything
)
1975 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1977 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1978 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1981 *disambiguate_only
= true;
1987 if (*disambiguate_only
)
1990 /* If we cannot constrain the size of the reference we cannot
1991 test if anything kills it. */
1992 if (!ref
->max_size_known_p ())
1995 poly_int64 offset
= ref
->offset
;
1996 poly_int64 maxsize
= ref
->max_size
;
1998 /* We can't deduce anything useful from clobbers. */
1999 if (gimple_clobber_p (def_stmt
))
2002 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2003 from that definition.
2005 if (is_gimple_reg_type (vr
->type
)
2006 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
2007 && (integer_zerop (gimple_call_arg (def_stmt
, 1))
2008 || ((TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
2009 || (INTEGRAL_TYPE_P (vr
->type
) && known_eq (ref
->size
, 8)))
2010 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
2011 && offset
.is_constant (&offseti
)
2012 && offseti
% BITS_PER_UNIT
== 0))
2013 && poly_int_tree_p (gimple_call_arg (def_stmt
, 2))
2014 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2015 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
))
2018 poly_int64 offset2
, size2
, maxsize2
;
2020 tree ref2
= gimple_call_arg (def_stmt
, 0);
2021 if (TREE_CODE (ref2
) == SSA_NAME
)
2023 ref2
= SSA_VAL (ref2
);
2024 if (TREE_CODE (ref2
) == SSA_NAME
2025 && (TREE_CODE (base
) != MEM_REF
2026 || TREE_OPERAND (base
, 0) != ref2
))
2028 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref2
);
2029 if (gimple_assign_single_p (def_stmt
)
2030 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2031 ref2
= gimple_assign_rhs1 (def_stmt
);
2034 if (TREE_CODE (ref2
) == ADDR_EXPR
)
2036 ref2
= TREE_OPERAND (ref2
, 0);
2037 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
2039 if (!known_size_p (maxsize2
)
2040 || !known_eq (maxsize2
, size2
)
2041 || !operand_equal_p (base
, base2
, OEP_ADDRESS_OF
))
2044 else if (TREE_CODE (ref2
) == SSA_NAME
)
2047 if (TREE_CODE (base
) != MEM_REF
2048 || !(mem_ref_offset (base
) << LOG2_BITS_PER_UNIT
).to_shwi (&soff
))
2052 if (TREE_OPERAND (base
, 0) != ref2
)
2054 gimple
*def
= SSA_NAME_DEF_STMT (ref2
);
2055 if (is_gimple_assign (def
)
2056 && gimple_assign_rhs_code (def
) == POINTER_PLUS_EXPR
2057 && gimple_assign_rhs1 (def
) == TREE_OPERAND (base
, 0)
2058 && poly_int_tree_p (gimple_assign_rhs2 (def
))
2059 && (wi::to_poly_offset (gimple_assign_rhs2 (def
))
2060 << LOG2_BITS_PER_UNIT
).to_shwi (&offset2
))
2062 ref2
= gimple_assign_rhs1 (def
);
2063 if (TREE_CODE (ref2
) == SSA_NAME
)
2064 ref2
= SSA_VAL (ref2
);
2072 tree len
= gimple_call_arg (def_stmt
, 2);
2073 if (known_subrange_p (offset
, maxsize
, offset2
,
2074 wi::to_poly_offset (len
) << LOG2_BITS_PER_UNIT
))
2077 if (integer_zerop (gimple_call_arg (def_stmt
, 1)))
2078 val
= build_zero_cst (vr
->type
);
2079 else if (INTEGRAL_TYPE_P (vr
->type
)
2080 && known_eq (ref
->size
, 8))
2082 gimple_match_op
res_op (gimple_match_cond::UNCOND
, NOP_EXPR
,
2083 vr
->type
, gimple_call_arg (def_stmt
, 1));
2084 val
= vn_nary_build_or_lookup (&res_op
);
2086 || (TREE_CODE (val
) == SSA_NAME
2087 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2092 unsigned len
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr
->type
));
2093 unsigned char *buf
= XALLOCAVEC (unsigned char, len
);
2094 memset (buf
, TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 1)),
2096 val
= native_interpret_expr (vr
->type
, buf
, len
);
2100 return vn_reference_lookup_or_insert_for_pieces
2101 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2105 /* 2) Assignment from an empty CONSTRUCTOR. */
2106 else if (is_gimple_reg_type (vr
->type
)
2107 && gimple_assign_single_p (def_stmt
)
2108 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
2109 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
2112 poly_int64 offset2
, size2
, maxsize2
;
2114 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
2115 &offset2
, &size2
, &maxsize2
, &reverse
);
2116 if (known_size_p (maxsize2
)
2117 && operand_equal_p (base
, base2
, 0)
2118 && known_subrange_p (offset
, maxsize
, offset2
, size2
))
2120 tree val
= build_zero_cst (vr
->type
);
2121 return vn_reference_lookup_or_insert_for_pieces
2122 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2126 /* 3) Assignment from a constant. We can use folds native encode/interpret
2127 routines to extract the assigned bits. */
2128 else if (known_eq (ref
->size
, maxsize
)
2129 && is_gimple_reg_type (vr
->type
)
2130 && !contains_storage_order_barrier_p (vr
->operands
)
2131 && gimple_assign_single_p (def_stmt
)
2132 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
2133 /* native_encode and native_decode operate on arrays of bytes
2134 and so fundamentally need a compile-time size and offset. */
2135 && maxsize
.is_constant (&maxsizei
)
2136 && maxsizei
% BITS_PER_UNIT
== 0
2137 && offset
.is_constant (&offseti
)
2138 && offseti
% BITS_PER_UNIT
== 0
2139 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
2140 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
2141 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
2144 HOST_WIDE_INT offset2
, size2
;
2146 base2
= get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt
),
2147 &offset2
, &size2
, &reverse
);
2150 && size2
% BITS_PER_UNIT
== 0
2151 && offset2
% BITS_PER_UNIT
== 0
2152 && operand_equal_p (base
, base2
, 0)
2153 && known_subrange_p (offseti
, maxsizei
, offset2
, size2
))
2155 /* We support up to 512-bit values (for V8DFmode). */
2156 unsigned char buffer
[64];
2159 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2160 if (TREE_CODE (rhs
) == SSA_NAME
)
2161 rhs
= SSA_VAL (rhs
);
2162 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
2163 buffer
, sizeof (buffer
),
2164 (offseti
- offset2
) / BITS_PER_UNIT
);
2165 if (len
> 0 && len
* BITS_PER_UNIT
>= maxsizei
)
2167 tree type
= vr
->type
;
2168 /* Make sure to interpret in a type that has a range
2169 covering the whole access size. */
2170 if (INTEGRAL_TYPE_P (vr
->type
)
2171 && maxsizei
!= TYPE_PRECISION (vr
->type
))
2172 type
= build_nonstandard_integer_type (maxsizei
,
2173 TYPE_UNSIGNED (type
));
2174 tree val
= native_interpret_expr (type
, buffer
,
2175 maxsizei
/ BITS_PER_UNIT
);
2176 /* If we chop off bits because the types precision doesn't
2177 match the memory access size this is ok when optimizing
2178 reads but not when called from the DSE code during
2181 && type
!= vr
->type
)
2183 if (! int_fits_type_p (val
, vr
->type
))
2186 val
= fold_convert (vr
->type
, val
);
2190 return vn_reference_lookup_or_insert_for_pieces
2191 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2196 /* 4) Assignment from an SSA name which definition we may be able
2197 to access pieces from. */
2198 else if (known_eq (ref
->size
, maxsize
)
2199 && is_gimple_reg_type (vr
->type
)
2200 && !contains_storage_order_barrier_p (vr
->operands
)
2201 && gimple_assign_single_p (def_stmt
)
2202 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
2205 poly_int64 offset2
, size2
, maxsize2
;
2207 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
2208 &offset2
, &size2
, &maxsize2
,
2211 && known_size_p (maxsize2
)
2212 && known_eq (maxsize2
, size2
)
2213 && operand_equal_p (base
, base2
, 0)
2214 && known_subrange_p (offset
, maxsize
, offset2
, size2
)
2215 /* ??? We can't handle bitfield precision extracts without
2216 either using an alternate type for the BIT_FIELD_REF and
2217 then doing a conversion or possibly adjusting the offset
2218 according to endianness. */
2219 && (! INTEGRAL_TYPE_P (vr
->type
)
2220 || known_eq (ref
->size
, TYPE_PRECISION (vr
->type
)))
2221 && multiple_p (ref
->size
, BITS_PER_UNIT
))
2223 gimple_match_op
op (gimple_match_cond::UNCOND
,
2224 BIT_FIELD_REF
, vr
->type
,
2225 vn_valueize (gimple_assign_rhs1 (def_stmt
)),
2226 bitsize_int (ref
->size
),
2227 bitsize_int (offset
- offset2
));
2228 tree val
= vn_nary_build_or_lookup (&op
);
2230 && (TREE_CODE (val
) != SSA_NAME
2231 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2233 vn_reference_t res
= vn_reference_lookup_or_insert_for_pieces
2234 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2240 /* 5) For aggregate copies translate the reference through them if
2241 the copy kills ref. */
2242 else if (vn_walk_kind
== VN_WALKREWRITE
2243 && gimple_assign_single_p (def_stmt
)
2244 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
2245 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
2246 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
2250 auto_vec
<vn_reference_op_s
> rhs
;
2251 vn_reference_op_t vro
;
2257 /* See if the assignment kills REF. */
2258 base2
= ao_ref_base (&lhs_ref
);
2259 if (!lhs_ref
.max_size_known_p ()
2261 && (TREE_CODE (base
) != MEM_REF
2262 || TREE_CODE (base2
) != MEM_REF
2263 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
2264 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
2265 TREE_OPERAND (base2
, 1))))
2266 || !stmt_kills_ref_p (def_stmt
, ref
))
2269 /* Find the common base of ref and the lhs. lhs_ops already
2270 contains valueized operands for the lhs. */
2271 i
= vr
->operands
.length () - 1;
2272 j
= lhs_ops
.length () - 1;
2273 while (j
>= 0 && i
>= 0
2274 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
2280 /* ??? The innermost op should always be a MEM_REF and we already
2281 checked that the assignment to the lhs kills vr. Thus for
2282 aggregate copies using char[] types the vn_reference_op_eq
2283 may fail when comparing types for compatibility. But we really
2284 don't care here - further lookups with the rewritten operands
2285 will simply fail if we messed up types too badly. */
2286 poly_int64 extra_off
= 0;
2287 if (j
== 0 && i
>= 0
2288 && lhs_ops
[0].opcode
== MEM_REF
2289 && maybe_ne (lhs_ops
[0].off
, -1))
2291 if (known_eq (lhs_ops
[0].off
, vr
->operands
[i
].off
))
2293 else if (vr
->operands
[i
].opcode
== MEM_REF
2294 && maybe_ne (vr
->operands
[i
].off
, -1))
2296 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
2301 /* i now points to the first additional op.
2302 ??? LHS may not be completely contained in VR, one or more
2303 VIEW_CONVERT_EXPRs could be in its way. We could at least
2304 try handling outermost VIEW_CONVERT_EXPRs. */
2308 /* Punt if the additional ops contain a storage order barrier. */
2309 for (k
= i
; k
>= 0; k
--)
2311 vro
= &vr
->operands
[k
];
2312 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
2316 /* Now re-write REF to be based on the rhs of the assignment. */
2317 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
2319 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2320 if (maybe_ne (extra_off
, 0))
2322 if (rhs
.length () < 2)
2324 int ix
= rhs
.length () - 2;
2325 if (rhs
[ix
].opcode
!= MEM_REF
2326 || known_eq (rhs
[ix
].off
, -1))
2328 rhs
[ix
].off
+= extra_off
;
2329 rhs
[ix
].op0
= int_const_binop (PLUS_EXPR
, rhs
[ix
].op0
,
2330 build_int_cst (TREE_TYPE (rhs
[ix
].op0
),
2334 /* We need to pre-pend vr->operands[0..i] to rhs. */
2335 vec
<vn_reference_op_s
> old
= vr
->operands
;
2336 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
2337 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
2339 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
2340 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
2341 vr
->operands
[i
+ 1 + j
] = *vro
;
2342 vr
->operands
= valueize_refs (vr
->operands
);
2343 if (old
== shared_lookup_references
)
2344 shared_lookup_references
= vr
->operands
;
2345 vr
->hashcode
= vn_reference_compute_hash (vr
);
2347 /* Try folding the new reference to a constant. */
2348 tree val
= fully_constant_vn_reference_p (vr
);
2350 return vn_reference_lookup_or_insert_for_pieces
2351 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2353 /* Adjust *ref from the new operands. */
2354 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2356 /* This can happen with bitfields. */
2357 if (maybe_ne (ref
->size
, r
.size
))
2361 /* Do not update last seen VUSE after translating. */
2362 last_vuse_ptr
= NULL
;
2364 /* Keep looking for the adjusted *REF / VR pair. */
2368 /* 6) For memcpy copies translate the reference through them if
2369 the copy kills ref. */
2370 else if (vn_walk_kind
== VN_WALKREWRITE
2371 && is_gimple_reg_type (vr
->type
)
2372 /* ??? Handle BCOPY as well. */
2373 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
2374 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
2375 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
2376 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2377 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
2378 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
2379 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
2380 && poly_int_tree_p (gimple_call_arg (def_stmt
, 2), ©_size
))
2384 poly_int64 rhs_offset
, lhs_offset
;
2385 vn_reference_op_s op
;
2386 poly_uint64 mem_offset
;
2387 poly_int64 at
, byte_maxsize
;
2389 /* Only handle non-variable, addressable refs. */
2390 if (maybe_ne (ref
->size
, maxsize
)
2391 || !multiple_p (offset
, BITS_PER_UNIT
, &at
)
2392 || !multiple_p (maxsize
, BITS_PER_UNIT
, &byte_maxsize
))
2395 /* Extract a pointer base and an offset for the destination. */
2396 lhs
= gimple_call_arg (def_stmt
, 0);
2398 if (TREE_CODE (lhs
) == SSA_NAME
)
2400 lhs
= vn_valueize (lhs
);
2401 if (TREE_CODE (lhs
) == SSA_NAME
)
2403 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
2404 if (gimple_assign_single_p (def_stmt
)
2405 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2406 lhs
= gimple_assign_rhs1 (def_stmt
);
2409 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2411 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2415 if (TREE_CODE (tem
) == MEM_REF
2416 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
2418 lhs
= TREE_OPERAND (tem
, 0);
2419 if (TREE_CODE (lhs
) == SSA_NAME
)
2420 lhs
= vn_valueize (lhs
);
2421 lhs_offset
+= mem_offset
;
2423 else if (DECL_P (tem
))
2424 lhs
= build_fold_addr_expr (tem
);
2428 if (TREE_CODE (lhs
) != SSA_NAME
2429 && TREE_CODE (lhs
) != ADDR_EXPR
)
2432 /* Extract a pointer base and an offset for the source. */
2433 rhs
= gimple_call_arg (def_stmt
, 1);
2435 if (TREE_CODE (rhs
) == SSA_NAME
)
2436 rhs
= vn_valueize (rhs
);
2437 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2439 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2443 if (TREE_CODE (tem
) == MEM_REF
2444 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
2446 rhs
= TREE_OPERAND (tem
, 0);
2447 rhs_offset
+= mem_offset
;
2449 else if (DECL_P (tem
)
2450 || TREE_CODE (tem
) == STRING_CST
)
2451 rhs
= build_fold_addr_expr (tem
);
2455 if (TREE_CODE (rhs
) != SSA_NAME
2456 && TREE_CODE (rhs
) != ADDR_EXPR
)
2459 /* The bases of the destination and the references have to agree. */
2460 if (TREE_CODE (base
) == MEM_REF
)
2462 if (TREE_OPERAND (base
, 0) != lhs
2463 || !poly_int_tree_p (TREE_OPERAND (base
, 1), &mem_offset
))
2467 else if (!DECL_P (base
)
2468 || TREE_CODE (lhs
) != ADDR_EXPR
2469 || TREE_OPERAND (lhs
, 0) != base
)
2472 /* If the access is completely outside of the memcpy destination
2473 area there is no aliasing. */
2474 if (!ranges_maybe_overlap_p (lhs_offset
, copy_size
, at
, byte_maxsize
))
2476 /* And the access has to be contained within the memcpy destination. */
2477 if (!known_subrange_p (at
, byte_maxsize
, lhs_offset
, copy_size
))
2480 /* Make room for 2 operands in the new reference. */
2481 if (vr
->operands
.length () < 2)
2483 vec
<vn_reference_op_s
> old
= vr
->operands
;
2484 vr
->operands
.safe_grow_cleared (2);
2485 if (old
== shared_lookup_references
)
2486 shared_lookup_references
= vr
->operands
;
2489 vr
->operands
.truncate (2);
2491 /* The looked-through reference is a simple MEM_REF. */
2492 memset (&op
, 0, sizeof (op
));
2494 op
.opcode
= MEM_REF
;
2495 op
.op0
= build_int_cst (ptr_type_node
, at
- lhs_offset
+ rhs_offset
);
2496 op
.off
= at
- lhs_offset
+ rhs_offset
;
2497 vr
->operands
[0] = op
;
2498 op
.type
= TREE_TYPE (rhs
);
2499 op
.opcode
= TREE_CODE (rhs
);
2502 vr
->operands
[1] = op
;
2503 vr
->hashcode
= vn_reference_compute_hash (vr
);
2505 /* Try folding the new reference to a constant. */
2506 tree val
= fully_constant_vn_reference_p (vr
);
2508 return vn_reference_lookup_or_insert_for_pieces
2509 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2511 /* Adjust *ref from the new operands. */
2512 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2514 /* This can happen with bitfields. */
2515 if (maybe_ne (ref
->size
, r
.size
))
2519 /* Do not update last seen VUSE after translating. */
2520 last_vuse_ptr
= NULL
;
2522 /* Keep looking for the adjusted *REF / VR pair. */
2526 /* Bail out and stop walking. */
2530 /* Return a reference op vector from OP that can be used for
2531 vn_reference_lookup_pieces. The caller is responsible for releasing
2534 vec
<vn_reference_op_s
>
2535 vn_reference_operands_for_lookup (tree op
)
2538 return valueize_shared_reference_ops_from_ref (op
, &valueized
).copy ();
2541 /* Lookup a reference operation by it's parts, in the current hash table.
2542 Returns the resulting value number if it exists in the hash table,
2543 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2544 vn_reference_t stored in the hashtable if something is found. */
2547 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2548 vec
<vn_reference_op_s
> operands
,
2549 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2551 struct vn_reference_s vr1
;
2559 vr1
.vuse
= vuse_ssa_val (vuse
);
2560 shared_lookup_references
.truncate (0);
2561 shared_lookup_references
.safe_grow (operands
.length ());
2562 memcpy (shared_lookup_references
.address (),
2563 operands
.address (),
2564 sizeof (vn_reference_op_s
)
2565 * operands
.length ());
2566 vr1
.operands
= operands
= shared_lookup_references
2567 = valueize_refs (shared_lookup_references
);
2570 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2571 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2574 vn_reference_lookup_1 (&vr1
, vnresult
);
2576 && kind
!= VN_NOWALK
2580 vn_walk_kind
= kind
;
2581 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2583 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2584 vn_reference_lookup_2
,
2585 vn_reference_lookup_3
,
2586 vuse_valueize
, &vr1
);
2587 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2591 return (*vnresult
)->result
;
2596 /* Lookup OP in the current hash table, and return the resulting value
2597 number if it exists in the hash table. Return NULL_TREE if it does
2598 not exist in the hash table or if the result field of the structure
2599 was NULL.. VNRESULT will be filled in with the vn_reference_t
2600 stored in the hashtable if one exists. When TBAA_P is false assume
2601 we are looking up a store and treat it as having alias-set zero. */
2604 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2605 vn_reference_t
*vnresult
, bool tbaa_p
)
2607 vec
<vn_reference_op_s
> operands
;
2608 struct vn_reference_s vr1
;
2610 bool valuezied_anything
;
2615 vr1
.vuse
= vuse_ssa_val (vuse
);
2616 vr1
.operands
= operands
2617 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2618 vr1
.type
= TREE_TYPE (op
);
2619 vr1
.set
= tbaa_p
? get_alias_set (op
) : 0;
2620 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2621 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2624 if (kind
!= VN_NOWALK
2627 vn_reference_t wvnresult
;
2629 /* Make sure to use a valueized reference if we valueized anything.
2630 Otherwise preserve the full reference for advanced TBAA. */
2631 if (!valuezied_anything
2632 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2634 ao_ref_init (&r
, op
);
2636 r
.ref_alias_set
= r
.base_alias_set
= 0;
2637 vn_walk_kind
= kind
;
2639 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2640 vn_reference_lookup_2
,
2641 vn_reference_lookup_3
,
2642 vuse_valueize
, &vr1
);
2643 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2647 *vnresult
= wvnresult
;
2648 return wvnresult
->result
;
2654 return vn_reference_lookup_1 (&vr1
, vnresult
);
2657 /* Lookup CALL in the current hash table and return the entry in
2658 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2661 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2667 tree vuse
= gimple_vuse (call
);
2669 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2670 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2671 vr
->type
= gimple_expr_type (call
);
2673 vr
->hashcode
= vn_reference_compute_hash (vr
);
2674 vn_reference_lookup_1 (vr
, vnresult
);
2677 /* Insert OP into the current hash table with a value number of RESULT. */
2680 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2682 vn_reference_s
**slot
;
2686 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
2687 if (TREE_CODE (result
) == SSA_NAME
)
2688 vr1
->value_id
= VN_INFO (result
)->value_id
;
2690 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2691 vr1
->vuse
= vuse_ssa_val (vuse
);
2692 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2693 vr1
->type
= TREE_TYPE (op
);
2694 vr1
->set
= get_alias_set (op
);
2695 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2696 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2697 vr1
->result_vdef
= vdef
;
2699 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2702 /* Because IL walking on reference lookup can end up visiting
2703 a def that is only to be visited later in iteration order
2704 when we are about to make an irreducible region reducible
2705 the def can be effectively processed and its ref being inserted
2706 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
2707 but save a lookup if we deal with already inserted refs here. */
2710 /* We cannot assert that we have the same value either because
2711 when disentangling an irreducible region we may end up visiting
2712 a use before the corresponding def. That's a missed optimization
2713 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
2714 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
2715 && !operand_equal_p ((*slot
)->result
, vr1
->result
, 0))
2717 fprintf (dump_file
, "Keeping old value ");
2718 print_generic_expr (dump_file
, (*slot
)->result
);
2719 fprintf (dump_file
, " because of collision\n");
2721 free_reference (vr1
);
2722 obstack_free (&vn_tables_obstack
, vr1
);
2727 vr1
->next
= last_inserted_ref
;
2728 last_inserted_ref
= vr1
;
2731 /* Insert a reference by it's pieces into the current hash table with
2732 a value number of RESULT. Return the resulting reference
2733 structure we created. */
2736 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2737 vec
<vn_reference_op_s
> operands
,
2738 tree result
, unsigned int value_id
)
2741 vn_reference_s
**slot
;
2744 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
2745 vr1
->value_id
= value_id
;
2746 vr1
->vuse
= vuse_ssa_val (vuse
);
2747 vr1
->operands
= valueize_refs (operands
);
2750 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2751 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2752 result
= SSA_VAL (result
);
2753 vr1
->result
= result
;
2755 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2758 /* At this point we should have all the things inserted that we have
2759 seen before, and we should never try inserting something that
2761 gcc_assert (!*slot
);
2764 vr1
->next
= last_inserted_ref
;
2765 last_inserted_ref
= vr1
;
2769 /* Compute and return the hash value for nary operation VBO1. */
2772 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2774 inchash::hash hstate
;
2777 for (i
= 0; i
< vno1
->length
; ++i
)
2778 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2779 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2781 if (((vno1
->length
== 2
2782 && commutative_tree_code (vno1
->opcode
))
2783 || (vno1
->length
== 3
2784 && commutative_ternary_tree_code (vno1
->opcode
)))
2785 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
2786 std::swap (vno1
->op
[0], vno1
->op
[1]);
2787 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
2788 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
2790 std::swap (vno1
->op
[0], vno1
->op
[1]);
2791 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
2794 hstate
.add_int (vno1
->opcode
);
2795 for (i
= 0; i
< vno1
->length
; ++i
)
2796 inchash::add_expr (vno1
->op
[i
], hstate
);
2798 return hstate
.end ();
2801 /* Compare nary operations VNO1 and VNO2 and return true if they are
2805 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2809 if (vno1
->hashcode
!= vno2
->hashcode
)
2812 if (vno1
->length
!= vno2
->length
)
2815 if (vno1
->opcode
!= vno2
->opcode
2816 || !types_compatible_p (vno1
->type
, vno2
->type
))
2819 for (i
= 0; i
< vno1
->length
; ++i
)
2820 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2823 /* BIT_INSERT_EXPR has an implict operand as the type precision
2824 of op1. Need to check to make sure they are the same. */
2825 if (vno1
->opcode
== BIT_INSERT_EXPR
2826 && TREE_CODE (vno1
->op
[1]) == INTEGER_CST
2827 && TYPE_PRECISION (TREE_TYPE (vno1
->op
[1]))
2828 != TYPE_PRECISION (TREE_TYPE (vno2
->op
[1])))
2834 /* Initialize VNO from the pieces provided. */
2837 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2838 enum tree_code code
, tree type
, tree
*ops
)
2841 vno
->length
= length
;
2843 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2846 /* Initialize VNO from OP. */
2849 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2853 vno
->opcode
= TREE_CODE (op
);
2854 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2855 vno
->type
= TREE_TYPE (op
);
2856 for (i
= 0; i
< vno
->length
; ++i
)
2857 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2860 /* Return the number of operands for a vn_nary ops structure from STMT. */
2863 vn_nary_length_from_stmt (gimple
*stmt
)
2865 switch (gimple_assign_rhs_code (stmt
))
2869 case VIEW_CONVERT_EXPR
:
2876 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2879 return gimple_num_ops (stmt
) - 1;
2883 /* Initialize VNO from STMT. */
2886 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
2890 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2891 vno
->type
= gimple_expr_type (stmt
);
2892 switch (vno
->opcode
)
2896 case VIEW_CONVERT_EXPR
:
2898 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2903 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2904 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2905 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2909 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2910 for (i
= 0; i
< vno
->length
; ++i
)
2911 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2915 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2916 vno
->length
= gimple_num_ops (stmt
) - 1;
2917 for (i
= 0; i
< vno
->length
; ++i
)
2918 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2922 /* Compute the hashcode for VNO and look for it in the hash table;
2923 return the resulting value number if it exists in the hash table.
2924 Return NULL_TREE if it does not exist in the hash table or if the
2925 result field of the operation is NULL. VNRESULT will contain the
2926 vn_nary_op_t from the hashtable if it exists. */
2929 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2931 vn_nary_op_s
**slot
;
2936 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2937 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
, NO_INSERT
);
2942 return (*slot
)->predicated_values
? NULL_TREE
: (*slot
)->u
.result
;
2945 /* Lookup a n-ary operation by its pieces and return the resulting value
2946 number if it exists in the hash table. Return NULL_TREE if it does
2947 not exist in the hash table or if the result field of the operation
2948 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2952 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2953 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2955 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2956 sizeof_vn_nary_op (length
));
2957 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2958 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2961 /* Lookup OP in the current hash table, and return the resulting value
2962 number if it exists in the hash table. Return NULL_TREE if it does
2963 not exist in the hash table or if the result field of the operation
2964 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2968 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2971 = XALLOCAVAR (struct vn_nary_op_s
,
2972 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2973 init_vn_nary_op_from_op (vno1
, op
);
2974 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2977 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2978 value number if it exists in the hash table. Return NULL_TREE if
2979 it does not exist in the hash table. VNRESULT will contain the
2980 vn_nary_op_t from the hashtable if it exists. */
2983 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
2986 = XALLOCAVAR (struct vn_nary_op_s
,
2987 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2988 init_vn_nary_op_from_stmt (vno1
, stmt
);
2989 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2992 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2995 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2997 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
3000 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3004 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
3006 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
, &vn_tables_obstack
);
3008 vno1
->value_id
= value_id
;
3009 vno1
->length
= length
;
3010 vno1
->predicated_values
= 0;
3011 vno1
->u
.result
= result
;
3016 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3017 VNO->HASHCODE first. */
3020 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
3023 vn_nary_op_s
**slot
;
3027 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
3028 gcc_assert (! vno
->predicated_values
3029 || (! vno
->u
.values
->next
3030 && vno
->u
.values
->n
== 1));
3033 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
3034 vno
->unwind_to
= *slot
;
3037 /* Prefer non-predicated values.
3038 ??? Only if those are constant, otherwise, with constant predicated
3039 value, turn them into predicated values with entry-block validity
3040 (??? but we always find the first valid result currently). */
3041 if ((*slot
)->predicated_values
3042 && ! vno
->predicated_values
)
3044 /* ??? We cannot remove *slot from the unwind stack list.
3045 For the moment we deal with this by skipping not found
3046 entries but this isn't ideal ... */
3048 /* ??? Maintain a stack of states we can unwind in
3049 vn_nary_op_s? But how far do we unwind? In reality
3050 we need to push change records somewhere... Or not
3051 unwind vn_nary_op_s and linking them but instead
3052 unwind the results "list", linking that, which also
3053 doesn't move on hashtable resize. */
3054 /* We can also have a ->unwind_to recording *slot there.
3055 That way we can make u.values a fixed size array with
3056 recording the number of entries but of course we then
3057 have always N copies for each unwind_to-state. Or we
3058 make sure to only ever append and each unwinding will
3059 pop off one entry (but how to deal with predicated
3060 replaced with non-predicated here?) */
3061 vno
->next
= last_inserted_nary
;
3062 last_inserted_nary
= vno
;
3065 else if (vno
->predicated_values
3066 && ! (*slot
)->predicated_values
)
3068 else if (vno
->predicated_values
3069 && (*slot
)->predicated_values
)
3071 /* ??? Factor this all into a insert_single_predicated_value
3073 gcc_assert (!vno
->u
.values
->next
&& vno
->u
.values
->n
== 1);
3075 = BASIC_BLOCK_FOR_FN (cfun
, vno
->u
.values
->valid_dominated_by_p
[0]);
3076 vn_pval
*nval
= vno
->u
.values
;
3077 vn_pval
**next
= &vno
->u
.values
;
3079 for (vn_pval
*val
= (*slot
)->u
.values
; val
; val
= val
->next
)
3081 if (expressions_equal_p (val
->result
, vno
->u
.values
->result
))
3084 for (unsigned i
= 0; i
< val
->n
; ++i
)
3087 = BASIC_BLOCK_FOR_FN (cfun
,
3088 val
->valid_dominated_by_p
[i
]);
3089 if (dominated_by_p (CDI_DOMINATORS
, vno_bb
, val_bb
))
3090 /* Value registered with more generic predicate. */
3092 else if (dominated_by_p (CDI_DOMINATORS
, val_bb
, vno_bb
))
3093 /* Shouldn't happen, we insert in RPO order. */
3097 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3099 + val
->n
* sizeof (int));
3100 (*next
)->next
= NULL
;
3101 (*next
)->result
= val
->result
;
3102 (*next
)->n
= val
->n
+ 1;
3103 memcpy ((*next
)->valid_dominated_by_p
,
3104 val
->valid_dominated_by_p
,
3105 val
->n
* sizeof (int));
3106 (*next
)->valid_dominated_by_p
[val
->n
] = vno_bb
->index
;
3107 next
= &(*next
)->next
;
3108 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3109 fprintf (dump_file
, "Appending predicate to value.\n");
3112 /* Copy other predicated values. */
3113 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3115 + (val
->n
-1) * sizeof (int));
3116 memcpy (*next
, val
, sizeof (vn_pval
) + (val
->n
-1) * sizeof (int));
3117 (*next
)->next
= NULL
;
3118 next
= &(*next
)->next
;
3124 vno
->next
= last_inserted_nary
;
3125 last_inserted_nary
= vno
;
3129 /* While we do not want to insert things twice it's awkward to
3130 avoid it in the case where visit_nary_op pattern-matches stuff
3131 and ends up simplifying the replacement to itself. We then
3132 get two inserts, one from visit_nary_op and one from
3133 vn_nary_build_or_lookup.
3134 So allow inserts with the same value number. */
3135 if ((*slot
)->u
.result
== vno
->u
.result
)
3139 /* ??? There's also optimistic vs. previous commited state merging
3140 that is problematic for the case of unwinding. */
3142 /* ??? We should return NULL if we do not use 'vno' and have the
3143 caller release it. */
3144 gcc_assert (!*slot
);
3147 vno
->next
= last_inserted_nary
;
3148 last_inserted_nary
= vno
;
3152 /* Insert a n-ary operation into the current hash table using it's
3153 pieces. Return the vn_nary_op_t structure we created and put in
3157 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
3158 tree type
, tree
*ops
,
3159 tree result
, unsigned int value_id
)
3161 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
3162 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3163 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3167 vn_nary_op_insert_pieces_predicated (unsigned int length
, enum tree_code code
,
3168 tree type
, tree
*ops
,
3169 tree result
, unsigned int value_id
,
3172 /* ??? Currently tracking BBs. */
3173 if (! single_pred_p (pred_e
->dest
))
3175 /* Never record for backedges. */
3176 if (pred_e
->flags
& EDGE_DFS_BACK
)
3181 /* Ignore backedges. */
3182 FOR_EACH_EDGE (e
, ei
, pred_e
->dest
->preds
)
3183 if (! dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
3188 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3189 /* ??? Fix dumping, but currently we only get comparisons. */
3190 && TREE_CODE_CLASS (code
) == tcc_comparison
)
3192 fprintf (dump_file
, "Recording on edge %d->%d ", pred_e
->src
->index
,
3193 pred_e
->dest
->index
);
3194 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
3195 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
3196 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
3197 fprintf (dump_file
, " == %s\n",
3198 integer_zerop (result
) ? "false" : "true");
3200 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, NULL_TREE
, value_id
);
3201 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3202 vno1
->predicated_values
= 1;
3203 vno1
->u
.values
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3205 vno1
->u
.values
->next
= NULL
;
3206 vno1
->u
.values
->result
= result
;
3207 vno1
->u
.values
->n
= 1;
3208 vno1
->u
.values
->valid_dominated_by_p
[0] = pred_e
->dest
->index
;
3209 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3213 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
);
3216 vn_nary_op_get_predicated_value (vn_nary_op_t vno
, basic_block bb
)
3218 if (! vno
->predicated_values
)
3219 return vno
->u
.result
;
3220 for (vn_pval
*val
= vno
->u
.values
; val
; val
= val
->next
)
3221 for (unsigned i
= 0; i
< val
->n
; ++i
)
3222 if (dominated_by_p_w_unex (bb
,
3224 (cfun
, val
->valid_dominated_by_p
[i
])))
3229 /* Insert OP into the current hash table with a value number of
3230 RESULT. Return the vn_nary_op_t structure we created and put in
3234 vn_nary_op_insert (tree op
, tree result
)
3236 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
3239 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
3240 init_vn_nary_op_from_op (vno1
, op
);
3241 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3244 /* Insert the rhs of STMT into the current hash table with a value number of
3248 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
3251 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
3252 result
, VN_INFO (result
)->value_id
);
3253 init_vn_nary_op_from_stmt (vno1
, stmt
);
3254 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3257 /* Compute a hashcode for PHI operation VP1 and return it. */
3259 static inline hashval_t
3260 vn_phi_compute_hash (vn_phi_t vp1
)
3262 inchash::hash
hstate (EDGE_COUNT (vp1
->block
->preds
) > 2
3263 ? vp1
->block
->index
: EDGE_COUNT (vp1
->block
->preds
));
3269 /* If all PHI arguments are constants we need to distinguish
3270 the PHI node via its type. */
3272 hstate
.merge_hash (vn_hash_type (type
));
3274 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
3276 /* Don't hash backedge values they need to be handled as VN_TOP
3277 for optimistic value-numbering. */
3278 if (e
->flags
& EDGE_DFS_BACK
)
3281 phi1op
= vp1
->phiargs
[e
->dest_idx
];
3282 if (phi1op
== VN_TOP
)
3284 inchash::add_expr (phi1op
, hstate
);
3287 return hstate
.end ();
3291 /* Return true if COND1 and COND2 represent the same condition, set
3292 *INVERTED_P if one needs to be inverted to make it the same as
3296 cond_stmts_equal_p (gcond
*cond1
, tree lhs1
, tree rhs1
,
3297 gcond
*cond2
, tree lhs2
, tree rhs2
, bool *inverted_p
)
3299 enum tree_code code1
= gimple_cond_code (cond1
);
3300 enum tree_code code2
= gimple_cond_code (cond2
);
3302 *inverted_p
= false;
3305 else if (code1
== swap_tree_comparison (code2
))
3306 std::swap (lhs2
, rhs2
);
3307 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
3309 else if (code1
== invert_tree_comparison
3310 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
3312 std::swap (lhs2
, rhs2
);
3318 return ((expressions_equal_p (lhs1
, lhs2
)
3319 && expressions_equal_p (rhs1
, rhs2
))
3320 || (commutative_tree_code (code1
)
3321 && expressions_equal_p (lhs1
, rhs2
)
3322 && expressions_equal_p (rhs1
, lhs2
)));
3325 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3328 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
3330 if (vp1
->hashcode
!= vp2
->hashcode
)
3333 if (vp1
->block
!= vp2
->block
)
3335 if (EDGE_COUNT (vp1
->block
->preds
) != EDGE_COUNT (vp2
->block
->preds
))
3338 switch (EDGE_COUNT (vp1
->block
->preds
))
3341 /* Single-arg PHIs are just copies. */
3346 /* Rule out backedges into the PHI. */
3347 if (vp1
->block
->loop_father
->header
== vp1
->block
3348 || vp2
->block
->loop_father
->header
== vp2
->block
)
3351 /* If the PHI nodes do not have compatible types
3352 they are not the same. */
3353 if (!types_compatible_p (vp1
->type
, vp2
->type
))
3357 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3359 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
3360 /* If the immediate dominator end in switch stmts multiple
3361 values may end up in the same PHI arg via intermediate
3363 if (EDGE_COUNT (idom1
->succs
) != 2
3364 || EDGE_COUNT (idom2
->succs
) != 2)
3367 /* Verify the controlling stmt is the same. */
3368 gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
));
3369 gcond
*last2
= safe_dyn_cast
<gcond
*> (last_stmt (idom2
));
3370 if (! last1
|| ! last2
)
3373 if (! cond_stmts_equal_p (last1
, vp1
->cclhs
, vp1
->ccrhs
,
3374 last2
, vp2
->cclhs
, vp2
->ccrhs
,
3378 /* Get at true/false controlled edges into the PHI. */
3379 edge te1
, te2
, fe1
, fe2
;
3380 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
3382 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
3386 /* Swap edges if the second condition is the inverted of the
3389 std::swap (te2
, fe2
);
3391 /* ??? Handle VN_TOP specially. */
3392 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
3393 vp2
->phiargs
[te2
->dest_idx
])
3394 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
3395 vp2
->phiargs
[fe2
->dest_idx
]))
3406 /* If the PHI nodes do not have compatible types
3407 they are not the same. */
3408 if (!types_compatible_p (vp1
->type
, vp2
->type
))
3411 /* Any phi in the same block will have it's arguments in the
3412 same edge order, because of how we store phi nodes. */
3413 for (unsigned i
= 0; i
< EDGE_COUNT (vp1
->block
->preds
); ++i
)
3415 tree phi1op
= vp1
->phiargs
[i
];
3416 tree phi2op
= vp2
->phiargs
[i
];
3417 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
3419 if (!expressions_equal_p (phi1op
, phi2op
))
3426 /* Lookup PHI in the current hash table, and return the resulting
3427 value number if it exists in the hash table. Return NULL_TREE if
3428 it does not exist in the hash table. */
3431 vn_phi_lookup (gimple
*phi
, bool backedges_varying_p
)
3434 struct vn_phi_s
*vp1
;
3438 vp1
= XALLOCAVAR (struct vn_phi_s
,
3439 sizeof (struct vn_phi_s
)
3440 + (gimple_phi_num_args (phi
) - 1) * sizeof (tree
));
3442 /* Canonicalize the SSA_NAME's to their value number. */
3443 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3445 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3446 if (TREE_CODE (def
) == SSA_NAME
3447 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
3448 def
= SSA_VAL (def
);
3449 vp1
->phiargs
[e
->dest_idx
] = def
;
3451 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
3452 vp1
->block
= gimple_bb (phi
);
3453 /* Extract values of the controlling condition. */
3454 vp1
->cclhs
= NULL_TREE
;
3455 vp1
->ccrhs
= NULL_TREE
;
3456 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3457 if (EDGE_COUNT (idom1
->succs
) == 2)
3458 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
3460 /* ??? We want to use SSA_VAL here. But possibly not
3462 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
3463 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
3465 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
3466 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, NO_INSERT
);
3469 return (*slot
)->result
;
3472 /* Insert PHI into the current hash table with a value number of
3476 vn_phi_insert (gimple
*phi
, tree result
, bool backedges_varying_p
)
3479 vn_phi_t vp1
= (vn_phi_t
) obstack_alloc (&vn_tables_obstack
,
3481 + ((gimple_phi_num_args (phi
) - 1)
3486 /* Canonicalize the SSA_NAME's to their value number. */
3487 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3489 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3490 if (TREE_CODE (def
) == SSA_NAME
3491 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
3492 def
= SSA_VAL (def
);
3493 vp1
->phiargs
[e
->dest_idx
] = def
;
3495 vp1
->value_id
= VN_INFO (result
)->value_id
;
3496 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
3497 vp1
->block
= gimple_bb (phi
);
3498 /* Extract values of the controlling condition. */
3499 vp1
->cclhs
= NULL_TREE
;
3500 vp1
->ccrhs
= NULL_TREE
;
3501 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3502 if (EDGE_COUNT (idom1
->succs
) == 2)
3503 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
3505 /* ??? We want to use SSA_VAL here. But possibly not
3507 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
3508 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
3510 vp1
->result
= result
;
3511 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
3513 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
3514 gcc_assert (!*slot
);
3517 vp1
->next
= last_inserted_phi
;
3518 last_inserted_phi
= vp1
;
3523 /* Return true if BB1 is dominated by BB2 taking into account edges
3524 that are not executable. */
3527 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
)
3532 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3535 /* Before iterating we'd like to know if there exists a
3536 (executable) path from bb2 to bb1 at all, if not we can
3537 directly return false. For now simply iterate once. */
3539 /* Iterate to the single executable bb1 predecessor. */
3540 if (EDGE_COUNT (bb1
->preds
) > 1)
3543 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
3544 if (e
->flags
& EDGE_EXECUTABLE
)
3557 /* Re-do the dominance check with changed bb1. */
3558 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3563 /* Iterate to the single executable bb2 successor. */
3565 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
3566 if (e
->flags
& EDGE_EXECUTABLE
)
3577 /* Verify the reached block is only reached through succe.
3578 If there is only one edge we can spare us the dominator
3579 check and iterate directly. */
3580 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
3582 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
3584 && (e
->flags
& EDGE_EXECUTABLE
))
3594 /* Re-do the dominance check with changed bb2. */
3595 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3600 /* We could now iterate updating bb1 / bb2. */
3604 /* Set the value number of FROM to TO, return true if it has changed
3608 set_ssa_val_to (tree from
, tree to
)
3610 vn_ssa_aux_t from_info
= VN_INFO (from
);
3611 tree currval
= from_info
->valnum
; // SSA_VAL (from)
3612 poly_int64 toff
, coff
;
3614 /* The only thing we allow as value numbers are ssa_names
3615 and invariants. So assert that here. We don't allow VN_TOP
3616 as visiting a stmt should produce a value-number other than
3618 ??? Still VN_TOP can happen for unreachable code, so force
3619 it to varying in that case. Not all code is prepared to
3620 get VN_TOP on valueization. */
3623 /* ??? When iterating and visiting PHI <undef, backedge-value>
3624 for the first time we rightfully get VN_TOP and we need to
3625 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
3626 With SCCVN we were simply lucky we iterated the other PHI
3627 cycles first and thus visited the backedge-value DEF. */
3628 if (currval
== VN_TOP
)
3630 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3631 fprintf (dump_file
, "Forcing value number to varying on "
3632 "receiving VN_TOP\n");
3636 gcc_checking_assert (to
!= NULL_TREE
3637 && ((TREE_CODE (to
) == SSA_NAME
3638 && (to
== from
|| SSA_VAL (to
) == to
))
3639 || is_gimple_min_invariant (to
)));
3643 if (currval
== from
)
3645 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3647 fprintf (dump_file
, "Not changing value number of ");
3648 print_generic_expr (dump_file
, from
);
3649 fprintf (dump_file
, " from VARYING to ");
3650 print_generic_expr (dump_file
, to
);
3651 fprintf (dump_file
, "\n");
3655 else if (currval
!= VN_TOP
3656 && ! is_gimple_min_invariant (currval
)
3657 && ! ssa_undefined_value_p (currval
, false)
3658 && is_gimple_min_invariant (to
))
3660 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3662 fprintf (dump_file
, "Forcing VARYING instead of changing "
3663 "value number of ");
3664 print_generic_expr (dump_file
, from
);
3665 fprintf (dump_file
, " from ");
3666 print_generic_expr (dump_file
, currval
);
3667 fprintf (dump_file
, " (non-constant) to ");
3668 print_generic_expr (dump_file
, to
);
3669 fprintf (dump_file
, " (constant)\n");
3673 else if (TREE_CODE (to
) == SSA_NAME
3674 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
3679 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3681 fprintf (dump_file
, "Setting value number of ");
3682 print_generic_expr (dump_file
, from
);
3683 fprintf (dump_file
, " to ");
3684 print_generic_expr (dump_file
, to
);
3688 && !operand_equal_p (currval
, to
, 0)
3689 /* Different undefined SSA names are not actually different. See
3690 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3691 && !(TREE_CODE (currval
) == SSA_NAME
3692 && TREE_CODE (to
) == SSA_NAME
3693 && ssa_undefined_value_p (currval
, false)
3694 && ssa_undefined_value_p (to
, false))
3695 /* ??? For addresses involving volatile objects or types operand_equal_p
3696 does not reliably detect ADDR_EXPRs as equal. We know we are only
3697 getting invariant gimple addresses here, so can use
3698 get_addr_base_and_unit_offset to do this comparison. */
3699 && !(TREE_CODE (currval
) == ADDR_EXPR
3700 && TREE_CODE (to
) == ADDR_EXPR
3701 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
3702 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
3703 && known_eq (coff
, toff
)))
3705 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3706 fprintf (dump_file
, " (changed)\n");
3707 from_info
->valnum
= to
;
3710 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3711 fprintf (dump_file
, "\n");
3715 /* Set all definitions in STMT to value number to themselves.
3716 Return true if a value number changed. */
3719 defs_to_varying (gimple
*stmt
)
3721 bool changed
= false;
3725 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3727 tree def
= DEF_FROM_PTR (defp
);
3728 changed
|= set_ssa_val_to (def
, def
);
3733 /* Visit a copy between LHS and RHS, return true if the value number
3737 visit_copy (tree lhs
, tree rhs
)
3740 rhs
= SSA_VAL (rhs
);
3742 return set_ssa_val_to (lhs
, rhs
);
3745 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3749 valueized_wider_op (tree wide_type
, tree op
)
3751 if (TREE_CODE (op
) == SSA_NAME
)
3752 op
= vn_valueize (op
);
3754 /* Either we have the op widened available. */
3757 tree tem
= vn_nary_op_lookup_pieces (1, NOP_EXPR
,
3758 wide_type
, ops
, NULL
);
3762 /* Or the op is truncated from some existing value. */
3763 if (TREE_CODE (op
) == SSA_NAME
)
3765 gimple
*def
= SSA_NAME_DEF_STMT (op
);
3766 if (is_gimple_assign (def
)
3767 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
3769 tem
= gimple_assign_rhs1 (def
);
3770 if (useless_type_conversion_p (wide_type
, TREE_TYPE (tem
)))
3772 if (TREE_CODE (tem
) == SSA_NAME
)
3773 tem
= vn_valueize (tem
);
3779 /* For constants simply extend it. */
3780 if (TREE_CODE (op
) == INTEGER_CST
)
3781 return wide_int_to_tree (wide_type
, wi::to_wide (op
));
3786 /* Visit a nary operator RHS, value number it, and return true if the
3787 value number of LHS has changed as a result. */
3790 visit_nary_op (tree lhs
, gassign
*stmt
)
3792 vn_nary_op_t vnresult
;
3793 tree result
= vn_nary_op_lookup_stmt (stmt
, &vnresult
);
3794 if (! result
&& vnresult
)
3795 result
= vn_nary_op_get_predicated_value (vnresult
, gimple_bb (stmt
));
3797 return set_ssa_val_to (lhs
, result
);
3799 /* Do some special pattern matching for redundancies of operations
3800 in different types. */
3801 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3802 tree type
= TREE_TYPE (lhs
);
3803 tree rhs1
= gimple_assign_rhs1 (stmt
);
3807 /* Match arithmetic done in a different type where we can easily
3808 substitute the result from some earlier sign-changed or widened
3810 if (INTEGRAL_TYPE_P (type
)
3811 && TREE_CODE (rhs1
) == SSA_NAME
3812 /* We only handle sign-changes or zero-extension -> & mask. */
3813 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1
))
3814 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (rhs1
)))
3815 || TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (rhs1
))))
3817 gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
3819 && (gimple_assign_rhs_code (def
) == PLUS_EXPR
3820 || gimple_assign_rhs_code (def
) == MINUS_EXPR
3821 || gimple_assign_rhs_code (def
) == MULT_EXPR
))
3824 /* Either we have the op widened available. */
3825 ops
[0] = valueized_wider_op (type
,
3826 gimple_assign_rhs1 (def
));
3828 ops
[1] = valueized_wider_op (type
,
3829 gimple_assign_rhs2 (def
));
3830 if (ops
[0] && ops
[1])
3832 ops
[0] = vn_nary_op_lookup_pieces
3833 (2, gimple_assign_rhs_code (def
), type
, ops
, NULL
);
3834 /* We have wider operation available. */
3837 unsigned lhs_prec
= TYPE_PRECISION (type
);
3838 unsigned rhs_prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
3839 if (lhs_prec
== rhs_prec
)
3841 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
3842 NOP_EXPR
, type
, ops
[0]);
3843 result
= vn_nary_build_or_lookup (&match_op
);
3846 bool changed
= set_ssa_val_to (lhs
, result
);
3847 vn_nary_op_insert_stmt (stmt
, result
);
3853 tree mask
= wide_int_to_tree
3854 (type
, wi::mask (rhs_prec
, false, lhs_prec
));
3855 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
3859 result
= vn_nary_build_or_lookup (&match_op
);
3862 bool changed
= set_ssa_val_to (lhs
, result
);
3863 vn_nary_op_insert_stmt (stmt
, result
);
3874 bool changed
= set_ssa_val_to (lhs
, lhs
);
3875 vn_nary_op_insert_stmt (stmt
, lhs
);
3879 /* Visit a call STMT storing into LHS. Return true if the value number
3880 of the LHS has changed as a result. */
3883 visit_reference_op_call (tree lhs
, gcall
*stmt
)
3885 bool changed
= false;
3886 struct vn_reference_s vr1
;
3887 vn_reference_t vnresult
= NULL
;
3888 tree vdef
= gimple_vdef (stmt
);
3890 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3891 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
3894 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
3897 if (vnresult
->result_vdef
&& vdef
)
3898 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3900 /* If the call was discovered to be pure or const reflect
3901 that as far as possible. */
3902 changed
|= set_ssa_val_to (vdef
, vuse_ssa_val (gimple_vuse (stmt
)));
3904 if (!vnresult
->result
&& lhs
)
3905 vnresult
->result
= lhs
;
3907 if (vnresult
->result
&& lhs
)
3908 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
3913 vn_reference_s
**slot
;
3914 tree vdef_val
= vdef
;
3917 /* If we value numbered an indirect functions function to
3918 one not clobbering memory value number its VDEF to its
3920 tree fn
= gimple_call_fn (stmt
);
3921 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
3924 if (TREE_CODE (fn
) == ADDR_EXPR
3925 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
3926 && (flags_from_decl_or_type (TREE_OPERAND (fn
, 0))
3927 & (ECF_CONST
| ECF_PURE
)))
3928 vdef_val
= vuse_ssa_val (gimple_vuse (stmt
));
3930 changed
|= set_ssa_val_to (vdef
, vdef_val
);
3933 changed
|= set_ssa_val_to (lhs
, lhs
);
3934 vr2
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
3935 vr2
->vuse
= vr1
.vuse
;
3936 /* As we are not walking the virtual operand chain we know the
3937 shared_lookup_references are still original so we can re-use
3939 vr2
->operands
= vr1
.operands
.copy ();
3940 vr2
->type
= vr1
.type
;
3942 vr2
->hashcode
= vr1
.hashcode
;
3944 vr2
->result_vdef
= vdef_val
;
3945 slot
= valid_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
3947 gcc_assert (!*slot
);
3949 vr2
->next
= last_inserted_ref
;
3950 last_inserted_ref
= vr2
;
3956 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3957 and return true if the value number of the LHS has changed as a result. */
3960 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
3962 bool changed
= false;
3966 last_vuse
= gimple_vuse (stmt
);
3967 last_vuse_ptr
= &last_vuse
;
3968 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3969 default_vn_walk_kind
, NULL
, true);
3970 last_vuse_ptr
= NULL
;
3972 /* We handle type-punning through unions by value-numbering based
3973 on offset and size of the access. Be prepared to handle a
3974 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3976 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3978 /* We will be setting the value number of lhs to the value number
3979 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3980 So first simplify and lookup this expression to see if it
3981 is already available. */
3982 gimple_match_op
res_op (gimple_match_cond::UNCOND
,
3983 VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
3984 result
= vn_nary_build_or_lookup (&res_op
);
3985 /* When building the conversion fails avoid inserting the reference
3988 return set_ssa_val_to (lhs
, lhs
);
3992 changed
= set_ssa_val_to (lhs
, result
);
3995 changed
= set_ssa_val_to (lhs
, lhs
);
3996 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
4003 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4004 and return true if the value number of the LHS has changed as a result. */
4007 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
4009 bool changed
= false;
4010 vn_reference_t vnresult
= NULL
;
4012 bool resultsame
= false;
4013 tree vuse
= gimple_vuse (stmt
);
4014 tree vdef
= gimple_vdef (stmt
);
4016 if (TREE_CODE (op
) == SSA_NAME
)
4019 /* First we want to lookup using the *vuses* from the store and see
4020 if there the last store to this location with the same address
4023 The vuses represent the memory state before the store. If the
4024 memory state, address, and value of the store is the same as the
4025 last store to this location, then this store will produce the
4026 same memory state as that store.
4028 In this case the vdef versions for this store are value numbered to those
4029 vuse versions, since they represent the same memory state after
4032 Otherwise, the vdefs for the store are used when inserting into
4033 the table, since the store generates a new memory state. */
4035 vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, &vnresult
, false);
4037 && vnresult
->result
)
4039 tree result
= vnresult
->result
;
4040 gcc_checking_assert (TREE_CODE (result
) != SSA_NAME
4041 || result
== SSA_VAL (result
));
4042 resultsame
= expressions_equal_p (result
, op
);
4045 /* If the TBAA state isn't compatible for downstream reads
4046 we cannot value-number the VDEFs the same. */
4047 alias_set_type set
= get_alias_set (lhs
);
4048 if (vnresult
->set
!= set
4049 && ! alias_set_subset_of (set
, vnresult
->set
))
4056 /* Only perform the following when being called from PRE
4057 which embeds tail merging. */
4058 if (default_vn_walk_kind
== VN_WALK
)
4060 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
4061 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
4064 VN_INFO (vdef
)->visited
= true;
4065 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
4069 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4071 fprintf (dump_file
, "No store match\n");
4072 fprintf (dump_file
, "Value numbering store ");
4073 print_generic_expr (dump_file
, lhs
);
4074 fprintf (dump_file
, " to ");
4075 print_generic_expr (dump_file
, op
);
4076 fprintf (dump_file
, "\n");
4078 /* Have to set value numbers before insert, since insert is
4079 going to valueize the references in-place. */
4081 changed
|= set_ssa_val_to (vdef
, vdef
);
4083 /* Do not insert structure copies into the tables. */
4084 if (is_gimple_min_invariant (op
)
4085 || is_gimple_reg (op
))
4086 vn_reference_insert (lhs
, op
, vdef
, NULL
);
4088 /* Only perform the following when being called from PRE
4089 which embeds tail merging. */
4090 if (default_vn_walk_kind
== VN_WALK
)
4092 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
4093 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
4098 /* We had a match, so value number the vdef to have the value
4099 number of the vuse it came from. */
4101 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4102 fprintf (dump_file
, "Store matched earlier value, "
4103 "value numbering store vdefs to matching vuses.\n");
4105 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
4111 /* Visit and value number PHI, return true if the value number
4112 changed. When BACKEDGES_VARYING_P is true then assume all
4113 backedge values are varying. When INSERTED is not NULL then
4114 this is just a ahead query for a possible iteration, set INSERTED
4115 to true if we'd insert into the hashtable. */
4118 visit_phi (gimple
*phi
, bool *inserted
, bool backedges_varying_p
)
4120 tree result
, sameval
= VN_TOP
, seen_undef
= NULL_TREE
;
4121 tree backedge_val
= NULL_TREE
;
4122 bool seen_non_backedge
= false;
4123 tree sameval_base
= NULL_TREE
;
4124 poly_int64 soff
, doff
;
4125 unsigned n_executable
= 0;
4129 /* TODO: We could check for this in initialization, and replace this
4130 with a gcc_assert. */
4131 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
4132 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
4134 /* We track whether a PHI was CSEd to to avoid excessive iterations
4135 that would be necessary only because the PHI changed arguments
4138 gimple_set_plf (phi
, GF_PLF_1
, false);
4140 /* See if all non-TOP arguments have the same value. TOP is
4141 equivalent to everything, so we can ignore it. */
4142 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4143 if (e
->flags
& EDGE_EXECUTABLE
)
4145 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4148 if (TREE_CODE (def
) == SSA_NAME
)
4150 if (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
))
4151 def
= SSA_VAL (def
);
4152 if (e
->flags
& EDGE_DFS_BACK
)
4155 if (!(e
->flags
& EDGE_DFS_BACK
))
4156 seen_non_backedge
= true;
4159 /* Ignore undefined defs for sameval but record one. */
4160 else if (TREE_CODE (def
) == SSA_NAME
4161 && ! virtual_operand_p (def
)
4162 && ssa_undefined_value_p (def
, false))
4164 else if (sameval
== VN_TOP
)
4166 else if (!expressions_equal_p (def
, sameval
))
4168 /* We know we're arriving only with invariant addresses here,
4169 try harder comparing them. We can do some caching here
4170 which we cannot do in expressions_equal_p. */
4171 if (TREE_CODE (def
) == ADDR_EXPR
4172 && TREE_CODE (sameval
) == ADDR_EXPR
4173 && sameval_base
!= (void *)-1)
4176 sameval_base
= get_addr_base_and_unit_offset
4177 (TREE_OPERAND (sameval
, 0), &soff
);
4179 sameval_base
= (tree
)(void *)-1;
4180 else if ((get_addr_base_and_unit_offset
4181 (TREE_OPERAND (def
, 0), &doff
) == sameval_base
)
4182 && known_eq (soff
, doff
))
4185 sameval
= NULL_TREE
;
4190 /* If the value we want to use is flowing over the backedge and we
4191 should take it as VARYING but it has a non-VARYING value drop to
4193 If we value-number a virtual operand never value-number to the
4194 value from the backedge as that confuses the alias-walking code.
4195 See gcc.dg/torture/pr87176.c. If the value is the same on a
4196 non-backedge everything is OK though. */
4198 && !seen_non_backedge
4199 && TREE_CODE (backedge_val
) == SSA_NAME
4200 && sameval
== backedge_val
4201 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val
)
4202 || SSA_VAL (backedge_val
) != backedge_val
))
4203 /* Note this just drops to VARYING without inserting the PHI into
4205 result
= PHI_RESULT (phi
);
4206 /* If none of the edges was executable keep the value-number at VN_TOP,
4207 if only a single edge is exectuable use its value. */
4208 else if (n_executable
<= 1)
4209 result
= seen_undef
? seen_undef
: sameval
;
4210 /* If we saw only undefined values and VN_TOP use one of the
4211 undefined values. */
4212 else if (sameval
== VN_TOP
)
4213 result
= seen_undef
? seen_undef
: sameval
;
4214 /* First see if it is equivalent to a phi node in this block. We prefer
4215 this as it allows IV elimination - see PRs 66502 and 67167. */
4216 else if ((result
= vn_phi_lookup (phi
, backedges_varying_p
)))
4219 && TREE_CODE (result
) == SSA_NAME
4220 && gimple_code (SSA_NAME_DEF_STMT (result
)) == GIMPLE_PHI
)
4222 gimple_set_plf (SSA_NAME_DEF_STMT (result
), GF_PLF_1
, true);
4223 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4225 fprintf (dump_file
, "Marking CSEd to PHI node ");
4226 print_gimple_expr (dump_file
, SSA_NAME_DEF_STMT (result
),
4228 fprintf (dump_file
, "\n");
4232 /* If all values are the same use that, unless we've seen undefined
4233 values as well and the value isn't constant.
4234 CCP/copyprop have the same restriction to not remove uninit warnings. */
4236 && (! seen_undef
|| is_gimple_min_invariant (sameval
)))
4240 result
= PHI_RESULT (phi
);
4241 /* Only insert PHIs that are varying, for constant value numbers
4242 we mess up equivalences otherwise as we are only comparing
4243 the immediate controlling predicates. */
4244 vn_phi_insert (phi
, result
, backedges_varying_p
);
4249 return set_ssa_val_to (PHI_RESULT (phi
), result
);
4252 /* Try to simplify RHS using equivalences and constant folding. */
4255 try_to_simplify (gassign
*stmt
)
4257 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4260 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4261 in this case, there is no point in doing extra work. */
4262 if (code
== SSA_NAME
)
4265 /* First try constant folding based on our current lattice. */
4266 mprts_hook
= vn_lookup_simplify_result
;
4267 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
4270 && (TREE_CODE (tem
) == SSA_NAME
4271 || is_gimple_min_invariant (tem
)))
4277 /* Visit and value number STMT, return true if the value number
4281 visit_stmt (gimple
*stmt
, bool backedges_varying_p
= false)
4283 bool changed
= false;
4285 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4287 fprintf (dump_file
, "Value numbering stmt = ");
4288 print_gimple_stmt (dump_file
, stmt
, 0);
4291 if (gimple_code (stmt
) == GIMPLE_PHI
)
4292 changed
= visit_phi (stmt
, NULL
, backedges_varying_p
);
4293 else if (gimple_has_volatile_ops (stmt
))
4294 changed
= defs_to_varying (stmt
);
4295 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
4297 enum tree_code code
= gimple_assign_rhs_code (ass
);
4298 tree lhs
= gimple_assign_lhs (ass
);
4299 tree rhs1
= gimple_assign_rhs1 (ass
);
4302 /* Shortcut for copies. Simplifying copies is pointless,
4303 since we copy the expression and value they represent. */
4304 if (code
== SSA_NAME
4305 && TREE_CODE (lhs
) == SSA_NAME
)
4307 changed
= visit_copy (lhs
, rhs1
);
4310 simplified
= try_to_simplify (ass
);
4313 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4315 fprintf (dump_file
, "RHS ");
4316 print_gimple_expr (dump_file
, ass
, 0);
4317 fprintf (dump_file
, " simplified to ");
4318 print_generic_expr (dump_file
, simplified
);
4319 fprintf (dump_file
, "\n");
4322 /* Setting value numbers to constants will occasionally
4323 screw up phi congruence because constants are not
4324 uniquely associated with a single ssa name that can be
4327 && is_gimple_min_invariant (simplified
)
4328 && TREE_CODE (lhs
) == SSA_NAME
)
4330 changed
= set_ssa_val_to (lhs
, simplified
);
4334 && TREE_CODE (simplified
) == SSA_NAME
4335 && TREE_CODE (lhs
) == SSA_NAME
)
4337 changed
= visit_copy (lhs
, simplified
);
4341 if ((TREE_CODE (lhs
) == SSA_NAME
4342 /* We can substitute SSA_NAMEs that are live over
4343 abnormal edges with their constant value. */
4344 && !(gimple_assign_copy_p (ass
)
4345 && is_gimple_min_invariant (rhs1
))
4347 && is_gimple_min_invariant (simplified
))
4348 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4349 /* Stores or copies from SSA_NAMEs that are live over
4350 abnormal edges are a problem. */
4351 || (code
== SSA_NAME
4352 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
4353 changed
= defs_to_varying (ass
);
4354 else if (REFERENCE_CLASS_P (lhs
)
4356 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
4357 else if (TREE_CODE (lhs
) == SSA_NAME
)
4359 if ((gimple_assign_copy_p (ass
)
4360 && is_gimple_min_invariant (rhs1
))
4362 && is_gimple_min_invariant (simplified
)))
4365 changed
= set_ssa_val_to (lhs
, simplified
);
4367 changed
= set_ssa_val_to (lhs
, rhs1
);
4371 /* Visit the original statement. */
4372 switch (vn_get_stmt_kind (ass
))
4375 changed
= visit_nary_op (lhs
, ass
);
4378 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
4381 changed
= defs_to_varying (ass
);
4387 changed
= defs_to_varying (ass
);
4389 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
4391 tree lhs
= gimple_call_lhs (call_stmt
);
4392 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
4394 /* Try constant folding based on our current lattice. */
4395 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
4399 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4401 fprintf (dump_file
, "call ");
4402 print_gimple_expr (dump_file
, call_stmt
, 0);
4403 fprintf (dump_file
, " simplified to ");
4404 print_generic_expr (dump_file
, simplified
);
4405 fprintf (dump_file
, "\n");
4408 /* Setting value numbers to constants will occasionally
4409 screw up phi congruence because constants are not
4410 uniquely associated with a single ssa name that can be
4413 && is_gimple_min_invariant (simplified
))
4415 changed
= set_ssa_val_to (lhs
, simplified
);
4416 if (gimple_vdef (call_stmt
))
4417 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
4418 SSA_VAL (gimple_vuse (call_stmt
)));
4422 && TREE_CODE (simplified
) == SSA_NAME
)
4424 changed
= visit_copy (lhs
, simplified
);
4425 if (gimple_vdef (call_stmt
))
4426 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
4427 SSA_VAL (gimple_vuse (call_stmt
)));
4430 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4432 changed
= defs_to_varying (call_stmt
);
4437 /* Pick up flags from a devirtualization target. */
4438 tree fn
= gimple_call_fn (stmt
);
4439 int extra_fnflags
= 0;
4440 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
4443 if (TREE_CODE (fn
) == ADDR_EXPR
4444 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
4445 extra_fnflags
= flags_from_decl_or_type (TREE_OPERAND (fn
, 0));
4447 if (!gimple_call_internal_p (call_stmt
)
4448 && (/* Calls to the same function with the same vuse
4449 and the same operands do not necessarily return the same
4450 value, unless they're pure or const. */
4451 ((gimple_call_flags (call_stmt
) | extra_fnflags
)
4452 & (ECF_PURE
| ECF_CONST
))
4453 /* If calls have a vdef, subsequent calls won't have
4454 the same incoming vuse. So, if 2 calls with vdef have the
4455 same vuse, we know they're not subsequent.
4456 We can value number 2 calls to the same function with the
4457 same vuse and the same operands which are not subsequent
4458 the same, because there is no code in the program that can
4459 compare the 2 values... */
4460 || (gimple_vdef (call_stmt
)
4461 /* ... unless the call returns a pointer which does
4462 not alias with anything else. In which case the
4463 information that the values are distinct are encoded
4465 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
4466 /* Only perform the following when being called from PRE
4467 which embeds tail merging. */
4468 && default_vn_walk_kind
== VN_WALK
)))
4469 changed
= visit_reference_op_call (lhs
, call_stmt
);
4471 changed
= defs_to_varying (call_stmt
);
4474 changed
= defs_to_varying (stmt
);
4480 /* Allocate a value number table. */
4483 allocate_vn_table (vn_tables_t table
, unsigned size
)
4485 table
->phis
= new vn_phi_table_type (size
);
4486 table
->nary
= new vn_nary_op_table_type (size
);
4487 table
->references
= new vn_reference_table_type (size
);
4490 /* Free a value number table. */
4493 free_vn_table (vn_tables_t table
)
4495 /* Walk over elements and release vectors. */
4496 vn_reference_iterator_type hir
;
4498 FOR_EACH_HASH_TABLE_ELEMENT (*table
->references
, vr
, vn_reference_t
, hir
)
4499 vr
->operands
.release ();
4504 delete table
->references
;
4505 table
->references
= NULL
;
4508 /* Set *ID according to RESULT. */
4511 set_value_id_for_result (tree result
, unsigned int *id
)
4513 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4514 *id
= VN_INFO (result
)->value_id
;
4515 else if (result
&& is_gimple_min_invariant (result
))
4516 *id
= get_or_alloc_constant_value_id (result
);
4518 *id
= get_next_value_id ();
4521 /* Set the value ids in the valid hash tables. */
4524 set_hashtable_value_ids (void)
4526 vn_nary_op_iterator_type hin
;
4527 vn_phi_iterator_type hip
;
4528 vn_reference_iterator_type hir
;
4533 /* Now set the value ids of the things we had put in the hash
4536 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4537 if (! vno
->predicated_values
)
4538 set_value_id_for_result (vno
->u
.result
, &vno
->value_id
);
4540 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4541 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4543 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4545 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4548 /* Return the maximum value id we have ever seen. */
4551 get_max_value_id (void)
4553 return next_value_id
;
4556 /* Return the next unique value id. */
4559 get_next_value_id (void)
4561 return next_value_id
++;
4565 /* Compare two expressions E1 and E2 and return true if they are equal. */
4568 expressions_equal_p (tree e1
, tree e2
)
4570 /* The obvious case. */
4574 /* If either one is VN_TOP consider them equal. */
4575 if (e1
== VN_TOP
|| e2
== VN_TOP
)
4578 /* If only one of them is null, they cannot be equal. */
4582 /* Now perform the actual comparison. */
4583 if (TREE_CODE (e1
) == TREE_CODE (e2
)
4584 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
4591 /* Return true if the nary operation NARY may trap. This is a copy
4592 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4595 vn_nary_may_trap (vn_nary_op_t nary
)
4598 tree rhs2
= NULL_TREE
;
4599 bool honor_nans
= false;
4600 bool honor_snans
= false;
4601 bool fp_operation
= false;
4602 bool honor_trapv
= false;
4606 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
4607 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
4608 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
4611 fp_operation
= FLOAT_TYPE_P (type
);
4614 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
4615 honor_snans
= flag_signaling_nans
!= 0;
4617 else if (INTEGRAL_TYPE_P (type
)
4618 && TYPE_OVERFLOW_TRAPS (type
))
4621 if (nary
->length
>= 2)
4623 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
4625 honor_nans
, honor_snans
, rhs2
,
4631 for (i
= 0; i
< nary
->length
; ++i
)
4632 if (tree_could_trap_p (nary
->op
[i
]))
4639 class eliminate_dom_walker
: public dom_walker
4642 eliminate_dom_walker (cdi_direction
, bitmap
);
4643 ~eliminate_dom_walker ();
4645 virtual edge
before_dom_children (basic_block
);
4646 virtual void after_dom_children (basic_block
);
4648 virtual tree
eliminate_avail (basic_block
, tree op
);
4649 virtual void eliminate_push_avail (basic_block
, tree op
);
4650 tree
eliminate_insert (basic_block
, gimple_stmt_iterator
*gsi
, tree val
);
4652 void eliminate_stmt (basic_block
, gimple_stmt_iterator
*);
4654 unsigned eliminate_cleanup (bool region_p
= false);
4657 unsigned int el_todo
;
4658 unsigned int eliminations
;
4659 unsigned int insertions
;
4661 /* SSA names that had their defs inserted by PRE if do_pre. */
4662 bitmap inserted_exprs
;
4664 /* Blocks with statements that have had their EH properties changed. */
4665 bitmap need_eh_cleanup
;
4667 /* Blocks with statements that have had their AB properties changed. */
4668 bitmap need_ab_cleanup
;
4670 /* Local state for the eliminate domwalk. */
4671 auto_vec
<gimple
*> to_remove
;
4672 auto_vec
<gimple
*> to_fixup
;
4673 auto_vec
<tree
> avail
;
4674 auto_vec
<tree
> avail_stack
;
4677 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction
,
4678 bitmap inserted_exprs_
)
4679 : dom_walker (direction
), do_pre (inserted_exprs_
!= NULL
),
4680 el_todo (0), eliminations (0), insertions (0),
4681 inserted_exprs (inserted_exprs_
)
4683 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4684 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4687 eliminate_dom_walker::~eliminate_dom_walker ()
4689 BITMAP_FREE (need_eh_cleanup
);
4690 BITMAP_FREE (need_ab_cleanup
);
4693 /* Return a leader for OP that is available at the current point of the
4694 eliminate domwalk. */
4697 eliminate_dom_walker::eliminate_avail (basic_block
, tree op
)
4699 tree valnum
= VN_INFO (op
)->valnum
;
4700 if (TREE_CODE (valnum
) == SSA_NAME
)
4702 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
4704 if (avail
.length () > SSA_NAME_VERSION (valnum
))
4705 return avail
[SSA_NAME_VERSION (valnum
)];
4707 else if (is_gimple_min_invariant (valnum
))
4712 /* At the current point of the eliminate domwalk make OP available. */
4715 eliminate_dom_walker::eliminate_push_avail (basic_block
, tree op
)
4717 tree valnum
= VN_INFO (op
)->valnum
;
4718 if (TREE_CODE (valnum
) == SSA_NAME
)
4720 if (avail
.length () <= SSA_NAME_VERSION (valnum
))
4721 avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
4723 if (avail
[SSA_NAME_VERSION (valnum
)])
4724 pushop
= avail
[SSA_NAME_VERSION (valnum
)];
4725 avail_stack
.safe_push (pushop
);
4726 avail
[SSA_NAME_VERSION (valnum
)] = op
;
4730 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4731 the leader for the expression if insertion was successful. */
4734 eliminate_dom_walker::eliminate_insert (basic_block bb
,
4735 gimple_stmt_iterator
*gsi
, tree val
)
4737 /* We can insert a sequence with a single assignment only. */
4738 gimple_seq stmts
= VN_INFO (val
)->expr
;
4739 if (!gimple_seq_singleton_p (stmts
))
4741 gassign
*stmt
= dyn_cast
<gassign
*> (gimple_seq_first_stmt (stmts
));
4743 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
4744 && gimple_assign_rhs_code (stmt
) != VIEW_CONVERT_EXPR
4745 && gimple_assign_rhs_code (stmt
) != BIT_FIELD_REF
4746 && (gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
4747 || TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)))
4750 tree op
= gimple_assign_rhs1 (stmt
);
4751 if (gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
4752 || gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
4753 op
= TREE_OPERAND (op
, 0);
4754 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (bb
, op
) : op
;
4760 if (gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
4761 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
4762 TREE_TYPE (val
), leader
,
4763 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1),
4764 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2));
4765 else if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
)
4766 res
= gimple_build (&stmts
, BIT_AND_EXPR
,
4767 TREE_TYPE (val
), leader
, gimple_assign_rhs2 (stmt
));
4769 res
= gimple_build (&stmts
, gimple_assign_rhs_code (stmt
),
4770 TREE_TYPE (val
), leader
);
4771 if (TREE_CODE (res
) != SSA_NAME
4772 || SSA_NAME_IS_DEFAULT_DEF (res
)
4773 || gimple_bb (SSA_NAME_DEF_STMT (res
)))
4775 gimple_seq_discard (stmts
);
4777 /* During propagation we have to treat SSA info conservatively
4778 and thus we can end up simplifying the inserted expression
4779 at elimination time to sth not defined in stmts. */
4780 /* But then this is a redundancy we failed to detect. Which means
4781 res now has two values. That doesn't play well with how
4782 we track availability here, so give up. */
4783 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4785 if (TREE_CODE (res
) == SSA_NAME
)
4786 res
= eliminate_avail (bb
, res
);
4789 fprintf (dump_file
, "Failed to insert expression for value ");
4790 print_generic_expr (dump_file
, val
);
4791 fprintf (dump_file
, " which is really fully redundant to ");
4792 print_generic_expr (dump_file
, res
);
4793 fprintf (dump_file
, "\n");
4801 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
4802 VN_INFO (res
)->valnum
= val
;
4803 VN_INFO (res
)->visited
= true;
4807 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4809 fprintf (dump_file
, "Inserted ");
4810 print_gimple_stmt (dump_file
, SSA_NAME_DEF_STMT (res
), 0);
4817 eliminate_dom_walker::eliminate_stmt (basic_block b
, gimple_stmt_iterator
*gsi
)
4819 tree sprime
= NULL_TREE
;
4820 gimple
*stmt
= gsi_stmt (*gsi
);
4821 tree lhs
= gimple_get_lhs (stmt
);
4822 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
4823 && !gimple_has_volatile_ops (stmt
)
4824 /* See PR43491. Do not replace a global register variable when
4825 it is a the RHS of an assignment. Do replace local register
4826 variables since gcc does not guarantee a local variable will
4827 be allocated in register.
4828 ??? The fix isn't effective here. This should instead
4829 be ensured by not value-numbering them the same but treating
4830 them like volatiles? */
4831 && !(gimple_assign_single_p (stmt
)
4832 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
4833 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
4834 && is_global_var (gimple_assign_rhs1 (stmt
)))))
4836 sprime
= eliminate_avail (b
, lhs
);
4839 /* If there is no existing usable leader but SCCVN thinks
4840 it has an expression it wants to use as replacement,
4842 tree val
= VN_INFO (lhs
)->valnum
;
4844 && TREE_CODE (val
) == SSA_NAME
4845 && VN_INFO (val
)->needs_insertion
4846 && VN_INFO (val
)->expr
!= NULL
4847 && (sprime
= eliminate_insert (b
, gsi
, val
)) != NULL_TREE
)
4848 eliminate_push_avail (b
, sprime
);
4851 /* If this now constitutes a copy duplicate points-to
4852 and range info appropriately. This is especially
4853 important for inserted code. See tree-ssa-copy.c
4854 for similar code. */
4856 && TREE_CODE (sprime
) == SSA_NAME
)
4858 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
4859 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
4860 && SSA_NAME_PTR_INFO (lhs
)
4861 && ! SSA_NAME_PTR_INFO (sprime
))
4863 duplicate_ssa_name_ptr_info (sprime
,
4864 SSA_NAME_PTR_INFO (lhs
));
4866 mark_ptr_info_alignment_unknown
4867 (SSA_NAME_PTR_INFO (sprime
));
4869 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
4870 && SSA_NAME_RANGE_INFO (lhs
)
4871 && ! SSA_NAME_RANGE_INFO (sprime
)
4873 duplicate_ssa_name_range_info (sprime
,
4874 SSA_NAME_RANGE_TYPE (lhs
),
4875 SSA_NAME_RANGE_INFO (lhs
));
4878 /* Inhibit the use of an inserted PHI on a loop header when
4879 the address of the memory reference is a simple induction
4880 variable. In other cases the vectorizer won't do anything
4881 anyway (either it's loop invariant or a complicated
4884 && TREE_CODE (sprime
) == SSA_NAME
4886 && (flag_tree_loop_vectorize
|| flag_tree_parallelize_loops
> 1)
4887 && loop_outer (b
->loop_father
)
4888 && has_zero_uses (sprime
)
4889 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
4890 && gimple_assign_load_p (stmt
))
4892 gimple
*def_stmt
= SSA_NAME_DEF_STMT (sprime
);
4893 basic_block def_bb
= gimple_bb (def_stmt
);
4894 if (gimple_code (def_stmt
) == GIMPLE_PHI
4895 && def_bb
->loop_father
->header
== def_bb
)
4897 loop_p loop
= def_bb
->loop_father
;
4901 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
4904 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
4906 && flow_bb_inside_loop_p (loop
, def_bb
)
4907 && simple_iv (loop
, loop
, op
, &iv
, true))
4915 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4917 fprintf (dump_file
, "Not replacing ");
4918 print_gimple_expr (dump_file
, stmt
, 0);
4919 fprintf (dump_file
, " with ");
4920 print_generic_expr (dump_file
, sprime
);
4921 fprintf (dump_file
, " which would add a loop"
4922 " carried dependence to loop %d\n",
4925 /* Don't keep sprime available. */
4933 /* If we can propagate the value computed for LHS into
4934 all uses don't bother doing anything with this stmt. */
4935 if (may_propagate_copy (lhs
, sprime
))
4937 /* Mark it for removal. */
4938 to_remove
.safe_push (stmt
);
4940 /* ??? Don't count copy/constant propagations. */
4941 if (gimple_assign_single_p (stmt
)
4942 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4943 || gimple_assign_rhs1 (stmt
) == sprime
))
4946 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4948 fprintf (dump_file
, "Replaced ");
4949 print_gimple_expr (dump_file
, stmt
, 0);
4950 fprintf (dump_file
, " with ");
4951 print_generic_expr (dump_file
, sprime
);
4952 fprintf (dump_file
, " in all uses of ");
4953 print_gimple_stmt (dump_file
, stmt
, 0);
4960 /* If this is an assignment from our leader (which
4961 happens in the case the value-number is a constant)
4962 then there is nothing to do. */
4963 if (gimple_assign_single_p (stmt
)
4964 && sprime
== gimple_assign_rhs1 (stmt
))
4967 /* Else replace its RHS. */
4968 bool can_make_abnormal_goto
4969 = is_gimple_call (stmt
)
4970 && stmt_can_make_abnormal_goto (stmt
);
4972 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4974 fprintf (dump_file
, "Replaced ");
4975 print_gimple_expr (dump_file
, stmt
, 0);
4976 fprintf (dump_file
, " with ");
4977 print_generic_expr (dump_file
, sprime
);
4978 fprintf (dump_file
, " in ");
4979 print_gimple_stmt (dump_file
, stmt
, 0);
4983 gimple
*orig_stmt
= stmt
;
4984 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4985 TREE_TYPE (sprime
)))
4986 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4987 tree vdef
= gimple_vdef (stmt
);
4988 tree vuse
= gimple_vuse (stmt
);
4989 propagate_tree_value_into_stmt (gsi
, sprime
);
4990 stmt
= gsi_stmt (*gsi
);
4992 /* In case the VDEF on the original stmt was released, value-number
4993 it to the VUSE. This is to make vuse_ssa_val able to skip
4994 released virtual operands. */
4995 if (vdef
!= gimple_vdef (stmt
))
4997 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef
));
4998 VN_INFO (vdef
)->valnum
= vuse
;
5001 /* If we removed EH side-effects from the statement, clean
5002 its EH information. */
5003 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
5005 bitmap_set_bit (need_eh_cleanup
,
5006 gimple_bb (stmt
)->index
);
5007 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5008 fprintf (dump_file
, " Removed EH side-effects.\n");
5011 /* Likewise for AB side-effects. */
5012 if (can_make_abnormal_goto
5013 && !stmt_can_make_abnormal_goto (stmt
))
5015 bitmap_set_bit (need_ab_cleanup
,
5016 gimple_bb (stmt
)->index
);
5017 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5018 fprintf (dump_file
, " Removed AB side-effects.\n");
5025 /* If the statement is a scalar store, see if the expression
5026 has the same value number as its rhs. If so, the store is
5028 if (gimple_assign_single_p (stmt
)
5029 && !gimple_has_volatile_ops (stmt
)
5030 && !is_gimple_reg (gimple_assign_lhs (stmt
))
5031 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
5032 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
5035 tree rhs
= gimple_assign_rhs1 (stmt
);
5036 vn_reference_t vnresult
;
5037 val
= vn_reference_lookup (lhs
, gimple_vuse (stmt
), VN_WALKREWRITE
,
5039 if (TREE_CODE (rhs
) == SSA_NAME
)
5040 rhs
= VN_INFO (rhs
)->valnum
;
5042 && operand_equal_p (val
, rhs
, 0))
5044 /* We can only remove the later store if the former aliases
5045 at least all accesses the later one does or if the store
5046 was to readonly memory storing the same value. */
5047 alias_set_type set
= get_alias_set (lhs
);
5049 || vnresult
->set
== set
5050 || alias_set_subset_of (set
, vnresult
->set
))
5052 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5054 fprintf (dump_file
, "Deleted redundant store ");
5055 print_gimple_stmt (dump_file
, stmt
, 0);
5058 /* Queue stmt for removal. */
5059 to_remove
.safe_push (stmt
);
5065 /* If this is a control statement value numbering left edges
5066 unexecuted on force the condition in a way consistent with
5068 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
5070 if ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
)
5071 ^ (EDGE_SUCC (b
, 1)->flags
& EDGE_EXECUTABLE
))
5073 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5075 fprintf (dump_file
, "Removing unexecutable edge from ");
5076 print_gimple_stmt (dump_file
, stmt
, 0);
5078 if (((EDGE_SUCC (b
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
5079 == ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
5080 gimple_cond_make_true (cond
);
5082 gimple_cond_make_false (cond
);
5084 el_todo
|= TODO_cleanup_cfg
;
5089 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
5090 bool was_noreturn
= (is_gimple_call (stmt
)
5091 && gimple_call_noreturn_p (stmt
));
5092 tree vdef
= gimple_vdef (stmt
);
5093 tree vuse
= gimple_vuse (stmt
);
5095 /* If we didn't replace the whole stmt (or propagate the result
5096 into all uses), replace all uses on this stmt with their
5098 bool modified
= false;
5099 use_operand_p use_p
;
5101 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5103 tree use
= USE_FROM_PTR (use_p
);
5104 /* ??? The call code above leaves stmt operands un-updated. */
5105 if (TREE_CODE (use
) != SSA_NAME
)
5108 if (SSA_NAME_IS_DEFAULT_DEF (use
))
5109 /* ??? For default defs BB shouldn't matter, but we have to
5110 solve the inconsistency between rpo eliminate and
5111 dom eliminate avail valueization first. */
5112 sprime
= eliminate_avail (b
, use
);
5114 /* Look for sth available at the definition block of the argument.
5115 This avoids inconsistencies between availability there which
5116 decides if the stmt can be removed and availability at the
5117 use site. The SSA property ensures that things available
5118 at the definition are also available at uses. */
5119 sprime
= eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use
)), use
);
5120 if (sprime
&& sprime
!= use
5121 && may_propagate_copy (use
, sprime
)
5122 /* We substitute into debug stmts to avoid excessive
5123 debug temporaries created by removed stmts, but we need
5124 to avoid doing so for inserted sprimes as we never want
5125 to create debug temporaries for them. */
5127 || TREE_CODE (sprime
) != SSA_NAME
5128 || !is_gimple_debug (stmt
)
5129 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
5131 propagate_value (use_p
, sprime
);
5136 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5137 into which is a requirement for the IPA devirt machinery. */
5138 gimple
*old_stmt
= stmt
;
5141 /* If a formerly non-invariant ADDR_EXPR is turned into an
5142 invariant one it was on a separate stmt. */
5143 if (gimple_assign_single_p (stmt
)
5144 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
5145 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
5146 gimple_stmt_iterator prev
= *gsi
;
5148 if (fold_stmt (gsi
))
5150 /* fold_stmt may have created new stmts inbetween
5151 the previous stmt and the folded stmt. Mark
5152 all defs created there as varying to not confuse
5153 the SCCVN machinery as we're using that even during
5155 if (gsi_end_p (prev
))
5156 prev
= gsi_start_bb (b
);
5159 if (gsi_stmt (prev
) != gsi_stmt (*gsi
))
5164 FOR_EACH_SSA_TREE_OPERAND (def
, gsi_stmt (prev
),
5165 dit
, SSA_OP_ALL_DEFS
)
5166 /* As existing DEFs may move between stmts
5167 only process new ones. */
5168 if (! has_VN_INFO (def
))
5170 VN_INFO (def
)->valnum
= def
;
5171 VN_INFO (def
)->visited
= true;
5173 if (gsi_stmt (prev
) == gsi_stmt (*gsi
))
5179 stmt
= gsi_stmt (*gsi
);
5180 /* In case we folded the stmt away schedule the NOP for removal. */
5181 if (gimple_nop_p (stmt
))
5182 to_remove
.safe_push (stmt
);
5185 /* Visit indirect calls and turn them into direct calls if
5186 possible using the devirtualization machinery. Do this before
5187 checking for required EH/abnormal/noreturn cleanup as devird
5188 may expose more of those. */
5189 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
5191 tree fn
= gimple_call_fn (call_stmt
);
5193 && flag_devirtualize
5194 && virtual_method_call_p (fn
))
5196 tree otr_type
= obj_type_ref_class (fn
);
5197 unsigned HOST_WIDE_INT otr_tok
5198 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn
));
5200 ipa_polymorphic_call_context
context (current_function_decl
,
5201 fn
, stmt
, &instance
);
5202 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
),
5205 vec
<cgraph_node
*> targets
5206 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
5207 otr_tok
, context
, &final
);
5209 dump_possible_polymorphic_call_targets (dump_file
,
5210 obj_type_ref_class (fn
),
5212 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5215 if (targets
.length () == 1)
5216 fn
= targets
[0]->decl
;
5218 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5219 if (dump_enabled_p ())
5221 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5222 "converting indirect call to "
5224 lang_hooks
.decl_printable_name (fn
, 2));
5226 gimple_call_set_fndecl (call_stmt
, fn
);
5227 /* If changing the call to __builtin_unreachable
5228 or similar noreturn function, adjust gimple_call_fntype
5230 if (gimple_call_noreturn_p (call_stmt
)
5231 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn
)))
5232 && TYPE_ARG_TYPES (TREE_TYPE (fn
))
5233 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn
)))
5235 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fn
));
5236 maybe_remove_unused_call_args (cfun
, call_stmt
);
5244 /* When changing a call into a noreturn call, cfg cleanup
5245 is needed to fix up the noreturn call. */
5247 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
5248 to_fixup
.safe_push (stmt
);
5249 /* When changing a condition or switch into one we know what
5250 edge will be executed, schedule a cfg cleanup. */
5251 if ((gimple_code (stmt
) == GIMPLE_COND
5252 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
5253 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
5254 || (gimple_code (stmt
) == GIMPLE_SWITCH
5255 && TREE_CODE (gimple_switch_index
5256 (as_a
<gswitch
*> (stmt
))) == INTEGER_CST
))
5257 el_todo
|= TODO_cleanup_cfg
;
5258 /* If we removed EH side-effects from the statement, clean
5259 its EH information. */
5260 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
5262 bitmap_set_bit (need_eh_cleanup
,
5263 gimple_bb (stmt
)->index
);
5264 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5265 fprintf (dump_file
, " Removed EH side-effects.\n");
5267 /* Likewise for AB side-effects. */
5268 if (can_make_abnormal_goto
5269 && !stmt_can_make_abnormal_goto (stmt
))
5271 bitmap_set_bit (need_ab_cleanup
,
5272 gimple_bb (stmt
)->index
);
5273 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5274 fprintf (dump_file
, " Removed AB side-effects.\n");
5277 /* In case the VDEF on the original stmt was released, value-number
5278 it to the VUSE. This is to make vuse_ssa_val able to skip
5279 released virtual operands. */
5280 if (vdef
&& SSA_NAME_IN_FREE_LIST (vdef
))
5281 VN_INFO (vdef
)->valnum
= vuse
;
5284 /* Make new values available - for fully redundant LHS we
5285 continue with the next stmt above and skip this. */
5287 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
5288 eliminate_push_avail (b
, DEF_FROM_PTR (defp
));
5291 /* Perform elimination for the basic-block B during the domwalk. */
5294 eliminate_dom_walker::before_dom_children (basic_block b
)
5297 avail_stack
.safe_push (NULL_TREE
);
5299 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5300 if (!(b
->flags
& BB_EXECUTABLE
))
5305 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
5307 gphi
*phi
= gsi
.phi ();
5308 tree res
= PHI_RESULT (phi
);
5310 if (virtual_operand_p (res
))
5316 tree sprime
= eliminate_avail (b
, res
);
5320 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5322 fprintf (dump_file
, "Replaced redundant PHI node defining ");
5323 print_generic_expr (dump_file
, res
);
5324 fprintf (dump_file
, " with ");
5325 print_generic_expr (dump_file
, sprime
);
5326 fprintf (dump_file
, "\n");
5329 /* If we inserted this PHI node ourself, it's not an elimination. */
5330 if (! inserted_exprs
5331 || ! bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
5334 /* If we will propagate into all uses don't bother to do
5336 if (may_propagate_copy (res
, sprime
))
5338 /* Mark the PHI for removal. */
5339 to_remove
.safe_push (phi
);
5344 remove_phi_node (&gsi
, false);
5346 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
5347 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
5348 gimple
*stmt
= gimple_build_assign (res
, sprime
);
5349 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
5350 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
5354 eliminate_push_avail (b
, res
);
5358 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
5361 eliminate_stmt (b
, &gsi
);
5363 /* Replace destination PHI arguments. */
5366 FOR_EACH_EDGE (e
, ei
, b
->succs
)
5367 if (e
->flags
& EDGE_EXECUTABLE
)
5368 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
5372 gphi
*phi
= gsi
.phi ();
5373 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
5374 tree arg
= USE_FROM_PTR (use_p
);
5375 if (TREE_CODE (arg
) != SSA_NAME
5376 || virtual_operand_p (arg
))
5378 tree sprime
= eliminate_avail (b
, arg
);
5379 if (sprime
&& may_propagate_copy (arg
, sprime
))
5380 propagate_value (use_p
, sprime
);
5383 vn_context_bb
= NULL
;
5388 /* Make no longer available leaders no longer available. */
5391 eliminate_dom_walker::after_dom_children (basic_block
)
5394 while ((entry
= avail_stack
.pop ()) != NULL_TREE
)
5396 tree valnum
= VN_INFO (entry
)->valnum
;
5397 tree old
= avail
[SSA_NAME_VERSION (valnum
)];
5399 avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
5401 avail
[SSA_NAME_VERSION (valnum
)] = entry
;
5405 /* Remove queued stmts and perform delayed cleanups. */
5408 eliminate_dom_walker::eliminate_cleanup (bool region_p
)
5410 statistics_counter_event (cfun
, "Eliminated", eliminations
);
5411 statistics_counter_event (cfun
, "Insertions", insertions
);
5413 /* We cannot remove stmts during BB walk, especially not release SSA
5414 names there as this confuses the VN machinery. The stmts ending
5415 up in to_remove are either stores or simple copies.
5416 Remove stmts in reverse order to make debug stmt creation possible. */
5417 while (!to_remove
.is_empty ())
5419 bool do_release_defs
= true;
5420 gimple
*stmt
= to_remove
.pop ();
5422 /* When we are value-numbering a region we do not require exit PHIs to
5423 be present so we have to make sure to deal with uses outside of the
5424 region of stmts that we thought are eliminated.
5425 ??? Note we may be confused by uses in dead regions we didn't run
5426 elimination on. Rather than checking individual uses we accept
5427 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5428 contains such example). */
5431 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
5433 tree lhs
= gimple_phi_result (phi
);
5434 if (!has_zero_uses (lhs
))
5436 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5437 fprintf (dump_file
, "Keeping eliminated stmt live "
5438 "as copy because of out-of-region uses\n");
5439 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
5440 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
5441 gimple_stmt_iterator gsi
5442 = gsi_after_labels (gimple_bb (stmt
));
5443 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
5444 do_release_defs
= false;
5447 else if (tree lhs
= gimple_get_lhs (stmt
))
5448 if (TREE_CODE (lhs
) == SSA_NAME
5449 && !has_zero_uses (lhs
))
5451 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5452 fprintf (dump_file
, "Keeping eliminated stmt live "
5453 "as copy because of out-of-region uses\n");
5454 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
5455 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5456 if (is_gimple_assign (stmt
))
5458 gimple_assign_set_rhs_from_tree (&gsi
, sprime
);
5459 stmt
= gsi_stmt (gsi
);
5461 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
5462 bitmap_set_bit (need_eh_cleanup
, gimple_bb (stmt
)->index
);
5467 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
5468 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
5469 do_release_defs
= false;
5474 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5476 fprintf (dump_file
, "Removing dead stmt ");
5477 print_gimple_stmt (dump_file
, stmt
, 0, TDF_NONE
);
5480 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5481 if (gimple_code (stmt
) == GIMPLE_PHI
)
5482 remove_phi_node (&gsi
, do_release_defs
);
5485 basic_block bb
= gimple_bb (stmt
);
5486 unlink_stmt_vdef (stmt
);
5487 if (gsi_remove (&gsi
, true))
5488 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
5489 if (is_gimple_call (stmt
) && stmt_can_make_abnormal_goto (stmt
))
5490 bitmap_set_bit (need_ab_cleanup
, bb
->index
);
5491 if (do_release_defs
)
5492 release_defs (stmt
);
5495 /* Removing a stmt may expose a forwarder block. */
5496 el_todo
|= TODO_cleanup_cfg
;
5499 /* Fixup stmts that became noreturn calls. This may require splitting
5500 blocks and thus isn't possible during the dominator walk. Do this
5501 in reverse order so we don't inadvertedly remove a stmt we want to
5502 fixup by visiting a dominating now noreturn call first. */
5503 while (!to_fixup
.is_empty ())
5505 gimple
*stmt
= to_fixup
.pop ();
5507 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5509 fprintf (dump_file
, "Fixing up noreturn call ");
5510 print_gimple_stmt (dump_file
, stmt
, 0);
5513 if (fixup_noreturn_call (stmt
))
5514 el_todo
|= TODO_cleanup_cfg
;
5517 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
5518 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
5521 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
5524 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
5526 if (do_eh_cleanup
|| do_ab_cleanup
)
5527 el_todo
|= TODO_cleanup_cfg
;
5532 /* Eliminate fully redundant computations. */
5535 eliminate_with_rpo_vn (bitmap inserted_exprs
)
5537 eliminate_dom_walker
walker (CDI_DOMINATORS
, inserted_exprs
);
5539 walker
.walk (cfun
->cfg
->x_entry_block_ptr
);
5540 return walker
.eliminate_cleanup ();
5544 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
5545 bool iterate
, bool eliminate
);
5548 run_rpo_vn (vn_lookup_kind kind
)
5550 default_vn_walk_kind
= kind
;
5551 do_rpo_vn (cfun
, NULL
, NULL
, true, false);
5553 /* ??? Prune requirement of these. */
5554 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
5555 constant_value_ids
= BITMAP_ALLOC (NULL
);
5557 /* Initialize the value ids and prune out remaining VN_TOPs
5561 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5563 vn_ssa_aux_t info
= VN_INFO (name
);
5565 || info
->valnum
== VN_TOP
)
5566 info
->valnum
= name
;
5567 if (info
->valnum
== name
)
5568 info
->value_id
= get_next_value_id ();
5569 else if (is_gimple_min_invariant (info
->valnum
))
5570 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
5574 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5576 vn_ssa_aux_t info
= VN_INFO (name
);
5577 if (TREE_CODE (info
->valnum
) == SSA_NAME
5578 && info
->valnum
!= name
5579 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
5580 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
5583 set_hashtable_value_ids ();
5585 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5587 fprintf (dump_file
, "Value numbers:\n");
5588 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5590 if (VN_INFO (name
)->visited
5591 && SSA_VAL (name
) != name
)
5593 print_generic_expr (dump_file
, name
);
5594 fprintf (dump_file
, " = ");
5595 print_generic_expr (dump_file
, SSA_VAL (name
));
5596 fprintf (dump_file
, " (%04d)\n", VN_INFO (name
)->value_id
);
5602 /* Free VN associated data structures. */
5607 free_vn_table (valid_info
);
5608 XDELETE (valid_info
);
5609 obstack_free (&vn_tables_obstack
, NULL
);
5610 obstack_free (&vn_tables_insert_obstack
, NULL
);
5612 vn_ssa_aux_iterator_type it
;
5614 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash
, info
, vn_ssa_aux_t
, it
)
5615 if (info
->needs_insertion
)
5616 release_ssa_name (info
->name
);
5617 obstack_free (&vn_ssa_aux_obstack
, NULL
);
5618 delete vn_ssa_aux_hash
;
5620 delete constant_to_value_id
;
5621 constant_to_value_id
= NULL
;
5622 BITMAP_FREE (constant_value_ids
);
5625 /* Adaptor to the elimination engine using RPO availability. */
5627 class rpo_elim
: public eliminate_dom_walker
5630 rpo_elim(basic_block entry_
)
5631 : eliminate_dom_walker (CDI_DOMINATORS
, NULL
), entry (entry_
) {}
5634 virtual tree
eliminate_avail (basic_block
, tree op
);
5636 virtual void eliminate_push_avail (basic_block
, tree
);
5639 /* Instead of having a local availability lattice for each
5640 basic-block and availability at X defined as union of
5641 the local availabilities at X and its dominators we're
5642 turning this upside down and track availability per
5643 value given values are usually made available at very
5644 few points (at least one).
5645 So we have a value -> vec<location, leader> map where
5646 LOCATION is specifying the basic-block LEADER is made
5647 available for VALUE. We push to this vector in RPO
5648 order thus for iteration we can simply pop the last
5650 LOCATION is the basic-block index and LEADER is its
5651 SSA name version. */
5652 /* ??? We'd like to use auto_vec here with embedded storage
5653 but that doesn't play well until we can provide move
5654 constructors and use std::move on hash-table expansion.
5655 So for now this is a bit more expensive than necessary.
5656 We eventually want to switch to a chaining scheme like
5657 for hashtable entries for unwinding which would make
5658 making the vector part of the vn_ssa_aux structure possible. */
5659 typedef hash_map
<tree
, vec
<std::pair
<int, int> > > rpo_avail_t
;
5660 rpo_avail_t m_rpo_avail
;
5663 /* Global RPO state for access from hooks. */
5664 static rpo_elim
*rpo_avail
;
5666 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
5669 vn_lookup_simplify_result (gimple_match_op
*res_op
)
5671 if (!res_op
->code
.is_tree_code ())
5673 tree
*ops
= res_op
->ops
;
5674 unsigned int length
= res_op
->num_ops
;
5675 if (res_op
->code
== CONSTRUCTOR
5676 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
5677 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
5678 && TREE_CODE (res_op
->ops
[0]) == CONSTRUCTOR
)
5680 length
= CONSTRUCTOR_NELTS (res_op
->ops
[0]);
5681 ops
= XALLOCAVEC (tree
, length
);
5682 for (unsigned i
= 0; i
< length
; ++i
)
5683 ops
[i
] = CONSTRUCTOR_ELT (res_op
->ops
[0], i
)->value
;
5685 vn_nary_op_t vnresult
= NULL
;
5686 tree res
= vn_nary_op_lookup_pieces (length
, (tree_code
) res_op
->code
,
5687 res_op
->type
, ops
, &vnresult
);
5688 /* If this is used from expression simplification make sure to
5689 return an available expression. */
5690 if (res
&& TREE_CODE (res
) == SSA_NAME
&& mprts_hook
&& rpo_avail
)
5691 res
= rpo_avail
->eliminate_avail (vn_context_bb
, res
);
5695 rpo_elim::~rpo_elim ()
5697 /* Release the avail vectors. */
5698 for (rpo_avail_t::iterator i
= m_rpo_avail
.begin ();
5699 i
!= m_rpo_avail
.end (); ++i
)
5700 (*i
).second
.release ();
5703 /* Return a leader for OPs value that is valid at BB. */
5706 rpo_elim::eliminate_avail (basic_block bb
, tree op
)
5709 tree valnum
= SSA_VAL (op
, &visited
);
5710 /* If we didn't visit OP then it must be defined outside of the
5711 region we process and also dominate it. So it is available. */
5714 if (TREE_CODE (valnum
) == SSA_NAME
)
5716 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
5718 vec
<std::pair
<int, int> > *av
= m_rpo_avail
.get (valnum
);
5719 if (!av
|| av
->is_empty ())
5721 int i
= av
->length () - 1;
5722 if ((*av
)[i
].first
== bb
->index
)
5723 /* On tramp3d 90% of the cases are here. */
5724 return ssa_name ((*av
)[i
].second
);
5727 basic_block abb
= BASIC_BLOCK_FOR_FN (cfun
, (*av
)[i
].first
);
5728 /* ??? During elimination we have to use availability at the
5729 definition site of a use we try to replace. This
5730 is required to not run into inconsistencies because
5731 of dominated_by_p_w_unex behavior and removing a definition
5732 while not replacing all uses.
5733 ??? We could try to consistently walk dominators
5734 ignoring non-executable regions. The nearest common
5735 dominator of bb and abb is where we can stop walking. We
5736 may also be able to "pre-compute" (bits of) the next immediate
5737 (non-)dominator during the RPO walk when marking edges as
5739 if (dominated_by_p_w_unex (bb
, abb
))
5741 tree leader
= ssa_name ((*av
)[i
].second
);
5742 /* Prevent eliminations that break loop-closed SSA. */
5743 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
)
5744 && ! SSA_NAME_IS_DEFAULT_DEF (leader
)
5745 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
5746 (leader
))->loop_father
,
5749 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5751 print_generic_expr (dump_file
, leader
);
5752 fprintf (dump_file
, " is available for ");
5753 print_generic_expr (dump_file
, valnum
);
5754 fprintf (dump_file
, "\n");
5756 /* On tramp3d 99% of the _remaining_ cases succeed at
5760 /* ??? Can we somehow skip to the immediate dominator
5761 RPO index (bb_to_rpo)? Again, maybe not worth, on
5762 tramp3d the worst number of elements in the vector is 9. */
5766 else if (valnum
!= VN_TOP
)
5767 /* valnum is is_gimple_min_invariant. */
5772 /* Make LEADER a leader for its value at BB. */
5775 rpo_elim::eliminate_push_avail (basic_block bb
, tree leader
)
5777 tree valnum
= VN_INFO (leader
)->valnum
;
5778 if (valnum
== VN_TOP
)
5780 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5782 fprintf (dump_file
, "Making available beyond BB%d ", bb
->index
);
5783 print_generic_expr (dump_file
, leader
);
5784 fprintf (dump_file
, " for value ");
5785 print_generic_expr (dump_file
, valnum
);
5786 fprintf (dump_file
, "\n");
5789 vec
<std::pair
<int, int> > &av
= m_rpo_avail
.get_or_insert (valnum
, &existed
);
5792 new (&av
) vec
<std::pair
<int, int> >;
5794 av
.reserve_exact (2);
5796 av
.safe_push (std::make_pair (bb
->index
, SSA_NAME_VERSION (leader
)));
5799 /* Valueization hook for RPO VN plus required state. */
5802 rpo_vn_valueize (tree name
)
5804 if (TREE_CODE (name
) == SSA_NAME
)
5806 vn_ssa_aux_t val
= VN_INFO (name
);
5809 tree tem
= val
->valnum
;
5810 if (tem
!= VN_TOP
&& tem
!= name
)
5812 if (TREE_CODE (tem
) != SSA_NAME
)
5814 /* For all values we only valueize to an available leader
5815 which means we can use SSA name info without restriction. */
5816 tem
= rpo_avail
->eliminate_avail (vn_context_bb
, tem
);
5825 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
5826 inverted condition. */
5829 insert_related_predicates_on_edge (enum tree_code code
, tree
*ops
, edge pred_e
)
5834 /* a < b -> a {!,<}= b */
5835 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
5836 ops
, boolean_true_node
, 0, pred_e
);
5837 vn_nary_op_insert_pieces_predicated (2, LE_EXPR
, boolean_type_node
,
5838 ops
, boolean_true_node
, 0, pred_e
);
5839 /* a < b -> ! a {>,=} b */
5840 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
5841 ops
, boolean_false_node
, 0, pred_e
);
5842 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
5843 ops
, boolean_false_node
, 0, pred_e
);
5846 /* a > b -> a {!,>}= b */
5847 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
5848 ops
, boolean_true_node
, 0, pred_e
);
5849 vn_nary_op_insert_pieces_predicated (2, GE_EXPR
, boolean_type_node
,
5850 ops
, boolean_true_node
, 0, pred_e
);
5851 /* a > b -> ! a {<,=} b */
5852 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
5853 ops
, boolean_false_node
, 0, pred_e
);
5854 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
5855 ops
, boolean_false_node
, 0, pred_e
);
5858 /* a == b -> ! a {<,>} b */
5859 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
5860 ops
, boolean_false_node
, 0, pred_e
);
5861 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
5862 ops
, boolean_false_node
, 0, pred_e
);
5867 /* Nothing besides inverted condition. */
5873 /* Main stmt worker for RPO VN, process BB. */
5876 process_bb (rpo_elim
&avail
, basic_block bb
,
5877 bool bb_visited
, bool iterate_phis
, bool iterate
, bool eliminate
,
5878 bool do_region
, bitmap exit_bbs
)
5886 /* If we are in loop-closed SSA preserve this state. This is
5887 relevant when called on regions from outside of FRE/PRE. */
5888 bool lc_phi_nodes
= false;
5889 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
))
5890 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5891 if (e
->src
->loop_father
!= e
->dest
->loop_father
5892 && flow_loop_nested_p (e
->dest
->loop_father
,
5893 e
->src
->loop_father
))
5895 lc_phi_nodes
= true;
5899 /* Value-number all defs in the basic-block. */
5900 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
5903 gphi
*phi
= gsi
.phi ();
5904 tree res
= PHI_RESULT (phi
);
5905 vn_ssa_aux_t res_info
= VN_INFO (res
);
5908 gcc_assert (!res_info
->visited
);
5909 res_info
->valnum
= VN_TOP
;
5910 res_info
->visited
= true;
5913 /* When not iterating force backedge values to varying. */
5914 visit_stmt (phi
, !iterate_phis
);
5915 if (virtual_operand_p (res
))
5919 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
5920 how we handle backedges and availability.
5921 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
5922 tree val
= res_info
->valnum
;
5923 if (res
!= val
&& !iterate
&& eliminate
)
5925 if (tree leader
= avail
.eliminate_avail (bb
, res
))
5928 /* Preserve loop-closed SSA form. */
5930 || is_gimple_min_invariant (leader
)))
5932 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5934 fprintf (dump_file
, "Replaced redundant PHI node "
5936 print_generic_expr (dump_file
, res
);
5937 fprintf (dump_file
, " with ");
5938 print_generic_expr (dump_file
, leader
);
5939 fprintf (dump_file
, "\n");
5941 avail
.eliminations
++;
5943 if (may_propagate_copy (res
, leader
))
5945 /* Schedule for removal. */
5946 avail
.to_remove
.safe_push (phi
);
5949 /* ??? Else generate a copy stmt. */
5953 /* Only make defs available that not already are. But make
5954 sure loop-closed SSA PHI node defs are picked up for
5958 || ! avail
.eliminate_avail (bb
, res
))
5959 avail
.eliminate_push_avail (bb
, res
);
5962 /* For empty BBs mark outgoing edges executable. For non-empty BBs
5963 we do this when processing the last stmt as we have to do this
5964 before elimination which otherwise forces GIMPLE_CONDs to
5965 if (1 != 0) style when seeing non-executable edges. */
5966 if (gsi_end_p (gsi_start_bb (bb
)))
5968 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5970 if (!(e
->flags
& EDGE_EXECUTABLE
))
5972 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5974 "marking outgoing edge %d -> %d executable\n",
5975 e
->src
->index
, e
->dest
->index
);
5976 e
->flags
|= EDGE_EXECUTABLE
;
5977 e
->dest
->flags
|= BB_EXECUTABLE
;
5979 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
5981 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5983 "marking destination block %d reachable\n",
5985 e
->dest
->flags
|= BB_EXECUTABLE
;
5989 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5990 !gsi_end_p (gsi
); gsi_next (&gsi
))
5996 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
5998 vn_ssa_aux_t op_info
= VN_INFO (op
);
5999 gcc_assert (!op_info
->visited
);
6000 op_info
->valnum
= VN_TOP
;
6001 op_info
->visited
= true;
6004 /* We somehow have to deal with uses that are not defined
6005 in the processed region. Forcing unvisited uses to
6006 varying here doesn't play well with def-use following during
6007 expression simplification, so we deal with this by checking
6008 the visited flag in SSA_VAL. */
6011 visit_stmt (gsi_stmt (gsi
));
6013 gimple
*last
= gsi_stmt (gsi
);
6015 switch (gimple_code (last
))
6018 e
= find_taken_edge (bb
, vn_valueize (gimple_switch_index
6019 (as_a
<gswitch
*> (last
))));
6023 tree lhs
= vn_valueize (gimple_cond_lhs (last
));
6024 tree rhs
= vn_valueize (gimple_cond_rhs (last
));
6025 tree val
= gimple_simplify (gimple_cond_code (last
),
6026 boolean_type_node
, lhs
, rhs
,
6028 /* If the condition didn't simplfy see if we have recorded
6029 an expression from sofar taken edges. */
6030 if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
6032 vn_nary_op_t vnresult
;
6036 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (last
),
6037 boolean_type_node
, ops
,
6039 /* Did we get a predicated value? */
6040 if (! val
&& vnresult
&& vnresult
->predicated_values
)
6042 val
= vn_nary_op_get_predicated_value (vnresult
, bb
);
6043 if (val
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
6045 fprintf (dump_file
, "Got predicated value ");
6046 print_generic_expr (dump_file
, val
, TDF_NONE
);
6047 fprintf (dump_file
, " for ");
6048 print_gimple_stmt (dump_file
, last
, TDF_SLIM
);
6053 e
= find_taken_edge (bb
, val
);
6056 /* If we didn't manage to compute the taken edge then
6057 push predicated expressions for the condition itself
6058 and related conditions to the hashtables. This allows
6059 simplification of redundant conditions which is
6060 important as early cleanup. */
6061 edge true_e
, false_e
;
6062 extract_true_false_edges_from_block (bb
, &true_e
, &false_e
);
6063 enum tree_code code
= gimple_cond_code (last
);
6064 enum tree_code icode
6065 = invert_tree_comparison (code
, HONOR_NANS (lhs
));
6070 && bitmap_bit_p (exit_bbs
, true_e
->dest
->index
))
6073 && bitmap_bit_p (exit_bbs
, false_e
->dest
->index
))
6076 vn_nary_op_insert_pieces_predicated
6077 (2, code
, boolean_type_node
, ops
,
6078 boolean_true_node
, 0, true_e
);
6080 vn_nary_op_insert_pieces_predicated
6081 (2, code
, boolean_type_node
, ops
,
6082 boolean_false_node
, 0, false_e
);
6083 if (icode
!= ERROR_MARK
)
6086 vn_nary_op_insert_pieces_predicated
6087 (2, icode
, boolean_type_node
, ops
,
6088 boolean_false_node
, 0, true_e
);
6090 vn_nary_op_insert_pieces_predicated
6091 (2, icode
, boolean_type_node
, ops
,
6092 boolean_true_node
, 0, false_e
);
6094 /* Relax for non-integers, inverted condition handled
6096 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6099 insert_related_predicates_on_edge (code
, ops
, true_e
);
6101 insert_related_predicates_on_edge (icode
, ops
, false_e
);
6107 e
= find_taken_edge (bb
, vn_valueize (gimple_goto_dest (last
)));
6114 todo
= TODO_cleanup_cfg
;
6115 if (!(e
->flags
& EDGE_EXECUTABLE
))
6117 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6119 "marking known outgoing %sedge %d -> %d executable\n",
6120 e
->flags
& EDGE_DFS_BACK
? "back-" : "",
6121 e
->src
->index
, e
->dest
->index
);
6122 e
->flags
|= EDGE_EXECUTABLE
;
6123 e
->dest
->flags
|= BB_EXECUTABLE
;
6125 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
6127 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6129 "marking destination block %d reachable\n",
6131 e
->dest
->flags
|= BB_EXECUTABLE
;
6134 else if (gsi_one_before_end_p (gsi
))
6136 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6138 if (!(e
->flags
& EDGE_EXECUTABLE
))
6140 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6142 "marking outgoing edge %d -> %d executable\n",
6143 e
->src
->index
, e
->dest
->index
);
6144 e
->flags
|= EDGE_EXECUTABLE
;
6145 e
->dest
->flags
|= BB_EXECUTABLE
;
6147 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
6149 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6151 "marking destination block %d reachable\n",
6153 e
->dest
->flags
|= BB_EXECUTABLE
;
6158 /* Eliminate. That also pushes to avail. */
6159 if (eliminate
&& ! iterate
)
6160 avail
.eliminate_stmt (bb
, &gsi
);
6162 /* If not eliminating, make all not already available defs
6164 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_DEF
)
6165 if (! avail
.eliminate_avail (bb
, op
))
6166 avail
.eliminate_push_avail (bb
, op
);
6169 /* Eliminate in destination PHI arguments. Always substitute in dest
6170 PHIs, even for non-executable edges. This handles region
6172 if (!iterate
&& eliminate
)
6173 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6174 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
6175 !gsi_end_p (gsi
); gsi_next (&gsi
))
6177 gphi
*phi
= gsi
.phi ();
6178 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
6179 tree arg
= USE_FROM_PTR (use_p
);
6180 if (TREE_CODE (arg
) != SSA_NAME
6181 || virtual_operand_p (arg
))
6184 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
6186 sprime
= SSA_VAL (arg
);
6187 gcc_assert (TREE_CODE (sprime
) != SSA_NAME
6188 || SSA_NAME_IS_DEFAULT_DEF (sprime
));
6191 /* Look for sth available at the definition block of the argument.
6192 This avoids inconsistencies between availability there which
6193 decides if the stmt can be removed and availability at the
6194 use site. The SSA property ensures that things available
6195 at the definition are also available at uses. */
6196 sprime
= avail
.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg
)),
6200 && may_propagate_copy (arg
, sprime
))
6201 propagate_value (use_p
, sprime
);
6204 vn_context_bb
= NULL
;
6208 /* Unwind state per basic-block. */
6212 /* Times this block has been visited. */
6214 /* Whether to handle this as iteration point or whether to treat
6215 incoming backedge PHI values as varying. */
6217 /* Maximum RPO index this block is reachable from. */
6221 vn_reference_t ref_top
;
6223 vn_nary_op_t nary_top
;
6226 /* Unwind the RPO VN state for iteration. */
6229 do_unwind (unwind_state
*to
, int rpo_idx
, rpo_elim
&avail
, int *bb_to_rpo
)
6231 gcc_assert (to
->iterate
);
6232 for (; last_inserted_nary
!= to
->nary_top
;
6233 last_inserted_nary
= last_inserted_nary
->next
)
6236 slot
= valid_info
->nary
->find_slot_with_hash
6237 (last_inserted_nary
, last_inserted_nary
->hashcode
, NO_INSERT
);
6238 /* Predication causes the need to restore previous state. */
6239 if ((*slot
)->unwind_to
)
6240 *slot
= (*slot
)->unwind_to
;
6242 valid_info
->nary
->clear_slot (slot
);
6244 for (; last_inserted_phi
!= to
->phi_top
;
6245 last_inserted_phi
= last_inserted_phi
->next
)
6248 slot
= valid_info
->phis
->find_slot_with_hash
6249 (last_inserted_phi
, last_inserted_phi
->hashcode
, NO_INSERT
);
6250 valid_info
->phis
->clear_slot (slot
);
6252 for (; last_inserted_ref
!= to
->ref_top
;
6253 last_inserted_ref
= last_inserted_ref
->next
)
6255 vn_reference_t
*slot
;
6256 slot
= valid_info
->references
->find_slot_with_hash
6257 (last_inserted_ref
, last_inserted_ref
->hashcode
, NO_INSERT
);
6258 (*slot
)->operands
.release ();
6259 valid_info
->references
->clear_slot (slot
);
6261 obstack_free (&vn_tables_obstack
, to
->ob_top
);
6263 /* Prune [rpo_idx, ] from avail. */
6264 /* ??? This is O(number-of-values-in-region) which is
6265 O(region-size) rather than O(iteration-piece). */
6266 for (rpo_elim::rpo_avail_t::iterator i
6267 = avail
.m_rpo_avail
.begin ();
6268 i
!= avail
.m_rpo_avail
.end (); ++i
)
6270 while (! (*i
).second
.is_empty ())
6272 if (bb_to_rpo
[(*i
).second
.last ().first
] < rpo_idx
)
6279 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6280 If ITERATE is true then treat backedges optimistically as not
6281 executed and iterate. If ELIMINATE is true then perform
6282 elimination, otherwise leave that to the caller. */
6285 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
6286 bool iterate
, bool eliminate
)
6290 /* We currently do not support region-based iteration when
6291 elimination is requested. */
6292 gcc_assert (!entry
|| !iterate
|| !eliminate
);
6293 /* When iterating we need loop info up-to-date. */
6294 gcc_assert (!iterate
|| !loops_state_satisfies_p (LOOPS_NEED_FIXUP
));
6296 bool do_region
= entry
!= NULL
;
6299 entry
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn
));
6300 exit_bbs
= BITMAP_ALLOC (NULL
);
6301 bitmap_set_bit (exit_bbs
, EXIT_BLOCK
);
6304 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
);
6305 int n
= rev_post_order_and_mark_dfs_back_seme
6306 (fn
, entry
, exit_bbs
, !loops_state_satisfies_p (LOOPS_NEED_FIXUP
), rpo
);
6307 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6308 for (int i
= 0; i
< n
/ 2; ++i
)
6309 std::swap (rpo
[i
], rpo
[n
-i
-1]);
6312 BITMAP_FREE (exit_bbs
);
6314 int *bb_to_rpo
= XNEWVEC (int, last_basic_block_for_fn (fn
));
6315 for (int i
= 0; i
< n
; ++i
)
6316 bb_to_rpo
[rpo
[i
]] = i
;
6318 unwind_state
*rpo_state
= XNEWVEC (unwind_state
, n
);
6320 rpo_elim
avail (entry
->dest
);
6323 /* Verify we have no extra entries into the region. */
6324 if (flag_checking
&& do_region
)
6326 auto_bb_flag
bb_in_region (fn
);
6327 for (int i
= 0; i
< n
; ++i
)
6329 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6330 bb
->flags
|= bb_in_region
;
6332 /* We can't merge the first two loops because we cannot rely
6333 on EDGE_DFS_BACK for edges not within the region. But if
6334 we decide to always have the bb_in_region flag we can
6335 do the checking during the RPO walk itself (but then it's
6336 also easy to handle MEME conservatively). */
6337 for (int i
= 0; i
< n
; ++i
)
6339 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6342 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6343 gcc_assert (e
== entry
|| (e
->src
->flags
& bb_in_region
));
6345 for (int i
= 0; i
< n
; ++i
)
6347 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6348 bb
->flags
&= ~bb_in_region
;
6352 /* Create the VN state. For the initial size of the various hashtables
6353 use a heuristic based on region size and number of SSA names. */
6354 unsigned region_size
= (((unsigned HOST_WIDE_INT
)n
* num_ssa_names
)
6355 / (n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
));
6356 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
6358 vn_ssa_aux_hash
= new hash_table
<vn_ssa_aux_hasher
> (region_size
* 2);
6359 gcc_obstack_init (&vn_ssa_aux_obstack
);
6361 gcc_obstack_init (&vn_tables_obstack
);
6362 gcc_obstack_init (&vn_tables_insert_obstack
);
6363 valid_info
= XCNEW (struct vn_tables_s
);
6364 allocate_vn_table (valid_info
, region_size
);
6365 last_inserted_ref
= NULL
;
6366 last_inserted_phi
= NULL
;
6367 last_inserted_nary
= NULL
;
6369 vn_valueize
= rpo_vn_valueize
;
6371 /* Initialize the unwind state and edge/BB executable state. */
6372 bool need_max_rpo_iterate
= false;
6373 for (int i
= 0; i
< n
; ++i
)
6375 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6376 rpo_state
[i
].visited
= 0;
6377 rpo_state
[i
].max_rpo
= i
;
6378 bb
->flags
&= ~BB_EXECUTABLE
;
6379 bool has_backedges
= false;
6382 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6384 if (e
->flags
& EDGE_DFS_BACK
)
6385 has_backedges
= true;
6386 e
->flags
&= ~EDGE_EXECUTABLE
;
6387 if (iterate
|| e
== entry
)
6389 if (bb_to_rpo
[e
->src
->index
] > i
)
6391 rpo_state
[i
].max_rpo
= MAX (rpo_state
[i
].max_rpo
,
6392 bb_to_rpo
[e
->src
->index
]);
6393 need_max_rpo_iterate
= true;
6396 rpo_state
[i
].max_rpo
6397 = MAX (rpo_state
[i
].max_rpo
,
6398 rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
);
6400 rpo_state
[i
].iterate
= iterate
&& has_backedges
;
6402 entry
->flags
|= EDGE_EXECUTABLE
;
6403 entry
->dest
->flags
|= BB_EXECUTABLE
;
6405 /* When there are irreducible regions the simplistic max_rpo computation
6406 above for the case of backedges doesn't work and we need to iterate
6407 until there are no more changes. */
6409 while (need_max_rpo_iterate
)
6412 need_max_rpo_iterate
= false;
6413 for (int i
= 0; i
< n
; ++i
)
6415 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6418 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6422 int max_rpo
= MAX (rpo_state
[i
].max_rpo
,
6423 rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
);
6424 if (rpo_state
[i
].max_rpo
!= max_rpo
)
6426 rpo_state
[i
].max_rpo
= max_rpo
;
6427 need_max_rpo_iterate
= true;
6432 statistics_histogram_event (cfun
, "RPO max_rpo iterations", nit
);
6434 /* As heuristic to improve compile-time we handle only the N innermost
6435 loops and the outermost one optimistically. */
6439 unsigned max_depth
= PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH
);
6440 FOR_EACH_LOOP (loop
, LI_ONLY_INNERMOST
)
6441 if (loop_depth (loop
) > max_depth
)
6442 for (unsigned i
= 2;
6443 i
< loop_depth (loop
) - max_depth
; ++i
)
6445 basic_block header
= superloop_at_depth (loop
, i
)->header
;
6446 bool non_latch_backedge
= false;
6449 FOR_EACH_EDGE (e
, ei
, header
->preds
)
6450 if (e
->flags
& EDGE_DFS_BACK
)
6452 e
->flags
|= EDGE_EXECUTABLE
;
6453 /* There can be a non-latch backedge into the header
6454 which is part of an outer irreducible region. We
6455 cannot avoid iterating this block then. */
6456 if (!dominated_by_p (CDI_DOMINATORS
,
6459 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6460 fprintf (dump_file
, "non-latch backedge %d -> %d "
6461 "forces iteration of loop %d\n",
6462 e
->src
->index
, e
->dest
->index
, loop
->num
);
6463 non_latch_backedge
= true;
6466 rpo_state
[bb_to_rpo
[header
->index
]].iterate
= non_latch_backedge
;
6473 /* Go and process all blocks, iterating as necessary. */
6476 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
6478 /* If the block has incoming backedges remember unwind state. This
6479 is required even for non-executable blocks since in irreducible
6480 regions we might reach them via the backedge and re-start iterating
6482 Note we can individually mark blocks with incoming backedges to
6483 not iterate where we then handle PHIs conservatively. We do that
6484 heuristically to reduce compile-time for degenerate cases. */
6485 if (rpo_state
[idx
].iterate
)
6487 rpo_state
[idx
].ob_top
= obstack_alloc (&vn_tables_obstack
, 0);
6488 rpo_state
[idx
].ref_top
= last_inserted_ref
;
6489 rpo_state
[idx
].phi_top
= last_inserted_phi
;
6490 rpo_state
[idx
].nary_top
= last_inserted_nary
;
6493 if (!(bb
->flags
& BB_EXECUTABLE
))
6495 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6496 fprintf (dump_file
, "Block %d: BB%d found not executable\n",
6502 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6503 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
6505 todo
|= process_bb (avail
, bb
,
6506 rpo_state
[idx
].visited
!= 0,
6507 rpo_state
[idx
].iterate
,
6508 iterate
, eliminate
, do_region
, exit_bbs
);
6509 rpo_state
[idx
].visited
++;
6511 /* Verify if changed values flow over executable outgoing backedges
6512 and those change destination PHI values (that's the thing we
6513 can easily verify). Reduce over all such edges to the farthest
6515 int iterate_to
= -1;
6518 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6519 if ((e
->flags
& (EDGE_DFS_BACK
|EDGE_EXECUTABLE
))
6520 == (EDGE_DFS_BACK
|EDGE_EXECUTABLE
)
6521 && rpo_state
[bb_to_rpo
[e
->dest
->index
]].iterate
)
6523 int destidx
= bb_to_rpo
[e
->dest
->index
];
6524 if (!rpo_state
[destidx
].visited
)
6526 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6527 fprintf (dump_file
, "Unvisited destination %d\n",
6529 if (iterate_to
== -1 || destidx
< iterate_to
)
6530 iterate_to
= destidx
;
6533 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6534 fprintf (dump_file
, "Looking for changed values of backedge"
6535 " %d->%d destination PHIs\n",
6536 e
->src
->index
, e
->dest
->index
);
6537 vn_context_bb
= e
->dest
;
6539 for (gsi
= gsi_start_phis (e
->dest
);
6540 !gsi_end_p (gsi
); gsi_next (&gsi
))
6542 bool inserted
= false;
6543 /* While we'd ideally just iterate on value changes
6544 we CSE PHIs and do that even across basic-block
6545 boundaries. So even hashtable state changes can
6546 be important (which is roughly equivalent to
6547 PHI argument value changes). To not excessively
6548 iterate because of that we track whether a PHI
6549 was CSEd to with GF_PLF_1. */
6550 bool phival_changed
;
6551 if ((phival_changed
= visit_phi (gsi
.phi (),
6553 || (inserted
&& gimple_plf (gsi
.phi (), GF_PLF_1
)))
6556 && dump_file
&& (dump_flags
& TDF_DETAILS
))
6557 fprintf (dump_file
, "PHI was CSEd and hashtable "
6558 "state (changed)\n");
6559 if (iterate_to
== -1 || destidx
< iterate_to
)
6560 iterate_to
= destidx
;
6564 vn_context_bb
= NULL
;
6566 if (iterate_to
!= -1)
6568 do_unwind (&rpo_state
[iterate_to
], iterate_to
, avail
, bb_to_rpo
);
6570 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6571 fprintf (dump_file
, "Iterating to %d BB%d\n",
6572 iterate_to
, rpo
[iterate_to
]);
6582 /* Process all blocks greedily with a worklist that enforces RPO
6583 processing of reachable blocks. */
6584 auto_bitmap worklist
;
6585 bitmap_set_bit (worklist
, 0);
6586 while (!bitmap_empty_p (worklist
))
6588 int idx
= bitmap_first_set_bit (worklist
);
6589 bitmap_clear_bit (worklist
, idx
);
6590 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
6591 gcc_assert ((bb
->flags
& BB_EXECUTABLE
)
6592 && !rpo_state
[idx
].visited
);
6594 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6595 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
6597 /* When we run into predecessor edges where we cannot trust its
6598 executable state mark them executable so PHI processing will
6600 ??? Do we need to force arguments flowing over that edge
6601 to be varying or will they even always be? */
6604 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6605 if (!(e
->flags
& EDGE_EXECUTABLE
)
6606 && !rpo_state
[bb_to_rpo
[e
->src
->index
]].visited
6607 && rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
>= (int)idx
)
6609 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6610 fprintf (dump_file
, "Cannot trust state of predecessor "
6611 "edge %d -> %d, marking executable\n",
6612 e
->src
->index
, e
->dest
->index
);
6613 e
->flags
|= EDGE_EXECUTABLE
;
6617 todo
|= process_bb (avail
, bb
, false, false, false, eliminate
,
6618 do_region
, exit_bbs
);
6619 rpo_state
[idx
].visited
++;
6621 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6622 if ((e
->flags
& EDGE_EXECUTABLE
)
6623 && e
->dest
->index
!= EXIT_BLOCK
6624 && (!do_region
|| !bitmap_bit_p (exit_bbs
, e
->dest
->index
))
6625 && !rpo_state
[bb_to_rpo
[e
->dest
->index
]].visited
)
6626 bitmap_set_bit (worklist
, bb_to_rpo
[e
->dest
->index
]);
6630 /* If statistics or dump file active. */
6632 unsigned max_visited
= 1;
6633 for (int i
= 0; i
< n
; ++i
)
6635 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6636 if (bb
->flags
& BB_EXECUTABLE
)
6638 statistics_histogram_event (cfun
, "RPO block visited times",
6639 rpo_state
[i
].visited
);
6640 if (rpo_state
[i
].visited
> max_visited
)
6641 max_visited
= rpo_state
[i
].visited
;
6643 unsigned nvalues
= 0, navail
= 0;
6644 for (rpo_elim::rpo_avail_t::iterator i
= avail
.m_rpo_avail
.begin ();
6645 i
!= avail
.m_rpo_avail
.end (); ++i
)
6648 navail
+= (*i
).second
.length ();
6650 statistics_counter_event (cfun
, "RPO blocks", n
);
6651 statistics_counter_event (cfun
, "RPO blocks visited", nblk
);
6652 statistics_counter_event (cfun
, "RPO blocks executable", nex
);
6653 statistics_histogram_event (cfun
, "RPO iterations", 10*nblk
/ nex
);
6654 statistics_histogram_event (cfun
, "RPO num values", nvalues
);
6655 statistics_histogram_event (cfun
, "RPO num avail", navail
);
6656 statistics_histogram_event (cfun
, "RPO num lattice",
6657 vn_ssa_aux_hash
->elements ());
6658 if (dump_file
&& (dump_flags
& (TDF_DETAILS
|TDF_STATS
)))
6660 fprintf (dump_file
, "RPO iteration over %d blocks visited %" PRIu64
6661 " blocks in total discovering %d executable blocks iterating "
6662 "%d.%d times, a block was visited max. %u times\n",
6664 (int)((10*nblk
/ nex
)/10), (int)((10*nblk
/ nex
)%10),
6666 fprintf (dump_file
, "RPO tracked %d values available at %d locations "
6667 "and %" PRIu64
" lattice elements\n",
6668 nvalues
, navail
, (uint64_t) vn_ssa_aux_hash
->elements ());
6673 /* When !iterate we already performed elimination during the RPO
6677 /* Elimination for region-based VN needs to be done within the
6679 gcc_assert (! do_region
);
6680 /* Note we can't use avail.walk here because that gets confused
6681 by the existing availability and it will be less efficient
6683 todo
|= eliminate_with_rpo_vn (NULL
);
6686 todo
|= avail
.eliminate_cleanup (do_region
);
6692 XDELETEVEC (bb_to_rpo
);
6694 XDELETEVEC (rpo_state
);
6699 /* Region-based entry for RPO VN. Performs value-numbering and elimination
6700 on the SEME region specified by ENTRY and EXIT_BBS. */
6703 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
)
6705 default_vn_walk_kind
= VN_WALKREWRITE
;
6706 unsigned todo
= do_rpo_vn (fn
, entry
, exit_bbs
, false, true);
6714 const pass_data pass_data_fre
=
6716 GIMPLE_PASS
, /* type */
6718 OPTGROUP_NONE
, /* optinfo_flags */
6719 TV_TREE_FRE
, /* tv_id */
6720 ( PROP_cfg
| PROP_ssa
), /* properties_required */
6721 0, /* properties_provided */
6722 0, /* properties_destroyed */
6723 0, /* todo_flags_start */
6724 0, /* todo_flags_finish */
6727 class pass_fre
: public gimple_opt_pass
6730 pass_fre (gcc::context
*ctxt
)
6731 : gimple_opt_pass (pass_data_fre
, ctxt
)
6734 /* opt_pass methods: */
6735 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
6736 virtual bool gate (function
*) { return flag_tree_fre
!= 0; }
6737 virtual unsigned int execute (function
*);
6739 }; // class pass_fre
6742 pass_fre::execute (function
*fun
)
6746 /* At -O[1g] use the cheap non-iterating mode. */
6747 calculate_dominance_info (CDI_DOMINATORS
);
6749 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
6751 default_vn_walk_kind
= VN_WALKREWRITE
;
6752 todo
= do_rpo_vn (fun
, NULL
, NULL
, optimize
> 1, true);
6756 loop_optimizer_finalize ();
6764 make_pass_fre (gcc::context
*ctxt
)
6766 return new pass_fre (ctxt
);
6769 #undef BB_EXECUTABLE