1 /* SCC value numbering for trees
2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "alloc-pool.h"
31 #include "insn-config.h"
34 #include "gimple-pretty-print.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
121 static tree
*last_vuse_ptr
;
122 static vn_lookup_kind vn_walk_kind
;
123 static vn_lookup_kind default_vn_walk_kind
;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
130 typedef vn_nary_op_s
*compare_type
;
131 static inline hashval_t
hash (const vn_nary_op_s
*);
132 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
135 /* Return the computed hashcode for nary operation P1. */
138 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
140 return vno1
->hashcode
;
143 /* Compare nary operations P1 and P2 and return true if they are
147 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
149 return vn_nary_op_eq (vno1
, vno2
);
152 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
156 /* vn_phi hashtable helpers. */
159 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
161 struct vn_phi_hasher
: pointer_hash
<vn_phi_s
>
163 static inline hashval_t
hash (const vn_phi_s
*);
164 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
165 static inline void remove (vn_phi_s
*);
168 /* Return the computed hashcode for phi operation P1. */
171 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
173 return vp1
->hashcode
;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
179 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
181 return vn_phi_eq (vp1
, vp2
);
184 /* Free a phi operation structure VP. */
187 vn_phi_hasher::remove (vn_phi_s
*phi
)
189 phi
->phiargs
.release ();
192 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
200 vn_reference_op_eq (const void *p1
, const void *p2
)
202 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
203 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
205 return (vro1
->opcode
== vro2
->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1
->type
== vro2
->type
208 || (vro1
->type
&& vro2
->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
210 TYPE_MAIN_VARIANT (vro2
->type
))))
211 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
212 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
213 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
216 /* Free a reference operation structure VP. */
219 free_reference (vn_reference_s
*vr
)
221 vr
->operands
.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher
: pointer_hash
<vn_reference_s
>
229 static inline hashval_t
hash (const vn_reference_s
*);
230 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
231 static inline void remove (vn_reference_s
*);
234 /* Return the hashcode for a given reference operation P1. */
237 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
239 return vr1
->hashcode
;
243 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
245 return vn_reference_eq (v
, c
);
249 vn_reference_hasher::remove (vn_reference_s
*v
)
254 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type
*nary
;
263 vn_phi_table_type
*phis
;
264 vn_reference_table_type
*references
;
265 struct obstack nary_obstack
;
266 object_allocator
<vn_phi_s
> *phis_pool
;
267 object_allocator
<vn_reference_s
> *references_pool
;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
275 static inline hashval_t
hash (const vn_constant_s
*);
276 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
279 /* Hash table hash function for vn_constant_t. */
282 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
284 return vc1
->hashcode
;
287 /* Hash table equality function for vn_constant_t. */
290 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
292 if (vc1
->hashcode
!= vc2
->hashcode
)
295 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
298 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
299 static bitmap constant_value_ids
;
302 /* Valid hashtables storing information we have proven to be
305 static vn_tables_t valid_info
;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info
;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
316 static vn_tables_t current_info
;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers
;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
330 vuse_ssa_val (tree x
)
339 while (SSA_NAME_IN_FREE_LIST (x
));
344 /* This represents the top of the VN lattice, which is the universal
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id
;
353 /* Next DFS number and the stack for strongly connected component
356 static unsigned int next_dfs_num
;
357 static vec
<tree
> sccstack
;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec
<vn_ssa_aux_t
> vn_ssa_aux_table
;
366 static struct obstack vn_ssa_aux_obstack
;
368 /* Return whether there is value numbering information for a given SSA name. */
371 has_VN_INFO (tree name
)
373 if (SSA_NAME_VERSION (name
) < vn_ssa_aux_table
.length ())
374 return vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] != NULL
;
378 /* Return the value numbering information for a given SSA name. */
383 vn_ssa_aux_t res
= vn_ssa_aux_table
[SSA_NAME_VERSION (name
)];
384 gcc_checking_assert (res
);
388 /* Set the value numbering info for a given SSA name to a given
392 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
394 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = value
;
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
401 VN_INFO_GET (tree name
)
403 vn_ssa_aux_t newinfo
;
405 gcc_assert (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ()
406 || vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] == NULL
);
407 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
408 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
409 if (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ())
410 vn_ssa_aux_table
.safe_grow_cleared (SSA_NAME_VERSION (name
) + 1);
411 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = newinfo
;
416 /* Return the vn_kind the expression computed by the stmt should be
420 vn_get_stmt_kind (gimple
*stmt
)
422 switch (gimple_code (stmt
))
430 enum tree_code code
= gimple_assign_rhs_code (stmt
);
431 tree rhs1
= gimple_assign_rhs1 (stmt
);
432 switch (get_gimple_rhs_class (code
))
434 case GIMPLE_UNARY_RHS
:
435 case GIMPLE_BINARY_RHS
:
436 case GIMPLE_TERNARY_RHS
:
438 case GIMPLE_SINGLE_RHS
:
439 switch (TREE_CODE_CLASS (code
))
442 /* VOP-less references can go through unary case. */
443 if ((code
== REALPART_EXPR
444 || code
== IMAGPART_EXPR
445 || code
== VIEW_CONVERT_EXPR
446 || code
== BIT_FIELD_REF
)
447 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
451 case tcc_declaration
:
458 if (code
== ADDR_EXPR
)
459 return (is_gimple_min_invariant (rhs1
)
460 ? VN_CONSTANT
: VN_REFERENCE
);
461 else if (code
== CONSTRUCTOR
)
474 /* Lookup a value id for CONSTANT and return it. If it does not
478 get_constant_value_id (tree constant
)
480 vn_constant_s
**slot
;
481 struct vn_constant_s vc
;
483 vc
.hashcode
= vn_hash_constant_with_type (constant
);
484 vc
.constant
= constant
;
485 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
487 return (*slot
)->value_id
;
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
495 get_or_alloc_constant_value_id (tree constant
)
497 vn_constant_s
**slot
;
498 struct vn_constant_s vc
;
501 vc
.hashcode
= vn_hash_constant_with_type (constant
);
502 vc
.constant
= constant
;
503 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
505 return (*slot
)->value_id
;
507 vcp
= XNEW (struct vn_constant_s
);
508 vcp
->hashcode
= vc
.hashcode
;
509 vcp
->constant
= constant
;
510 vcp
->value_id
= get_next_value_id ();
512 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
513 return vcp
->value_id
;
516 /* Return true if V is a value id for a constant. */
519 value_id_constant_p (unsigned int v
)
521 return bitmap_bit_p (constant_value_ids
, v
);
524 /* Compute the hash for a reference operand VRO1. */
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
529 hstate
.add_int (vro1
->opcode
);
531 inchash::add_expr (vro1
->op0
, hstate
);
533 inchash::add_expr (vro1
->op1
, hstate
);
535 inchash::add_expr (vro1
->op2
, hstate
);
538 /* Compute a hash for the reference operation VR1 and return it. */
541 vn_reference_compute_hash (const vn_reference_t vr1
)
543 inchash::hash hstate
;
546 vn_reference_op_t vro
;
547 HOST_WIDE_INT off
= -1;
550 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
552 if (vro
->opcode
== MEM_REF
)
554 else if (vro
->opcode
!= ADDR_EXPR
)
566 hstate
.add_int (off
);
569 && vro
->opcode
== ADDR_EXPR
)
573 tree op
= TREE_OPERAND (vro
->op0
, 0);
574 hstate
.add_int (TREE_CODE (op
));
575 inchash::add_expr (op
, hstate
);
579 vn_reference_op_compute_hash (vro
, hstate
);
582 result
= hstate
.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
585 result
+= SSA_NAME_VERSION (vr1
->vuse
);
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
594 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
598 /* Early out if this is not a hash collision. */
599 if (vr1
->hashcode
!= vr2
->hashcode
)
602 /* The VOP needs to be the same. */
603 if (vr1
->vuse
!= vr2
->vuse
)
606 /* If the operands are the same we are done. */
607 if (vr1
->operands
== vr2
->operands
)
610 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
613 if (INTEGRAL_TYPE_P (vr1
->type
)
614 && INTEGRAL_TYPE_P (vr2
->type
))
616 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
619 else if (INTEGRAL_TYPE_P (vr1
->type
)
620 && (TYPE_PRECISION (vr1
->type
)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
623 else if (INTEGRAL_TYPE_P (vr2
->type
)
624 && (TYPE_PRECISION (vr2
->type
)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
632 HOST_WIDE_INT off1
= 0, off2
= 0;
633 vn_reference_op_t vro1
, vro2
;
634 vn_reference_op_s tem1
, tem2
;
635 bool deref1
= false, deref2
= false;
636 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
638 if (vro1
->opcode
== MEM_REF
)
640 /* Do not look through a storage order barrier. */
641 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
647 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
649 if (vro2
->opcode
== MEM_REF
)
651 /* Do not look through a storage order barrier. */
652 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
660 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
662 memset (&tem1
, 0, sizeof (tem1
));
663 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
664 tem1
.type
= TREE_TYPE (tem1
.op0
);
665 tem1
.opcode
= TREE_CODE (tem1
.op0
);
669 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
671 memset (&tem2
, 0, sizeof (tem2
));
672 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
673 tem2
.type
= TREE_TYPE (tem2
.op0
);
674 tem2
.opcode
= TREE_CODE (tem2
.op0
);
678 if (deref1
!= deref2
)
680 if (!vn_reference_op_eq (vro1
, vro2
))
685 while (vr1
->operands
.length () != i
686 || vr2
->operands
.length () != j
);
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
695 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
697 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
699 vn_reference_op_s temp
;
703 memset (&temp
, 0, sizeof (temp
));
704 temp
.type
= TREE_TYPE (ref
);
705 temp
.opcode
= TREE_CODE (ref
);
706 temp
.op0
= TMR_INDEX (ref
);
707 temp
.op1
= TMR_STEP (ref
);
708 temp
.op2
= TMR_OFFSET (ref
);
710 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
711 temp
.base
= MR_DEPENDENCE_BASE (ref
);
712 result
->quick_push (temp
);
714 memset (&temp
, 0, sizeof (temp
));
715 temp
.type
= NULL_TREE
;
716 temp
.opcode
= ERROR_MARK
;
717 temp
.op0
= TMR_INDEX2 (ref
);
719 result
->quick_push (temp
);
721 memset (&temp
, 0, sizeof (temp
));
722 temp
.type
= NULL_TREE
;
723 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
724 temp
.op0
= TMR_BASE (ref
);
726 result
->quick_push (temp
);
730 /* For non-calls, store the information that makes up the address. */
734 vn_reference_op_s temp
;
736 memset (&temp
, 0, sizeof (temp
));
737 temp
.type
= TREE_TYPE (ref
);
738 temp
.opcode
= TREE_CODE (ref
);
744 temp
.op0
= TREE_OPERAND (ref
, 1);
747 temp
.op0
= TREE_OPERAND (ref
, 1);
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp
.op0
= TREE_OPERAND (ref
, 1);
754 offset_int off
= mem_ref_offset (ref
);
755 if (wi::fits_shwi_p (off
))
756 temp
.off
= off
.to_shwi ();
758 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
759 temp
.base
= MR_DEPENDENCE_BASE (ref
);
760 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
763 /* Record bits, position and storage order. */
764 temp
.op0
= TREE_OPERAND (ref
, 1);
765 temp
.op1
= TREE_OPERAND (ref
, 2);
766 if (tree_fits_shwi_p (TREE_OPERAND (ref
, 2)))
768 HOST_WIDE_INT off
= tree_to_shwi (TREE_OPERAND (ref
, 2));
769 if (off
% BITS_PER_UNIT
== 0)
770 temp
.off
= off
/ BITS_PER_UNIT
;
772 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp
.type
= NULL_TREE
;
779 temp
.op0
= TREE_OPERAND (ref
, 1);
780 temp
.op1
= TREE_OPERAND (ref
, 2);
782 tree this_offset
= component_ref_field_offset (ref
);
784 && TREE_CODE (this_offset
) == INTEGER_CST
)
786 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
787 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
790 = (wi::to_offset (this_offset
)
791 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
792 if (wi::fits_shwi_p (off
)
793 /* Probibit value-numbering zero offset components
794 of addresses the same before the pass folding
795 __builtin_object_size had a chance to run
796 (checking cfun->after_inlining does the
798 && (TREE_CODE (orig
) != ADDR_EXPR
800 || cfun
->after_inlining
))
801 temp
.off
= off
.to_shwi ();
806 case ARRAY_RANGE_REF
:
808 /* Record index as operand. */
809 temp
.op0
= TREE_OPERAND (ref
, 1);
810 /* Always record lower bounds and element size. */
811 temp
.op1
= array_ref_low_bound (ref
);
812 temp
.op2
= array_ref_element_size (ref
);
813 if (TREE_CODE (temp
.op0
) == INTEGER_CST
814 && TREE_CODE (temp
.op1
) == INTEGER_CST
815 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
817 offset_int off
= ((wi::to_offset (temp
.op0
)
818 - wi::to_offset (temp
.op1
))
819 * wi::to_offset (temp
.op2
));
820 if (wi::fits_shwi_p (off
))
821 temp
.off
= off
.to_shwi();
825 if (DECL_HARD_REGISTER (ref
))
834 /* Canonicalize decls to MEM[&decl] which is what we end up with
835 when valueizing MEM[ptr] with ptr = &decl. */
836 temp
.opcode
= MEM_REF
;
837 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
839 result
->safe_push (temp
);
840 temp
.opcode
= ADDR_EXPR
;
841 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
842 temp
.type
= TREE_TYPE (temp
.op0
);
856 if (is_gimple_min_invariant (ref
))
862 /* These are only interesting for their operands, their
863 existence, and their type. They will never be the last
864 ref in the chain of references (IE they require an
865 operand), so we don't have to put anything
866 for op* as it will be handled by the iteration */
870 case VIEW_CONVERT_EXPR
:
872 temp
.reverse
= storage_order_barrier_p (ref
);
875 /* This is only interesting for its constant offset. */
876 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
881 result
->safe_push (temp
);
883 if (REFERENCE_CLASS_P (ref
)
884 || TREE_CODE (ref
) == MODIFY_EXPR
885 || TREE_CODE (ref
) == WITH_SIZE_EXPR
886 || (TREE_CODE (ref
) == ADDR_EXPR
887 && !is_gimple_min_invariant (ref
)))
888 ref
= TREE_OPERAND (ref
, 0);
894 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
895 operands in *OPS, the reference alias set SET and the reference type TYPE.
896 Return true if something useful was produced. */
899 ao_ref_init_from_vn_reference (ao_ref
*ref
,
900 alias_set_type set
, tree type
,
901 vec
<vn_reference_op_s
> ops
)
903 vn_reference_op_t op
;
905 tree base
= NULL_TREE
;
907 offset_int offset
= 0;
909 offset_int size
= -1;
910 tree size_tree
= NULL_TREE
;
911 alias_set_type base_alias_set
= -1;
913 /* First get the final access size from just the outermost expression. */
915 if (op
->opcode
== COMPONENT_REF
)
916 size_tree
= DECL_SIZE (op
->op0
);
917 else if (op
->opcode
== BIT_FIELD_REF
)
921 machine_mode mode
= TYPE_MODE (type
);
923 size_tree
= TYPE_SIZE (type
);
925 size
= int (GET_MODE_BITSIZE (mode
));
927 if (size_tree
!= NULL_TREE
928 && TREE_CODE (size_tree
) == INTEGER_CST
)
929 size
= wi::to_offset (size_tree
);
931 /* Initially, maxsize is the same as the accessed element size.
932 In the following it will only grow (or become -1). */
935 /* Compute cumulative bit-offset for nested component-refs and array-refs,
936 and find the ultimate containing object. */
937 FOR_EACH_VEC_ELT (ops
, i
, op
)
941 /* These may be in the reference ops, but we cannot do anything
942 sensible with them here. */
944 /* Apart from ADDR_EXPR arguments to MEM_REF. */
945 if (base
!= NULL_TREE
946 && TREE_CODE (base
) == MEM_REF
948 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
950 vn_reference_op_t pop
= &ops
[i
-1];
951 base
= TREE_OPERAND (op
->op0
, 0);
958 offset
+= pop
->off
* BITS_PER_UNIT
;
966 /* Record the base objects. */
968 base_alias_set
= get_deref_alias_set (op
->op0
);
969 *op0_p
= build2 (MEM_REF
, op
->type
,
971 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
972 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
973 op0_p
= &TREE_OPERAND (*op0_p
, 0);
984 /* And now the usual component-reference style ops. */
986 offset
+= wi::to_offset (op
->op1
);
991 tree field
= op
->op0
;
992 /* We do not have a complete COMPONENT_REF tree here so we
993 cannot use component_ref_field_offset. Do the interesting
995 tree this_offset
= DECL_FIELD_OFFSET (field
);
997 if (op
->op1
|| TREE_CODE (this_offset
) != INTEGER_CST
)
1001 offset_int woffset
= (wi::to_offset (this_offset
)
1002 << LOG2_BITS_PER_UNIT
);
1003 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1009 case ARRAY_RANGE_REF
:
1011 /* We recorded the lower bound and the element size. */
1012 if (TREE_CODE (op
->op0
) != INTEGER_CST
1013 || TREE_CODE (op
->op1
) != INTEGER_CST
1014 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1019 = wi::sext (wi::to_offset (op
->op0
) - wi::to_offset (op
->op1
),
1020 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1021 woffset
*= wi::to_offset (op
->op2
);
1022 woffset
<<= LOG2_BITS_PER_UNIT
;
1034 case VIEW_CONVERT_EXPR
:
1051 if (base
== NULL_TREE
)
1054 ref
->ref
= NULL_TREE
;
1056 ref
->ref_alias_set
= set
;
1057 if (base_alias_set
!= -1)
1058 ref
->base_alias_set
= base_alias_set
;
1060 ref
->base_alias_set
= get_alias_set (base
);
1061 /* We discount volatiles from value-numbering elsewhere. */
1062 ref
->volatile_p
= false;
1064 if (!wi::fits_shwi_p (size
) || wi::neg_p (size
))
1072 ref
->size
= size
.to_shwi ();
1074 if (!wi::fits_shwi_p (offset
))
1081 ref
->offset
= offset
.to_shwi ();
1083 if (!wi::fits_shwi_p (max_size
) || wi::neg_p (max_size
))
1086 ref
->max_size
= max_size
.to_shwi ();
1091 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1092 vn_reference_op_s's. */
1095 copy_reference_ops_from_call (gcall
*call
,
1096 vec
<vn_reference_op_s
> *result
)
1098 vn_reference_op_s temp
;
1100 tree lhs
= gimple_call_lhs (call
);
1103 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1104 different. By adding the lhs here in the vector, we ensure that the
1105 hashcode is different, guaranteeing a different value number. */
1106 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1108 memset (&temp
, 0, sizeof (temp
));
1109 temp
.opcode
= MODIFY_EXPR
;
1110 temp
.type
= TREE_TYPE (lhs
);
1113 result
->safe_push (temp
);
1116 /* Copy the type, opcode, function, static chain and EH region, if any. */
1117 memset (&temp
, 0, sizeof (temp
));
1118 temp
.type
= gimple_call_return_type (call
);
1119 temp
.opcode
= CALL_EXPR
;
1120 temp
.op0
= gimple_call_fn (call
);
1121 temp
.op1
= gimple_call_chain (call
);
1122 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1123 temp
.op2
= size_int (lr
);
1125 if (gimple_call_with_bounds_p (call
))
1126 temp
.with_bounds
= 1;
1127 result
->safe_push (temp
);
1129 /* Copy the call arguments. As they can be references as well,
1130 just chain them together. */
1131 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1133 tree callarg
= gimple_call_arg (call
, i
);
1134 copy_reference_ops_from_ref (callarg
, result
);
1138 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1139 *I_P to point to the last element of the replacement. */
1141 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1144 unsigned int i
= *i_p
;
1145 vn_reference_op_t op
= &(*ops
)[i
];
1146 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1148 HOST_WIDE_INT addr_offset
= 0;
1150 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1151 from .foo.bar to the preceding MEM_REF offset and replace the
1152 address with &OBJ. */
1153 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1155 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1156 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1158 offset_int off
= offset_int::from (mem_op
->op0
, SIGNED
);
1160 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1161 op
->op0
= build_fold_addr_expr (addr_base
);
1162 if (tree_fits_shwi_p (mem_op
->op0
))
1163 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1171 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1172 *I_P to point to the last element of the replacement. */
1174 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1177 unsigned int i
= *i_p
;
1178 vn_reference_op_t op
= &(*ops
)[i
];
1179 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1181 enum tree_code code
;
1184 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1185 if (!is_gimple_assign (def_stmt
))
1188 code
= gimple_assign_rhs_code (def_stmt
);
1189 if (code
!= ADDR_EXPR
1190 && code
!= POINTER_PLUS_EXPR
)
1193 off
= offset_int::from (mem_op
->op0
, SIGNED
);
1195 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1196 from .foo.bar to the preceding MEM_REF offset and replace the
1197 address with &OBJ. */
1198 if (code
== ADDR_EXPR
)
1200 tree addr
, addr_base
;
1201 HOST_WIDE_INT addr_offset
;
1203 addr
= gimple_assign_rhs1 (def_stmt
);
1204 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1206 /* If that didn't work because the address isn't invariant propagate
1207 the reference tree from the address operation in case the current
1208 dereference isn't offsetted. */
1210 && *i_p
== ops
->length () - 1
1212 /* This makes us disable this transform for PRE where the
1213 reference ops might be also used for code insertion which
1215 && default_vn_walk_kind
== VN_WALKREWRITE
)
1217 auto_vec
<vn_reference_op_s
, 32> tem
;
1218 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1221 ops
->safe_splice (tem
);
1226 || TREE_CODE (addr_base
) != MEM_REF
)
1230 off
+= mem_ref_offset (addr_base
);
1231 op
->op0
= TREE_OPERAND (addr_base
, 0);
1236 ptr
= gimple_assign_rhs1 (def_stmt
);
1237 ptroff
= gimple_assign_rhs2 (def_stmt
);
1238 if (TREE_CODE (ptr
) != SSA_NAME
1239 || TREE_CODE (ptroff
) != INTEGER_CST
)
1242 off
+= wi::to_offset (ptroff
);
1246 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1247 if (tree_fits_shwi_p (mem_op
->op0
))
1248 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1251 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1252 op
->op0
= SSA_VAL (op
->op0
);
1253 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1254 op
->opcode
= TREE_CODE (op
->op0
);
1257 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1258 vn_reference_maybe_forwprop_address (ops
, i_p
);
1259 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1260 vn_reference_fold_indirect (ops
, i_p
);
1264 /* Optimize the reference REF to a constant if possible or return
1265 NULL_TREE if not. */
1268 fully_constant_vn_reference_p (vn_reference_t ref
)
1270 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1271 vn_reference_op_t op
;
1273 /* Try to simplify the translated expression if it is
1274 a call to a builtin function with at most two arguments. */
1276 if (op
->opcode
== CALL_EXPR
1277 && TREE_CODE (op
->op0
) == ADDR_EXPR
1278 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1279 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1280 && operands
.length () >= 2
1281 && operands
.length () <= 3)
1283 vn_reference_op_t arg0
, arg1
= NULL
;
1284 bool anyconst
= false;
1285 arg0
= &operands
[1];
1286 if (operands
.length () > 2)
1287 arg1
= &operands
[2];
1288 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1289 || (arg0
->opcode
== ADDR_EXPR
1290 && is_gimple_min_invariant (arg0
->op0
)))
1293 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1294 || (arg1
->opcode
== ADDR_EXPR
1295 && is_gimple_min_invariant (arg1
->op0
))))
1299 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1302 arg1
? arg1
->op0
: NULL
);
1304 && TREE_CODE (folded
) == NOP_EXPR
)
1305 folded
= TREE_OPERAND (folded
, 0);
1307 && is_gimple_min_invariant (folded
))
1312 /* Simplify reads from constants or constant initializers. */
1313 else if (BITS_PER_UNIT
== 8
1314 && is_gimple_reg_type (ref
->type
)
1315 && (!INTEGRAL_TYPE_P (ref
->type
)
1316 || TYPE_PRECISION (ref
->type
) % BITS_PER_UNIT
== 0))
1318 HOST_WIDE_INT off
= 0;
1320 if (INTEGRAL_TYPE_P (ref
->type
))
1321 size
= TYPE_PRECISION (ref
->type
);
1323 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1324 if (size
% BITS_PER_UNIT
!= 0
1325 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1327 size
/= BITS_PER_UNIT
;
1329 for (i
= 0; i
< operands
.length (); ++i
)
1331 if (operands
[i
].off
== -1)
1333 off
+= operands
[i
].off
;
1334 if (operands
[i
].opcode
== MEM_REF
)
1340 vn_reference_op_t base
= &operands
[--i
];
1341 tree ctor
= error_mark_node
;
1342 tree decl
= NULL_TREE
;
1343 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1345 else if (base
->opcode
== MEM_REF
1346 && base
[1].opcode
== ADDR_EXPR
1347 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1348 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
))
1350 decl
= TREE_OPERAND (base
[1].op0
, 0);
1351 ctor
= ctor_for_folding (decl
);
1353 if (ctor
== NULL_TREE
)
1354 return build_zero_cst (ref
->type
);
1355 else if (ctor
!= error_mark_node
)
1359 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1360 off
* BITS_PER_UNIT
,
1361 size
* BITS_PER_UNIT
, decl
);
1364 STRIP_USELESS_TYPE_CONVERSION (res
);
1365 if (is_gimple_min_invariant (res
))
1371 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1372 int len
= native_encode_expr (ctor
, buf
, size
, off
);
1374 return native_interpret_expr (ref
->type
, buf
, len
);
1382 /* Return true if OPS contain a storage order barrier. */
1385 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1387 vn_reference_op_t op
;
1390 FOR_EACH_VEC_ELT (ops
, i
, op
)
1391 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1397 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1398 structures into their value numbers. This is done in-place, and
1399 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1400 whether any operands were valueized. */
1402 static vec
<vn_reference_op_s
>
1403 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
)
1405 vn_reference_op_t vro
;
1408 *valueized_anything
= false;
1410 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1412 if (vro
->opcode
== SSA_NAME
1413 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1415 tree tem
= SSA_VAL (vro
->op0
);
1416 if (tem
!= vro
->op0
)
1418 *valueized_anything
= true;
1421 /* If it transforms from an SSA_NAME to a constant, update
1423 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1424 vro
->opcode
= TREE_CODE (vro
->op0
);
1426 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1428 tree tem
= SSA_VAL (vro
->op1
);
1429 if (tem
!= vro
->op1
)
1431 *valueized_anything
= true;
1435 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1437 tree tem
= SSA_VAL (vro
->op2
);
1438 if (tem
!= vro
->op2
)
1440 *valueized_anything
= true;
1444 /* If it transforms from an SSA_NAME to an address, fold with
1445 a preceding indirect reference. */
1448 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1449 && orig
[i
- 1].opcode
== MEM_REF
)
1451 if (vn_reference_fold_indirect (&orig
, &i
))
1452 *valueized_anything
= true;
1455 && vro
->opcode
== SSA_NAME
1456 && orig
[i
- 1].opcode
== MEM_REF
)
1458 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1459 *valueized_anything
= true;
1461 /* If it transforms a non-constant ARRAY_REF into a constant
1462 one, adjust the constant offset. */
1463 else if (vro
->opcode
== ARRAY_REF
1465 && TREE_CODE (vro
->op0
) == INTEGER_CST
1466 && TREE_CODE (vro
->op1
) == INTEGER_CST
1467 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1469 offset_int off
= ((wi::to_offset (vro
->op0
)
1470 - wi::to_offset (vro
->op1
))
1471 * wi::to_offset (vro
->op2
));
1472 if (wi::fits_shwi_p (off
))
1473 vro
->off
= off
.to_shwi ();
1480 static vec
<vn_reference_op_s
>
1481 valueize_refs (vec
<vn_reference_op_s
> orig
)
1484 return valueize_refs_1 (orig
, &tem
);
1487 static vec
<vn_reference_op_s
> shared_lookup_references
;
1489 /* Create a vector of vn_reference_op_s structures from REF, a
1490 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1491 this function. *VALUEIZED_ANYTHING will specify whether any
1492 operands were valueized. */
1494 static vec
<vn_reference_op_s
>
1495 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1499 shared_lookup_references
.truncate (0);
1500 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1501 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1502 valueized_anything
);
1503 return shared_lookup_references
;
1506 /* Create a vector of vn_reference_op_s structures from CALL, a
1507 call statement. The vector is shared among all callers of
1510 static vec
<vn_reference_op_s
>
1511 valueize_shared_reference_ops_from_call (gcall
*call
)
1515 shared_lookup_references
.truncate (0);
1516 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1517 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1518 return shared_lookup_references
;
1521 /* Lookup a SCCVN reference operation VR in the current hash table.
1522 Returns the resulting value number if it exists in the hash table,
1523 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1524 vn_reference_t stored in the hashtable if something is found. */
1527 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1529 vn_reference_s
**slot
;
1532 hash
= vr
->hashcode
;
1533 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1534 if (!slot
&& current_info
== optimistic_info
)
1535 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1539 *vnresult
= (vn_reference_t
)*slot
;
1540 return ((vn_reference_t
)*slot
)->result
;
1546 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1547 with the current VUSE and performs the expression lookup. */
1550 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1551 unsigned int cnt
, void *vr_
)
1553 vn_reference_t vr
= (vn_reference_t
)vr_
;
1554 vn_reference_s
**slot
;
1557 /* This bounds the stmt walks we perform on reference lookups
1558 to O(1) instead of O(N) where N is the number of dominating
1560 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1564 *last_vuse_ptr
= vuse
;
1566 /* Fixup vuse and hash. */
1568 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1569 vr
->vuse
= vuse_ssa_val (vuse
);
1571 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1573 hash
= vr
->hashcode
;
1574 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1575 if (!slot
&& current_info
== optimistic_info
)
1576 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1583 /* Lookup an existing or insert a new vn_reference entry into the
1584 value table for the VUSE, SET, TYPE, OPERANDS reference which
1585 has the value VALUE which is either a constant or an SSA name. */
1587 static vn_reference_t
1588 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1591 vec
<vn_reference_op_s
,
1596 vn_reference_t result
;
1599 vr1
.operands
= operands
;
1602 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1603 if (vn_reference_lookup_1 (&vr1
, &result
))
1605 if (TREE_CODE (value
) == SSA_NAME
)
1606 value_id
= VN_INFO (value
)->value_id
;
1608 value_id
= get_or_alloc_constant_value_id (value
);
1609 return vn_reference_insert_pieces (vuse
, set
, type
,
1610 operands
.copy (), value
, value_id
);
1613 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*stmt
, tree result
);
1615 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1618 vn_lookup_simplify_result (code_helper rcode
, tree type
, tree
*ops
)
1620 if (!rcode
.is_tree_code ())
1622 vn_nary_op_t vnresult
= NULL
;
1623 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code
) rcode
),
1624 (tree_code
) rcode
, type
, ops
, &vnresult
);
1627 /* Return a value-number for RCODE OPS... either by looking up an existing
1628 value-number for the simplified result or by inserting the operation. */
1631 vn_nary_build_or_lookup (code_helper rcode
, tree type
, tree
*ops
)
1633 tree result
= NULL_TREE
;
1634 /* We will be creating a value number for
1636 So first simplify and lookup this expression to see if it
1637 is already available. */
1638 mprts_hook
= vn_lookup_simplify_result
;
1640 switch (TREE_CODE_LENGTH ((tree_code
) rcode
))
1643 res
= gimple_resimplify1 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1646 res
= gimple_resimplify2 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1649 res
= gimple_resimplify3 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1653 gimple
*new_stmt
= NULL
;
1655 && gimple_simplified_result_is_gimple_val (rcode
, ops
))
1656 /* The expression is already available. */
1660 tree val
= vn_lookup_simplify_result (rcode
, type
, ops
);
1663 gimple_seq stmts
= NULL
;
1664 result
= maybe_push_res_to_seq (rcode
, type
, ops
, &stmts
);
1667 gcc_assert (gimple_seq_singleton_p (stmts
));
1668 new_stmt
= gimple_seq_first_stmt (stmts
);
1672 /* The expression is already available. */
1677 /* The expression is not yet available, value-number lhs to
1678 the new SSA_NAME we created. */
1679 /* Initialize value-number information properly. */
1680 VN_INFO_GET (result
)->valnum
= result
;
1681 VN_INFO (result
)->value_id
= get_next_value_id ();
1682 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
1684 VN_INFO (result
)->needs_insertion
= true;
1685 /* ??? PRE phi-translation inserts NARYs without corresponding
1686 SSA name result. Re-use those but set their result according
1687 to the stmt we just built. */
1688 vn_nary_op_t nary
= NULL
;
1689 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
1692 gcc_assert (nary
->result
== NULL_TREE
);
1693 nary
->result
= gimple_assign_lhs (new_stmt
);
1695 /* As all "inserted" statements are singleton SCCs, insert
1696 to the valid table. This is strictly needed to
1697 avoid re-generating new value SSA_NAMEs for the same
1698 expression during SCC iteration over and over (the
1699 optimistic table gets cleared after each iteration).
1700 We do not need to insert into the optimistic table, as
1701 lookups there will fall back to the valid table. */
1702 else if (current_info
== optimistic_info
)
1704 current_info
= valid_info
;
1705 vn_nary_op_insert_stmt (new_stmt
, result
);
1706 current_info
= optimistic_info
;
1709 vn_nary_op_insert_stmt (new_stmt
, result
);
1710 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1712 fprintf (dump_file
, "Inserting name ");
1713 print_generic_expr (dump_file
, result
, 0);
1714 fprintf (dump_file
, " for expression ");
1715 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
1716 fprintf (dump_file
, "\n");
1722 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1723 from the statement defining VUSE and if not successful tries to
1724 translate *REFP and VR_ through an aggregate copy at the definition
1725 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1726 of *REF and *VR. If only disambiguation was performed then
1727 *DISAMBIGUATE_ONLY is set to true. */
1730 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1731 bool *disambiguate_only
)
1733 vn_reference_t vr
= (vn_reference_t
)vr_
;
1734 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1735 tree base
= ao_ref_base (ref
);
1736 HOST_WIDE_INT offset
, maxsize
;
1737 static vec
<vn_reference_op_s
>
1740 bool lhs_ref_ok
= false;
1742 /* If the reference is based on a parameter that was determined as
1743 pointing to readonly memory it doesn't change. */
1744 if (TREE_CODE (base
) == MEM_REF
1745 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
1746 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0))
1747 && bitmap_bit_p (const_parms
,
1748 SSA_NAME_VERSION (TREE_OPERAND (base
, 0))))
1750 *disambiguate_only
= true;
1754 /* First try to disambiguate after value-replacing in the definitions LHS. */
1755 if (is_gimple_assign (def_stmt
))
1757 tree lhs
= gimple_assign_lhs (def_stmt
);
1758 bool valueized_anything
= false;
1759 /* Avoid re-allocation overhead. */
1760 lhs_ops
.truncate (0);
1761 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1762 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
);
1763 if (valueized_anything
)
1765 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1766 get_alias_set (lhs
),
1767 TREE_TYPE (lhs
), lhs_ops
);
1769 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1771 *disambiguate_only
= true;
1777 ao_ref_init (&lhs_ref
, lhs
);
1781 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1782 && gimple_call_num_args (def_stmt
) <= 4)
1784 /* For builtin calls valueize its arguments and call the
1785 alias oracle again. Valueization may improve points-to
1786 info of pointers and constify size and position arguments.
1787 Originally this was motivated by PR61034 which has
1788 conditional calls to free falsely clobbering ref because
1789 of imprecise points-to info of the argument. */
1791 bool valueized_anything
= false;
1792 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1794 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1795 if (TREE_CODE (oldargs
[i
]) == SSA_NAME
1796 && VN_INFO (oldargs
[i
])->valnum
!= oldargs
[i
])
1798 gimple_call_set_arg (def_stmt
, i
, VN_INFO (oldargs
[i
])->valnum
);
1799 valueized_anything
= true;
1802 if (valueized_anything
)
1804 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1806 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1807 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1810 *disambiguate_only
= true;
1816 if (*disambiguate_only
)
1819 offset
= ref
->offset
;
1820 maxsize
= ref
->max_size
;
1822 /* If we cannot constrain the size of the reference we cannot
1823 test if anything kills it. */
1827 /* We can't deduce anything useful from clobbers. */
1828 if (gimple_clobber_p (def_stmt
))
1831 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1832 from that definition.
1834 if (is_gimple_reg_type (vr
->type
)
1835 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1836 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1837 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2))
1838 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1840 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1842 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1844 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
1846 size2
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2)) * 8;
1847 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1848 == tree_to_uhwi (gimple_call_arg (def_stmt
, 2))
1850 && operand_equal_p (base
, base2
, 0)
1851 && offset2
<= offset
1852 && offset2
+ size2
>= offset
+ maxsize
)
1854 tree val
= build_zero_cst (vr
->type
);
1855 return vn_reference_lookup_or_insert_for_pieces
1856 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1860 /* 2) Assignment from an empty CONSTRUCTOR. */
1861 else if (is_gimple_reg_type (vr
->type
)
1862 && gimple_assign_single_p (def_stmt
)
1863 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1864 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1867 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1869 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1870 &offset2
, &size2
, &maxsize2
, &reverse
);
1872 && operand_equal_p (base
, base2
, 0)
1873 && offset2
<= offset
1874 && offset2
+ size2
>= offset
+ maxsize
)
1876 tree val
= build_zero_cst (vr
->type
);
1877 return vn_reference_lookup_or_insert_for_pieces
1878 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1882 /* 3) Assignment from a constant. We can use folds native encode/interpret
1883 routines to extract the assigned bits. */
1884 else if (ref
->size
== maxsize
1885 && is_gimple_reg_type (vr
->type
)
1886 && !contains_storage_order_barrier_p (vr
->operands
)
1887 && gimple_assign_single_p (def_stmt
)
1888 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
1889 && maxsize
% BITS_PER_UNIT
== 0
1890 && offset
% BITS_PER_UNIT
== 0
1891 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
1892 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
1893 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
1896 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1898 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1899 &offset2
, &size2
, &maxsize2
, &reverse
);
1902 && maxsize2
== size2
1903 && size2
% BITS_PER_UNIT
== 0
1904 && offset2
% BITS_PER_UNIT
== 0
1905 && operand_equal_p (base
, base2
, 0)
1906 && offset2
<= offset
1907 && offset2
+ size2
>= offset
+ maxsize
)
1909 /* We support up to 512-bit values (for V8DFmode). */
1910 unsigned char buffer
[64];
1913 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1914 if (TREE_CODE (rhs
) == SSA_NAME
)
1915 rhs
= SSA_VAL (rhs
);
1916 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
1917 buffer
, sizeof (buffer
));
1920 tree type
= vr
->type
;
1921 /* Make sure to interpret in a type that has a range
1922 covering the whole access size. */
1923 if (INTEGRAL_TYPE_P (vr
->type
)
1924 && ref
->size
!= TYPE_PRECISION (vr
->type
))
1925 type
= build_nonstandard_integer_type (ref
->size
,
1926 TYPE_UNSIGNED (type
));
1927 tree val
= native_interpret_expr (type
,
1929 + ((offset
- offset2
)
1931 ref
->size
/ BITS_PER_UNIT
);
1932 /* If we chop off bits because the types precision doesn't
1933 match the memory access size this is ok when optimizing
1934 reads but not when called from the DSE code during
1937 && type
!= vr
->type
)
1939 if (! int_fits_type_p (val
, vr
->type
))
1942 val
= fold_convert (vr
->type
, val
);
1946 return vn_reference_lookup_or_insert_for_pieces
1947 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1952 /* 4) Assignment from an SSA name which definition we may be able
1953 to access pieces from. */
1954 else if (ref
->size
== maxsize
1955 && is_gimple_reg_type (vr
->type
)
1956 && !contains_storage_order_barrier_p (vr
->operands
)
1957 && gimple_assign_single_p (def_stmt
)
1958 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
1961 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1963 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1964 &offset2
, &size2
, &maxsize2
,
1968 && maxsize2
== size2
1969 && operand_equal_p (base
, base2
, 0)
1970 && offset2
<= offset
1971 && offset2
+ size2
>= offset
+ maxsize
1972 /* ??? We can't handle bitfield precision extracts without
1973 either using an alternate type for the BIT_FIELD_REF and
1974 then doing a conversion or possibly adjusting the offset
1975 according to endianess. */
1976 && (! INTEGRAL_TYPE_P (vr
->type
)
1977 || ref
->size
== TYPE_PRECISION (vr
->type
))
1978 && ref
->size
% BITS_PER_UNIT
== 0)
1980 code_helper rcode
= BIT_FIELD_REF
;
1982 ops
[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt
));
1983 ops
[1] = bitsize_int (ref
->size
);
1984 ops
[2] = bitsize_int (offset
- offset2
);
1985 tree val
= vn_nary_build_or_lookup (rcode
, vr
->type
, ops
);
1988 vn_reference_t res
= vn_reference_lookup_or_insert_for_pieces
1989 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1995 /* 5) For aggregate copies translate the reference through them if
1996 the copy kills ref. */
1997 else if (vn_walk_kind
== VN_WALKREWRITE
1998 && gimple_assign_single_p (def_stmt
)
1999 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
2000 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
2001 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
2004 HOST_WIDE_INT maxsize2
;
2006 auto_vec
<vn_reference_op_s
> rhs
;
2007 vn_reference_op_t vro
;
2013 /* See if the assignment kills REF. */
2014 base2
= ao_ref_base (&lhs_ref
);
2015 maxsize2
= lhs_ref
.max_size
;
2018 && (TREE_CODE (base
) != MEM_REF
2019 || TREE_CODE (base2
) != MEM_REF
2020 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
2021 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
2022 TREE_OPERAND (base2
, 1))))
2023 || !stmt_kills_ref_p (def_stmt
, ref
))
2026 /* Find the common base of ref and the lhs. lhs_ops already
2027 contains valueized operands for the lhs. */
2028 i
= vr
->operands
.length () - 1;
2029 j
= lhs_ops
.length () - 1;
2030 while (j
>= 0 && i
>= 0
2031 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
2037 /* ??? The innermost op should always be a MEM_REF and we already
2038 checked that the assignment to the lhs kills vr. Thus for
2039 aggregate copies using char[] types the vn_reference_op_eq
2040 may fail when comparing types for compatibility. But we really
2041 don't care here - further lookups with the rewritten operands
2042 will simply fail if we messed up types too badly. */
2043 HOST_WIDE_INT extra_off
= 0;
2044 if (j
== 0 && i
>= 0
2045 && lhs_ops
[0].opcode
== MEM_REF
2046 && lhs_ops
[0].off
!= -1)
2048 if (lhs_ops
[0].off
== vr
->operands
[i
].off
)
2050 else if (vr
->operands
[i
].opcode
== MEM_REF
2051 && vr
->operands
[i
].off
!= -1)
2053 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
2058 /* i now points to the first additional op.
2059 ??? LHS may not be completely contained in VR, one or more
2060 VIEW_CONVERT_EXPRs could be in its way. We could at least
2061 try handling outermost VIEW_CONVERT_EXPRs. */
2065 /* Punt if the additional ops contain a storage order barrier. */
2066 for (k
= i
; k
>= 0; k
--)
2068 vro
= &vr
->operands
[k
];
2069 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
2073 /* Now re-write REF to be based on the rhs of the assignment. */
2074 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
2076 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2079 if (rhs
.length () < 2
2080 || rhs
[0].opcode
!= MEM_REF
2081 || rhs
[0].off
== -1)
2083 rhs
[0].off
+= extra_off
;
2084 rhs
[0].op0
= int_const_binop (PLUS_EXPR
, rhs
[0].op0
,
2085 build_int_cst (TREE_TYPE (rhs
[0].op0
),
2089 /* We need to pre-pend vr->operands[0..i] to rhs. */
2090 vec
<vn_reference_op_s
> old
= vr
->operands
;
2091 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
2092 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
2094 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
2095 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
2096 vr
->operands
[i
+ 1 + j
] = *vro
;
2097 vr
->operands
= valueize_refs (vr
->operands
);
2098 if (old
== shared_lookup_references
)
2099 shared_lookup_references
= vr
->operands
;
2100 vr
->hashcode
= vn_reference_compute_hash (vr
);
2102 /* Try folding the new reference to a constant. */
2103 tree val
= fully_constant_vn_reference_p (vr
);
2105 return vn_reference_lookup_or_insert_for_pieces
2106 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2108 /* Adjust *ref from the new operands. */
2109 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2111 /* This can happen with bitfields. */
2112 if (ref
->size
!= r
.size
)
2116 /* Do not update last seen VUSE after translating. */
2117 last_vuse_ptr
= NULL
;
2119 /* Keep looking for the adjusted *REF / VR pair. */
2123 /* 6) For memcpy copies translate the reference through them if
2124 the copy kills ref. */
2125 else if (vn_walk_kind
== VN_WALKREWRITE
2126 && is_gimple_reg_type (vr
->type
)
2127 /* ??? Handle BCOPY as well. */
2128 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
2129 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
2130 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
2131 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2132 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
2133 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
2134 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
2135 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2)))
2139 HOST_WIDE_INT rhs_offset
, copy_size
, lhs_offset
;
2140 vn_reference_op_s op
;
2143 /* Only handle non-variable, addressable refs. */
2144 if (ref
->size
!= maxsize
2145 || offset
% BITS_PER_UNIT
!= 0
2146 || ref
->size
% BITS_PER_UNIT
!= 0)
2149 /* Extract a pointer base and an offset for the destination. */
2150 lhs
= gimple_call_arg (def_stmt
, 0);
2152 if (TREE_CODE (lhs
) == SSA_NAME
)
2154 lhs
= SSA_VAL (lhs
);
2155 if (TREE_CODE (lhs
) == SSA_NAME
)
2157 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
2158 if (gimple_assign_single_p (def_stmt
)
2159 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2160 lhs
= gimple_assign_rhs1 (def_stmt
);
2163 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2165 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2169 if (TREE_CODE (tem
) == MEM_REF
2170 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2172 lhs
= TREE_OPERAND (tem
, 0);
2173 if (TREE_CODE (lhs
) == SSA_NAME
)
2174 lhs
= SSA_VAL (lhs
);
2175 lhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2177 else if (DECL_P (tem
))
2178 lhs
= build_fold_addr_expr (tem
);
2182 if (TREE_CODE (lhs
) != SSA_NAME
2183 && TREE_CODE (lhs
) != ADDR_EXPR
)
2186 /* Extract a pointer base and an offset for the source. */
2187 rhs
= gimple_call_arg (def_stmt
, 1);
2189 if (TREE_CODE (rhs
) == SSA_NAME
)
2190 rhs
= SSA_VAL (rhs
);
2191 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2193 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2197 if (TREE_CODE (tem
) == MEM_REF
2198 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2200 rhs
= TREE_OPERAND (tem
, 0);
2201 rhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2203 else if (DECL_P (tem
))
2204 rhs
= build_fold_addr_expr (tem
);
2208 if (TREE_CODE (rhs
) != SSA_NAME
2209 && TREE_CODE (rhs
) != ADDR_EXPR
)
2212 copy_size
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2));
2214 /* The bases of the destination and the references have to agree. */
2215 if ((TREE_CODE (base
) != MEM_REF
2217 || (TREE_CODE (base
) == MEM_REF
2218 && (TREE_OPERAND (base
, 0) != lhs
2219 || !tree_fits_uhwi_p (TREE_OPERAND (base
, 1))))
2221 && (TREE_CODE (lhs
) != ADDR_EXPR
2222 || TREE_OPERAND (lhs
, 0) != base
)))
2225 at
= offset
/ BITS_PER_UNIT
;
2226 if (TREE_CODE (base
) == MEM_REF
)
2227 at
+= tree_to_uhwi (TREE_OPERAND (base
, 1));
2228 /* If the access is completely outside of the memcpy destination
2229 area there is no aliasing. */
2230 if (lhs_offset
>= at
+ maxsize
/ BITS_PER_UNIT
2231 || lhs_offset
+ copy_size
<= at
)
2233 /* And the access has to be contained within the memcpy destination. */
2235 || lhs_offset
+ copy_size
< at
+ maxsize
/ BITS_PER_UNIT
)
2238 /* Make room for 2 operands in the new reference. */
2239 if (vr
->operands
.length () < 2)
2241 vec
<vn_reference_op_s
> old
= vr
->operands
;
2242 vr
->operands
.safe_grow_cleared (2);
2243 if (old
== shared_lookup_references
)
2244 shared_lookup_references
= vr
->operands
;
2247 vr
->operands
.truncate (2);
2249 /* The looked-through reference is a simple MEM_REF. */
2250 memset (&op
, 0, sizeof (op
));
2252 op
.opcode
= MEM_REF
;
2253 op
.op0
= build_int_cst (ptr_type_node
, at
- rhs_offset
);
2254 op
.off
= at
- lhs_offset
+ rhs_offset
;
2255 vr
->operands
[0] = op
;
2256 op
.type
= TREE_TYPE (rhs
);
2257 op
.opcode
= TREE_CODE (rhs
);
2260 vr
->operands
[1] = op
;
2261 vr
->hashcode
= vn_reference_compute_hash (vr
);
2263 /* Try folding the new reference to a constant. */
2264 tree val
= fully_constant_vn_reference_p (vr
);
2266 return vn_reference_lookup_or_insert_for_pieces
2267 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2269 /* Adjust *ref from the new operands. */
2270 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2272 /* This can happen with bitfields. */
2273 if (ref
->size
!= r
.size
)
2277 /* Do not update last seen VUSE after translating. */
2278 last_vuse_ptr
= NULL
;
2280 /* Keep looking for the adjusted *REF / VR pair. */
2284 /* Bail out and stop walking. */
2288 /* Lookup a reference operation by it's parts, in the current hash table.
2289 Returns the resulting value number if it exists in the hash table,
2290 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2291 vn_reference_t stored in the hashtable if something is found. */
2294 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2295 vec
<vn_reference_op_s
> operands
,
2296 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2298 struct vn_reference_s vr1
;
2306 vr1
.vuse
= vuse_ssa_val (vuse
);
2307 shared_lookup_references
.truncate (0);
2308 shared_lookup_references
.safe_grow (operands
.length ());
2309 memcpy (shared_lookup_references
.address (),
2310 operands
.address (),
2311 sizeof (vn_reference_op_s
)
2312 * operands
.length ());
2313 vr1
.operands
= operands
= shared_lookup_references
2314 = valueize_refs (shared_lookup_references
);
2317 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2318 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2321 vn_reference_lookup_1 (&vr1
, vnresult
);
2323 && kind
!= VN_NOWALK
2327 vn_walk_kind
= kind
;
2328 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2330 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2331 vn_reference_lookup_2
,
2332 vn_reference_lookup_3
,
2333 vuse_ssa_val
, &vr1
);
2334 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2338 return (*vnresult
)->result
;
2343 /* Lookup OP in the current hash table, and return the resulting value
2344 number if it exists in the hash table. Return NULL_TREE if it does
2345 not exist in the hash table or if the result field of the structure
2346 was NULL.. VNRESULT will be filled in with the vn_reference_t
2347 stored in the hashtable if one exists. When TBAA_P is false assume
2348 we are looking up a store and treat it as having alias-set zero. */
2351 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2352 vn_reference_t
*vnresult
, bool tbaa_p
)
2354 vec
<vn_reference_op_s
> operands
;
2355 struct vn_reference_s vr1
;
2357 bool valuezied_anything
;
2362 vr1
.vuse
= vuse_ssa_val (vuse
);
2363 vr1
.operands
= operands
2364 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2365 vr1
.type
= TREE_TYPE (op
);
2366 vr1
.set
= tbaa_p
? get_alias_set (op
) : 0;
2367 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2368 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2371 if (kind
!= VN_NOWALK
2374 vn_reference_t wvnresult
;
2376 /* Make sure to use a valueized reference if we valueized anything.
2377 Otherwise preserve the full reference for advanced TBAA. */
2378 if (!valuezied_anything
2379 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2381 ao_ref_init (&r
, op
);
2383 r
.ref_alias_set
= r
.base_alias_set
= 0;
2384 vn_walk_kind
= kind
;
2386 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2387 vn_reference_lookup_2
,
2388 vn_reference_lookup_3
,
2389 vuse_ssa_val
, &vr1
);
2390 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2394 *vnresult
= wvnresult
;
2395 return wvnresult
->result
;
2401 return vn_reference_lookup_1 (&vr1
, vnresult
);
2404 /* Lookup CALL in the current hash table and return the entry in
2405 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2408 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2414 tree vuse
= gimple_vuse (call
);
2416 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2417 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2418 vr
->type
= gimple_expr_type (call
);
2420 vr
->hashcode
= vn_reference_compute_hash (vr
);
2421 vn_reference_lookup_1 (vr
, vnresult
);
2424 /* Insert OP into the current hash table with a value number of
2425 RESULT, and return the resulting reference structure we created. */
2427 static vn_reference_t
2428 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2430 vn_reference_s
**slot
;
2434 vr1
= current_info
->references_pool
->allocate ();
2435 if (TREE_CODE (result
) == SSA_NAME
)
2436 vr1
->value_id
= VN_INFO (result
)->value_id
;
2438 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2439 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2440 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2441 vr1
->type
= TREE_TYPE (op
);
2442 vr1
->set
= get_alias_set (op
);
2443 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2444 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2445 vr1
->result_vdef
= vdef
;
2447 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2450 /* Because we lookup stores using vuses, and value number failures
2451 using the vdefs (see visit_reference_op_store for how and why),
2452 it's possible that on failure we may try to insert an already
2453 inserted store. This is not wrong, there is no ssa name for a
2454 store that we could use as a differentiator anyway. Thus, unlike
2455 the other lookup functions, you cannot gcc_assert (!*slot)
2458 /* But free the old slot in case of a collision. */
2460 free_reference (*slot
);
2466 /* Insert a reference by it's pieces into the current hash table with
2467 a value number of RESULT. Return the resulting reference
2468 structure we created. */
2471 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2472 vec
<vn_reference_op_s
> operands
,
2473 tree result
, unsigned int value_id
)
2476 vn_reference_s
**slot
;
2479 vr1
= current_info
->references_pool
->allocate ();
2480 vr1
->value_id
= value_id
;
2481 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2482 vr1
->operands
= valueize_refs (operands
);
2485 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2486 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2487 result
= SSA_VAL (result
);
2488 vr1
->result
= result
;
2490 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2493 /* At this point we should have all the things inserted that we have
2494 seen before, and we should never try inserting something that
2496 gcc_assert (!*slot
);
2498 free_reference (*slot
);
2504 /* Compute and return the hash value for nary operation VBO1. */
2507 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2509 inchash::hash hstate
;
2512 for (i
= 0; i
< vno1
->length
; ++i
)
2513 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2514 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2516 if (((vno1
->length
== 2
2517 && commutative_tree_code (vno1
->opcode
))
2518 || (vno1
->length
== 3
2519 && commutative_ternary_tree_code (vno1
->opcode
)))
2520 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2521 std::swap (vno1
->op
[0], vno1
->op
[1]);
2522 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
2523 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2525 std::swap (vno1
->op
[0], vno1
->op
[1]);
2526 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
2529 hstate
.add_int (vno1
->opcode
);
2530 for (i
= 0; i
< vno1
->length
; ++i
)
2531 inchash::add_expr (vno1
->op
[i
], hstate
);
2533 return hstate
.end ();
2536 /* Compare nary operations VNO1 and VNO2 and return true if they are
2540 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2544 if (vno1
->hashcode
!= vno2
->hashcode
)
2547 if (vno1
->length
!= vno2
->length
)
2550 if (vno1
->opcode
!= vno2
->opcode
2551 || !types_compatible_p (vno1
->type
, vno2
->type
))
2554 for (i
= 0; i
< vno1
->length
; ++i
)
2555 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2561 /* Initialize VNO from the pieces provided. */
2564 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2565 enum tree_code code
, tree type
, tree
*ops
)
2568 vno
->length
= length
;
2570 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2573 /* Initialize VNO from OP. */
2576 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2580 vno
->opcode
= TREE_CODE (op
);
2581 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2582 vno
->type
= TREE_TYPE (op
);
2583 for (i
= 0; i
< vno
->length
; ++i
)
2584 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2587 /* Return the number of operands for a vn_nary ops structure from STMT. */
2590 vn_nary_length_from_stmt (gimple
*stmt
)
2592 switch (gimple_assign_rhs_code (stmt
))
2596 case VIEW_CONVERT_EXPR
:
2603 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2606 return gimple_num_ops (stmt
) - 1;
2610 /* Initialize VNO from STMT. */
2613 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
2617 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2618 vno
->type
= gimple_expr_type (stmt
);
2619 switch (vno
->opcode
)
2623 case VIEW_CONVERT_EXPR
:
2625 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2630 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2631 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2632 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2636 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2637 for (i
= 0; i
< vno
->length
; ++i
)
2638 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2642 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2643 vno
->length
= gimple_num_ops (stmt
) - 1;
2644 for (i
= 0; i
< vno
->length
; ++i
)
2645 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2649 /* Compute the hashcode for VNO and look for it in the hash table;
2650 return the resulting value number if it exists in the hash table.
2651 Return NULL_TREE if it does not exist in the hash table or if the
2652 result field of the operation is NULL. VNRESULT will contain the
2653 vn_nary_op_t from the hashtable if it exists. */
2656 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2658 vn_nary_op_s
**slot
;
2663 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2664 slot
= current_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2666 if (!slot
&& current_info
== optimistic_info
)
2667 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2673 return (*slot
)->result
;
2676 /* Lookup a n-ary operation by its pieces and return the resulting value
2677 number if it exists in the hash table. Return NULL_TREE if it does
2678 not exist in the hash table or if the result field of the operation
2679 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2683 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2684 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2686 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2687 sizeof_vn_nary_op (length
));
2688 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2689 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2692 /* Lookup OP in the current hash table, and return the resulting value
2693 number if it exists in the hash table. Return NULL_TREE if it does
2694 not exist in the hash table or if the result field of the operation
2695 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2699 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2702 = XALLOCAVAR (struct vn_nary_op_s
,
2703 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2704 init_vn_nary_op_from_op (vno1
, op
);
2705 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2708 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2709 value number if it exists in the hash table. Return NULL_TREE if
2710 it does not exist in the hash table. VNRESULT will contain the
2711 vn_nary_op_t from the hashtable if it exists. */
2714 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
2717 = XALLOCAVAR (struct vn_nary_op_s
,
2718 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2719 init_vn_nary_op_from_stmt (vno1
, stmt
);
2720 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2723 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2726 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2728 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2731 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2735 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2737 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2738 ¤t_info
->nary_obstack
);
2740 vno1
->value_id
= value_id
;
2741 vno1
->length
= length
;
2742 vno1
->result
= result
;
2747 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2748 VNO->HASHCODE first. */
2751 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
2754 vn_nary_op_s
**slot
;
2757 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2759 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
2760 gcc_assert (!*slot
);
2766 /* Insert a n-ary operation into the current hash table using it's
2767 pieces. Return the vn_nary_op_t structure we created and put in
2771 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2772 tree type
, tree
*ops
,
2773 tree result
, unsigned int value_id
)
2775 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2776 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2777 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2780 /* Insert OP into the current hash table with a value number of
2781 RESULT. Return the vn_nary_op_t structure we created and put in
2785 vn_nary_op_insert (tree op
, tree result
)
2787 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2790 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2791 init_vn_nary_op_from_op (vno1
, op
);
2792 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2795 /* Insert the rhs of STMT into the current hash table with a value number of
2799 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
2802 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
2803 result
, VN_INFO (result
)->value_id
);
2804 init_vn_nary_op_from_stmt (vno1
, stmt
);
2805 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2808 /* Compute a hashcode for PHI operation VP1 and return it. */
2810 static inline hashval_t
2811 vn_phi_compute_hash (vn_phi_t vp1
)
2813 inchash::hash
hstate (vp1
->phiargs
.length () > 2
2814 ? vp1
->block
->index
: vp1
->phiargs
.length ());
2820 /* If all PHI arguments are constants we need to distinguish
2821 the PHI node via its type. */
2823 hstate
.merge_hash (vn_hash_type (type
));
2825 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
2827 /* Don't hash backedge values they need to be handled as VN_TOP
2828 for optimistic value-numbering. */
2829 if (e
->flags
& EDGE_DFS_BACK
)
2832 phi1op
= vp1
->phiargs
[e
->dest_idx
];
2833 if (phi1op
== VN_TOP
)
2835 inchash::add_expr (phi1op
, hstate
);
2838 return hstate
.end ();
2842 /* Return true if COND1 and COND2 represent the same condition, set
2843 *INVERTED_P if one needs to be inverted to make it the same as
2847 cond_stmts_equal_p (gcond
*cond1
, gcond
*cond2
, bool *inverted_p
)
2849 enum tree_code code1
= gimple_cond_code (cond1
);
2850 enum tree_code code2
= gimple_cond_code (cond2
);
2851 tree lhs1
= gimple_cond_lhs (cond1
);
2852 tree lhs2
= gimple_cond_lhs (cond2
);
2853 tree rhs1
= gimple_cond_rhs (cond1
);
2854 tree rhs2
= gimple_cond_rhs (cond2
);
2856 *inverted_p
= false;
2859 else if (code1
== swap_tree_comparison (code2
))
2860 std::swap (lhs2
, rhs2
);
2861 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
2863 else if (code1
== invert_tree_comparison
2864 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
2866 std::swap (lhs2
, rhs2
);
2872 lhs1
= vn_valueize (lhs1
);
2873 rhs1
= vn_valueize (rhs1
);
2874 lhs2
= vn_valueize (lhs2
);
2875 rhs2
= vn_valueize (rhs2
);
2876 return ((expressions_equal_p (lhs1
, lhs2
)
2877 && expressions_equal_p (rhs1
, rhs2
))
2878 || (commutative_tree_code (code1
)
2879 && expressions_equal_p (lhs1
, rhs2
)
2880 && expressions_equal_p (rhs1
, lhs2
)));
2883 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2886 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
2888 if (vp1
->hashcode
!= vp2
->hashcode
)
2891 if (vp1
->block
!= vp2
->block
)
2893 if (vp1
->phiargs
.length () != vp2
->phiargs
.length ())
2896 switch (vp1
->phiargs
.length ())
2899 /* Single-arg PHIs are just copies. */
2904 /* Rule out backedges into the PHI. */
2905 if (vp1
->block
->loop_father
->header
== vp1
->block
2906 || vp2
->block
->loop_father
->header
== vp2
->block
)
2909 /* If the PHI nodes do not have compatible types
2910 they are not the same. */
2911 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2915 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
2917 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
2918 /* If the immediate dominator end in switch stmts multiple
2919 values may end up in the same PHI arg via intermediate
2921 if (EDGE_COUNT (idom1
->succs
) != 2
2922 || EDGE_COUNT (idom2
->succs
) != 2)
2925 /* Verify the controlling stmt is the same. */
2926 gimple
*last1
= last_stmt (idom1
);
2927 gimple
*last2
= last_stmt (idom2
);
2928 if (gimple_code (last1
) != GIMPLE_COND
2929 || gimple_code (last2
) != GIMPLE_COND
)
2932 if (! cond_stmts_equal_p (as_a
<gcond
*> (last1
),
2933 as_a
<gcond
*> (last2
), &inverted_p
))
2936 /* Get at true/false controlled edges into the PHI. */
2937 edge te1
, te2
, fe1
, fe2
;
2938 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
2940 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
2944 /* Swap edges if the second condition is the inverted of the
2947 std::swap (te2
, fe2
);
2949 /* ??? Handle VN_TOP specially. */
2950 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
2951 vp2
->phiargs
[te2
->dest_idx
])
2952 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
2953 vp2
->phiargs
[fe2
->dest_idx
]))
2964 /* If the PHI nodes do not have compatible types
2965 they are not the same. */
2966 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2969 /* Any phi in the same block will have it's arguments in the
2970 same edge order, because of how we store phi nodes. */
2973 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2975 tree phi2op
= vp2
->phiargs
[i
];
2976 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
2978 if (!expressions_equal_p (phi1op
, phi2op
))
2985 static vec
<tree
> shared_lookup_phiargs
;
2987 /* Lookup PHI in the current hash table, and return the resulting
2988 value number if it exists in the hash table. Return NULL_TREE if
2989 it does not exist in the hash table. */
2992 vn_phi_lookup (gimple
*phi
)
2995 struct vn_phi_s vp1
;
2999 shared_lookup_phiargs
.truncate (0);
3000 shared_lookup_phiargs
.safe_grow (gimple_phi_num_args (phi
));
3002 /* Canonicalize the SSA_NAME's to their value number. */
3003 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3005 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3006 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
3007 shared_lookup_phiargs
[e
->dest_idx
] = def
;
3009 vp1
.type
= TREE_TYPE (gimple_phi_result (phi
));
3010 vp1
.phiargs
= shared_lookup_phiargs
;
3011 vp1
.block
= gimple_bb (phi
);
3012 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
3013 slot
= current_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
3015 if (!slot
&& current_info
== optimistic_info
)
3016 slot
= valid_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
3020 return (*slot
)->result
;
3023 /* Insert PHI into the current hash table with a value number of
3027 vn_phi_insert (gimple
*phi
, tree result
)
3030 vn_phi_t vp1
= current_info
->phis_pool
->allocate ();
3031 vec
<tree
> args
= vNULL
;
3035 args
.safe_grow (gimple_phi_num_args (phi
));
3037 /* Canonicalize the SSA_NAME's to their value number. */
3038 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3040 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3041 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
3042 args
[e
->dest_idx
] = def
;
3044 vp1
->value_id
= VN_INFO (result
)->value_id
;
3045 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
3046 vp1
->phiargs
= args
;
3047 vp1
->block
= gimple_bb (phi
);
3048 vp1
->result
= result
;
3049 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
3051 slot
= current_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
3053 /* Because we iterate over phi operations more than once, it's
3054 possible the slot might already exist here, hence no assert.*/
3060 /* Print set of components in strongly connected component SCC to OUT. */
3063 print_scc (FILE *out
, vec
<tree
> scc
)
3068 fprintf (out
, "SCC consists of:");
3069 FOR_EACH_VEC_ELT (scc
, i
, var
)
3072 print_generic_expr (out
, var
, 0);
3074 fprintf (out
, "\n");
3077 /* Return true if BB1 is dominated by BB2 taking into account edges
3078 that are not executable. */
3081 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
)
3086 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3089 /* Before iterating we'd like to know if there exists a
3090 (executable) path from bb2 to bb1 at all, if not we can
3091 directly return false. For now simply iterate once. */
3093 /* Iterate to the single executable bb1 predecessor. */
3094 if (EDGE_COUNT (bb1
->preds
) > 1)
3097 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
3098 if (e
->flags
& EDGE_EXECUTABLE
)
3111 /* Re-do the dominance check with changed bb1. */
3112 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3117 /* Iterate to the single executable bb2 successor. */
3119 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
3120 if (e
->flags
& EDGE_EXECUTABLE
)
3131 /* Verify the reached block is only reached through succe.
3132 If there is only one edge we can spare us the dominator
3133 check and iterate directly. */
3134 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
3136 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
3138 && (e
->flags
& EDGE_EXECUTABLE
))
3148 /* Re-do the dominance check with changed bb2. */
3149 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3154 /* We could now iterate updating bb1 / bb2. */
3158 /* Set the value number of FROM to TO, return true if it has changed
3162 set_ssa_val_to (tree from
, tree to
)
3164 tree currval
= SSA_VAL (from
);
3165 HOST_WIDE_INT toff
, coff
;
3167 /* The only thing we allow as value numbers are ssa_names
3168 and invariants. So assert that here. We don't allow VN_TOP
3169 as visiting a stmt should produce a value-number other than
3171 ??? Still VN_TOP can happen for unreachable code, so force
3172 it to varying in that case. Not all code is prepared to
3173 get VN_TOP on valueization. */
3176 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3177 fprintf (dump_file
, "Forcing value number to varying on "
3178 "receiving VN_TOP\n");
3182 gcc_assert (to
!= NULL_TREE
3183 && ((TREE_CODE (to
) == SSA_NAME
3184 && (to
== from
|| SSA_VAL (to
) == to
))
3185 || is_gimple_min_invariant (to
)));
3189 if (currval
== from
)
3191 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3193 fprintf (dump_file
, "Not changing value number of ");
3194 print_generic_expr (dump_file
, from
, 0);
3195 fprintf (dump_file
, " from VARYING to ");
3196 print_generic_expr (dump_file
, to
, 0);
3197 fprintf (dump_file
, "\n");
3201 else if (TREE_CODE (to
) == SSA_NAME
3202 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
3206 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3208 fprintf (dump_file
, "Setting value number of ");
3209 print_generic_expr (dump_file
, from
, 0);
3210 fprintf (dump_file
, " to ");
3211 print_generic_expr (dump_file
, to
, 0);
3215 && !operand_equal_p (currval
, to
, 0)
3216 /* ??? For addresses involving volatile objects or types operand_equal_p
3217 does not reliably detect ADDR_EXPRs as equal. We know we are only
3218 getting invariant gimple addresses here, so can use
3219 get_addr_base_and_unit_offset to do this comparison. */
3220 && !(TREE_CODE (currval
) == ADDR_EXPR
3221 && TREE_CODE (to
) == ADDR_EXPR
3222 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
3223 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
3226 /* If we equate two SSA names we have to make the side-band info
3227 of the leader conservative (and remember whatever original value
3229 if (TREE_CODE (to
) == SSA_NAME
)
3231 if (INTEGRAL_TYPE_P (TREE_TYPE (to
))
3232 && SSA_NAME_RANGE_INFO (to
))
3234 if (SSA_NAME_IS_DEFAULT_DEF (to
)
3235 || dominated_by_p_w_unex
3236 (gimple_bb (SSA_NAME_DEF_STMT (from
)),
3237 gimple_bb (SSA_NAME_DEF_STMT (to
))))
3238 /* Keep the info from the dominator. */
3240 else if (SSA_NAME_IS_DEFAULT_DEF (from
)
3241 || dominated_by_p_w_unex
3242 (gimple_bb (SSA_NAME_DEF_STMT (to
)),
3243 gimple_bb (SSA_NAME_DEF_STMT (from
))))
3245 /* Save old info. */
3246 if (! VN_INFO (to
)->info
.range_info
)
3248 VN_INFO (to
)->info
.range_info
= SSA_NAME_RANGE_INFO (to
);
3249 VN_INFO (to
)->range_info_anti_range_p
3250 = SSA_NAME_ANTI_RANGE_P (to
);
3252 /* Use that from the dominator. */
3253 SSA_NAME_RANGE_INFO (to
) = SSA_NAME_RANGE_INFO (from
);
3254 SSA_NAME_ANTI_RANGE_P (to
) = SSA_NAME_ANTI_RANGE_P (from
);
3258 /* Save old info. */
3259 if (! VN_INFO (to
)->info
.range_info
)
3261 VN_INFO (to
)->info
.range_info
= SSA_NAME_RANGE_INFO (to
);
3262 VN_INFO (to
)->range_info_anti_range_p
3263 = SSA_NAME_ANTI_RANGE_P (to
);
3265 /* Rather than allocating memory and unioning the info
3267 SSA_NAME_RANGE_INFO (to
) = NULL
;
3270 else if (POINTER_TYPE_P (TREE_TYPE (to
))
3271 && SSA_NAME_PTR_INFO (to
))
3273 if (SSA_NAME_IS_DEFAULT_DEF (to
)
3274 || dominated_by_p_w_unex
3275 (gimple_bb (SSA_NAME_DEF_STMT (from
)),
3276 gimple_bb (SSA_NAME_DEF_STMT (to
))))
3277 /* Keep the info from the dominator. */
3279 else if (SSA_NAME_IS_DEFAULT_DEF (from
)
3280 || dominated_by_p_w_unex
3281 (gimple_bb (SSA_NAME_DEF_STMT (to
)),
3282 gimple_bb (SSA_NAME_DEF_STMT (from
))))
3284 /* Save old info. */
3285 if (! VN_INFO (to
)->info
.ptr_info
)
3286 VN_INFO (to
)->info
.ptr_info
= SSA_NAME_PTR_INFO (to
);
3287 /* Use that from the dominator. */
3288 SSA_NAME_PTR_INFO (to
) = SSA_NAME_PTR_INFO (from
);
3290 else if (! SSA_NAME_PTR_INFO (from
)
3291 /* Handle the case of trivially equivalent info. */
3292 || memcmp (SSA_NAME_PTR_INFO (to
),
3293 SSA_NAME_PTR_INFO (from
),
3294 sizeof (ptr_info_def
)) != 0)
3296 /* Save old info. */
3297 if (! VN_INFO (to
)->info
.ptr_info
)
3298 VN_INFO (to
)->info
.ptr_info
= SSA_NAME_PTR_INFO (to
);
3299 /* Rather than allocating memory and unioning the info
3301 SSA_NAME_PTR_INFO (to
) = NULL
;
3306 VN_INFO (from
)->valnum
= to
;
3307 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3308 fprintf (dump_file
, " (changed)\n");
3311 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3312 fprintf (dump_file
, "\n");
3316 /* Mark as processed all the definitions in the defining stmt of USE, or
3320 mark_use_processed (tree use
)
3324 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
3326 if (SSA_NAME_IS_DEFAULT_DEF (use
) || gimple_code (stmt
) == GIMPLE_PHI
)
3328 VN_INFO (use
)->use_processed
= true;
3332 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3334 tree def
= DEF_FROM_PTR (defp
);
3336 VN_INFO (def
)->use_processed
= true;
3340 /* Set all definitions in STMT to value number to themselves.
3341 Return true if a value number changed. */
3344 defs_to_varying (gimple
*stmt
)
3346 bool changed
= false;
3350 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3352 tree def
= DEF_FROM_PTR (defp
);
3353 changed
|= set_ssa_val_to (def
, def
);
3358 /* Visit a copy between LHS and RHS, return true if the value number
3362 visit_copy (tree lhs
, tree rhs
)
3365 rhs
= SSA_VAL (rhs
);
3367 return set_ssa_val_to (lhs
, rhs
);
3370 /* Visit a nary operator RHS, value number it, and return true if the
3371 value number of LHS has changed as a result. */
3374 visit_nary_op (tree lhs
, gimple
*stmt
)
3376 bool changed
= false;
3377 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
3380 changed
= set_ssa_val_to (lhs
, result
);
3383 changed
= set_ssa_val_to (lhs
, lhs
);
3384 vn_nary_op_insert_stmt (stmt
, lhs
);
3390 /* Visit a call STMT storing into LHS. Return true if the value number
3391 of the LHS has changed as a result. */
3394 visit_reference_op_call (tree lhs
, gcall
*stmt
)
3396 bool changed
= false;
3397 struct vn_reference_s vr1
;
3398 vn_reference_t vnresult
= NULL
;
3399 tree vdef
= gimple_vdef (stmt
);
3401 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3402 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
3405 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
3408 if (vnresult
->result_vdef
&& vdef
)
3409 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3411 if (!vnresult
->result
&& lhs
)
3412 vnresult
->result
= lhs
;
3414 if (vnresult
->result
&& lhs
)
3415 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
3420 vn_reference_s
**slot
;
3422 changed
|= set_ssa_val_to (vdef
, vdef
);
3424 changed
|= set_ssa_val_to (lhs
, lhs
);
3425 vr2
= current_info
->references_pool
->allocate ();
3426 vr2
->vuse
= vr1
.vuse
;
3427 /* As we are not walking the virtual operand chain we know the
3428 shared_lookup_references are still original so we can re-use
3430 vr2
->operands
= vr1
.operands
.copy ();
3431 vr2
->type
= vr1
.type
;
3433 vr2
->hashcode
= vr1
.hashcode
;
3435 vr2
->result_vdef
= vdef
;
3436 slot
= current_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
3438 gcc_assert (!*slot
);
3445 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3446 and return true if the value number of the LHS has changed as a result. */
3449 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
3451 bool changed
= false;
3455 last_vuse
= gimple_vuse (stmt
);
3456 last_vuse_ptr
= &last_vuse
;
3457 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3458 default_vn_walk_kind
, NULL
, true);
3459 last_vuse_ptr
= NULL
;
3461 /* We handle type-punning through unions by value-numbering based
3462 on offset and size of the access. Be prepared to handle a
3463 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3465 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3467 /* We will be setting the value number of lhs to the value number
3468 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3469 So first simplify and lookup this expression to see if it
3470 is already available. */
3471 code_helper rcode
= VIEW_CONVERT_EXPR
;
3472 tree ops
[3] = { result
};
3473 result
= vn_nary_build_or_lookup (rcode
, TREE_TYPE (op
), ops
);
3477 changed
= set_ssa_val_to (lhs
, result
);
3480 changed
= set_ssa_val_to (lhs
, lhs
);
3481 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
3488 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3489 and return true if the value number of the LHS has changed as a result. */
3492 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
3494 bool changed
= false;
3495 vn_reference_t vnresult
= NULL
;
3496 tree result
, assign
;
3497 bool resultsame
= false;
3498 tree vuse
= gimple_vuse (stmt
);
3499 tree vdef
= gimple_vdef (stmt
);
3501 if (TREE_CODE (op
) == SSA_NAME
)
3504 /* First we want to lookup using the *vuses* from the store and see
3505 if there the last store to this location with the same address
3508 The vuses represent the memory state before the store. If the
3509 memory state, address, and value of the store is the same as the
3510 last store to this location, then this store will produce the
3511 same memory state as that store.
3513 In this case the vdef versions for this store are value numbered to those
3514 vuse versions, since they represent the same memory state after
3517 Otherwise, the vdefs for the store are used when inserting into
3518 the table, since the store generates a new memory state. */
3520 result
= vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, NULL
, false);
3524 if (TREE_CODE (result
) == SSA_NAME
)
3525 result
= SSA_VAL (result
);
3526 resultsame
= expressions_equal_p (result
, op
);
3529 if ((!result
|| !resultsame
)
3530 /* Only perform the following when being called from PRE
3531 which embeds tail merging. */
3532 && default_vn_walk_kind
== VN_WALK
)
3534 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3535 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
3538 VN_INFO (vdef
)->use_processed
= true;
3539 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3543 if (!result
|| !resultsame
)
3545 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3547 fprintf (dump_file
, "No store match\n");
3548 fprintf (dump_file
, "Value numbering store ");
3549 print_generic_expr (dump_file
, lhs
, 0);
3550 fprintf (dump_file
, " to ");
3551 print_generic_expr (dump_file
, op
, 0);
3552 fprintf (dump_file
, "\n");
3554 /* Have to set value numbers before insert, since insert is
3555 going to valueize the references in-place. */
3558 changed
|= set_ssa_val_to (vdef
, vdef
);
3561 /* Do not insert structure copies into the tables. */
3562 if (is_gimple_min_invariant (op
)
3563 || is_gimple_reg (op
))
3564 vn_reference_insert (lhs
, op
, vdef
, NULL
);
3566 /* Only perform the following when being called from PRE
3567 which embeds tail merging. */
3568 if (default_vn_walk_kind
== VN_WALK
)
3570 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3571 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
3576 /* We had a match, so value number the vdef to have the value
3577 number of the vuse it came from. */
3579 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3580 fprintf (dump_file
, "Store matched earlier value,"
3581 "value numbering store vdefs to matching vuses.\n");
3583 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
3589 /* Visit and value number PHI, return true if the value number
3593 visit_phi (gimple
*phi
)
3595 bool changed
= false;
3597 tree sameval
= VN_TOP
;
3598 bool allsame
= true;
3599 unsigned n_executable
= 0;
3601 /* TODO: We could check for this in init_sccvn, and replace this
3602 with a gcc_assert. */
3603 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
3604 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3606 /* See if all non-TOP arguments have the same value. TOP is
3607 equivalent to everything, so we can ignore it. */
3610 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3611 if (e
->flags
& EDGE_EXECUTABLE
)
3613 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3616 if (TREE_CODE (def
) == SSA_NAME
)
3617 def
= SSA_VAL (def
);
3620 if (sameval
== VN_TOP
)
3622 else if (!expressions_equal_p (def
, sameval
))
3629 /* If none of the edges was executable or all incoming values are
3630 undefined keep the value-number at VN_TOP. If only a single edge
3631 is exectuable use its value. */
3632 if (sameval
== VN_TOP
3633 || n_executable
== 1)
3634 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3636 /* First see if it is equivalent to a phi node in this block. We prefer
3637 this as it allows IV elimination - see PRs 66502 and 67167. */
3638 result
= vn_phi_lookup (phi
);
3640 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
3641 /* Otherwise all value numbered to the same value, the phi node has that
3644 changed
= set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3647 vn_phi_insert (phi
, PHI_RESULT (phi
));
3648 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3654 /* Try to simplify RHS using equivalences and constant folding. */
3657 try_to_simplify (gassign
*stmt
)
3659 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3662 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3663 in this case, there is no point in doing extra work. */
3664 if (code
== SSA_NAME
)
3667 /* First try constant folding based on our current lattice. */
3668 mprts_hook
= vn_lookup_simplify_result
;
3669 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
3672 && (TREE_CODE (tem
) == SSA_NAME
3673 || is_gimple_min_invariant (tem
)))
3679 /* Visit and value number USE, return true if the value number
3683 visit_use (tree use
)
3685 bool changed
= false;
3686 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
3688 mark_use_processed (use
);
3690 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
3691 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3692 && !SSA_NAME_IS_DEFAULT_DEF (use
))
3694 fprintf (dump_file
, "Value numbering ");
3695 print_generic_expr (dump_file
, use
, 0);
3696 fprintf (dump_file
, " stmt = ");
3697 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3700 /* Handle uninitialized uses. */
3701 if (SSA_NAME_IS_DEFAULT_DEF (use
))
3702 changed
= set_ssa_val_to (use
, use
);
3703 else if (gimple_code (stmt
) == GIMPLE_PHI
)
3704 changed
= visit_phi (stmt
);
3705 else if (gimple_has_volatile_ops (stmt
))
3706 changed
= defs_to_varying (stmt
);
3707 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
3709 enum tree_code code
= gimple_assign_rhs_code (ass
);
3710 tree lhs
= gimple_assign_lhs (ass
);
3711 tree rhs1
= gimple_assign_rhs1 (ass
);
3714 /* Shortcut for copies. Simplifying copies is pointless,
3715 since we copy the expression and value they represent. */
3716 if (code
== SSA_NAME
3717 && TREE_CODE (lhs
) == SSA_NAME
)
3719 changed
= visit_copy (lhs
, rhs1
);
3722 simplified
= try_to_simplify (ass
);
3725 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3727 fprintf (dump_file
, "RHS ");
3728 print_gimple_expr (dump_file
, ass
, 0, 0);
3729 fprintf (dump_file
, " simplified to ");
3730 print_generic_expr (dump_file
, simplified
, 0);
3731 fprintf (dump_file
, "\n");
3734 /* Setting value numbers to constants will occasionally
3735 screw up phi congruence because constants are not
3736 uniquely associated with a single ssa name that can be
3739 && is_gimple_min_invariant (simplified
)
3740 && TREE_CODE (lhs
) == SSA_NAME
)
3742 changed
= set_ssa_val_to (lhs
, simplified
);
3746 && TREE_CODE (simplified
) == SSA_NAME
3747 && TREE_CODE (lhs
) == SSA_NAME
)
3749 changed
= visit_copy (lhs
, simplified
);
3753 if ((TREE_CODE (lhs
) == SSA_NAME
3754 /* We can substitute SSA_NAMEs that are live over
3755 abnormal edges with their constant value. */
3756 && !(gimple_assign_copy_p (ass
)
3757 && is_gimple_min_invariant (rhs1
))
3759 && is_gimple_min_invariant (simplified
))
3760 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3761 /* Stores or copies from SSA_NAMEs that are live over
3762 abnormal edges are a problem. */
3763 || (code
== SSA_NAME
3764 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
3765 changed
= defs_to_varying (ass
);
3766 else if (REFERENCE_CLASS_P (lhs
)
3768 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
3769 else if (TREE_CODE (lhs
) == SSA_NAME
)
3771 if ((gimple_assign_copy_p (ass
)
3772 && is_gimple_min_invariant (rhs1
))
3774 && is_gimple_min_invariant (simplified
)))
3777 changed
= set_ssa_val_to (lhs
, simplified
);
3779 changed
= set_ssa_val_to (lhs
, rhs1
);
3783 /* Visit the original statement. */
3784 switch (vn_get_stmt_kind (ass
))
3787 changed
= visit_nary_op (lhs
, ass
);
3790 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
3793 changed
= defs_to_varying (ass
);
3799 changed
= defs_to_varying (ass
);
3801 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
3803 tree lhs
= gimple_call_lhs (call_stmt
);
3804 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3806 /* Try constant folding based on our current lattice. */
3807 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
3811 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3813 fprintf (dump_file
, "call ");
3814 print_gimple_expr (dump_file
, call_stmt
, 0, 0);
3815 fprintf (dump_file
, " simplified to ");
3816 print_generic_expr (dump_file
, simplified
, 0);
3817 fprintf (dump_file
, "\n");
3820 /* Setting value numbers to constants will occasionally
3821 screw up phi congruence because constants are not
3822 uniquely associated with a single ssa name that can be
3825 && is_gimple_min_invariant (simplified
))
3827 changed
= set_ssa_val_to (lhs
, simplified
);
3828 if (gimple_vdef (call_stmt
))
3829 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
3830 SSA_VAL (gimple_vuse (call_stmt
)));
3834 && TREE_CODE (simplified
) == SSA_NAME
)
3836 changed
= visit_copy (lhs
, simplified
);
3837 if (gimple_vdef (call_stmt
))
3838 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
3839 SSA_VAL (gimple_vuse (call_stmt
)));
3842 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3844 changed
= defs_to_varying (call_stmt
);
3849 if (!gimple_call_internal_p (call_stmt
)
3850 && (/* Calls to the same function with the same vuse
3851 and the same operands do not necessarily return the same
3852 value, unless they're pure or const. */
3853 gimple_call_flags (call_stmt
) & (ECF_PURE
| ECF_CONST
)
3854 /* If calls have a vdef, subsequent calls won't have
3855 the same incoming vuse. So, if 2 calls with vdef have the
3856 same vuse, we know they're not subsequent.
3857 We can value number 2 calls to the same function with the
3858 same vuse and the same operands which are not subsequent
3859 the same, because there is no code in the program that can
3860 compare the 2 values... */
3861 || (gimple_vdef (call_stmt
)
3862 /* ... unless the call returns a pointer which does
3863 not alias with anything else. In which case the
3864 information that the values are distinct are encoded
3866 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
3867 /* Only perform the following when being called from PRE
3868 which embeds tail merging. */
3869 && default_vn_walk_kind
== VN_WALK
)))
3870 changed
= visit_reference_op_call (lhs
, call_stmt
);
3872 changed
= defs_to_varying (call_stmt
);
3875 changed
= defs_to_varying (stmt
);
3880 /* Compare two operands by reverse postorder index */
3883 compare_ops (const void *pa
, const void *pb
)
3885 const tree opa
= *((const tree
*)pa
);
3886 const tree opb
= *((const tree
*)pb
);
3887 gimple
*opstmta
= SSA_NAME_DEF_STMT (opa
);
3888 gimple
*opstmtb
= SSA_NAME_DEF_STMT (opb
);
3892 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
3893 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3894 else if (gimple_nop_p (opstmta
))
3896 else if (gimple_nop_p (opstmtb
))
3899 bba
= gimple_bb (opstmta
);
3900 bbb
= gimple_bb (opstmtb
);
3903 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3911 if (gimple_code (opstmta
) == GIMPLE_PHI
3912 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3913 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3914 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3916 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3918 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3919 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3921 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3923 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3926 /* Sort an array containing members of a strongly connected component
3927 SCC so that the members are ordered by RPO number.
3928 This means that when the sort is complete, iterating through the
3929 array will give you the members in RPO order. */
3932 sort_scc (vec
<tree
> scc
)
3934 scc
.qsort (compare_ops
);
3937 /* Insert the no longer used nary ONARY to the hash INFO. */
3940 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3942 size_t size
= sizeof_vn_nary_op (onary
->length
);
3943 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
3944 &info
->nary_obstack
);
3945 memcpy (nary
, onary
, size
);
3946 vn_nary_op_insert_into (nary
, info
->nary
, false);
3949 /* Insert the no longer used phi OPHI to the hash INFO. */
3952 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3954 vn_phi_t phi
= info
->phis_pool
->allocate ();
3956 memcpy (phi
, ophi
, sizeof (*phi
));
3957 ophi
->phiargs
.create (0);
3958 slot
= info
->phis
->find_slot_with_hash (phi
, phi
->hashcode
, INSERT
);
3959 gcc_assert (!*slot
);
3963 /* Insert the no longer used reference OREF to the hash INFO. */
3966 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3969 vn_reference_s
**slot
;
3970 ref
= info
->references_pool
->allocate ();
3971 memcpy (ref
, oref
, sizeof (*ref
));
3972 oref
->operands
.create (0);
3973 slot
= info
->references
->find_slot_with_hash (ref
, ref
->hashcode
, INSERT
);
3975 free_reference (*slot
);
3979 /* Process a strongly connected component in the SSA graph. */
3982 process_scc (vec
<tree
> scc
)
3986 unsigned int iterations
= 0;
3987 bool changed
= true;
3988 vn_nary_op_iterator_type hin
;
3989 vn_phi_iterator_type hip
;
3990 vn_reference_iterator_type hir
;
3995 /* If the SCC has a single member, just visit it. */
3996 if (scc
.length () == 1)
3999 if (VN_INFO (use
)->use_processed
)
4001 /* We need to make sure it doesn't form a cycle itself, which can
4002 happen for self-referential PHI nodes. In that case we would
4003 end up inserting an expression with VN_TOP operands into the
4004 valid table which makes us derive bogus equivalences later.
4005 The cheapest way to check this is to assume it for all PHI nodes. */
4006 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
4007 /* Fallthru to iteration. */ ;
4015 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4016 print_scc (dump_file
, scc
);
4018 /* Iterate over the SCC with the optimistic table until it stops
4020 current_info
= optimistic_info
;
4025 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4026 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
4027 /* As we are value-numbering optimistically we have to
4028 clear the expression tables and the simplified expressions
4029 in each iteration until we converge. */
4030 optimistic_info
->nary
->empty ();
4031 optimistic_info
->phis
->empty ();
4032 optimistic_info
->references
->empty ();
4033 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
4034 gcc_obstack_init (&optimistic_info
->nary_obstack
);
4035 optimistic_info
->phis_pool
->release ();
4036 optimistic_info
->references_pool
->release ();
4037 FOR_EACH_VEC_ELT (scc
, i
, var
)
4038 gcc_assert (!VN_INFO (var
)->needs_insertion
4039 && VN_INFO (var
)->expr
== NULL
);
4040 FOR_EACH_VEC_ELT (scc
, i
, var
)
4041 changed
|= visit_use (var
);
4044 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4045 fprintf (dump_file
, "Processing SCC needed %d iterations\n", iterations
);
4046 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
4048 /* Finally, copy the contents of the no longer used optimistic
4049 table to the valid table. */
4050 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->nary
, nary
, vn_nary_op_t
, hin
)
4051 copy_nary (nary
, valid_info
);
4052 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->phis
, phi
, vn_phi_t
, hip
)
4053 copy_phi (phi
, valid_info
);
4054 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->references
,
4055 ref
, vn_reference_t
, hir
)
4056 copy_reference (ref
, valid_info
);
4058 current_info
= valid_info
;
4062 /* Pop the components of the found SCC for NAME off the SCC stack
4063 and process them. Returns true if all went well, false if
4064 we run into resource limits. */
4067 extract_and_process_scc_for_name (tree name
)
4072 /* Found an SCC, pop the components off the SCC stack and
4076 x
= sccstack
.pop ();
4078 VN_INFO (x
)->on_sccstack
= false;
4080 } while (x
!= name
);
4082 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4084 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
4087 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
4088 "SCC size %u exceeding %u\n", scc
.length (),
4089 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
4094 if (scc
.length () > 1)
4102 /* Depth first search on NAME to discover and process SCC's in the SSA
4104 Execution of this algorithm relies on the fact that the SCC's are
4105 popped off the stack in topological order.
4106 Returns true if successful, false if we stopped processing SCC's due
4107 to resource constraints. */
4112 vec
<ssa_op_iter
> itervec
= vNULL
;
4113 vec
<tree
> namevec
= vNULL
;
4114 use_operand_p usep
= NULL
;
4121 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
4122 VN_INFO (name
)->visited
= true;
4123 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
4125 sccstack
.safe_push (name
);
4126 VN_INFO (name
)->on_sccstack
= true;
4127 defstmt
= SSA_NAME_DEF_STMT (name
);
4129 /* Recursively DFS on our operands, looking for SCC's. */
4130 if (!gimple_nop_p (defstmt
))
4132 /* Push a new iterator. */
4133 if (gphi
*phi
= dyn_cast
<gphi
*> (defstmt
))
4134 usep
= op_iter_init_phiuse (&iter
, phi
, SSA_OP_ALL_USES
);
4136 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
4139 clear_and_done_ssa_iter (&iter
);
4143 /* If we are done processing uses of a name, go up the stack
4144 of iterators and process SCCs as we found them. */
4145 if (op_iter_done (&iter
))
4147 /* See if we found an SCC. */
4148 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
4149 if (!extract_and_process_scc_for_name (name
))
4156 /* Check if we are done. */
4157 if (namevec
.is_empty ())
4164 /* Restore the last use walker and continue walking there. */
4166 name
= namevec
.pop ();
4167 memcpy (&iter
, &itervec
.last (),
4168 sizeof (ssa_op_iter
));
4170 goto continue_walking
;
4173 use
= USE_FROM_PTR (usep
);
4175 /* Since we handle phi nodes, we will sometimes get
4176 invariants in the use expression. */
4177 if (TREE_CODE (use
) == SSA_NAME
)
4179 if (! (VN_INFO (use
)->visited
))
4181 /* Recurse by pushing the current use walking state on
4182 the stack and starting over. */
4183 itervec
.safe_push (iter
);
4184 namevec
.safe_push (name
);
4189 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
4190 VN_INFO (use
)->low
);
4192 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
4193 && VN_INFO (use
)->on_sccstack
)
4195 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
4196 VN_INFO (name
)->low
);
4200 usep
= op_iter_next_use (&iter
);
4204 /* Allocate a value number table. */
4207 allocate_vn_table (vn_tables_t table
)
4209 table
->phis
= new vn_phi_table_type (23);
4210 table
->nary
= new vn_nary_op_table_type (23);
4211 table
->references
= new vn_reference_table_type (23);
4213 gcc_obstack_init (&table
->nary_obstack
);
4214 table
->phis_pool
= new object_allocator
<vn_phi_s
> ("VN phis");
4215 table
->references_pool
= new object_allocator
<vn_reference_s
>
4219 /* Free a value number table. */
4222 free_vn_table (vn_tables_t table
)
4228 delete table
->references
;
4229 table
->references
= NULL
;
4230 obstack_free (&table
->nary_obstack
, NULL
);
4231 delete table
->phis_pool
;
4232 delete table
->references_pool
;
4240 int *rpo_numbers_temp
;
4242 calculate_dominance_info (CDI_DOMINATORS
);
4243 mark_dfs_back_edges ();
4245 sccstack
.create (0);
4246 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
4248 constant_value_ids
= BITMAP_ALLOC (NULL
);
4253 vn_ssa_aux_table
.create (num_ssa_names
+ 1);
4254 /* VEC_alloc doesn't actually grow it to the right size, it just
4255 preallocates the space to do so. */
4256 vn_ssa_aux_table
.safe_grow_cleared (num_ssa_names
+ 1);
4257 gcc_obstack_init (&vn_ssa_aux_obstack
);
4259 shared_lookup_phiargs
.create (0);
4260 shared_lookup_references
.create (0);
4261 rpo_numbers
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
4263 XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
4264 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
4266 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4267 the i'th block in RPO order is bb. We want to map bb's to RPO
4268 numbers, so we need to rearrange this array. */
4269 for (j
= 0; j
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; j
++)
4270 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
4272 XDELETE (rpo_numbers_temp
);
4274 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
4276 renumber_gimple_stmt_uids ();
4278 /* Create the valid and optimistic value numbering tables. */
4279 valid_info
= XCNEW (struct vn_tables_s
);
4280 allocate_vn_table (valid_info
);
4281 optimistic_info
= XCNEW (struct vn_tables_s
);
4282 allocate_vn_table (optimistic_info
);
4283 current_info
= valid_info
;
4285 /* Create the VN_INFO structures, and initialize value numbers to
4286 TOP or VARYING for parameters. */
4287 for (i
= 1; i
< num_ssa_names
; i
++)
4289 tree name
= ssa_name (i
);
4293 VN_INFO_GET (name
)->valnum
= VN_TOP
;
4294 VN_INFO (name
)->needs_insertion
= false;
4295 VN_INFO (name
)->expr
= NULL
;
4296 VN_INFO (name
)->value_id
= 0;
4298 if (!SSA_NAME_IS_DEFAULT_DEF (name
))
4301 switch (TREE_CODE (SSA_NAME_VAR (name
)))
4304 /* Undefined vars keep TOP. */
4308 /* Parameters are VARYING but we can record a condition
4309 if we know it is a non-NULL pointer. */
4310 VN_INFO (name
)->visited
= true;
4311 VN_INFO (name
)->valnum
= name
;
4312 if (POINTER_TYPE_P (TREE_TYPE (name
))
4313 && nonnull_arg_p (SSA_NAME_VAR (name
)))
4317 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
4318 vn_nary_op_insert_pieces (2, NE_EXPR
, boolean_type_node
, ops
,
4319 boolean_true_node
, 0);
4320 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4322 fprintf (dump_file
, "Recording ");
4323 print_generic_expr (dump_file
, name
, TDF_SLIM
);
4324 fprintf (dump_file
, " != 0\n");
4330 /* If the result is passed by invisible reference the default
4331 def is initialized, otherwise it's uninitialized. */
4332 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name
)))
4334 VN_INFO (name
)->visited
= true;
4335 VN_INFO (name
)->valnum
= name
;
4345 /* Restore SSA info that has been reset on value leaders. */
4348 scc_vn_restore_ssa_info (void)
4350 for (unsigned i
= 0; i
< num_ssa_names
; i
++)
4352 tree name
= ssa_name (i
);
4354 && has_VN_INFO (name
))
4356 if (VN_INFO (name
)->needs_insertion
)
4358 else if (POINTER_TYPE_P (TREE_TYPE (name
))
4359 && VN_INFO (name
)->info
.ptr_info
)
4360 SSA_NAME_PTR_INFO (name
) = VN_INFO (name
)->info
.ptr_info
;
4361 else if (INTEGRAL_TYPE_P (TREE_TYPE (name
))
4362 && VN_INFO (name
)->info
.range_info
)
4364 SSA_NAME_RANGE_INFO (name
) = VN_INFO (name
)->info
.range_info
;
4365 SSA_NAME_ANTI_RANGE_P (name
)
4366 = VN_INFO (name
)->range_info_anti_range_p
;
4377 delete constant_to_value_id
;
4378 constant_to_value_id
= NULL
;
4379 BITMAP_FREE (constant_value_ids
);
4380 shared_lookup_phiargs
.release ();
4381 shared_lookup_references
.release ();
4382 XDELETEVEC (rpo_numbers
);
4384 for (i
= 0; i
< num_ssa_names
; i
++)
4386 tree name
= ssa_name (i
);
4388 && has_VN_INFO (name
)
4389 && VN_INFO (name
)->needs_insertion
)
4390 release_ssa_name (name
);
4392 obstack_free (&vn_ssa_aux_obstack
, NULL
);
4393 vn_ssa_aux_table
.release ();
4395 sccstack
.release ();
4396 free_vn_table (valid_info
);
4397 XDELETE (valid_info
);
4398 free_vn_table (optimistic_info
);
4399 XDELETE (optimistic_info
);
4401 BITMAP_FREE (const_parms
);
4404 /* Set *ID according to RESULT. */
4407 set_value_id_for_result (tree result
, unsigned int *id
)
4409 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4410 *id
= VN_INFO (result
)->value_id
;
4411 else if (result
&& is_gimple_min_invariant (result
))
4412 *id
= get_or_alloc_constant_value_id (result
);
4414 *id
= get_next_value_id ();
4417 /* Set the value ids in the valid hash tables. */
4420 set_hashtable_value_ids (void)
4422 vn_nary_op_iterator_type hin
;
4423 vn_phi_iterator_type hip
;
4424 vn_reference_iterator_type hir
;
4429 /* Now set the value ids of the things we had put in the hash
4432 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4433 set_value_id_for_result (vno
->result
, &vno
->value_id
);
4435 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4436 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4438 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4440 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4443 class sccvn_dom_walker
: public dom_walker
4447 : dom_walker (CDI_DOMINATORS
, true), fail (false), cond_stack (vNULL
) {}
4448 ~sccvn_dom_walker ();
4450 virtual edge
before_dom_children (basic_block
);
4451 virtual void after_dom_children (basic_block
);
4453 void record_cond (basic_block
,
4454 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4455 void record_conds (basic_block
,
4456 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4459 vec
<std::pair
<basic_block
, std::pair
<vn_nary_op_t
, vn_nary_op_t
> > >
4463 sccvn_dom_walker::~sccvn_dom_walker ()
4465 cond_stack
.release ();
4468 /* Record a temporary condition for the BB and its dominated blocks. */
4471 sccvn_dom_walker::record_cond (basic_block bb
,
4472 enum tree_code code
, tree lhs
, tree rhs
,
4475 tree ops
[2] = { lhs
, rhs
};
4476 vn_nary_op_t old
= NULL
;
4477 if (vn_nary_op_lookup_pieces (2, code
, boolean_type_node
, ops
, &old
))
4478 current_info
->nary
->remove_elt_with_hash (old
, old
->hashcode
);
4480 = vn_nary_op_insert_pieces (2, code
, boolean_type_node
, ops
,
4483 : boolean_false_node
, 0);
4484 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4486 fprintf (dump_file
, "Recording temporarily ");
4487 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
4488 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
4489 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
4490 fprintf (dump_file
, " == %s%s\n",
4491 value
? "true" : "false",
4492 old
? " (old entry saved)" : "");
4494 cond_stack
.safe_push (std::make_pair (bb
, std::make_pair (cond
, old
)));
4497 /* Record temporary conditions for the BB and its dominated blocks
4498 according to LHS CODE RHS == VALUE and its dominated conditions. */
4501 sccvn_dom_walker::record_conds (basic_block bb
,
4502 enum tree_code code
, tree lhs
, tree rhs
,
4505 /* Record the original condition. */
4506 record_cond (bb
, code
, lhs
, rhs
, value
);
4511 /* Record dominated conditions if the condition is true. Note that
4512 the inversion is already recorded. */
4517 record_cond (bb
, code
== LT_EXPR
? LE_EXPR
: GE_EXPR
, lhs
, rhs
, true);
4518 record_cond (bb
, NE_EXPR
, lhs
, rhs
, true);
4519 record_cond (bb
, EQ_EXPR
, lhs
, rhs
, false);
4523 record_cond (bb
, LE_EXPR
, lhs
, rhs
, true);
4524 record_cond (bb
, GE_EXPR
, lhs
, rhs
, true);
4525 record_cond (bb
, LT_EXPR
, lhs
, rhs
, false);
4526 record_cond (bb
, GT_EXPR
, lhs
, rhs
, false);
4534 /* Restore expressions and values derived from conditionals. */
4537 sccvn_dom_walker::after_dom_children (basic_block bb
)
4539 while (!cond_stack
.is_empty ()
4540 && cond_stack
.last ().first
== bb
)
4542 vn_nary_op_t cond
= cond_stack
.last ().second
.first
;
4543 vn_nary_op_t old
= cond_stack
.last ().second
.second
;
4544 current_info
->nary
->remove_elt_with_hash (cond
, cond
->hashcode
);
4546 vn_nary_op_insert_into (old
, current_info
->nary
, false);
4551 /* Value number all statements in BB. */
4554 sccvn_dom_walker::before_dom_children (basic_block bb
)
4562 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4563 fprintf (dump_file
, "Visiting BB %d\n", bb
->index
);
4565 /* If we have a single predecessor record the equivalence from a
4566 possible condition on the predecessor edge. */
4568 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4570 /* Ignore simple backedges from this to allow recording conditions
4572 if (dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
4584 /* Check if there are multiple executable successor edges in
4585 the source block. Otherwise there is no additional info
4588 FOR_EACH_EDGE (e2
, ei
, pred_e
->src
->succs
)
4590 && e2
->flags
& EDGE_EXECUTABLE
)
4592 if (e2
&& (e2
->flags
& EDGE_EXECUTABLE
))
4594 gimple
*stmt
= last_stmt (pred_e
->src
);
4596 && gimple_code (stmt
) == GIMPLE_COND
)
4598 enum tree_code code
= gimple_cond_code (stmt
);
4599 tree lhs
= gimple_cond_lhs (stmt
);
4600 tree rhs
= gimple_cond_rhs (stmt
);
4601 record_conds (bb
, code
, lhs
, rhs
,
4602 (pred_e
->flags
& EDGE_TRUE_VALUE
) != 0);
4603 code
= invert_tree_comparison (code
, HONOR_NANS (lhs
));
4604 if (code
!= ERROR_MARK
)
4605 record_conds (bb
, code
, lhs
, rhs
,
4606 (pred_e
->flags
& EDGE_TRUE_VALUE
) == 0);
4611 /* Value-number all defs in the basic-block. */
4612 for (gphi_iterator gsi
= gsi_start_phis (bb
);
4613 !gsi_end_p (gsi
); gsi_next (&gsi
))
4615 gphi
*phi
= gsi
.phi ();
4616 tree res
= PHI_RESULT (phi
);
4617 if (!VN_INFO (res
)->visited
4624 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
4625 !gsi_end_p (gsi
); gsi_next (&gsi
))
4629 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
4630 if (!VN_INFO (op
)->visited
4638 /* Finally look at the last stmt. */
4639 gimple
*stmt
= last_stmt (bb
);
4643 enum gimple_code code
= gimple_code (stmt
);
4644 if (code
!= GIMPLE_COND
4645 && code
!= GIMPLE_SWITCH
4646 && code
!= GIMPLE_GOTO
)
4649 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4651 fprintf (dump_file
, "Visiting control stmt ending BB %d: ", bb
->index
);
4652 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4655 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4656 if value-numbering can prove they are not reachable. Handling
4657 computed gotos is also possible. */
4663 tree lhs
= vn_valueize (gimple_cond_lhs (stmt
));
4664 tree rhs
= vn_valueize (gimple_cond_rhs (stmt
));
4665 val
= gimple_simplify (gimple_cond_code (stmt
),
4666 boolean_type_node
, lhs
, rhs
,
4668 /* If that didn't simplify to a constant see if we have recorded
4669 temporary expressions from taken edges. */
4670 if (!val
|| TREE_CODE (val
) != INTEGER_CST
)
4675 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt
),
4676 boolean_type_node
, ops
, NULL
);
4681 val
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
4684 val
= gimple_goto_dest (stmt
);
4692 edge taken
= find_taken_edge (bb
, vn_valueize (val
));
4696 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4697 fprintf (dump_file
, "Marking all edges out of BB %d but (%d -> %d) as "
4698 "not executable\n", bb
->index
, bb
->index
, taken
->dest
->index
);
4703 /* Do SCCVN. Returns true if it finished, false if we bailed out
4704 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4705 how we use the alias oracle walking during the VN process. */
4708 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
4712 default_vn_walk_kind
= default_vn_walk_kind_
;
4716 /* Collect pointers we know point to readonly memory. */
4717 const_parms
= BITMAP_ALLOC (NULL
);
4718 tree fnspec
= lookup_attribute ("fn spec",
4719 TYPE_ATTRIBUTES (TREE_TYPE (cfun
->decl
)));
4722 fnspec
= TREE_VALUE (TREE_VALUE (fnspec
));
4724 for (tree arg
= DECL_ARGUMENTS (cfun
->decl
);
4725 arg
; arg
= DECL_CHAIN (arg
), ++i
)
4727 if (i
>= (unsigned) TREE_STRING_LENGTH (fnspec
))
4729 if (TREE_STRING_POINTER (fnspec
)[i
] == 'R'
4730 || TREE_STRING_POINTER (fnspec
)[i
] == 'r')
4732 tree name
= ssa_default_def (cfun
, arg
);
4734 bitmap_set_bit (const_parms
, SSA_NAME_VERSION (name
));
4739 /* Walk all blocks in dominator order, value-numbering stmts
4740 SSA defs and decide whether outgoing edges are not executable. */
4741 sccvn_dom_walker walker
;
4742 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
4749 /* Initialize the value ids and prune out remaining VN_TOPs
4751 for (i
= 1; i
< num_ssa_names
; ++i
)
4753 tree name
= ssa_name (i
);
4757 info
= VN_INFO (name
);
4759 info
->valnum
= name
;
4760 if (info
->valnum
== name
4761 || info
->valnum
== VN_TOP
)
4762 info
->value_id
= get_next_value_id ();
4763 else if (is_gimple_min_invariant (info
->valnum
))
4764 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
4768 for (i
= 1; i
< num_ssa_names
; ++i
)
4770 tree name
= ssa_name (i
);
4774 info
= VN_INFO (name
);
4775 if (TREE_CODE (info
->valnum
) == SSA_NAME
4776 && info
->valnum
!= name
4777 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
4778 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
4781 set_hashtable_value_ids ();
4783 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4785 fprintf (dump_file
, "Value numbers:\n");
4786 for (i
= 0; i
< num_ssa_names
; i
++)
4788 tree name
= ssa_name (i
);
4790 && VN_INFO (name
)->visited
4791 && SSA_VAL (name
) != name
)
4793 print_generic_expr (dump_file
, name
, 0);
4794 fprintf (dump_file
, " = ");
4795 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
4796 fprintf (dump_file
, "\n");
4804 /* Return the maximum value id we have ever seen. */
4807 get_max_value_id (void)
4809 return next_value_id
;
4812 /* Return the next unique value id. */
4815 get_next_value_id (void)
4817 return next_value_id
++;
4821 /* Compare two expressions E1 and E2 and return true if they are equal. */
4824 expressions_equal_p (tree e1
, tree e2
)
4826 /* The obvious case. */
4830 /* If either one is VN_TOP consider them equal. */
4831 if (e1
== VN_TOP
|| e2
== VN_TOP
)
4834 /* If only one of them is null, they cannot be equal. */
4838 /* Now perform the actual comparison. */
4839 if (TREE_CODE (e1
) == TREE_CODE (e2
)
4840 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
4847 /* Return true if the nary operation NARY may trap. This is a copy
4848 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4851 vn_nary_may_trap (vn_nary_op_t nary
)
4854 tree rhs2
= NULL_TREE
;
4855 bool honor_nans
= false;
4856 bool honor_snans
= false;
4857 bool fp_operation
= false;
4858 bool honor_trapv
= false;
4862 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
4863 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
4864 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
4867 fp_operation
= FLOAT_TYPE_P (type
);
4870 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
4871 honor_snans
= flag_signaling_nans
!= 0;
4873 else if (INTEGRAL_TYPE_P (type
)
4874 && TYPE_OVERFLOW_TRAPS (type
))
4877 if (nary
->length
>= 2)
4879 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
4881 honor_nans
, honor_snans
, rhs2
,
4887 for (i
= 0; i
< nary
->length
; ++i
)
4888 if (tree_could_trap_p (nary
->op
[i
]))