1 /* SCC value numbering for trees
2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "alloc-pool.h"
31 #include "insn-config.h"
34 #include "gimple-pretty-print.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
121 static tree
*last_vuse_ptr
;
122 static vn_lookup_kind vn_walk_kind
;
123 static vn_lookup_kind default_vn_walk_kind
;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
130 typedef vn_nary_op_s
*compare_type
;
131 static inline hashval_t
hash (const vn_nary_op_s
*);
132 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
135 /* Return the computed hashcode for nary operation P1. */
138 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
140 return vno1
->hashcode
;
143 /* Compare nary operations P1 and P2 and return true if they are
147 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
149 return vn_nary_op_eq (vno1
, vno2
);
152 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
156 /* vn_phi hashtable helpers. */
159 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
161 struct vn_phi_hasher
: pointer_hash
<vn_phi_s
>
163 static inline hashval_t
hash (const vn_phi_s
*);
164 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
165 static inline void remove (vn_phi_s
*);
168 /* Return the computed hashcode for phi operation P1. */
171 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
173 return vp1
->hashcode
;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
179 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
181 return vn_phi_eq (vp1
, vp2
);
184 /* Free a phi operation structure VP. */
187 vn_phi_hasher::remove (vn_phi_s
*phi
)
189 phi
->phiargs
.release ();
192 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
200 vn_reference_op_eq (const void *p1
, const void *p2
)
202 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
203 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
205 return (vro1
->opcode
== vro2
->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1
->type
== vro2
->type
208 || (vro1
->type
&& vro2
->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
210 TYPE_MAIN_VARIANT (vro2
->type
))))
211 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
212 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
213 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
216 /* Free a reference operation structure VP. */
219 free_reference (vn_reference_s
*vr
)
221 vr
->operands
.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher
: pointer_hash
<vn_reference_s
>
229 static inline hashval_t
hash (const vn_reference_s
*);
230 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
231 static inline void remove (vn_reference_s
*);
234 /* Return the hashcode for a given reference operation P1. */
237 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
239 return vr1
->hashcode
;
243 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
245 return vn_reference_eq (v
, c
);
249 vn_reference_hasher::remove (vn_reference_s
*v
)
254 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type
*nary
;
263 vn_phi_table_type
*phis
;
264 vn_reference_table_type
*references
;
265 struct obstack nary_obstack
;
266 object_allocator
<vn_phi_s
> *phis_pool
;
267 object_allocator
<vn_reference_s
> *references_pool
;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
275 static inline hashval_t
hash (const vn_constant_s
*);
276 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
279 /* Hash table hash function for vn_constant_t. */
282 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
284 return vc1
->hashcode
;
287 /* Hash table equality function for vn_constant_t. */
290 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
292 if (vc1
->hashcode
!= vc2
->hashcode
)
295 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
298 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
299 static bitmap constant_value_ids
;
302 /* Valid hashtables storing information we have proven to be
305 static vn_tables_t valid_info
;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info
;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
316 static vn_tables_t current_info
;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers
;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
330 vuse_ssa_val (tree x
)
339 while (SSA_NAME_IN_FREE_LIST (x
));
344 /* This represents the top of the VN lattice, which is the universal
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id
;
353 /* Next DFS number and the stack for strongly connected component
356 static unsigned int next_dfs_num
;
357 static vec
<tree
> sccstack
;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec
<vn_ssa_aux_t
> vn_ssa_aux_table
;
366 static struct obstack vn_ssa_aux_obstack
;
368 /* Return whether there is value numbering information for a given SSA name. */
371 has_VN_INFO (tree name
)
373 if (SSA_NAME_VERSION (name
) < vn_ssa_aux_table
.length ())
374 return vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] != NULL
;
378 /* Return the value numbering information for a given SSA name. */
383 vn_ssa_aux_t res
= vn_ssa_aux_table
[SSA_NAME_VERSION (name
)];
384 gcc_checking_assert (res
);
388 /* Set the value numbering info for a given SSA name to a given
392 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
394 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = value
;
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
401 VN_INFO_GET (tree name
)
403 vn_ssa_aux_t newinfo
;
405 gcc_assert (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ()
406 || vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] == NULL
);
407 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
408 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
409 if (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ())
410 vn_ssa_aux_table
.safe_grow_cleared (SSA_NAME_VERSION (name
) + 1);
411 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = newinfo
;
416 /* Return the vn_kind the expression computed by the stmt should be
420 vn_get_stmt_kind (gimple
*stmt
)
422 switch (gimple_code (stmt
))
430 enum tree_code code
= gimple_assign_rhs_code (stmt
);
431 tree rhs1
= gimple_assign_rhs1 (stmt
);
432 switch (get_gimple_rhs_class (code
))
434 case GIMPLE_UNARY_RHS
:
435 case GIMPLE_BINARY_RHS
:
436 case GIMPLE_TERNARY_RHS
:
438 case GIMPLE_SINGLE_RHS
:
439 switch (TREE_CODE_CLASS (code
))
442 /* VOP-less references can go through unary case. */
443 if ((code
== REALPART_EXPR
444 || code
== IMAGPART_EXPR
445 || code
== VIEW_CONVERT_EXPR
446 || code
== BIT_FIELD_REF
)
447 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
451 case tcc_declaration
:
458 if (code
== ADDR_EXPR
)
459 return (is_gimple_min_invariant (rhs1
)
460 ? VN_CONSTANT
: VN_REFERENCE
);
461 else if (code
== CONSTRUCTOR
)
474 /* Lookup a value id for CONSTANT and return it. If it does not
478 get_constant_value_id (tree constant
)
480 vn_constant_s
**slot
;
481 struct vn_constant_s vc
;
483 vc
.hashcode
= vn_hash_constant_with_type (constant
);
484 vc
.constant
= constant
;
485 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
487 return (*slot
)->value_id
;
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
495 get_or_alloc_constant_value_id (tree constant
)
497 vn_constant_s
**slot
;
498 struct vn_constant_s vc
;
501 vc
.hashcode
= vn_hash_constant_with_type (constant
);
502 vc
.constant
= constant
;
503 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
505 return (*slot
)->value_id
;
507 vcp
= XNEW (struct vn_constant_s
);
508 vcp
->hashcode
= vc
.hashcode
;
509 vcp
->constant
= constant
;
510 vcp
->value_id
= get_next_value_id ();
512 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
513 return vcp
->value_id
;
516 /* Return true if V is a value id for a constant. */
519 value_id_constant_p (unsigned int v
)
521 return bitmap_bit_p (constant_value_ids
, v
);
524 /* Compute the hash for a reference operand VRO1. */
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
529 hstate
.add_int (vro1
->opcode
);
531 inchash::add_expr (vro1
->op0
, hstate
);
533 inchash::add_expr (vro1
->op1
, hstate
);
535 inchash::add_expr (vro1
->op2
, hstate
);
538 /* Compute a hash for the reference operation VR1 and return it. */
541 vn_reference_compute_hash (const vn_reference_t vr1
)
543 inchash::hash hstate
;
546 vn_reference_op_t vro
;
547 HOST_WIDE_INT off
= -1;
550 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
552 if (vro
->opcode
== MEM_REF
)
554 else if (vro
->opcode
!= ADDR_EXPR
)
566 hstate
.add_int (off
);
569 && vro
->opcode
== ADDR_EXPR
)
573 tree op
= TREE_OPERAND (vro
->op0
, 0);
574 hstate
.add_int (TREE_CODE (op
));
575 inchash::add_expr (op
, hstate
);
579 vn_reference_op_compute_hash (vro
, hstate
);
582 result
= hstate
.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
585 result
+= SSA_NAME_VERSION (vr1
->vuse
);
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
594 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
598 /* Early out if this is not a hash collision. */
599 if (vr1
->hashcode
!= vr2
->hashcode
)
602 /* The VOP needs to be the same. */
603 if (vr1
->vuse
!= vr2
->vuse
)
606 /* If the operands are the same we are done. */
607 if (vr1
->operands
== vr2
->operands
)
610 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
613 if (INTEGRAL_TYPE_P (vr1
->type
)
614 && INTEGRAL_TYPE_P (vr2
->type
))
616 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
619 else if (INTEGRAL_TYPE_P (vr1
->type
)
620 && (TYPE_PRECISION (vr1
->type
)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
623 else if (INTEGRAL_TYPE_P (vr2
->type
)
624 && (TYPE_PRECISION (vr2
->type
)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
632 HOST_WIDE_INT off1
= 0, off2
= 0;
633 vn_reference_op_t vro1
, vro2
;
634 vn_reference_op_s tem1
, tem2
;
635 bool deref1
= false, deref2
= false;
636 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
638 if (vro1
->opcode
== MEM_REF
)
640 /* Do not look through a storage order barrier. */
641 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
647 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
649 if (vro2
->opcode
== MEM_REF
)
651 /* Do not look through a storage order barrier. */
652 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
660 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
662 memset (&tem1
, 0, sizeof (tem1
));
663 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
664 tem1
.type
= TREE_TYPE (tem1
.op0
);
665 tem1
.opcode
= TREE_CODE (tem1
.op0
);
669 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
671 memset (&tem2
, 0, sizeof (tem2
));
672 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
673 tem2
.type
= TREE_TYPE (tem2
.op0
);
674 tem2
.opcode
= TREE_CODE (tem2
.op0
);
678 if (deref1
!= deref2
)
680 if (!vn_reference_op_eq (vro1
, vro2
))
685 while (vr1
->operands
.length () != i
686 || vr2
->operands
.length () != j
);
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
695 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
697 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
699 vn_reference_op_s temp
;
703 memset (&temp
, 0, sizeof (temp
));
704 temp
.type
= TREE_TYPE (ref
);
705 temp
.opcode
= TREE_CODE (ref
);
706 temp
.op0
= TMR_INDEX (ref
);
707 temp
.op1
= TMR_STEP (ref
);
708 temp
.op2
= TMR_OFFSET (ref
);
710 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
711 temp
.base
= MR_DEPENDENCE_BASE (ref
);
712 result
->quick_push (temp
);
714 memset (&temp
, 0, sizeof (temp
));
715 temp
.type
= NULL_TREE
;
716 temp
.opcode
= ERROR_MARK
;
717 temp
.op0
= TMR_INDEX2 (ref
);
719 result
->quick_push (temp
);
721 memset (&temp
, 0, sizeof (temp
));
722 temp
.type
= NULL_TREE
;
723 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
724 temp
.op0
= TMR_BASE (ref
);
726 result
->quick_push (temp
);
730 /* For non-calls, store the information that makes up the address. */
734 vn_reference_op_s temp
;
736 memset (&temp
, 0, sizeof (temp
));
737 temp
.type
= TREE_TYPE (ref
);
738 temp
.opcode
= TREE_CODE (ref
);
744 temp
.op0
= TREE_OPERAND (ref
, 1);
747 temp
.op0
= TREE_OPERAND (ref
, 1);
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp
.op0
= TREE_OPERAND (ref
, 1);
754 offset_int off
= mem_ref_offset (ref
);
755 if (wi::fits_shwi_p (off
))
756 temp
.off
= off
.to_shwi ();
758 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
759 temp
.base
= MR_DEPENDENCE_BASE (ref
);
760 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
763 /* Record bits, position and storage order. */
764 temp
.op0
= TREE_OPERAND (ref
, 1);
765 temp
.op1
= TREE_OPERAND (ref
, 2);
766 if (tree_fits_shwi_p (TREE_OPERAND (ref
, 2)))
768 HOST_WIDE_INT off
= tree_to_shwi (TREE_OPERAND (ref
, 2));
769 if (off
% BITS_PER_UNIT
== 0)
770 temp
.off
= off
/ BITS_PER_UNIT
;
772 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp
.type
= NULL_TREE
;
779 temp
.op0
= TREE_OPERAND (ref
, 1);
780 temp
.op1
= TREE_OPERAND (ref
, 2);
782 tree this_offset
= component_ref_field_offset (ref
);
784 && TREE_CODE (this_offset
) == INTEGER_CST
)
786 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
787 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
790 = (wi::to_offset (this_offset
)
791 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
792 if (wi::fits_shwi_p (off
)
793 /* Probibit value-numbering zero offset components
794 of addresses the same before the pass folding
795 __builtin_object_size had a chance to run
796 (checking cfun->after_inlining does the
798 && (TREE_CODE (orig
) != ADDR_EXPR
800 || cfun
->after_inlining
))
801 temp
.off
= off
.to_shwi ();
806 case ARRAY_RANGE_REF
:
808 /* Record index as operand. */
809 temp
.op0
= TREE_OPERAND (ref
, 1);
810 /* Always record lower bounds and element size. */
811 temp
.op1
= array_ref_low_bound (ref
);
812 temp
.op2
= array_ref_element_size (ref
);
813 if (TREE_CODE (temp
.op0
) == INTEGER_CST
814 && TREE_CODE (temp
.op1
) == INTEGER_CST
815 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
817 offset_int off
= ((wi::to_offset (temp
.op0
)
818 - wi::to_offset (temp
.op1
))
819 * wi::to_offset (temp
.op2
));
820 if (wi::fits_shwi_p (off
))
821 temp
.off
= off
.to_shwi();
825 if (DECL_HARD_REGISTER (ref
))
834 /* Canonicalize decls to MEM[&decl] which is what we end up with
835 when valueizing MEM[ptr] with ptr = &decl. */
836 temp
.opcode
= MEM_REF
;
837 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
839 result
->safe_push (temp
);
840 temp
.opcode
= ADDR_EXPR
;
841 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
842 temp
.type
= TREE_TYPE (temp
.op0
);
856 if (is_gimple_min_invariant (ref
))
862 /* These are only interesting for their operands, their
863 existence, and their type. They will never be the last
864 ref in the chain of references (IE they require an
865 operand), so we don't have to put anything
866 for op* as it will be handled by the iteration */
870 case VIEW_CONVERT_EXPR
:
872 temp
.reverse
= storage_order_barrier_p (ref
);
875 /* This is only interesting for its constant offset. */
876 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
881 result
->safe_push (temp
);
883 if (REFERENCE_CLASS_P (ref
)
884 || TREE_CODE (ref
) == MODIFY_EXPR
885 || TREE_CODE (ref
) == WITH_SIZE_EXPR
886 || (TREE_CODE (ref
) == ADDR_EXPR
887 && !is_gimple_min_invariant (ref
)))
888 ref
= TREE_OPERAND (ref
, 0);
894 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
895 operands in *OPS, the reference alias set SET and the reference type TYPE.
896 Return true if something useful was produced. */
899 ao_ref_init_from_vn_reference (ao_ref
*ref
,
900 alias_set_type set
, tree type
,
901 vec
<vn_reference_op_s
> ops
)
903 vn_reference_op_t op
;
905 tree base
= NULL_TREE
;
907 offset_int offset
= 0;
909 offset_int size
= -1;
910 tree size_tree
= NULL_TREE
;
911 alias_set_type base_alias_set
= -1;
913 /* First get the final access size from just the outermost expression. */
915 if (op
->opcode
== COMPONENT_REF
)
916 size_tree
= DECL_SIZE (op
->op0
);
917 else if (op
->opcode
== BIT_FIELD_REF
)
921 machine_mode mode
= TYPE_MODE (type
);
923 size_tree
= TYPE_SIZE (type
);
925 size
= int (GET_MODE_BITSIZE (mode
));
927 if (size_tree
!= NULL_TREE
928 && TREE_CODE (size_tree
) == INTEGER_CST
)
929 size
= wi::to_offset (size_tree
);
931 /* Initially, maxsize is the same as the accessed element size.
932 In the following it will only grow (or become -1). */
935 /* Compute cumulative bit-offset for nested component-refs and array-refs,
936 and find the ultimate containing object. */
937 FOR_EACH_VEC_ELT (ops
, i
, op
)
941 /* These may be in the reference ops, but we cannot do anything
942 sensible with them here. */
944 /* Apart from ADDR_EXPR arguments to MEM_REF. */
945 if (base
!= NULL_TREE
946 && TREE_CODE (base
) == MEM_REF
948 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
950 vn_reference_op_t pop
= &ops
[i
-1];
951 base
= TREE_OPERAND (op
->op0
, 0);
958 offset
+= pop
->off
* BITS_PER_UNIT
;
966 /* Record the base objects. */
968 base_alias_set
= get_deref_alias_set (op
->op0
);
969 *op0_p
= build2 (MEM_REF
, op
->type
,
971 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
972 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
973 op0_p
= &TREE_OPERAND (*op0_p
, 0);
984 /* And now the usual component-reference style ops. */
986 offset
+= wi::to_offset (op
->op1
);
991 tree field
= op
->op0
;
992 /* We do not have a complete COMPONENT_REF tree here so we
993 cannot use component_ref_field_offset. Do the interesting
995 tree this_offset
= DECL_FIELD_OFFSET (field
);
997 if (op
->op1
|| TREE_CODE (this_offset
) != INTEGER_CST
)
1001 offset_int woffset
= (wi::to_offset (this_offset
)
1002 << LOG2_BITS_PER_UNIT
);
1003 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1009 case ARRAY_RANGE_REF
:
1011 /* We recorded the lower bound and the element size. */
1012 if (TREE_CODE (op
->op0
) != INTEGER_CST
1013 || TREE_CODE (op
->op1
) != INTEGER_CST
1014 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1019 = wi::sext (wi::to_offset (op
->op0
) - wi::to_offset (op
->op1
),
1020 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1021 woffset
*= wi::to_offset (op
->op2
);
1022 woffset
<<= LOG2_BITS_PER_UNIT
;
1034 case VIEW_CONVERT_EXPR
:
1051 if (base
== NULL_TREE
)
1054 ref
->ref
= NULL_TREE
;
1056 ref
->ref_alias_set
= set
;
1057 if (base_alias_set
!= -1)
1058 ref
->base_alias_set
= base_alias_set
;
1060 ref
->base_alias_set
= get_alias_set (base
);
1061 /* We discount volatiles from value-numbering elsewhere. */
1062 ref
->volatile_p
= false;
1064 if (!wi::fits_shwi_p (size
) || wi::neg_p (size
))
1072 ref
->size
= size
.to_shwi ();
1074 if (!wi::fits_shwi_p (offset
))
1081 ref
->offset
= offset
.to_shwi ();
1083 if (!wi::fits_shwi_p (max_size
) || wi::neg_p (max_size
))
1086 ref
->max_size
= max_size
.to_shwi ();
1091 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1092 vn_reference_op_s's. */
1095 copy_reference_ops_from_call (gcall
*call
,
1096 vec
<vn_reference_op_s
> *result
)
1098 vn_reference_op_s temp
;
1100 tree lhs
= gimple_call_lhs (call
);
1103 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1104 different. By adding the lhs here in the vector, we ensure that the
1105 hashcode is different, guaranteeing a different value number. */
1106 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1108 memset (&temp
, 0, sizeof (temp
));
1109 temp
.opcode
= MODIFY_EXPR
;
1110 temp
.type
= TREE_TYPE (lhs
);
1113 result
->safe_push (temp
);
1116 /* Copy the type, opcode, function, static chain and EH region, if any. */
1117 memset (&temp
, 0, sizeof (temp
));
1118 temp
.type
= gimple_call_return_type (call
);
1119 temp
.opcode
= CALL_EXPR
;
1120 temp
.op0
= gimple_call_fn (call
);
1121 temp
.op1
= gimple_call_chain (call
);
1122 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1123 temp
.op2
= size_int (lr
);
1125 if (gimple_call_with_bounds_p (call
))
1126 temp
.with_bounds
= 1;
1127 result
->safe_push (temp
);
1129 /* Copy the call arguments. As they can be references as well,
1130 just chain them together. */
1131 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1133 tree callarg
= gimple_call_arg (call
, i
);
1134 copy_reference_ops_from_ref (callarg
, result
);
1138 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1139 *I_P to point to the last element of the replacement. */
1141 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1144 unsigned int i
= *i_p
;
1145 vn_reference_op_t op
= &(*ops
)[i
];
1146 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1148 HOST_WIDE_INT addr_offset
= 0;
1150 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1151 from .foo.bar to the preceding MEM_REF offset and replace the
1152 address with &OBJ. */
1153 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1155 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1156 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1158 offset_int off
= offset_int::from (mem_op
->op0
, SIGNED
);
1160 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1161 op
->op0
= build_fold_addr_expr (addr_base
);
1162 if (tree_fits_shwi_p (mem_op
->op0
))
1163 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1171 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1172 *I_P to point to the last element of the replacement. */
1174 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1177 unsigned int i
= *i_p
;
1178 vn_reference_op_t op
= &(*ops
)[i
];
1179 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1181 enum tree_code code
;
1184 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1185 if (!is_gimple_assign (def_stmt
))
1188 code
= gimple_assign_rhs_code (def_stmt
);
1189 if (code
!= ADDR_EXPR
1190 && code
!= POINTER_PLUS_EXPR
)
1193 off
= offset_int::from (mem_op
->op0
, SIGNED
);
1195 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1196 from .foo.bar to the preceding MEM_REF offset and replace the
1197 address with &OBJ. */
1198 if (code
== ADDR_EXPR
)
1200 tree addr
, addr_base
;
1201 HOST_WIDE_INT addr_offset
;
1203 addr
= gimple_assign_rhs1 (def_stmt
);
1204 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1206 /* If that didn't work because the address isn't invariant propagate
1207 the reference tree from the address operation in case the current
1208 dereference isn't offsetted. */
1210 && *i_p
== ops
->length () - 1
1212 /* This makes us disable this transform for PRE where the
1213 reference ops might be also used for code insertion which
1215 && default_vn_walk_kind
== VN_WALKREWRITE
)
1217 auto_vec
<vn_reference_op_s
, 32> tem
;
1218 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1221 ops
->safe_splice (tem
);
1226 || TREE_CODE (addr_base
) != MEM_REF
)
1230 off
+= mem_ref_offset (addr_base
);
1231 op
->op0
= TREE_OPERAND (addr_base
, 0);
1236 ptr
= gimple_assign_rhs1 (def_stmt
);
1237 ptroff
= gimple_assign_rhs2 (def_stmt
);
1238 if (TREE_CODE (ptr
) != SSA_NAME
1239 || TREE_CODE (ptroff
) != INTEGER_CST
)
1242 off
+= wi::to_offset (ptroff
);
1246 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1247 if (tree_fits_shwi_p (mem_op
->op0
))
1248 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1251 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1252 op
->op0
= SSA_VAL (op
->op0
);
1253 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1254 op
->opcode
= TREE_CODE (op
->op0
);
1257 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1258 vn_reference_maybe_forwprop_address (ops
, i_p
);
1259 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1260 vn_reference_fold_indirect (ops
, i_p
);
1264 /* Optimize the reference REF to a constant if possible or return
1265 NULL_TREE if not. */
1268 fully_constant_vn_reference_p (vn_reference_t ref
)
1270 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1271 vn_reference_op_t op
;
1273 /* Try to simplify the translated expression if it is
1274 a call to a builtin function with at most two arguments. */
1276 if (op
->opcode
== CALL_EXPR
1277 && TREE_CODE (op
->op0
) == ADDR_EXPR
1278 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1279 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1280 && operands
.length () >= 2
1281 && operands
.length () <= 3)
1283 vn_reference_op_t arg0
, arg1
= NULL
;
1284 bool anyconst
= false;
1285 arg0
= &operands
[1];
1286 if (operands
.length () > 2)
1287 arg1
= &operands
[2];
1288 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1289 || (arg0
->opcode
== ADDR_EXPR
1290 && is_gimple_min_invariant (arg0
->op0
)))
1293 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1294 || (arg1
->opcode
== ADDR_EXPR
1295 && is_gimple_min_invariant (arg1
->op0
))))
1299 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1302 arg1
? arg1
->op0
: NULL
);
1304 && TREE_CODE (folded
) == NOP_EXPR
)
1305 folded
= TREE_OPERAND (folded
, 0);
1307 && is_gimple_min_invariant (folded
))
1312 /* Simplify reads from constants or constant initializers. */
1313 else if (BITS_PER_UNIT
== 8
1314 && is_gimple_reg_type (ref
->type
)
1315 && (!INTEGRAL_TYPE_P (ref
->type
)
1316 || TYPE_PRECISION (ref
->type
) % BITS_PER_UNIT
== 0))
1318 HOST_WIDE_INT off
= 0;
1320 if (INTEGRAL_TYPE_P (ref
->type
))
1321 size
= TYPE_PRECISION (ref
->type
);
1323 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1324 if (size
% BITS_PER_UNIT
!= 0
1325 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1327 size
/= BITS_PER_UNIT
;
1329 for (i
= 0; i
< operands
.length (); ++i
)
1331 if (operands
[i
].off
== -1)
1333 off
+= operands
[i
].off
;
1334 if (operands
[i
].opcode
== MEM_REF
)
1340 vn_reference_op_t base
= &operands
[--i
];
1341 tree ctor
= error_mark_node
;
1342 tree decl
= NULL_TREE
;
1343 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1345 else if (base
->opcode
== MEM_REF
1346 && base
[1].opcode
== ADDR_EXPR
1347 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1348 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
))
1350 decl
= TREE_OPERAND (base
[1].op0
, 0);
1351 ctor
= ctor_for_folding (decl
);
1353 if (ctor
== NULL_TREE
)
1354 return build_zero_cst (ref
->type
);
1355 else if (ctor
!= error_mark_node
)
1359 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1360 off
* BITS_PER_UNIT
,
1361 size
* BITS_PER_UNIT
, decl
);
1364 STRIP_USELESS_TYPE_CONVERSION (res
);
1365 if (is_gimple_min_invariant (res
))
1371 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1372 int len
= native_encode_expr (ctor
, buf
, size
, off
);
1374 return native_interpret_expr (ref
->type
, buf
, len
);
1382 /* Return true if OPS contain a storage order barrier. */
1385 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1387 vn_reference_op_t op
;
1390 FOR_EACH_VEC_ELT (ops
, i
, op
)
1391 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1397 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1398 structures into their value numbers. This is done in-place, and
1399 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1400 whether any operands were valueized. */
1402 static vec
<vn_reference_op_s
>
1403 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
)
1405 vn_reference_op_t vro
;
1408 *valueized_anything
= false;
1410 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1412 if (vro
->opcode
== SSA_NAME
1413 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1415 tree tem
= SSA_VAL (vro
->op0
);
1416 if (tem
!= vro
->op0
)
1418 *valueized_anything
= true;
1421 /* If it transforms from an SSA_NAME to a constant, update
1423 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1424 vro
->opcode
= TREE_CODE (vro
->op0
);
1426 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1428 tree tem
= SSA_VAL (vro
->op1
);
1429 if (tem
!= vro
->op1
)
1431 *valueized_anything
= true;
1435 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1437 tree tem
= SSA_VAL (vro
->op2
);
1438 if (tem
!= vro
->op2
)
1440 *valueized_anything
= true;
1444 /* If it transforms from an SSA_NAME to an address, fold with
1445 a preceding indirect reference. */
1448 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1449 && orig
[i
- 1].opcode
== MEM_REF
)
1451 if (vn_reference_fold_indirect (&orig
, &i
))
1452 *valueized_anything
= true;
1455 && vro
->opcode
== SSA_NAME
1456 && orig
[i
- 1].opcode
== MEM_REF
)
1458 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1459 *valueized_anything
= true;
1461 /* If it transforms a non-constant ARRAY_REF into a constant
1462 one, adjust the constant offset. */
1463 else if (vro
->opcode
== ARRAY_REF
1465 && TREE_CODE (vro
->op0
) == INTEGER_CST
1466 && TREE_CODE (vro
->op1
) == INTEGER_CST
1467 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1469 offset_int off
= ((wi::to_offset (vro
->op0
)
1470 - wi::to_offset (vro
->op1
))
1471 * wi::to_offset (vro
->op2
));
1472 if (wi::fits_shwi_p (off
))
1473 vro
->off
= off
.to_shwi ();
1480 static vec
<vn_reference_op_s
>
1481 valueize_refs (vec
<vn_reference_op_s
> orig
)
1484 return valueize_refs_1 (orig
, &tem
);
1487 static vec
<vn_reference_op_s
> shared_lookup_references
;
1489 /* Create a vector of vn_reference_op_s structures from REF, a
1490 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1491 this function. *VALUEIZED_ANYTHING will specify whether any
1492 operands were valueized. */
1494 static vec
<vn_reference_op_s
>
1495 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1499 shared_lookup_references
.truncate (0);
1500 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1501 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1502 valueized_anything
);
1503 return shared_lookup_references
;
1506 /* Create a vector of vn_reference_op_s structures from CALL, a
1507 call statement. The vector is shared among all callers of
1510 static vec
<vn_reference_op_s
>
1511 valueize_shared_reference_ops_from_call (gcall
*call
)
1515 shared_lookup_references
.truncate (0);
1516 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1517 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1518 return shared_lookup_references
;
1521 /* Lookup a SCCVN reference operation VR in the current hash table.
1522 Returns the resulting value number if it exists in the hash table,
1523 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1524 vn_reference_t stored in the hashtable if something is found. */
1527 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1529 vn_reference_s
**slot
;
1532 hash
= vr
->hashcode
;
1533 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1534 if (!slot
&& current_info
== optimistic_info
)
1535 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1539 *vnresult
= (vn_reference_t
)*slot
;
1540 return ((vn_reference_t
)*slot
)->result
;
1546 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1547 with the current VUSE and performs the expression lookup. */
1550 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1551 unsigned int cnt
, void *vr_
)
1553 vn_reference_t vr
= (vn_reference_t
)vr_
;
1554 vn_reference_s
**slot
;
1557 /* This bounds the stmt walks we perform on reference lookups
1558 to O(1) instead of O(N) where N is the number of dominating
1560 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1564 *last_vuse_ptr
= vuse
;
1566 /* Fixup vuse and hash. */
1568 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1569 vr
->vuse
= vuse_ssa_val (vuse
);
1571 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1573 hash
= vr
->hashcode
;
1574 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1575 if (!slot
&& current_info
== optimistic_info
)
1576 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1583 /* Lookup an existing or insert a new vn_reference entry into the
1584 value table for the VUSE, SET, TYPE, OPERANDS reference which
1585 has the value VALUE which is either a constant or an SSA name. */
1587 static vn_reference_t
1588 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1591 vec
<vn_reference_op_s
,
1596 vn_reference_t result
;
1599 vr1
.operands
= operands
;
1602 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1603 if (vn_reference_lookup_1 (&vr1
, &result
))
1605 if (TREE_CODE (value
) == SSA_NAME
)
1606 value_id
= VN_INFO (value
)->value_id
;
1608 value_id
= get_or_alloc_constant_value_id (value
);
1609 return vn_reference_insert_pieces (vuse
, set
, type
,
1610 operands
.copy (), value
, value_id
);
1613 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*stmt
, tree result
);
1615 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1618 vn_lookup_simplify_result (code_helper rcode
, tree type
, tree
*ops
)
1620 if (!rcode
.is_tree_code ())
1622 vn_nary_op_t vnresult
= NULL
;
1623 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code
) rcode
),
1624 (tree_code
) rcode
, type
, ops
, &vnresult
);
1627 /* Return a value-number for RCODE OPS... either by looking up an existing
1628 value-number for the simplified result or by inserting the operation. */
1631 vn_nary_build_or_lookup (code_helper rcode
, tree type
, tree
*ops
)
1633 tree result
= NULL_TREE
;
1634 /* We will be creating a value number for
1636 So first simplify and lookup this expression to see if it
1637 is already available. */
1638 mprts_hook
= vn_lookup_simplify_result
;
1640 switch (TREE_CODE_LENGTH ((tree_code
) rcode
))
1643 res
= gimple_resimplify1 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1646 res
= gimple_resimplify2 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1649 res
= gimple_resimplify3 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1653 gimple
*new_stmt
= NULL
;
1655 && gimple_simplified_result_is_gimple_val (rcode
, ops
))
1656 /* The expression is already available. */
1660 tree val
= vn_lookup_simplify_result (rcode
, type
, ops
);
1663 gimple_seq stmts
= NULL
;
1664 result
= maybe_push_res_to_seq (rcode
, type
, ops
, &stmts
);
1667 gcc_assert (gimple_seq_singleton_p (stmts
));
1668 new_stmt
= gimple_seq_first_stmt (stmts
);
1672 /* The expression is already available. */
1677 /* The expression is not yet available, value-number lhs to
1678 the new SSA_NAME we created. */
1679 /* Initialize value-number information properly. */
1680 VN_INFO_GET (result
)->valnum
= result
;
1681 VN_INFO (result
)->value_id
= get_next_value_id ();
1682 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
1684 VN_INFO (result
)->needs_insertion
= true;
1685 /* As all "inserted" statements are singleton SCCs, insert
1686 to the valid table. This is strictly needed to
1687 avoid re-generating new value SSA_NAMEs for the same
1688 expression during SCC iteration over and over (the
1689 optimistic table gets cleared after each iteration).
1690 We do not need to insert into the optimistic table, as
1691 lookups there will fall back to the valid table. */
1692 if (current_info
== optimistic_info
)
1694 current_info
= valid_info
;
1695 vn_nary_op_insert_stmt (new_stmt
, result
);
1696 current_info
= optimistic_info
;
1699 vn_nary_op_insert_stmt (new_stmt
, result
);
1700 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1702 fprintf (dump_file
, "Inserting name ");
1703 print_generic_expr (dump_file
, result
, 0);
1704 fprintf (dump_file
, " for expression ");
1705 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
1706 fprintf (dump_file
, "\n");
1712 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1713 from the statement defining VUSE and if not successful tries to
1714 translate *REFP and VR_ through an aggregate copy at the definition
1715 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1716 of *REF and *VR. If only disambiguation was performed then
1717 *DISAMBIGUATE_ONLY is set to true. */
1720 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1721 bool *disambiguate_only
)
1723 vn_reference_t vr
= (vn_reference_t
)vr_
;
1724 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1725 tree base
= ao_ref_base (ref
);
1726 HOST_WIDE_INT offset
, maxsize
;
1727 static vec
<vn_reference_op_s
>
1730 bool lhs_ref_ok
= false;
1732 /* If the reference is based on a parameter that was determined as
1733 pointing to readonly memory it doesn't change. */
1734 if (TREE_CODE (base
) == MEM_REF
1735 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
1736 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0))
1737 && bitmap_bit_p (const_parms
,
1738 SSA_NAME_VERSION (TREE_OPERAND (base
, 0))))
1740 *disambiguate_only
= true;
1744 /* First try to disambiguate after value-replacing in the definitions LHS. */
1745 if (is_gimple_assign (def_stmt
))
1747 tree lhs
= gimple_assign_lhs (def_stmt
);
1748 bool valueized_anything
= false;
1749 /* Avoid re-allocation overhead. */
1750 lhs_ops
.truncate (0);
1751 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1752 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
);
1753 if (valueized_anything
)
1755 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1756 get_alias_set (lhs
),
1757 TREE_TYPE (lhs
), lhs_ops
);
1759 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1761 *disambiguate_only
= true;
1767 ao_ref_init (&lhs_ref
, lhs
);
1771 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1772 && gimple_call_num_args (def_stmt
) <= 4)
1774 /* For builtin calls valueize its arguments and call the
1775 alias oracle again. Valueization may improve points-to
1776 info of pointers and constify size and position arguments.
1777 Originally this was motivated by PR61034 which has
1778 conditional calls to free falsely clobbering ref because
1779 of imprecise points-to info of the argument. */
1781 bool valueized_anything
= false;
1782 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1784 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1785 if (TREE_CODE (oldargs
[i
]) == SSA_NAME
1786 && VN_INFO (oldargs
[i
])->valnum
!= oldargs
[i
])
1788 gimple_call_set_arg (def_stmt
, i
, VN_INFO (oldargs
[i
])->valnum
);
1789 valueized_anything
= true;
1792 if (valueized_anything
)
1794 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1796 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1797 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1800 *disambiguate_only
= true;
1806 if (*disambiguate_only
)
1809 offset
= ref
->offset
;
1810 maxsize
= ref
->max_size
;
1812 /* If we cannot constrain the size of the reference we cannot
1813 test if anything kills it. */
1817 /* We can't deduce anything useful from clobbers. */
1818 if (gimple_clobber_p (def_stmt
))
1821 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1822 from that definition.
1824 if (is_gimple_reg_type (vr
->type
)
1825 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1826 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1827 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2))
1828 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1830 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1832 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1834 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
1836 size2
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2)) * 8;
1837 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1838 == tree_to_uhwi (gimple_call_arg (def_stmt
, 2))
1840 && operand_equal_p (base
, base2
, 0)
1841 && offset2
<= offset
1842 && offset2
+ size2
>= offset
+ maxsize
)
1844 tree val
= build_zero_cst (vr
->type
);
1845 return vn_reference_lookup_or_insert_for_pieces
1846 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1850 /* 2) Assignment from an empty CONSTRUCTOR. */
1851 else if (is_gimple_reg_type (vr
->type
)
1852 && gimple_assign_single_p (def_stmt
)
1853 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1854 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1857 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1859 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1860 &offset2
, &size2
, &maxsize2
, &reverse
);
1862 && operand_equal_p (base
, base2
, 0)
1863 && offset2
<= offset
1864 && offset2
+ size2
>= offset
+ maxsize
)
1866 tree val
= build_zero_cst (vr
->type
);
1867 return vn_reference_lookup_or_insert_for_pieces
1868 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1872 /* 3) Assignment from a constant. We can use folds native encode/interpret
1873 routines to extract the assigned bits. */
1874 else if (ref
->size
== maxsize
1875 && is_gimple_reg_type (vr
->type
)
1876 && !contains_storage_order_barrier_p (vr
->operands
)
1877 && gimple_assign_single_p (def_stmt
)
1878 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
1879 && maxsize
% BITS_PER_UNIT
== 0
1880 && offset
% BITS_PER_UNIT
== 0
1881 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
1882 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
1883 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
1886 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1888 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1889 &offset2
, &size2
, &maxsize2
, &reverse
);
1892 && maxsize2
== size2
1893 && size2
% BITS_PER_UNIT
== 0
1894 && offset2
% BITS_PER_UNIT
== 0
1895 && operand_equal_p (base
, base2
, 0)
1896 && offset2
<= offset
1897 && offset2
+ size2
>= offset
+ maxsize
)
1899 /* We support up to 512-bit values (for V8DFmode). */
1900 unsigned char buffer
[64];
1903 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1904 if (TREE_CODE (rhs
) == SSA_NAME
)
1905 rhs
= SSA_VAL (rhs
);
1906 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
1907 buffer
, sizeof (buffer
));
1910 tree val
= native_interpret_expr (vr
->type
,
1912 + ((offset
- offset2
)
1914 ref
->size
/ BITS_PER_UNIT
);
1916 return vn_reference_lookup_or_insert_for_pieces
1917 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1922 /* 4) Assignment from an SSA name which definition we may be able
1923 to access pieces from. */
1924 else if (ref
->size
== maxsize
1925 && is_gimple_reg_type (vr
->type
)
1926 && !contains_storage_order_barrier_p (vr
->operands
)
1927 && gimple_assign_single_p (def_stmt
)
1928 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
1931 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1933 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1934 &offset2
, &size2
, &maxsize2
,
1938 && maxsize2
== size2
1939 && operand_equal_p (base
, base2
, 0)
1940 && offset2
<= offset
1941 && offset2
+ size2
>= offset
+ maxsize
1942 /* ??? We can't handle bitfield precision extracts without
1943 either using an alternate type for the BIT_FIELD_REF and
1944 then doing a conversion or possibly adjusting the offset
1945 according to endianess. */
1946 && (! INTEGRAL_TYPE_P (vr
->type
)
1947 || ref
->size
== TYPE_PRECISION (vr
->type
))
1948 && ref
->size
% BITS_PER_UNIT
== 0)
1950 code_helper rcode
= BIT_FIELD_REF
;
1952 ops
[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt
));
1953 ops
[1] = bitsize_int (ref
->size
);
1954 ops
[2] = bitsize_int (offset
- offset2
);
1955 tree val
= vn_nary_build_or_lookup (rcode
, vr
->type
, ops
);
1958 vn_reference_t res
= vn_reference_lookup_or_insert_for_pieces
1959 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1965 /* 5) For aggregate copies translate the reference through them if
1966 the copy kills ref. */
1967 else if (vn_walk_kind
== VN_WALKREWRITE
1968 && gimple_assign_single_p (def_stmt
)
1969 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
1970 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
1971 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
1974 HOST_WIDE_INT maxsize2
;
1976 auto_vec
<vn_reference_op_s
> rhs
;
1977 vn_reference_op_t vro
;
1983 /* See if the assignment kills REF. */
1984 base2
= ao_ref_base (&lhs_ref
);
1985 maxsize2
= lhs_ref
.max_size
;
1988 && (TREE_CODE (base
) != MEM_REF
1989 || TREE_CODE (base2
) != MEM_REF
1990 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
1991 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
1992 TREE_OPERAND (base2
, 1))))
1993 || !stmt_kills_ref_p (def_stmt
, ref
))
1996 /* Find the common base of ref and the lhs. lhs_ops already
1997 contains valueized operands for the lhs. */
1998 i
= vr
->operands
.length () - 1;
1999 j
= lhs_ops
.length () - 1;
2000 while (j
>= 0 && i
>= 0
2001 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
2007 /* ??? The innermost op should always be a MEM_REF and we already
2008 checked that the assignment to the lhs kills vr. Thus for
2009 aggregate copies using char[] types the vn_reference_op_eq
2010 may fail when comparing types for compatibility. But we really
2011 don't care here - further lookups with the rewritten operands
2012 will simply fail if we messed up types too badly. */
2013 HOST_WIDE_INT extra_off
= 0;
2014 if (j
== 0 && i
>= 0
2015 && lhs_ops
[0].opcode
== MEM_REF
2016 && lhs_ops
[0].off
!= -1)
2018 if (lhs_ops
[0].off
== vr
->operands
[i
].off
)
2020 else if (vr
->operands
[i
].opcode
== MEM_REF
2021 && vr
->operands
[i
].off
!= -1)
2023 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
2028 /* i now points to the first additional op.
2029 ??? LHS may not be completely contained in VR, one or more
2030 VIEW_CONVERT_EXPRs could be in its way. We could at least
2031 try handling outermost VIEW_CONVERT_EXPRs. */
2035 /* Punt if the additional ops contain a storage order barrier. */
2036 for (k
= i
; k
>= 0; k
--)
2038 vro
= &vr
->operands
[k
];
2039 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
2043 /* Now re-write REF to be based on the rhs of the assignment. */
2044 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
2046 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2049 if (rhs
.length () < 2
2050 || rhs
[0].opcode
!= MEM_REF
2051 || rhs
[0].off
== -1)
2053 rhs
[0].off
+= extra_off
;
2054 rhs
[0].op0
= int_const_binop (PLUS_EXPR
, rhs
[0].op0
,
2055 build_int_cst (TREE_TYPE (rhs
[0].op0
),
2059 /* We need to pre-pend vr->operands[0..i] to rhs. */
2060 vec
<vn_reference_op_s
> old
= vr
->operands
;
2061 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
2063 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
2064 if (old
== shared_lookup_references
)
2065 shared_lookup_references
= vr
->operands
;
2068 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
2069 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
2070 vr
->operands
[i
+ 1 + j
] = *vro
;
2071 vr
->operands
= valueize_refs (vr
->operands
);
2072 if (old
== shared_lookup_references
)
2073 shared_lookup_references
= vr
->operands
;
2074 vr
->hashcode
= vn_reference_compute_hash (vr
);
2076 /* Try folding the new reference to a constant. */
2077 tree val
= fully_constant_vn_reference_p (vr
);
2079 return vn_reference_lookup_or_insert_for_pieces
2080 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2082 /* Adjust *ref from the new operands. */
2083 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2085 /* This can happen with bitfields. */
2086 if (ref
->size
!= r
.size
)
2090 /* Do not update last seen VUSE after translating. */
2091 last_vuse_ptr
= NULL
;
2093 /* Keep looking for the adjusted *REF / VR pair. */
2097 /* 6) For memcpy copies translate the reference through them if
2098 the copy kills ref. */
2099 else if (vn_walk_kind
== VN_WALKREWRITE
2100 && is_gimple_reg_type (vr
->type
)
2101 /* ??? Handle BCOPY as well. */
2102 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
2103 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
2104 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
2105 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2106 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
2107 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
2108 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
2109 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2)))
2113 HOST_WIDE_INT rhs_offset
, copy_size
, lhs_offset
;
2114 vn_reference_op_s op
;
2117 /* Only handle non-variable, addressable refs. */
2118 if (ref
->size
!= maxsize
2119 || offset
% BITS_PER_UNIT
!= 0
2120 || ref
->size
% BITS_PER_UNIT
!= 0)
2123 /* Extract a pointer base and an offset for the destination. */
2124 lhs
= gimple_call_arg (def_stmt
, 0);
2126 if (TREE_CODE (lhs
) == SSA_NAME
)
2128 lhs
= SSA_VAL (lhs
);
2129 if (TREE_CODE (lhs
) == SSA_NAME
)
2131 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
2132 if (gimple_assign_single_p (def_stmt
)
2133 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2134 lhs
= gimple_assign_rhs1 (def_stmt
);
2137 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2139 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2143 if (TREE_CODE (tem
) == MEM_REF
2144 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2146 lhs
= TREE_OPERAND (tem
, 0);
2147 if (TREE_CODE (lhs
) == SSA_NAME
)
2148 lhs
= SSA_VAL (lhs
);
2149 lhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2151 else if (DECL_P (tem
))
2152 lhs
= build_fold_addr_expr (tem
);
2156 if (TREE_CODE (lhs
) != SSA_NAME
2157 && TREE_CODE (lhs
) != ADDR_EXPR
)
2160 /* Extract a pointer base and an offset for the source. */
2161 rhs
= gimple_call_arg (def_stmt
, 1);
2163 if (TREE_CODE (rhs
) == SSA_NAME
)
2164 rhs
= SSA_VAL (rhs
);
2165 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2167 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2171 if (TREE_CODE (tem
) == MEM_REF
2172 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2174 rhs
= TREE_OPERAND (tem
, 0);
2175 rhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2177 else if (DECL_P (tem
))
2178 rhs
= build_fold_addr_expr (tem
);
2182 if (TREE_CODE (rhs
) != SSA_NAME
2183 && TREE_CODE (rhs
) != ADDR_EXPR
)
2186 copy_size
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2));
2188 /* The bases of the destination and the references have to agree. */
2189 if ((TREE_CODE (base
) != MEM_REF
2191 || (TREE_CODE (base
) == MEM_REF
2192 && (TREE_OPERAND (base
, 0) != lhs
2193 || !tree_fits_uhwi_p (TREE_OPERAND (base
, 1))))
2195 && (TREE_CODE (lhs
) != ADDR_EXPR
2196 || TREE_OPERAND (lhs
, 0) != base
)))
2199 at
= offset
/ BITS_PER_UNIT
;
2200 if (TREE_CODE (base
) == MEM_REF
)
2201 at
+= tree_to_uhwi (TREE_OPERAND (base
, 1));
2202 /* If the access is completely outside of the memcpy destination
2203 area there is no aliasing. */
2204 if (lhs_offset
>= at
+ maxsize
/ BITS_PER_UNIT
2205 || lhs_offset
+ copy_size
<= at
)
2207 /* And the access has to be contained within the memcpy destination. */
2209 || lhs_offset
+ copy_size
< at
+ maxsize
/ BITS_PER_UNIT
)
2212 /* Make room for 2 operands in the new reference. */
2213 if (vr
->operands
.length () < 2)
2215 vec
<vn_reference_op_s
> old
= vr
->operands
;
2216 vr
->operands
.safe_grow_cleared (2);
2217 if (old
== shared_lookup_references
2218 && vr
->operands
!= old
)
2219 shared_lookup_references
= vr
->operands
;
2222 vr
->operands
.truncate (2);
2224 /* The looked-through reference is a simple MEM_REF. */
2225 memset (&op
, 0, sizeof (op
));
2227 op
.opcode
= MEM_REF
;
2228 op
.op0
= build_int_cst (ptr_type_node
, at
- rhs_offset
);
2229 op
.off
= at
- lhs_offset
+ rhs_offset
;
2230 vr
->operands
[0] = op
;
2231 op
.type
= TREE_TYPE (rhs
);
2232 op
.opcode
= TREE_CODE (rhs
);
2235 vr
->operands
[1] = op
;
2236 vr
->hashcode
= vn_reference_compute_hash (vr
);
2238 /* Try folding the new reference to a constant. */
2239 tree val
= fully_constant_vn_reference_p (vr
);
2241 return vn_reference_lookup_or_insert_for_pieces
2242 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2244 /* Adjust *ref from the new operands. */
2245 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2247 /* This can happen with bitfields. */
2248 if (ref
->size
!= r
.size
)
2252 /* Do not update last seen VUSE after translating. */
2253 last_vuse_ptr
= NULL
;
2255 /* Keep looking for the adjusted *REF / VR pair. */
2259 /* Bail out and stop walking. */
2263 /* Lookup a reference operation by it's parts, in the current hash table.
2264 Returns the resulting value number if it exists in the hash table,
2265 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2266 vn_reference_t stored in the hashtable if something is found. */
2269 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2270 vec
<vn_reference_op_s
> operands
,
2271 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2273 struct vn_reference_s vr1
;
2281 vr1
.vuse
= vuse_ssa_val (vuse
);
2282 shared_lookup_references
.truncate (0);
2283 shared_lookup_references
.safe_grow (operands
.length ());
2284 memcpy (shared_lookup_references
.address (),
2285 operands
.address (),
2286 sizeof (vn_reference_op_s
)
2287 * operands
.length ());
2288 vr1
.operands
= operands
= shared_lookup_references
2289 = valueize_refs (shared_lookup_references
);
2292 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2293 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2296 vn_reference_lookup_1 (&vr1
, vnresult
);
2298 && kind
!= VN_NOWALK
2302 vn_walk_kind
= kind
;
2303 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2305 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2306 vn_reference_lookup_2
,
2307 vn_reference_lookup_3
,
2308 vuse_ssa_val
, &vr1
);
2309 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2313 return (*vnresult
)->result
;
2318 /* Lookup OP in the current hash table, and return the resulting value
2319 number if it exists in the hash table. Return NULL_TREE if it does
2320 not exist in the hash table or if the result field of the structure
2321 was NULL.. VNRESULT will be filled in with the vn_reference_t
2322 stored in the hashtable if one exists. When TBAA_P is false assume
2323 we are looking up a store and treat it as having alias-set zero. */
2326 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2327 vn_reference_t
*vnresult
, bool tbaa_p
)
2329 vec
<vn_reference_op_s
> operands
;
2330 struct vn_reference_s vr1
;
2332 bool valuezied_anything
;
2337 vr1
.vuse
= vuse_ssa_val (vuse
);
2338 vr1
.operands
= operands
2339 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2340 vr1
.type
= TREE_TYPE (op
);
2341 vr1
.set
= tbaa_p
? get_alias_set (op
) : 0;
2342 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2343 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2346 if (kind
!= VN_NOWALK
2349 vn_reference_t wvnresult
;
2351 /* Make sure to use a valueized reference if we valueized anything.
2352 Otherwise preserve the full reference for advanced TBAA. */
2353 if (!valuezied_anything
2354 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2356 ao_ref_init (&r
, op
);
2358 r
.ref_alias_set
= r
.base_alias_set
= 0;
2359 vn_walk_kind
= kind
;
2361 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2362 vn_reference_lookup_2
,
2363 vn_reference_lookup_3
,
2364 vuse_ssa_val
, &vr1
);
2365 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2369 *vnresult
= wvnresult
;
2370 return wvnresult
->result
;
2376 return vn_reference_lookup_1 (&vr1
, vnresult
);
2379 /* Lookup CALL in the current hash table and return the entry in
2380 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2383 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2389 tree vuse
= gimple_vuse (call
);
2391 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2392 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2393 vr
->type
= gimple_expr_type (call
);
2395 vr
->hashcode
= vn_reference_compute_hash (vr
);
2396 vn_reference_lookup_1 (vr
, vnresult
);
2399 /* Insert OP into the current hash table with a value number of
2400 RESULT, and return the resulting reference structure we created. */
2402 static vn_reference_t
2403 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2405 vn_reference_s
**slot
;
2409 vr1
= current_info
->references_pool
->allocate ();
2410 if (TREE_CODE (result
) == SSA_NAME
)
2411 vr1
->value_id
= VN_INFO (result
)->value_id
;
2413 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2414 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2415 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2416 vr1
->type
= TREE_TYPE (op
);
2417 vr1
->set
= get_alias_set (op
);
2418 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2419 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2420 vr1
->result_vdef
= vdef
;
2422 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2425 /* Because we lookup stores using vuses, and value number failures
2426 using the vdefs (see visit_reference_op_store for how and why),
2427 it's possible that on failure we may try to insert an already
2428 inserted store. This is not wrong, there is no ssa name for a
2429 store that we could use as a differentiator anyway. Thus, unlike
2430 the other lookup functions, you cannot gcc_assert (!*slot)
2433 /* But free the old slot in case of a collision. */
2435 free_reference (*slot
);
2441 /* Insert a reference by it's pieces into the current hash table with
2442 a value number of RESULT. Return the resulting reference
2443 structure we created. */
2446 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2447 vec
<vn_reference_op_s
> operands
,
2448 tree result
, unsigned int value_id
)
2451 vn_reference_s
**slot
;
2454 vr1
= current_info
->references_pool
->allocate ();
2455 vr1
->value_id
= value_id
;
2456 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2457 vr1
->operands
= valueize_refs (operands
);
2460 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2461 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2462 result
= SSA_VAL (result
);
2463 vr1
->result
= result
;
2465 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2468 /* At this point we should have all the things inserted that we have
2469 seen before, and we should never try inserting something that
2471 gcc_assert (!*slot
);
2473 free_reference (*slot
);
2479 /* Compute and return the hash value for nary operation VBO1. */
2482 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2484 inchash::hash hstate
;
2487 for (i
= 0; i
< vno1
->length
; ++i
)
2488 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2489 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2491 if (((vno1
->length
== 2
2492 && commutative_tree_code (vno1
->opcode
))
2493 || (vno1
->length
== 3
2494 && commutative_ternary_tree_code (vno1
->opcode
)))
2495 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2496 std::swap (vno1
->op
[0], vno1
->op
[1]);
2497 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
2498 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2500 std::swap (vno1
->op
[0], vno1
->op
[1]);
2501 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
2504 hstate
.add_int (vno1
->opcode
);
2505 for (i
= 0; i
< vno1
->length
; ++i
)
2506 inchash::add_expr (vno1
->op
[i
], hstate
);
2508 return hstate
.end ();
2511 /* Compare nary operations VNO1 and VNO2 and return true if they are
2515 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2519 if (vno1
->hashcode
!= vno2
->hashcode
)
2522 if (vno1
->length
!= vno2
->length
)
2525 if (vno1
->opcode
!= vno2
->opcode
2526 || !types_compatible_p (vno1
->type
, vno2
->type
))
2529 for (i
= 0; i
< vno1
->length
; ++i
)
2530 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2536 /* Initialize VNO from the pieces provided. */
2539 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2540 enum tree_code code
, tree type
, tree
*ops
)
2543 vno
->length
= length
;
2545 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2548 /* Initialize VNO from OP. */
2551 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2555 vno
->opcode
= TREE_CODE (op
);
2556 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2557 vno
->type
= TREE_TYPE (op
);
2558 for (i
= 0; i
< vno
->length
; ++i
)
2559 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2562 /* Return the number of operands for a vn_nary ops structure from STMT. */
2565 vn_nary_length_from_stmt (gimple
*stmt
)
2567 switch (gimple_assign_rhs_code (stmt
))
2571 case VIEW_CONVERT_EXPR
:
2578 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2581 return gimple_num_ops (stmt
) - 1;
2585 /* Initialize VNO from STMT. */
2588 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
2592 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2593 vno
->type
= gimple_expr_type (stmt
);
2594 switch (vno
->opcode
)
2598 case VIEW_CONVERT_EXPR
:
2600 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2605 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2606 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2607 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2611 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2612 for (i
= 0; i
< vno
->length
; ++i
)
2613 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2617 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2618 vno
->length
= gimple_num_ops (stmt
) - 1;
2619 for (i
= 0; i
< vno
->length
; ++i
)
2620 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2624 /* Compute the hashcode for VNO and look for it in the hash table;
2625 return the resulting value number if it exists in the hash table.
2626 Return NULL_TREE if it does not exist in the hash table or if the
2627 result field of the operation is NULL. VNRESULT will contain the
2628 vn_nary_op_t from the hashtable if it exists. */
2631 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2633 vn_nary_op_s
**slot
;
2638 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2639 slot
= current_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2641 if (!slot
&& current_info
== optimistic_info
)
2642 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2648 return (*slot
)->result
;
2651 /* Lookup a n-ary operation by its pieces and return the resulting value
2652 number if it exists in the hash table. Return NULL_TREE if it does
2653 not exist in the hash table or if the result field of the operation
2654 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2658 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2659 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2661 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2662 sizeof_vn_nary_op (length
));
2663 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2664 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2667 /* Lookup OP in the current hash table, and return the resulting value
2668 number if it exists in the hash table. Return NULL_TREE if it does
2669 not exist in the hash table or if the result field of the operation
2670 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2674 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2677 = XALLOCAVAR (struct vn_nary_op_s
,
2678 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2679 init_vn_nary_op_from_op (vno1
, op
);
2680 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2683 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2684 value number if it exists in the hash table. Return NULL_TREE if
2685 it does not exist in the hash table. VNRESULT will contain the
2686 vn_nary_op_t from the hashtable if it exists. */
2689 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
2692 = XALLOCAVAR (struct vn_nary_op_s
,
2693 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2694 init_vn_nary_op_from_stmt (vno1
, stmt
);
2695 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2698 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2701 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2703 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2706 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2710 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2712 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2713 ¤t_info
->nary_obstack
);
2715 vno1
->value_id
= value_id
;
2716 vno1
->length
= length
;
2717 vno1
->result
= result
;
2722 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2723 VNO->HASHCODE first. */
2726 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
2729 vn_nary_op_s
**slot
;
2732 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2734 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
2735 gcc_assert (!*slot
);
2741 /* Insert a n-ary operation into the current hash table using it's
2742 pieces. Return the vn_nary_op_t structure we created and put in
2746 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2747 tree type
, tree
*ops
,
2748 tree result
, unsigned int value_id
)
2750 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2751 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2752 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2755 /* Insert OP into the current hash table with a value number of
2756 RESULT. Return the vn_nary_op_t structure we created and put in
2760 vn_nary_op_insert (tree op
, tree result
)
2762 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2765 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2766 init_vn_nary_op_from_op (vno1
, op
);
2767 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2770 /* Insert the rhs of STMT into the current hash table with a value number of
2774 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
2777 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
2778 result
, VN_INFO (result
)->value_id
);
2779 init_vn_nary_op_from_stmt (vno1
, stmt
);
2780 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2783 /* Compute a hashcode for PHI operation VP1 and return it. */
2785 static inline hashval_t
2786 vn_phi_compute_hash (vn_phi_t vp1
)
2788 inchash::hash
hstate (vp1
->phiargs
.length () > 2
2789 ? vp1
->block
->index
: vp1
->phiargs
.length ());
2795 /* If all PHI arguments are constants we need to distinguish
2796 the PHI node via its type. */
2798 hstate
.merge_hash (vn_hash_type (type
));
2800 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
2802 /* Don't hash backedge values they need to be handled as VN_TOP
2803 for optimistic value-numbering. */
2804 if (e
->flags
& EDGE_DFS_BACK
)
2807 phi1op
= vp1
->phiargs
[e
->dest_idx
];
2808 if (phi1op
== VN_TOP
)
2810 inchash::add_expr (phi1op
, hstate
);
2813 return hstate
.end ();
2817 /* Return true if COND1 and COND2 represent the same condition, set
2818 *INVERTED_P if one needs to be inverted to make it the same as
2822 cond_stmts_equal_p (gcond
*cond1
, gcond
*cond2
, bool *inverted_p
)
2824 enum tree_code code1
= gimple_cond_code (cond1
);
2825 enum tree_code code2
= gimple_cond_code (cond2
);
2826 tree lhs1
= gimple_cond_lhs (cond1
);
2827 tree lhs2
= gimple_cond_lhs (cond2
);
2828 tree rhs1
= gimple_cond_rhs (cond1
);
2829 tree rhs2
= gimple_cond_rhs (cond2
);
2831 *inverted_p
= false;
2834 else if (code1
== swap_tree_comparison (code2
))
2835 std::swap (lhs2
, rhs2
);
2836 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
2838 else if (code1
== invert_tree_comparison
2839 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
2841 std::swap (lhs2
, rhs2
);
2847 lhs1
= vn_valueize (lhs1
);
2848 rhs1
= vn_valueize (rhs1
);
2849 lhs2
= vn_valueize (lhs2
);
2850 rhs2
= vn_valueize (rhs2
);
2851 return ((expressions_equal_p (lhs1
, lhs2
)
2852 && expressions_equal_p (rhs1
, rhs2
))
2853 || (commutative_tree_code (code1
)
2854 && expressions_equal_p (lhs1
, rhs2
)
2855 && expressions_equal_p (rhs1
, lhs2
)));
2858 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2861 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
2863 if (vp1
->hashcode
!= vp2
->hashcode
)
2866 if (vp1
->block
!= vp2
->block
)
2868 if (vp1
->phiargs
.length () != vp2
->phiargs
.length ())
2871 switch (vp1
->phiargs
.length ())
2874 /* Single-arg PHIs are just copies. */
2879 /* Rule out backedges into the PHI. */
2880 if (vp1
->block
->loop_father
->header
== vp1
->block
2881 || vp2
->block
->loop_father
->header
== vp2
->block
)
2884 /* If the PHI nodes do not have compatible types
2885 they are not the same. */
2886 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2890 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
2892 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
2893 /* If the immediate dominator end in switch stmts multiple
2894 values may end up in the same PHI arg via intermediate
2896 if (EDGE_COUNT (idom1
->succs
) != 2
2897 || EDGE_COUNT (idom2
->succs
) != 2)
2900 /* Verify the controlling stmt is the same. */
2901 gimple
*last1
= last_stmt (idom1
);
2902 gimple
*last2
= last_stmt (idom2
);
2903 if (gimple_code (last1
) != GIMPLE_COND
2904 || gimple_code (last2
) != GIMPLE_COND
)
2907 if (! cond_stmts_equal_p (as_a
<gcond
*> (last1
),
2908 as_a
<gcond
*> (last2
), &inverted_p
))
2911 /* Get at true/false controlled edges into the PHI. */
2912 edge te1
, te2
, fe1
, fe2
;
2913 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
2915 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
2919 /* Swap edges if the second condition is the inverted of the
2922 std::swap (te2
, fe2
);
2924 /* ??? Handle VN_TOP specially. */
2925 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
2926 vp2
->phiargs
[te2
->dest_idx
])
2927 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
2928 vp2
->phiargs
[fe2
->dest_idx
]))
2939 /* If the PHI nodes do not have compatible types
2940 they are not the same. */
2941 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2944 /* Any phi in the same block will have it's arguments in the
2945 same edge order, because of how we store phi nodes. */
2948 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2950 tree phi2op
= vp2
->phiargs
[i
];
2951 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
2953 if (!expressions_equal_p (phi1op
, phi2op
))
2960 static vec
<tree
> shared_lookup_phiargs
;
2962 /* Lookup PHI in the current hash table, and return the resulting
2963 value number if it exists in the hash table. Return NULL_TREE if
2964 it does not exist in the hash table. */
2967 vn_phi_lookup (gimple
*phi
)
2970 struct vn_phi_s vp1
;
2974 shared_lookup_phiargs
.truncate (0);
2975 shared_lookup_phiargs
.safe_grow (gimple_phi_num_args (phi
));
2977 /* Canonicalize the SSA_NAME's to their value number. */
2978 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
2980 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
2981 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2982 shared_lookup_phiargs
[e
->dest_idx
] = def
;
2984 vp1
.type
= TREE_TYPE (gimple_phi_result (phi
));
2985 vp1
.phiargs
= shared_lookup_phiargs
;
2986 vp1
.block
= gimple_bb (phi
);
2987 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
2988 slot
= current_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2990 if (!slot
&& current_info
== optimistic_info
)
2991 slot
= valid_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2995 return (*slot
)->result
;
2998 /* Insert PHI into the current hash table with a value number of
3002 vn_phi_insert (gimple
*phi
, tree result
)
3005 vn_phi_t vp1
= current_info
->phis_pool
->allocate ();
3006 vec
<tree
> args
= vNULL
;
3010 args
.safe_grow (gimple_phi_num_args (phi
));
3012 /* Canonicalize the SSA_NAME's to their value number. */
3013 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3015 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3016 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
3017 args
[e
->dest_idx
] = def
;
3019 vp1
->value_id
= VN_INFO (result
)->value_id
;
3020 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
3021 vp1
->phiargs
= args
;
3022 vp1
->block
= gimple_bb (phi
);
3023 vp1
->result
= result
;
3024 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
3026 slot
= current_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
3028 /* Because we iterate over phi operations more than once, it's
3029 possible the slot might already exist here, hence no assert.*/
3035 /* Print set of components in strongly connected component SCC to OUT. */
3038 print_scc (FILE *out
, vec
<tree
> scc
)
3043 fprintf (out
, "SCC consists of:");
3044 FOR_EACH_VEC_ELT (scc
, i
, var
)
3047 print_generic_expr (out
, var
, 0);
3049 fprintf (out
, "\n");
3052 /* Return true if BB1 is dominated by BB2 taking into account edges
3053 that are not executable. */
3056 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
)
3061 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3064 /* Before iterating we'd like to know if there exists a
3065 (executable) path from bb2 to bb1 at all, if not we can
3066 directly return false. For now simply iterate once. */
3068 /* Iterate to the single executable bb1 predecessor. */
3069 if (EDGE_COUNT (bb1
->preds
) > 1)
3072 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
3073 if (e
->flags
& EDGE_EXECUTABLE
)
3086 /* Re-do the dominance check with changed bb1. */
3087 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3092 /* Iterate to the single executable bb2 successor. */
3094 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
3095 if (e
->flags
& EDGE_EXECUTABLE
)
3106 /* Verify the reached block is only reached through succe.
3107 If there is only one edge we can spare us the dominator
3108 check and iterate directly. */
3109 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
3111 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
3113 && (e
->flags
& EDGE_EXECUTABLE
))
3123 /* Re-do the dominance check with changed bb2. */
3124 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3129 /* We could now iterate updating bb1 / bb2. */
3133 /* Set the value number of FROM to TO, return true if it has changed
3137 set_ssa_val_to (tree from
, tree to
)
3139 tree currval
= SSA_VAL (from
);
3140 HOST_WIDE_INT toff
, coff
;
3142 /* The only thing we allow as value numbers are ssa_names
3143 and invariants. So assert that here. We don't allow VN_TOP
3144 as visiting a stmt should produce a value-number other than
3146 ??? Still VN_TOP can happen for unreachable code, so force
3147 it to varying in that case. Not all code is prepared to
3148 get VN_TOP on valueization. */
3151 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3152 fprintf (dump_file
, "Forcing value number to varying on "
3153 "receiving VN_TOP\n");
3157 gcc_assert (to
!= NULL_TREE
3158 && ((TREE_CODE (to
) == SSA_NAME
3159 && (to
== from
|| SSA_VAL (to
) == to
))
3160 || is_gimple_min_invariant (to
)));
3164 if (currval
== from
)
3166 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3168 fprintf (dump_file
, "Not changing value number of ");
3169 print_generic_expr (dump_file
, from
, 0);
3170 fprintf (dump_file
, " from VARYING to ");
3171 print_generic_expr (dump_file
, to
, 0);
3172 fprintf (dump_file
, "\n");
3176 else if (TREE_CODE (to
) == SSA_NAME
3177 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
3181 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3183 fprintf (dump_file
, "Setting value number of ");
3184 print_generic_expr (dump_file
, from
, 0);
3185 fprintf (dump_file
, " to ");
3186 print_generic_expr (dump_file
, to
, 0);
3190 && !operand_equal_p (currval
, to
, 0)
3191 /* ??? For addresses involving volatile objects or types operand_equal_p
3192 does not reliably detect ADDR_EXPRs as equal. We know we are only
3193 getting invariant gimple addresses here, so can use
3194 get_addr_base_and_unit_offset to do this comparison. */
3195 && !(TREE_CODE (currval
) == ADDR_EXPR
3196 && TREE_CODE (to
) == ADDR_EXPR
3197 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
3198 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
3201 /* If we equate two SSA names we have to make the side-band info
3202 of the leader conservative (and remember whatever original value
3204 if (TREE_CODE (to
) == SSA_NAME
)
3206 if (INTEGRAL_TYPE_P (TREE_TYPE (to
))
3207 && SSA_NAME_RANGE_INFO (to
))
3209 if (SSA_NAME_IS_DEFAULT_DEF (to
)
3210 || dominated_by_p_w_unex
3211 (gimple_bb (SSA_NAME_DEF_STMT (from
)),
3212 gimple_bb (SSA_NAME_DEF_STMT (to
))))
3213 /* Keep the info from the dominator. */
3215 else if (SSA_NAME_IS_DEFAULT_DEF (from
)
3216 || dominated_by_p_w_unex
3217 (gimple_bb (SSA_NAME_DEF_STMT (to
)),
3218 gimple_bb (SSA_NAME_DEF_STMT (from
))))
3220 /* Save old info. */
3221 if (! VN_INFO (to
)->info
.range_info
)
3223 VN_INFO (to
)->info
.range_info
= SSA_NAME_RANGE_INFO (to
);
3224 VN_INFO (to
)->range_info_anti_range_p
3225 = SSA_NAME_ANTI_RANGE_P (to
);
3227 /* Use that from the dominator. */
3228 SSA_NAME_RANGE_INFO (to
) = SSA_NAME_RANGE_INFO (from
);
3229 SSA_NAME_ANTI_RANGE_P (to
) = SSA_NAME_ANTI_RANGE_P (from
);
3233 /* Save old info. */
3234 if (! VN_INFO (to
)->info
.range_info
)
3236 VN_INFO (to
)->info
.range_info
= SSA_NAME_RANGE_INFO (to
);
3237 VN_INFO (to
)->range_info_anti_range_p
3238 = SSA_NAME_ANTI_RANGE_P (to
);
3240 /* Rather than allocating memory and unioning the info
3242 SSA_NAME_RANGE_INFO (to
) = NULL
;
3245 else if (POINTER_TYPE_P (TREE_TYPE (to
))
3246 && SSA_NAME_PTR_INFO (to
))
3248 if (SSA_NAME_IS_DEFAULT_DEF (to
)
3249 || dominated_by_p_w_unex
3250 (gimple_bb (SSA_NAME_DEF_STMT (from
)),
3251 gimple_bb (SSA_NAME_DEF_STMT (to
))))
3252 /* Keep the info from the dominator. */
3254 else if (SSA_NAME_IS_DEFAULT_DEF (from
)
3255 || dominated_by_p_w_unex
3256 (gimple_bb (SSA_NAME_DEF_STMT (to
)),
3257 gimple_bb (SSA_NAME_DEF_STMT (from
))))
3259 /* Save old info. */
3260 if (! VN_INFO (to
)->info
.ptr_info
)
3261 VN_INFO (to
)->info
.ptr_info
= SSA_NAME_PTR_INFO (to
);
3262 /* Use that from the dominator. */
3263 SSA_NAME_PTR_INFO (to
) = SSA_NAME_PTR_INFO (from
);
3265 else if (! SSA_NAME_PTR_INFO (from
)
3266 /* Handle the case of trivially equivalent info. */
3267 || memcmp (SSA_NAME_PTR_INFO (to
),
3268 SSA_NAME_PTR_INFO (from
),
3269 sizeof (ptr_info_def
)) != 0)
3271 /* Save old info. */
3272 if (! VN_INFO (to
)->info
.ptr_info
)
3273 VN_INFO (to
)->info
.ptr_info
= SSA_NAME_PTR_INFO (to
);
3274 /* Rather than allocating memory and unioning the info
3276 SSA_NAME_PTR_INFO (to
) = NULL
;
3281 VN_INFO (from
)->valnum
= to
;
3282 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3283 fprintf (dump_file
, " (changed)\n");
3286 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3287 fprintf (dump_file
, "\n");
3291 /* Mark as processed all the definitions in the defining stmt of USE, or
3295 mark_use_processed (tree use
)
3299 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
3301 if (SSA_NAME_IS_DEFAULT_DEF (use
) || gimple_code (stmt
) == GIMPLE_PHI
)
3303 VN_INFO (use
)->use_processed
= true;
3307 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3309 tree def
= DEF_FROM_PTR (defp
);
3311 VN_INFO (def
)->use_processed
= true;
3315 /* Set all definitions in STMT to value number to themselves.
3316 Return true if a value number changed. */
3319 defs_to_varying (gimple
*stmt
)
3321 bool changed
= false;
3325 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3327 tree def
= DEF_FROM_PTR (defp
);
3328 changed
|= set_ssa_val_to (def
, def
);
3333 /* Visit a copy between LHS and RHS, return true if the value number
3337 visit_copy (tree lhs
, tree rhs
)
3340 rhs
= SSA_VAL (rhs
);
3342 return set_ssa_val_to (lhs
, rhs
);
3345 /* Visit a nary operator RHS, value number it, and return true if the
3346 value number of LHS has changed as a result. */
3349 visit_nary_op (tree lhs
, gimple
*stmt
)
3351 bool changed
= false;
3352 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
3355 changed
= set_ssa_val_to (lhs
, result
);
3358 changed
= set_ssa_val_to (lhs
, lhs
);
3359 vn_nary_op_insert_stmt (stmt
, lhs
);
3365 /* Visit a call STMT storing into LHS. Return true if the value number
3366 of the LHS has changed as a result. */
3369 visit_reference_op_call (tree lhs
, gcall
*stmt
)
3371 bool changed
= false;
3372 struct vn_reference_s vr1
;
3373 vn_reference_t vnresult
= NULL
;
3374 tree vdef
= gimple_vdef (stmt
);
3376 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3377 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
3380 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
3383 if (vnresult
->result_vdef
&& vdef
)
3384 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3386 if (!vnresult
->result
&& lhs
)
3387 vnresult
->result
= lhs
;
3389 if (vnresult
->result
&& lhs
)
3390 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
3395 vn_reference_s
**slot
;
3397 changed
|= set_ssa_val_to (vdef
, vdef
);
3399 changed
|= set_ssa_val_to (lhs
, lhs
);
3400 vr2
= current_info
->references_pool
->allocate ();
3401 vr2
->vuse
= vr1
.vuse
;
3402 /* As we are not walking the virtual operand chain we know the
3403 shared_lookup_references are still original so we can re-use
3405 vr2
->operands
= vr1
.operands
.copy ();
3406 vr2
->type
= vr1
.type
;
3408 vr2
->hashcode
= vr1
.hashcode
;
3410 vr2
->result_vdef
= vdef
;
3411 slot
= current_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
3413 gcc_assert (!*slot
);
3420 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3421 and return true if the value number of the LHS has changed as a result. */
3424 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
3426 bool changed
= false;
3430 last_vuse
= gimple_vuse (stmt
);
3431 last_vuse_ptr
= &last_vuse
;
3432 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3433 default_vn_walk_kind
, NULL
, true);
3434 last_vuse_ptr
= NULL
;
3436 /* We handle type-punning through unions by value-numbering based
3437 on offset and size of the access. Be prepared to handle a
3438 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3440 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3442 /* We will be setting the value number of lhs to the value number
3443 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3444 So first simplify and lookup this expression to see if it
3445 is already available. */
3446 code_helper rcode
= VIEW_CONVERT_EXPR
;
3447 tree ops
[3] = { result
};
3448 result
= vn_nary_build_or_lookup (rcode
, TREE_TYPE (op
), ops
);
3452 changed
= set_ssa_val_to (lhs
, result
);
3455 changed
= set_ssa_val_to (lhs
, lhs
);
3456 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
3463 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3464 and return true if the value number of the LHS has changed as a result. */
3467 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
3469 bool changed
= false;
3470 vn_reference_t vnresult
= NULL
;
3471 tree result
, assign
;
3472 bool resultsame
= false;
3473 tree vuse
= gimple_vuse (stmt
);
3474 tree vdef
= gimple_vdef (stmt
);
3476 if (TREE_CODE (op
) == SSA_NAME
)
3479 /* First we want to lookup using the *vuses* from the store and see
3480 if there the last store to this location with the same address
3483 The vuses represent the memory state before the store. If the
3484 memory state, address, and value of the store is the same as the
3485 last store to this location, then this store will produce the
3486 same memory state as that store.
3488 In this case the vdef versions for this store are value numbered to those
3489 vuse versions, since they represent the same memory state after
3492 Otherwise, the vdefs for the store are used when inserting into
3493 the table, since the store generates a new memory state. */
3495 result
= vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, NULL
, false);
3499 if (TREE_CODE (result
) == SSA_NAME
)
3500 result
= SSA_VAL (result
);
3501 resultsame
= expressions_equal_p (result
, op
);
3504 if ((!result
|| !resultsame
)
3505 /* Only perform the following when being called from PRE
3506 which embeds tail merging. */
3507 && default_vn_walk_kind
== VN_WALK
)
3509 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3510 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
3513 VN_INFO (vdef
)->use_processed
= true;
3514 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3518 if (!result
|| !resultsame
)
3520 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3522 fprintf (dump_file
, "No store match\n");
3523 fprintf (dump_file
, "Value numbering store ");
3524 print_generic_expr (dump_file
, lhs
, 0);
3525 fprintf (dump_file
, " to ");
3526 print_generic_expr (dump_file
, op
, 0);
3527 fprintf (dump_file
, "\n");
3529 /* Have to set value numbers before insert, since insert is
3530 going to valueize the references in-place. */
3533 changed
|= set_ssa_val_to (vdef
, vdef
);
3536 /* Do not insert structure copies into the tables. */
3537 if (is_gimple_min_invariant (op
)
3538 || is_gimple_reg (op
))
3539 vn_reference_insert (lhs
, op
, vdef
, NULL
);
3541 /* Only perform the following when being called from PRE
3542 which embeds tail merging. */
3543 if (default_vn_walk_kind
== VN_WALK
)
3545 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3546 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
3551 /* We had a match, so value number the vdef to have the value
3552 number of the vuse it came from. */
3554 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3555 fprintf (dump_file
, "Store matched earlier value,"
3556 "value numbering store vdefs to matching vuses.\n");
3558 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
3564 /* Visit and value number PHI, return true if the value number
3568 visit_phi (gimple
*phi
)
3570 bool changed
= false;
3572 tree sameval
= VN_TOP
;
3573 bool allsame
= true;
3574 unsigned n_executable
= 0;
3576 /* TODO: We could check for this in init_sccvn, and replace this
3577 with a gcc_assert. */
3578 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
3579 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3581 /* See if all non-TOP arguments have the same value. TOP is
3582 equivalent to everything, so we can ignore it. */
3585 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3586 if (e
->flags
& EDGE_EXECUTABLE
)
3588 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3591 if (TREE_CODE (def
) == SSA_NAME
)
3592 def
= SSA_VAL (def
);
3595 if (sameval
== VN_TOP
)
3597 else if (!expressions_equal_p (def
, sameval
))
3604 /* If none of the edges was executable or all incoming values are
3605 undefined keep the value-number at VN_TOP. If only a single edge
3606 is exectuable use its value. */
3607 if (sameval
== VN_TOP
3608 || n_executable
== 1)
3609 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3611 /* First see if it is equivalent to a phi node in this block. We prefer
3612 this as it allows IV elimination - see PRs 66502 and 67167. */
3613 result
= vn_phi_lookup (phi
);
3615 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
3616 /* Otherwise all value numbered to the same value, the phi node has that
3619 changed
= set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3622 vn_phi_insert (phi
, PHI_RESULT (phi
));
3623 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3629 /* Try to simplify RHS using equivalences and constant folding. */
3632 try_to_simplify (gassign
*stmt
)
3634 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3637 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3638 in this case, there is no point in doing extra work. */
3639 if (code
== SSA_NAME
)
3642 /* First try constant folding based on our current lattice. */
3643 mprts_hook
= vn_lookup_simplify_result
;
3644 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
3647 && (TREE_CODE (tem
) == SSA_NAME
3648 || is_gimple_min_invariant (tem
)))
3654 /* Visit and value number USE, return true if the value number
3658 visit_use (tree use
)
3660 bool changed
= false;
3661 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
3663 mark_use_processed (use
);
3665 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
3666 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3667 && !SSA_NAME_IS_DEFAULT_DEF (use
))
3669 fprintf (dump_file
, "Value numbering ");
3670 print_generic_expr (dump_file
, use
, 0);
3671 fprintf (dump_file
, " stmt = ");
3672 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3675 /* Handle uninitialized uses. */
3676 if (SSA_NAME_IS_DEFAULT_DEF (use
))
3677 changed
= set_ssa_val_to (use
, use
);
3678 else if (gimple_code (stmt
) == GIMPLE_PHI
)
3679 changed
= visit_phi (stmt
);
3680 else if (gimple_has_volatile_ops (stmt
))
3681 changed
= defs_to_varying (stmt
);
3682 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
3684 enum tree_code code
= gimple_assign_rhs_code (ass
);
3685 tree lhs
= gimple_assign_lhs (ass
);
3686 tree rhs1
= gimple_assign_rhs1 (ass
);
3689 /* Shortcut for copies. Simplifying copies is pointless,
3690 since we copy the expression and value they represent. */
3691 if (code
== SSA_NAME
3692 && TREE_CODE (lhs
) == SSA_NAME
)
3694 changed
= visit_copy (lhs
, rhs1
);
3697 simplified
= try_to_simplify (ass
);
3700 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3702 fprintf (dump_file
, "RHS ");
3703 print_gimple_expr (dump_file
, ass
, 0, 0);
3704 fprintf (dump_file
, " simplified to ");
3705 print_generic_expr (dump_file
, simplified
, 0);
3706 fprintf (dump_file
, "\n");
3709 /* Setting value numbers to constants will occasionally
3710 screw up phi congruence because constants are not
3711 uniquely associated with a single ssa name that can be
3714 && is_gimple_min_invariant (simplified
)
3715 && TREE_CODE (lhs
) == SSA_NAME
)
3717 changed
= set_ssa_val_to (lhs
, simplified
);
3721 && TREE_CODE (simplified
) == SSA_NAME
3722 && TREE_CODE (lhs
) == SSA_NAME
)
3724 changed
= visit_copy (lhs
, simplified
);
3728 if ((TREE_CODE (lhs
) == SSA_NAME
3729 /* We can substitute SSA_NAMEs that are live over
3730 abnormal edges with their constant value. */
3731 && !(gimple_assign_copy_p (ass
)
3732 && is_gimple_min_invariant (rhs1
))
3734 && is_gimple_min_invariant (simplified
))
3735 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3736 /* Stores or copies from SSA_NAMEs that are live over
3737 abnormal edges are a problem. */
3738 || (code
== SSA_NAME
3739 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
3740 changed
= defs_to_varying (ass
);
3741 else if (REFERENCE_CLASS_P (lhs
)
3743 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
3744 else if (TREE_CODE (lhs
) == SSA_NAME
)
3746 if ((gimple_assign_copy_p (ass
)
3747 && is_gimple_min_invariant (rhs1
))
3749 && is_gimple_min_invariant (simplified
)))
3752 changed
= set_ssa_val_to (lhs
, simplified
);
3754 changed
= set_ssa_val_to (lhs
, rhs1
);
3758 /* Visit the original statement. */
3759 switch (vn_get_stmt_kind (ass
))
3762 changed
= visit_nary_op (lhs
, ass
);
3765 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
3768 changed
= defs_to_varying (ass
);
3774 changed
= defs_to_varying (ass
);
3776 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
3778 tree lhs
= gimple_call_lhs (call_stmt
);
3779 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3781 /* Try constant folding based on our current lattice. */
3782 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
3786 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3788 fprintf (dump_file
, "call ");
3789 print_gimple_expr (dump_file
, call_stmt
, 0, 0);
3790 fprintf (dump_file
, " simplified to ");
3791 print_generic_expr (dump_file
, simplified
, 0);
3792 fprintf (dump_file
, "\n");
3795 /* Setting value numbers to constants will occasionally
3796 screw up phi congruence because constants are not
3797 uniquely associated with a single ssa name that can be
3800 && is_gimple_min_invariant (simplified
))
3802 changed
= set_ssa_val_to (lhs
, simplified
);
3803 if (gimple_vdef (call_stmt
))
3804 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
3805 SSA_VAL (gimple_vuse (call_stmt
)));
3809 && TREE_CODE (simplified
) == SSA_NAME
)
3811 changed
= visit_copy (lhs
, simplified
);
3812 if (gimple_vdef (call_stmt
))
3813 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
3814 SSA_VAL (gimple_vuse (call_stmt
)));
3817 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3819 changed
= defs_to_varying (call_stmt
);
3824 if (!gimple_call_internal_p (call_stmt
)
3825 && (/* Calls to the same function with the same vuse
3826 and the same operands do not necessarily return the same
3827 value, unless they're pure or const. */
3828 gimple_call_flags (call_stmt
) & (ECF_PURE
| ECF_CONST
)
3829 /* If calls have a vdef, subsequent calls won't have
3830 the same incoming vuse. So, if 2 calls with vdef have the
3831 same vuse, we know they're not subsequent.
3832 We can value number 2 calls to the same function with the
3833 same vuse and the same operands which are not subsequent
3834 the same, because there is no code in the program that can
3835 compare the 2 values... */
3836 || (gimple_vdef (call_stmt
)
3837 /* ... unless the call returns a pointer which does
3838 not alias with anything else. In which case the
3839 information that the values are distinct are encoded
3841 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
3842 /* Only perform the following when being called from PRE
3843 which embeds tail merging. */
3844 && default_vn_walk_kind
== VN_WALK
)))
3845 changed
= visit_reference_op_call (lhs
, call_stmt
);
3847 changed
= defs_to_varying (call_stmt
);
3850 changed
= defs_to_varying (stmt
);
3855 /* Compare two operands by reverse postorder index */
3858 compare_ops (const void *pa
, const void *pb
)
3860 const tree opa
= *((const tree
*)pa
);
3861 const tree opb
= *((const tree
*)pb
);
3862 gimple
*opstmta
= SSA_NAME_DEF_STMT (opa
);
3863 gimple
*opstmtb
= SSA_NAME_DEF_STMT (opb
);
3867 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
3868 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3869 else if (gimple_nop_p (opstmta
))
3871 else if (gimple_nop_p (opstmtb
))
3874 bba
= gimple_bb (opstmta
);
3875 bbb
= gimple_bb (opstmtb
);
3878 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3886 if (gimple_code (opstmta
) == GIMPLE_PHI
3887 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3888 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3889 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3891 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3893 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3894 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3896 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3898 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3901 /* Sort an array containing members of a strongly connected component
3902 SCC so that the members are ordered by RPO number.
3903 This means that when the sort is complete, iterating through the
3904 array will give you the members in RPO order. */
3907 sort_scc (vec
<tree
> scc
)
3909 scc
.qsort (compare_ops
);
3912 /* Insert the no longer used nary ONARY to the hash INFO. */
3915 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3917 size_t size
= sizeof_vn_nary_op (onary
->length
);
3918 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
3919 &info
->nary_obstack
);
3920 memcpy (nary
, onary
, size
);
3921 vn_nary_op_insert_into (nary
, info
->nary
, false);
3924 /* Insert the no longer used phi OPHI to the hash INFO. */
3927 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3929 vn_phi_t phi
= info
->phis_pool
->allocate ();
3931 memcpy (phi
, ophi
, sizeof (*phi
));
3932 ophi
->phiargs
.create (0);
3933 slot
= info
->phis
->find_slot_with_hash (phi
, phi
->hashcode
, INSERT
);
3934 gcc_assert (!*slot
);
3938 /* Insert the no longer used reference OREF to the hash INFO. */
3941 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3944 vn_reference_s
**slot
;
3945 ref
= info
->references_pool
->allocate ();
3946 memcpy (ref
, oref
, sizeof (*ref
));
3947 oref
->operands
.create (0);
3948 slot
= info
->references
->find_slot_with_hash (ref
, ref
->hashcode
, INSERT
);
3950 free_reference (*slot
);
3954 /* Process a strongly connected component in the SSA graph. */
3957 process_scc (vec
<tree
> scc
)
3961 unsigned int iterations
= 0;
3962 bool changed
= true;
3963 vn_nary_op_iterator_type hin
;
3964 vn_phi_iterator_type hip
;
3965 vn_reference_iterator_type hir
;
3970 /* If the SCC has a single member, just visit it. */
3971 if (scc
.length () == 1)
3974 if (VN_INFO (use
)->use_processed
)
3976 /* We need to make sure it doesn't form a cycle itself, which can
3977 happen for self-referential PHI nodes. In that case we would
3978 end up inserting an expression with VN_TOP operands into the
3979 valid table which makes us derive bogus equivalences later.
3980 The cheapest way to check this is to assume it for all PHI nodes. */
3981 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
3982 /* Fallthru to iteration. */ ;
3990 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3991 print_scc (dump_file
, scc
);
3993 /* Iterate over the SCC with the optimistic table until it stops
3995 current_info
= optimistic_info
;
4000 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4001 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
4002 /* As we are value-numbering optimistically we have to
4003 clear the expression tables and the simplified expressions
4004 in each iteration until we converge. */
4005 optimistic_info
->nary
->empty ();
4006 optimistic_info
->phis
->empty ();
4007 optimistic_info
->references
->empty ();
4008 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
4009 gcc_obstack_init (&optimistic_info
->nary_obstack
);
4010 optimistic_info
->phis_pool
->release ();
4011 optimistic_info
->references_pool
->release ();
4012 FOR_EACH_VEC_ELT (scc
, i
, var
)
4013 gcc_assert (!VN_INFO (var
)->needs_insertion
4014 && VN_INFO (var
)->expr
== NULL
);
4015 FOR_EACH_VEC_ELT (scc
, i
, var
)
4016 changed
|= visit_use (var
);
4019 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4020 fprintf (dump_file
, "Processing SCC needed %d iterations\n", iterations
);
4021 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
4023 /* Finally, copy the contents of the no longer used optimistic
4024 table to the valid table. */
4025 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->nary
, nary
, vn_nary_op_t
, hin
)
4026 copy_nary (nary
, valid_info
);
4027 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->phis
, phi
, vn_phi_t
, hip
)
4028 copy_phi (phi
, valid_info
);
4029 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->references
,
4030 ref
, vn_reference_t
, hir
)
4031 copy_reference (ref
, valid_info
);
4033 current_info
= valid_info
;
4037 /* Pop the components of the found SCC for NAME off the SCC stack
4038 and process them. Returns true if all went well, false if
4039 we run into resource limits. */
4042 extract_and_process_scc_for_name (tree name
)
4047 /* Found an SCC, pop the components off the SCC stack and
4051 x
= sccstack
.pop ();
4053 VN_INFO (x
)->on_sccstack
= false;
4055 } while (x
!= name
);
4057 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4059 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
4062 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
4063 "SCC size %u exceeding %u\n", scc
.length (),
4064 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
4069 if (scc
.length () > 1)
4077 /* Depth first search on NAME to discover and process SCC's in the SSA
4079 Execution of this algorithm relies on the fact that the SCC's are
4080 popped off the stack in topological order.
4081 Returns true if successful, false if we stopped processing SCC's due
4082 to resource constraints. */
4087 vec
<ssa_op_iter
> itervec
= vNULL
;
4088 vec
<tree
> namevec
= vNULL
;
4089 use_operand_p usep
= NULL
;
4096 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
4097 VN_INFO (name
)->visited
= true;
4098 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
4100 sccstack
.safe_push (name
);
4101 VN_INFO (name
)->on_sccstack
= true;
4102 defstmt
= SSA_NAME_DEF_STMT (name
);
4104 /* Recursively DFS on our operands, looking for SCC's. */
4105 if (!gimple_nop_p (defstmt
))
4107 /* Push a new iterator. */
4108 if (gphi
*phi
= dyn_cast
<gphi
*> (defstmt
))
4109 usep
= op_iter_init_phiuse (&iter
, phi
, SSA_OP_ALL_USES
);
4111 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
4114 clear_and_done_ssa_iter (&iter
);
4118 /* If we are done processing uses of a name, go up the stack
4119 of iterators and process SCCs as we found them. */
4120 if (op_iter_done (&iter
))
4122 /* See if we found an SCC. */
4123 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
4124 if (!extract_and_process_scc_for_name (name
))
4131 /* Check if we are done. */
4132 if (namevec
.is_empty ())
4139 /* Restore the last use walker and continue walking there. */
4141 name
= namevec
.pop ();
4142 memcpy (&iter
, &itervec
.last (),
4143 sizeof (ssa_op_iter
));
4145 goto continue_walking
;
4148 use
= USE_FROM_PTR (usep
);
4150 /* Since we handle phi nodes, we will sometimes get
4151 invariants in the use expression. */
4152 if (TREE_CODE (use
) == SSA_NAME
)
4154 if (! (VN_INFO (use
)->visited
))
4156 /* Recurse by pushing the current use walking state on
4157 the stack and starting over. */
4158 itervec
.safe_push (iter
);
4159 namevec
.safe_push (name
);
4164 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
4165 VN_INFO (use
)->low
);
4167 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
4168 && VN_INFO (use
)->on_sccstack
)
4170 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
4171 VN_INFO (name
)->low
);
4175 usep
= op_iter_next_use (&iter
);
4179 /* Allocate a value number table. */
4182 allocate_vn_table (vn_tables_t table
)
4184 table
->phis
= new vn_phi_table_type (23);
4185 table
->nary
= new vn_nary_op_table_type (23);
4186 table
->references
= new vn_reference_table_type (23);
4188 gcc_obstack_init (&table
->nary_obstack
);
4189 table
->phis_pool
= new object_allocator
<vn_phi_s
> ("VN phis");
4190 table
->references_pool
= new object_allocator
<vn_reference_s
>
4194 /* Free a value number table. */
4197 free_vn_table (vn_tables_t table
)
4203 delete table
->references
;
4204 table
->references
= NULL
;
4205 obstack_free (&table
->nary_obstack
, NULL
);
4206 delete table
->phis_pool
;
4207 delete table
->references_pool
;
4215 int *rpo_numbers_temp
;
4217 calculate_dominance_info (CDI_DOMINATORS
);
4218 mark_dfs_back_edges ();
4220 sccstack
.create (0);
4221 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
4223 constant_value_ids
= BITMAP_ALLOC (NULL
);
4228 vn_ssa_aux_table
.create (num_ssa_names
+ 1);
4229 /* VEC_alloc doesn't actually grow it to the right size, it just
4230 preallocates the space to do so. */
4231 vn_ssa_aux_table
.safe_grow_cleared (num_ssa_names
+ 1);
4232 gcc_obstack_init (&vn_ssa_aux_obstack
);
4234 shared_lookup_phiargs
.create (0);
4235 shared_lookup_references
.create (0);
4236 rpo_numbers
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
4238 XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
4239 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
4241 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4242 the i'th block in RPO order is bb. We want to map bb's to RPO
4243 numbers, so we need to rearrange this array. */
4244 for (j
= 0; j
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; j
++)
4245 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
4247 XDELETE (rpo_numbers_temp
);
4249 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
4251 renumber_gimple_stmt_uids ();
4253 /* Create the valid and optimistic value numbering tables. */
4254 valid_info
= XCNEW (struct vn_tables_s
);
4255 allocate_vn_table (valid_info
);
4256 optimistic_info
= XCNEW (struct vn_tables_s
);
4257 allocate_vn_table (optimistic_info
);
4258 current_info
= valid_info
;
4260 /* Create the VN_INFO structures, and initialize value numbers to
4261 TOP or VARYING for parameters. */
4262 for (i
= 1; i
< num_ssa_names
; i
++)
4264 tree name
= ssa_name (i
);
4268 VN_INFO_GET (name
)->valnum
= VN_TOP
;
4269 VN_INFO (name
)->needs_insertion
= false;
4270 VN_INFO (name
)->expr
= NULL
;
4271 VN_INFO (name
)->value_id
= 0;
4273 if (!SSA_NAME_IS_DEFAULT_DEF (name
))
4276 switch (TREE_CODE (SSA_NAME_VAR (name
)))
4279 /* Undefined vars keep TOP. */
4283 /* Parameters are VARYING but we can record a condition
4284 if we know it is a non-NULL pointer. */
4285 VN_INFO (name
)->visited
= true;
4286 VN_INFO (name
)->valnum
= name
;
4287 if (POINTER_TYPE_P (TREE_TYPE (name
))
4288 && nonnull_arg_p (SSA_NAME_VAR (name
)))
4292 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
4293 vn_nary_op_insert_pieces (2, NE_EXPR
, boolean_type_node
, ops
,
4294 boolean_true_node
, 0);
4295 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4297 fprintf (dump_file
, "Recording ");
4298 print_generic_expr (dump_file
, name
, TDF_SLIM
);
4299 fprintf (dump_file
, " != 0\n");
4305 /* If the result is passed by invisible reference the default
4306 def is initialized, otherwise it's uninitialized. */
4307 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name
)))
4309 VN_INFO (name
)->visited
= true;
4310 VN_INFO (name
)->valnum
= name
;
4320 /* Restore SSA info that has been reset on value leaders. */
4323 scc_vn_restore_ssa_info (void)
4325 for (unsigned i
= 0; i
< num_ssa_names
; i
++)
4327 tree name
= ssa_name (i
);
4329 && has_VN_INFO (name
))
4331 if (VN_INFO (name
)->needs_insertion
)
4333 else if (POINTER_TYPE_P (TREE_TYPE (name
))
4334 && VN_INFO (name
)->info
.ptr_info
)
4335 SSA_NAME_PTR_INFO (name
) = VN_INFO (name
)->info
.ptr_info
;
4336 else if (INTEGRAL_TYPE_P (TREE_TYPE (name
))
4337 && VN_INFO (name
)->info
.range_info
)
4339 SSA_NAME_RANGE_INFO (name
) = VN_INFO (name
)->info
.range_info
;
4340 SSA_NAME_ANTI_RANGE_P (name
)
4341 = VN_INFO (name
)->range_info_anti_range_p
;
4352 delete constant_to_value_id
;
4353 constant_to_value_id
= NULL
;
4354 BITMAP_FREE (constant_value_ids
);
4355 shared_lookup_phiargs
.release ();
4356 shared_lookup_references
.release ();
4357 XDELETEVEC (rpo_numbers
);
4359 for (i
= 0; i
< num_ssa_names
; i
++)
4361 tree name
= ssa_name (i
);
4363 && has_VN_INFO (name
)
4364 && VN_INFO (name
)->needs_insertion
)
4365 release_ssa_name (name
);
4367 obstack_free (&vn_ssa_aux_obstack
, NULL
);
4368 vn_ssa_aux_table
.release ();
4370 sccstack
.release ();
4371 free_vn_table (valid_info
);
4372 XDELETE (valid_info
);
4373 free_vn_table (optimistic_info
);
4374 XDELETE (optimistic_info
);
4376 BITMAP_FREE (const_parms
);
4379 /* Set *ID according to RESULT. */
4382 set_value_id_for_result (tree result
, unsigned int *id
)
4384 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4385 *id
= VN_INFO (result
)->value_id
;
4386 else if (result
&& is_gimple_min_invariant (result
))
4387 *id
= get_or_alloc_constant_value_id (result
);
4389 *id
= get_next_value_id ();
4392 /* Set the value ids in the valid hash tables. */
4395 set_hashtable_value_ids (void)
4397 vn_nary_op_iterator_type hin
;
4398 vn_phi_iterator_type hip
;
4399 vn_reference_iterator_type hir
;
4404 /* Now set the value ids of the things we had put in the hash
4407 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4408 set_value_id_for_result (vno
->result
, &vno
->value_id
);
4410 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4411 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4413 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4415 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4418 class sccvn_dom_walker
: public dom_walker
4422 : dom_walker (CDI_DOMINATORS
, true), fail (false), cond_stack (vNULL
) {}
4423 ~sccvn_dom_walker ();
4425 virtual edge
before_dom_children (basic_block
);
4426 virtual void after_dom_children (basic_block
);
4428 void record_cond (basic_block
,
4429 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4430 void record_conds (basic_block
,
4431 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4434 vec
<std::pair
<basic_block
, std::pair
<vn_nary_op_t
, vn_nary_op_t
> > >
4438 sccvn_dom_walker::~sccvn_dom_walker ()
4440 cond_stack
.release ();
4443 /* Record a temporary condition for the BB and its dominated blocks. */
4446 sccvn_dom_walker::record_cond (basic_block bb
,
4447 enum tree_code code
, tree lhs
, tree rhs
,
4450 tree ops
[2] = { lhs
, rhs
};
4451 vn_nary_op_t old
= NULL
;
4452 if (vn_nary_op_lookup_pieces (2, code
, boolean_type_node
, ops
, &old
))
4453 current_info
->nary
->remove_elt_with_hash (old
, old
->hashcode
);
4455 = vn_nary_op_insert_pieces (2, code
, boolean_type_node
, ops
,
4458 : boolean_false_node
, 0);
4459 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4461 fprintf (dump_file
, "Recording temporarily ");
4462 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
4463 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
4464 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
4465 fprintf (dump_file
, " == %s%s\n",
4466 value
? "true" : "false",
4467 old
? " (old entry saved)" : "");
4469 cond_stack
.safe_push (std::make_pair (bb
, std::make_pair (cond
, old
)));
4472 /* Record temporary conditions for the BB and its dominated blocks
4473 according to LHS CODE RHS == VALUE and its dominated conditions. */
4476 sccvn_dom_walker::record_conds (basic_block bb
,
4477 enum tree_code code
, tree lhs
, tree rhs
,
4480 /* Record the original condition. */
4481 record_cond (bb
, code
, lhs
, rhs
, value
);
4486 /* Record dominated conditions if the condition is true. Note that
4487 the inversion is already recorded. */
4492 record_cond (bb
, code
== LT_EXPR
? LE_EXPR
: GE_EXPR
, lhs
, rhs
, true);
4493 record_cond (bb
, NE_EXPR
, lhs
, rhs
, true);
4494 record_cond (bb
, EQ_EXPR
, lhs
, rhs
, false);
4498 record_cond (bb
, LE_EXPR
, lhs
, rhs
, true);
4499 record_cond (bb
, GE_EXPR
, lhs
, rhs
, true);
4500 record_cond (bb
, LT_EXPR
, lhs
, rhs
, false);
4501 record_cond (bb
, GT_EXPR
, lhs
, rhs
, false);
4509 /* Restore expressions and values derived from conditionals. */
4512 sccvn_dom_walker::after_dom_children (basic_block bb
)
4514 while (!cond_stack
.is_empty ()
4515 && cond_stack
.last ().first
== bb
)
4517 vn_nary_op_t cond
= cond_stack
.last ().second
.first
;
4518 vn_nary_op_t old
= cond_stack
.last ().second
.second
;
4519 current_info
->nary
->remove_elt_with_hash (cond
, cond
->hashcode
);
4521 vn_nary_op_insert_into (old
, current_info
->nary
, false);
4526 /* Value number all statements in BB. */
4529 sccvn_dom_walker::before_dom_children (basic_block bb
)
4537 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4538 fprintf (dump_file
, "Visiting BB %d\n", bb
->index
);
4540 /* If we have a single predecessor record the equivalence from a
4541 possible condition on the predecessor edge. */
4543 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4545 /* Ignore simple backedges from this to allow recording conditions
4547 if (dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
4559 /* Check if there are multiple executable successor edges in
4560 the source block. Otherwise there is no additional info
4563 FOR_EACH_EDGE (e2
, ei
, pred_e
->src
->succs
)
4565 && e2
->flags
& EDGE_EXECUTABLE
)
4567 if (e2
&& (e2
->flags
& EDGE_EXECUTABLE
))
4569 gimple
*stmt
= last_stmt (pred_e
->src
);
4571 && gimple_code (stmt
) == GIMPLE_COND
)
4573 enum tree_code code
= gimple_cond_code (stmt
);
4574 tree lhs
= gimple_cond_lhs (stmt
);
4575 tree rhs
= gimple_cond_rhs (stmt
);
4576 record_conds (bb
, code
, lhs
, rhs
,
4577 (pred_e
->flags
& EDGE_TRUE_VALUE
) != 0);
4578 code
= invert_tree_comparison (code
, HONOR_NANS (lhs
));
4579 if (code
!= ERROR_MARK
)
4580 record_conds (bb
, code
, lhs
, rhs
,
4581 (pred_e
->flags
& EDGE_TRUE_VALUE
) == 0);
4586 /* Value-number all defs in the basic-block. */
4587 for (gphi_iterator gsi
= gsi_start_phis (bb
);
4588 !gsi_end_p (gsi
); gsi_next (&gsi
))
4590 gphi
*phi
= gsi
.phi ();
4591 tree res
= PHI_RESULT (phi
);
4592 if (!VN_INFO (res
)->visited
4599 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
4600 !gsi_end_p (gsi
); gsi_next (&gsi
))
4604 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
4605 if (!VN_INFO (op
)->visited
4613 /* Finally look at the last stmt. */
4614 gimple
*stmt
= last_stmt (bb
);
4618 enum gimple_code code
= gimple_code (stmt
);
4619 if (code
!= GIMPLE_COND
4620 && code
!= GIMPLE_SWITCH
4621 && code
!= GIMPLE_GOTO
)
4624 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4626 fprintf (dump_file
, "Visiting control stmt ending BB %d: ", bb
->index
);
4627 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4630 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4631 if value-numbering can prove they are not reachable. Handling
4632 computed gotos is also possible. */
4638 tree lhs
= vn_valueize (gimple_cond_lhs (stmt
));
4639 tree rhs
= vn_valueize (gimple_cond_rhs (stmt
));
4640 val
= gimple_simplify (gimple_cond_code (stmt
),
4641 boolean_type_node
, lhs
, rhs
,
4643 /* If that didn't simplify to a constant see if we have recorded
4644 temporary expressions from taken edges. */
4645 if (!val
|| TREE_CODE (val
) != INTEGER_CST
)
4650 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt
),
4651 boolean_type_node
, ops
, NULL
);
4656 val
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
4659 val
= gimple_goto_dest (stmt
);
4667 edge taken
= find_taken_edge (bb
, vn_valueize (val
));
4671 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4672 fprintf (dump_file
, "Marking all edges out of BB %d but (%d -> %d) as "
4673 "not executable\n", bb
->index
, bb
->index
, taken
->dest
->index
);
4678 /* Do SCCVN. Returns true if it finished, false if we bailed out
4679 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4680 how we use the alias oracle walking during the VN process. */
4683 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
4687 default_vn_walk_kind
= default_vn_walk_kind_
;
4691 /* Collect pointers we know point to readonly memory. */
4692 const_parms
= BITMAP_ALLOC (NULL
);
4693 tree fnspec
= lookup_attribute ("fn spec",
4694 TYPE_ATTRIBUTES (TREE_TYPE (cfun
->decl
)));
4697 fnspec
= TREE_VALUE (TREE_VALUE (fnspec
));
4699 for (tree arg
= DECL_ARGUMENTS (cfun
->decl
);
4700 arg
; arg
= DECL_CHAIN (arg
), ++i
)
4702 if (i
>= (unsigned) TREE_STRING_LENGTH (fnspec
))
4704 if (TREE_STRING_POINTER (fnspec
)[i
] == 'R'
4705 || TREE_STRING_POINTER (fnspec
)[i
] == 'r')
4707 tree name
= ssa_default_def (cfun
, arg
);
4709 bitmap_set_bit (const_parms
, SSA_NAME_VERSION (name
));
4714 /* Walk all blocks in dominator order, value-numbering stmts
4715 SSA defs and decide whether outgoing edges are not executable. */
4716 sccvn_dom_walker walker
;
4717 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
4724 /* Initialize the value ids and prune out remaining VN_TOPs
4726 for (i
= 1; i
< num_ssa_names
; ++i
)
4728 tree name
= ssa_name (i
);
4732 info
= VN_INFO (name
);
4734 info
->valnum
= name
;
4735 if (info
->valnum
== name
4736 || info
->valnum
== VN_TOP
)
4737 info
->value_id
= get_next_value_id ();
4738 else if (is_gimple_min_invariant (info
->valnum
))
4739 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
4743 for (i
= 1; i
< num_ssa_names
; ++i
)
4745 tree name
= ssa_name (i
);
4749 info
= VN_INFO (name
);
4750 if (TREE_CODE (info
->valnum
) == SSA_NAME
4751 && info
->valnum
!= name
4752 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
4753 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
4756 set_hashtable_value_ids ();
4758 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4760 fprintf (dump_file
, "Value numbers:\n");
4761 for (i
= 0; i
< num_ssa_names
; i
++)
4763 tree name
= ssa_name (i
);
4765 && VN_INFO (name
)->visited
4766 && SSA_VAL (name
) != name
)
4768 print_generic_expr (dump_file
, name
, 0);
4769 fprintf (dump_file
, " = ");
4770 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
4771 fprintf (dump_file
, "\n");
4779 /* Return the maximum value id we have ever seen. */
4782 get_max_value_id (void)
4784 return next_value_id
;
4787 /* Return the next unique value id. */
4790 get_next_value_id (void)
4792 return next_value_id
++;
4796 /* Compare two expressions E1 and E2 and return true if they are equal. */
4799 expressions_equal_p (tree e1
, tree e2
)
4801 /* The obvious case. */
4805 /* If either one is VN_TOP consider them equal. */
4806 if (e1
== VN_TOP
|| e2
== VN_TOP
)
4809 /* If only one of them is null, they cannot be equal. */
4813 /* Now perform the actual comparison. */
4814 if (TREE_CODE (e1
) == TREE_CODE (e2
)
4815 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
4822 /* Return true if the nary operation NARY may trap. This is a copy
4823 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4826 vn_nary_may_trap (vn_nary_op_t nary
)
4829 tree rhs2
= NULL_TREE
;
4830 bool honor_nans
= false;
4831 bool honor_snans
= false;
4832 bool fp_operation
= false;
4833 bool honor_trapv
= false;
4837 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
4838 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
4839 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
4842 fp_operation
= FLOAT_TYPE_P (type
);
4845 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
4846 honor_snans
= flag_signaling_nans
!= 0;
4848 else if (INTEGRAL_TYPE_P (type
)
4849 && TYPE_OVERFLOW_TRAPS (type
))
4852 if (nary
->length
>= 2)
4854 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
4856 honor_nans
, honor_snans
, rhs2
,
4862 for (i
= 0; i
< nary
->length
; ++i
)
4863 if (tree_could_trap_p (nary
->op
[i
]))