1 /* SCC value numbering for trees
2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "alloc-pool.h"
31 #include "insn-config.h"
34 #include "gimple-pretty-print.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
121 static tree
*last_vuse_ptr
;
122 static vn_lookup_kind vn_walk_kind
;
123 static vn_lookup_kind default_vn_walk_kind
;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
130 typedef vn_nary_op_s
*compare_type
;
131 static inline hashval_t
hash (const vn_nary_op_s
*);
132 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
135 /* Return the computed hashcode for nary operation P1. */
138 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
140 return vno1
->hashcode
;
143 /* Compare nary operations P1 and P2 and return true if they are
147 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
149 return vn_nary_op_eq (vno1
, vno2
);
152 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
156 /* vn_phi hashtable helpers. */
159 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
161 struct vn_phi_hasher
: pointer_hash
<vn_phi_s
>
163 static inline hashval_t
hash (const vn_phi_s
*);
164 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
165 static inline void remove (vn_phi_s
*);
168 /* Return the computed hashcode for phi operation P1. */
171 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
173 return vp1
->hashcode
;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
179 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
181 return vn_phi_eq (vp1
, vp2
);
184 /* Free a phi operation structure VP. */
187 vn_phi_hasher::remove (vn_phi_s
*phi
)
189 phi
->phiargs
.release ();
192 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
200 vn_reference_op_eq (const void *p1
, const void *p2
)
202 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
203 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
205 return (vro1
->opcode
== vro2
->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1
->type
== vro2
->type
208 || (vro1
->type
&& vro2
->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
210 TYPE_MAIN_VARIANT (vro2
->type
))))
211 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
212 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
213 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
216 /* Free a reference operation structure VP. */
219 free_reference (vn_reference_s
*vr
)
221 vr
->operands
.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher
: pointer_hash
<vn_reference_s
>
229 static inline hashval_t
hash (const vn_reference_s
*);
230 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
231 static inline void remove (vn_reference_s
*);
234 /* Return the hashcode for a given reference operation P1. */
237 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
239 return vr1
->hashcode
;
243 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
245 return vn_reference_eq (v
, c
);
249 vn_reference_hasher::remove (vn_reference_s
*v
)
254 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type
*nary
;
263 vn_phi_table_type
*phis
;
264 vn_reference_table_type
*references
;
265 struct obstack nary_obstack
;
266 object_allocator
<vn_phi_s
> *phis_pool
;
267 object_allocator
<vn_reference_s
> *references_pool
;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
275 static inline hashval_t
hash (const vn_constant_s
*);
276 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
279 /* Hash table hash function for vn_constant_t. */
282 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
284 return vc1
->hashcode
;
287 /* Hash table equality function for vn_constant_t. */
290 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
292 if (vc1
->hashcode
!= vc2
->hashcode
)
295 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
298 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
299 static bitmap constant_value_ids
;
302 /* Valid hashtables storing information we have proven to be
305 static vn_tables_t valid_info
;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info
;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
316 static vn_tables_t current_info
;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers
;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
330 vuse_ssa_val (tree x
)
339 while (SSA_NAME_IN_FREE_LIST (x
));
344 /* This represents the top of the VN lattice, which is the universal
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id
;
353 /* Next DFS number and the stack for strongly connected component
356 static unsigned int next_dfs_num
;
357 static vec
<tree
> sccstack
;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec
<vn_ssa_aux_t
> vn_ssa_aux_table
;
366 static struct obstack vn_ssa_aux_obstack
;
368 /* Return whether there is value numbering information for a given SSA name. */
371 has_VN_INFO (tree name
)
373 if (SSA_NAME_VERSION (name
) < vn_ssa_aux_table
.length ())
374 return vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] != NULL
;
378 /* Return the value numbering information for a given SSA name. */
383 vn_ssa_aux_t res
= vn_ssa_aux_table
[SSA_NAME_VERSION (name
)];
384 gcc_checking_assert (res
);
388 /* Set the value numbering info for a given SSA name to a given
392 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
394 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = value
;
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
401 VN_INFO_GET (tree name
)
403 vn_ssa_aux_t newinfo
;
405 gcc_assert (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ()
406 || vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] == NULL
);
407 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
408 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
409 if (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ())
410 vn_ssa_aux_table
.safe_grow_cleared (SSA_NAME_VERSION (name
) + 1);
411 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = newinfo
;
416 /* Return the vn_kind the expression computed by the stmt should be
420 vn_get_stmt_kind (gimple
*stmt
)
422 switch (gimple_code (stmt
))
430 enum tree_code code
= gimple_assign_rhs_code (stmt
);
431 tree rhs1
= gimple_assign_rhs1 (stmt
);
432 switch (get_gimple_rhs_class (code
))
434 case GIMPLE_UNARY_RHS
:
435 case GIMPLE_BINARY_RHS
:
436 case GIMPLE_TERNARY_RHS
:
438 case GIMPLE_SINGLE_RHS
:
439 switch (TREE_CODE_CLASS (code
))
442 /* VOP-less references can go through unary case. */
443 if ((code
== REALPART_EXPR
444 || code
== IMAGPART_EXPR
445 || code
== VIEW_CONVERT_EXPR
446 || code
== BIT_FIELD_REF
)
447 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
451 case tcc_declaration
:
458 if (code
== ADDR_EXPR
)
459 return (is_gimple_min_invariant (rhs1
)
460 ? VN_CONSTANT
: VN_REFERENCE
);
461 else if (code
== CONSTRUCTOR
)
474 /* Lookup a value id for CONSTANT and return it. If it does not
478 get_constant_value_id (tree constant
)
480 vn_constant_s
**slot
;
481 struct vn_constant_s vc
;
483 vc
.hashcode
= vn_hash_constant_with_type (constant
);
484 vc
.constant
= constant
;
485 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
487 return (*slot
)->value_id
;
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
495 get_or_alloc_constant_value_id (tree constant
)
497 vn_constant_s
**slot
;
498 struct vn_constant_s vc
;
501 vc
.hashcode
= vn_hash_constant_with_type (constant
);
502 vc
.constant
= constant
;
503 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
505 return (*slot
)->value_id
;
507 vcp
= XNEW (struct vn_constant_s
);
508 vcp
->hashcode
= vc
.hashcode
;
509 vcp
->constant
= constant
;
510 vcp
->value_id
= get_next_value_id ();
512 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
513 return vcp
->value_id
;
516 /* Return true if V is a value id for a constant. */
519 value_id_constant_p (unsigned int v
)
521 return bitmap_bit_p (constant_value_ids
, v
);
524 /* Compute the hash for a reference operand VRO1. */
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
529 hstate
.add_int (vro1
->opcode
);
531 inchash::add_expr (vro1
->op0
, hstate
);
533 inchash::add_expr (vro1
->op1
, hstate
);
535 inchash::add_expr (vro1
->op2
, hstate
);
538 /* Compute a hash for the reference operation VR1 and return it. */
541 vn_reference_compute_hash (const vn_reference_t vr1
)
543 inchash::hash hstate
;
546 vn_reference_op_t vro
;
547 HOST_WIDE_INT off
= -1;
550 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
552 if (vro
->opcode
== MEM_REF
)
554 else if (vro
->opcode
!= ADDR_EXPR
)
566 hstate
.add_int (off
);
569 && vro
->opcode
== ADDR_EXPR
)
573 tree op
= TREE_OPERAND (vro
->op0
, 0);
574 hstate
.add_int (TREE_CODE (op
));
575 inchash::add_expr (op
, hstate
);
579 vn_reference_op_compute_hash (vro
, hstate
);
582 result
= hstate
.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
585 result
+= SSA_NAME_VERSION (vr1
->vuse
);
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
594 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
598 /* Early out if this is not a hash collision. */
599 if (vr1
->hashcode
!= vr2
->hashcode
)
602 /* The VOP needs to be the same. */
603 if (vr1
->vuse
!= vr2
->vuse
)
606 /* If the operands are the same we are done. */
607 if (vr1
->operands
== vr2
->operands
)
610 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
613 if (INTEGRAL_TYPE_P (vr1
->type
)
614 && INTEGRAL_TYPE_P (vr2
->type
))
616 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
619 else if (INTEGRAL_TYPE_P (vr1
->type
)
620 && (TYPE_PRECISION (vr1
->type
)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
623 else if (INTEGRAL_TYPE_P (vr2
->type
)
624 && (TYPE_PRECISION (vr2
->type
)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
632 HOST_WIDE_INT off1
= 0, off2
= 0;
633 vn_reference_op_t vro1
, vro2
;
634 vn_reference_op_s tem1
, tem2
;
635 bool deref1
= false, deref2
= false;
636 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
638 if (vro1
->opcode
== MEM_REF
)
640 /* Do not look through a storage order barrier. */
641 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
647 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
649 if (vro2
->opcode
== MEM_REF
)
651 /* Do not look through a storage order barrier. */
652 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
660 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
662 memset (&tem1
, 0, sizeof (tem1
));
663 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
664 tem1
.type
= TREE_TYPE (tem1
.op0
);
665 tem1
.opcode
= TREE_CODE (tem1
.op0
);
669 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
671 memset (&tem2
, 0, sizeof (tem2
));
672 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
673 tem2
.type
= TREE_TYPE (tem2
.op0
);
674 tem2
.opcode
= TREE_CODE (tem2
.op0
);
678 if (deref1
!= deref2
)
680 if (!vn_reference_op_eq (vro1
, vro2
))
685 while (vr1
->operands
.length () != i
686 || vr2
->operands
.length () != j
);
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
695 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
697 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
699 vn_reference_op_s temp
;
703 memset (&temp
, 0, sizeof (temp
));
704 temp
.type
= TREE_TYPE (ref
);
705 temp
.opcode
= TREE_CODE (ref
);
706 temp
.op0
= TMR_INDEX (ref
);
707 temp
.op1
= TMR_STEP (ref
);
708 temp
.op2
= TMR_OFFSET (ref
);
710 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
711 temp
.base
= MR_DEPENDENCE_BASE (ref
);
712 result
->quick_push (temp
);
714 memset (&temp
, 0, sizeof (temp
));
715 temp
.type
= NULL_TREE
;
716 temp
.opcode
= ERROR_MARK
;
717 temp
.op0
= TMR_INDEX2 (ref
);
719 result
->quick_push (temp
);
721 memset (&temp
, 0, sizeof (temp
));
722 temp
.type
= NULL_TREE
;
723 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
724 temp
.op0
= TMR_BASE (ref
);
726 result
->quick_push (temp
);
730 /* For non-calls, store the information that makes up the address. */
734 vn_reference_op_s temp
;
736 memset (&temp
, 0, sizeof (temp
));
737 temp
.type
= TREE_TYPE (ref
);
738 temp
.opcode
= TREE_CODE (ref
);
744 temp
.op0
= TREE_OPERAND (ref
, 1);
747 temp
.op0
= TREE_OPERAND (ref
, 1);
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp
.op0
= TREE_OPERAND (ref
, 1);
754 offset_int off
= mem_ref_offset (ref
);
755 if (wi::fits_shwi_p (off
))
756 temp
.off
= off
.to_shwi ();
758 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
759 temp
.base
= MR_DEPENDENCE_BASE (ref
);
760 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
763 /* Record bits, position and storage order. */
764 temp
.op0
= TREE_OPERAND (ref
, 1);
765 temp
.op1
= TREE_OPERAND (ref
, 2);
766 if (tree_fits_shwi_p (TREE_OPERAND (ref
, 2)))
768 HOST_WIDE_INT off
= tree_to_shwi (TREE_OPERAND (ref
, 2));
769 if (off
% BITS_PER_UNIT
== 0)
770 temp
.off
= off
/ BITS_PER_UNIT
;
772 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp
.type
= NULL_TREE
;
779 temp
.op0
= TREE_OPERAND (ref
, 1);
780 temp
.op1
= TREE_OPERAND (ref
, 2);
782 tree this_offset
= component_ref_field_offset (ref
);
784 && TREE_CODE (this_offset
) == INTEGER_CST
)
786 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
787 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
790 = (wi::to_offset (this_offset
)
791 + wi::lrshift (wi::to_offset (bit_offset
),
792 LOG2_BITS_PER_UNIT
));
793 if (wi::fits_shwi_p (off
)
794 /* Probibit value-numbering zero offset components
795 of addresses the same before the pass folding
796 __builtin_object_size had a chance to run
797 (checking cfun->after_inlining does the
799 && (TREE_CODE (orig
) != ADDR_EXPR
801 || cfun
->after_inlining
))
802 temp
.off
= off
.to_shwi ();
807 case ARRAY_RANGE_REF
:
809 /* Record index as operand. */
810 temp
.op0
= TREE_OPERAND (ref
, 1);
811 /* Always record lower bounds and element size. */
812 temp
.op1
= array_ref_low_bound (ref
);
813 temp
.op2
= array_ref_element_size (ref
);
814 if (TREE_CODE (temp
.op0
) == INTEGER_CST
815 && TREE_CODE (temp
.op1
) == INTEGER_CST
816 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
818 offset_int off
= ((wi::to_offset (temp
.op0
)
819 - wi::to_offset (temp
.op1
))
820 * wi::to_offset (temp
.op2
));
821 if (wi::fits_shwi_p (off
))
822 temp
.off
= off
.to_shwi();
826 if (DECL_HARD_REGISTER (ref
))
835 /* Canonicalize decls to MEM[&decl] which is what we end up with
836 when valueizing MEM[ptr] with ptr = &decl. */
837 temp
.opcode
= MEM_REF
;
838 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
840 result
->safe_push (temp
);
841 temp
.opcode
= ADDR_EXPR
;
842 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
843 temp
.type
= TREE_TYPE (temp
.op0
);
857 if (is_gimple_min_invariant (ref
))
863 /* These are only interesting for their operands, their
864 existence, and their type. They will never be the last
865 ref in the chain of references (IE they require an
866 operand), so we don't have to put anything
867 for op* as it will be handled by the iteration */
871 case VIEW_CONVERT_EXPR
:
873 temp
.reverse
= storage_order_barrier_p (ref
);
876 /* This is only interesting for its constant offset. */
877 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
882 result
->safe_push (temp
);
884 if (REFERENCE_CLASS_P (ref
)
885 || TREE_CODE (ref
) == MODIFY_EXPR
886 || TREE_CODE (ref
) == WITH_SIZE_EXPR
887 || (TREE_CODE (ref
) == ADDR_EXPR
888 && !is_gimple_min_invariant (ref
)))
889 ref
= TREE_OPERAND (ref
, 0);
895 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
896 operands in *OPS, the reference alias set SET and the reference type TYPE.
897 Return true if something useful was produced. */
900 ao_ref_init_from_vn_reference (ao_ref
*ref
,
901 alias_set_type set
, tree type
,
902 vec
<vn_reference_op_s
> ops
)
904 vn_reference_op_t op
;
906 tree base
= NULL_TREE
;
908 offset_int offset
= 0;
910 offset_int size
= -1;
911 tree size_tree
= NULL_TREE
;
912 alias_set_type base_alias_set
= -1;
914 /* First get the final access size from just the outermost expression. */
916 if (op
->opcode
== COMPONENT_REF
)
917 size_tree
= DECL_SIZE (op
->op0
);
918 else if (op
->opcode
== BIT_FIELD_REF
)
922 machine_mode mode
= TYPE_MODE (type
);
924 size_tree
= TYPE_SIZE (type
);
926 size
= int (GET_MODE_BITSIZE (mode
));
928 if (size_tree
!= NULL_TREE
929 && TREE_CODE (size_tree
) == INTEGER_CST
)
930 size
= wi::to_offset (size_tree
);
932 /* Initially, maxsize is the same as the accessed element size.
933 In the following it will only grow (or become -1). */
936 /* Compute cumulative bit-offset for nested component-refs and array-refs,
937 and find the ultimate containing object. */
938 FOR_EACH_VEC_ELT (ops
, i
, op
)
942 /* These may be in the reference ops, but we cannot do anything
943 sensible with them here. */
945 /* Apart from ADDR_EXPR arguments to MEM_REF. */
946 if (base
!= NULL_TREE
947 && TREE_CODE (base
) == MEM_REF
949 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
951 vn_reference_op_t pop
= &ops
[i
-1];
952 base
= TREE_OPERAND (op
->op0
, 0);
959 offset
+= pop
->off
* BITS_PER_UNIT
;
967 /* Record the base objects. */
969 base_alias_set
= get_deref_alias_set (op
->op0
);
970 *op0_p
= build2 (MEM_REF
, op
->type
,
972 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
973 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
974 op0_p
= &TREE_OPERAND (*op0_p
, 0);
985 /* And now the usual component-reference style ops. */
987 offset
+= wi::to_offset (op
->op1
);
992 tree field
= op
->op0
;
993 /* We do not have a complete COMPONENT_REF tree here so we
994 cannot use component_ref_field_offset. Do the interesting
996 tree this_offset
= DECL_FIELD_OFFSET (field
);
998 if (op
->op1
|| TREE_CODE (this_offset
) != INTEGER_CST
)
1002 offset_int woffset
= wi::lshift (wi::to_offset (this_offset
),
1003 LOG2_BITS_PER_UNIT
);
1004 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1010 case ARRAY_RANGE_REF
:
1012 /* We recorded the lower bound and the element size. */
1013 if (TREE_CODE (op
->op0
) != INTEGER_CST
1014 || TREE_CODE (op
->op1
) != INTEGER_CST
1015 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1020 = wi::sext (wi::to_offset (op
->op0
) - wi::to_offset (op
->op1
),
1021 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1022 woffset
*= wi::to_offset (op
->op2
);
1023 woffset
= wi::lshift (woffset
, LOG2_BITS_PER_UNIT
);
1035 case VIEW_CONVERT_EXPR
:
1052 if (base
== NULL_TREE
)
1055 ref
->ref
= NULL_TREE
;
1057 ref
->ref_alias_set
= set
;
1058 if (base_alias_set
!= -1)
1059 ref
->base_alias_set
= base_alias_set
;
1061 ref
->base_alias_set
= get_alias_set (base
);
1062 /* We discount volatiles from value-numbering elsewhere. */
1063 ref
->volatile_p
= false;
1065 if (!wi::fits_shwi_p (size
) || wi::neg_p (size
))
1073 ref
->size
= size
.to_shwi ();
1075 if (!wi::fits_shwi_p (offset
))
1082 ref
->offset
= offset
.to_shwi ();
1084 if (!wi::fits_shwi_p (max_size
) || wi::neg_p (max_size
))
1087 ref
->max_size
= max_size
.to_shwi ();
1092 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1093 vn_reference_op_s's. */
1096 copy_reference_ops_from_call (gcall
*call
,
1097 vec
<vn_reference_op_s
> *result
)
1099 vn_reference_op_s temp
;
1101 tree lhs
= gimple_call_lhs (call
);
1104 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1105 different. By adding the lhs here in the vector, we ensure that the
1106 hashcode is different, guaranteeing a different value number. */
1107 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1109 memset (&temp
, 0, sizeof (temp
));
1110 temp
.opcode
= MODIFY_EXPR
;
1111 temp
.type
= TREE_TYPE (lhs
);
1114 result
->safe_push (temp
);
1117 /* Copy the type, opcode, function, static chain and EH region, if any. */
1118 memset (&temp
, 0, sizeof (temp
));
1119 temp
.type
= gimple_call_return_type (call
);
1120 temp
.opcode
= CALL_EXPR
;
1121 temp
.op0
= gimple_call_fn (call
);
1122 temp
.op1
= gimple_call_chain (call
);
1123 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1124 temp
.op2
= size_int (lr
);
1126 if (gimple_call_with_bounds_p (call
))
1127 temp
.with_bounds
= 1;
1128 result
->safe_push (temp
);
1130 /* Copy the call arguments. As they can be references as well,
1131 just chain them together. */
1132 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1134 tree callarg
= gimple_call_arg (call
, i
);
1135 copy_reference_ops_from_ref (callarg
, result
);
1139 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1140 *I_P to point to the last element of the replacement. */
1142 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1145 unsigned int i
= *i_p
;
1146 vn_reference_op_t op
= &(*ops
)[i
];
1147 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1149 HOST_WIDE_INT addr_offset
= 0;
1151 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1152 from .foo.bar to the preceding MEM_REF offset and replace the
1153 address with &OBJ. */
1154 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1156 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1157 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1159 offset_int off
= offset_int::from (mem_op
->op0
, SIGNED
);
1161 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1162 op
->op0
= build_fold_addr_expr (addr_base
);
1163 if (tree_fits_shwi_p (mem_op
->op0
))
1164 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1172 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1173 *I_P to point to the last element of the replacement. */
1175 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1178 unsigned int i
= *i_p
;
1179 vn_reference_op_t op
= &(*ops
)[i
];
1180 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1182 enum tree_code code
;
1185 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1186 if (!is_gimple_assign (def_stmt
))
1189 code
= gimple_assign_rhs_code (def_stmt
);
1190 if (code
!= ADDR_EXPR
1191 && code
!= POINTER_PLUS_EXPR
)
1194 off
= offset_int::from (mem_op
->op0
, SIGNED
);
1196 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1197 from .foo.bar to the preceding MEM_REF offset and replace the
1198 address with &OBJ. */
1199 if (code
== ADDR_EXPR
)
1201 tree addr
, addr_base
;
1202 HOST_WIDE_INT addr_offset
;
1204 addr
= gimple_assign_rhs1 (def_stmt
);
1205 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1207 /* If that didn't work because the address isn't invariant propagate
1208 the reference tree from the address operation in case the current
1209 dereference isn't offsetted. */
1211 && *i_p
== ops
->length () - 1
1213 /* This makes us disable this transform for PRE where the
1214 reference ops might be also used for code insertion which
1216 && default_vn_walk_kind
== VN_WALKREWRITE
)
1218 auto_vec
<vn_reference_op_s
, 32> tem
;
1219 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1222 ops
->safe_splice (tem
);
1227 || TREE_CODE (addr_base
) != MEM_REF
)
1231 off
+= mem_ref_offset (addr_base
);
1232 op
->op0
= TREE_OPERAND (addr_base
, 0);
1237 ptr
= gimple_assign_rhs1 (def_stmt
);
1238 ptroff
= gimple_assign_rhs2 (def_stmt
);
1239 if (TREE_CODE (ptr
) != SSA_NAME
1240 || TREE_CODE (ptroff
) != INTEGER_CST
)
1243 off
+= wi::to_offset (ptroff
);
1247 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1248 if (tree_fits_shwi_p (mem_op
->op0
))
1249 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1252 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1253 op
->op0
= SSA_VAL (op
->op0
);
1254 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1255 op
->opcode
= TREE_CODE (op
->op0
);
1258 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1259 vn_reference_maybe_forwprop_address (ops
, i_p
);
1260 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1261 vn_reference_fold_indirect (ops
, i_p
);
1265 /* Optimize the reference REF to a constant if possible or return
1266 NULL_TREE if not. */
1269 fully_constant_vn_reference_p (vn_reference_t ref
)
1271 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1272 vn_reference_op_t op
;
1274 /* Try to simplify the translated expression if it is
1275 a call to a builtin function with at most two arguments. */
1277 if (op
->opcode
== CALL_EXPR
1278 && TREE_CODE (op
->op0
) == ADDR_EXPR
1279 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1280 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1281 && operands
.length () >= 2
1282 && operands
.length () <= 3)
1284 vn_reference_op_t arg0
, arg1
= NULL
;
1285 bool anyconst
= false;
1286 arg0
= &operands
[1];
1287 if (operands
.length () > 2)
1288 arg1
= &operands
[2];
1289 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1290 || (arg0
->opcode
== ADDR_EXPR
1291 && is_gimple_min_invariant (arg0
->op0
)))
1294 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1295 || (arg1
->opcode
== ADDR_EXPR
1296 && is_gimple_min_invariant (arg1
->op0
))))
1300 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1303 arg1
? arg1
->op0
: NULL
);
1305 && TREE_CODE (folded
) == NOP_EXPR
)
1306 folded
= TREE_OPERAND (folded
, 0);
1308 && is_gimple_min_invariant (folded
))
1313 /* Simplify reads from constants or constant initializers. */
1314 else if (BITS_PER_UNIT
== 8
1315 && is_gimple_reg_type (ref
->type
)
1316 && (!INTEGRAL_TYPE_P (ref
->type
)
1317 || TYPE_PRECISION (ref
->type
) % BITS_PER_UNIT
== 0))
1319 HOST_WIDE_INT off
= 0;
1321 if (INTEGRAL_TYPE_P (ref
->type
))
1322 size
= TYPE_PRECISION (ref
->type
);
1324 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1325 if (size
% BITS_PER_UNIT
!= 0
1326 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1328 size
/= BITS_PER_UNIT
;
1330 for (i
= 0; i
< operands
.length (); ++i
)
1332 if (operands
[i
].off
== -1)
1334 off
+= operands
[i
].off
;
1335 if (operands
[i
].opcode
== MEM_REF
)
1341 vn_reference_op_t base
= &operands
[--i
];
1342 tree ctor
= error_mark_node
;
1343 tree decl
= NULL_TREE
;
1344 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1346 else if (base
->opcode
== MEM_REF
1347 && base
[1].opcode
== ADDR_EXPR
1348 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1349 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
))
1351 decl
= TREE_OPERAND (base
[1].op0
, 0);
1352 ctor
= ctor_for_folding (decl
);
1354 if (ctor
== NULL_TREE
)
1355 return build_zero_cst (ref
->type
);
1356 else if (ctor
!= error_mark_node
)
1360 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1361 off
* BITS_PER_UNIT
,
1362 size
* BITS_PER_UNIT
, decl
);
1365 STRIP_USELESS_TYPE_CONVERSION (res
);
1366 if (is_gimple_min_invariant (res
))
1372 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1373 int len
= native_encode_expr (ctor
, buf
, size
, off
);
1375 return native_interpret_expr (ref
->type
, buf
, len
);
1383 /* Return true if OPS contain a storage order barrier. */
1386 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1388 vn_reference_op_t op
;
1391 FOR_EACH_VEC_ELT (ops
, i
, op
)
1392 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1398 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1399 structures into their value numbers. This is done in-place, and
1400 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1401 whether any operands were valueized. */
1403 static vec
<vn_reference_op_s
>
1404 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
)
1406 vn_reference_op_t vro
;
1409 *valueized_anything
= false;
1411 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1413 if (vro
->opcode
== SSA_NAME
1414 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1416 tree tem
= SSA_VAL (vro
->op0
);
1417 if (tem
!= vro
->op0
)
1419 *valueized_anything
= true;
1422 /* If it transforms from an SSA_NAME to a constant, update
1424 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1425 vro
->opcode
= TREE_CODE (vro
->op0
);
1427 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1429 tree tem
= SSA_VAL (vro
->op1
);
1430 if (tem
!= vro
->op1
)
1432 *valueized_anything
= true;
1436 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1438 tree tem
= SSA_VAL (vro
->op2
);
1439 if (tem
!= vro
->op2
)
1441 *valueized_anything
= true;
1445 /* If it transforms from an SSA_NAME to an address, fold with
1446 a preceding indirect reference. */
1449 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1450 && orig
[i
- 1].opcode
== MEM_REF
)
1452 if (vn_reference_fold_indirect (&orig
, &i
))
1453 *valueized_anything
= true;
1456 && vro
->opcode
== SSA_NAME
1457 && orig
[i
- 1].opcode
== MEM_REF
)
1459 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1460 *valueized_anything
= true;
1462 /* If it transforms a non-constant ARRAY_REF into a constant
1463 one, adjust the constant offset. */
1464 else if (vro
->opcode
== ARRAY_REF
1466 && TREE_CODE (vro
->op0
) == INTEGER_CST
1467 && TREE_CODE (vro
->op1
) == INTEGER_CST
1468 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1470 offset_int off
= ((wi::to_offset (vro
->op0
)
1471 - wi::to_offset (vro
->op1
))
1472 * wi::to_offset (vro
->op2
));
1473 if (wi::fits_shwi_p (off
))
1474 vro
->off
= off
.to_shwi ();
1481 static vec
<vn_reference_op_s
>
1482 valueize_refs (vec
<vn_reference_op_s
> orig
)
1485 return valueize_refs_1 (orig
, &tem
);
1488 static vec
<vn_reference_op_s
> shared_lookup_references
;
1490 /* Create a vector of vn_reference_op_s structures from REF, a
1491 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1492 this function. *VALUEIZED_ANYTHING will specify whether any
1493 operands were valueized. */
1495 static vec
<vn_reference_op_s
>
1496 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1500 shared_lookup_references
.truncate (0);
1501 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1502 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1503 valueized_anything
);
1504 return shared_lookup_references
;
1507 /* Create a vector of vn_reference_op_s structures from CALL, a
1508 call statement. The vector is shared among all callers of
1511 static vec
<vn_reference_op_s
>
1512 valueize_shared_reference_ops_from_call (gcall
*call
)
1516 shared_lookup_references
.truncate (0);
1517 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1518 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1519 return shared_lookup_references
;
1522 /* Lookup a SCCVN reference operation VR in the current hash table.
1523 Returns the resulting value number if it exists in the hash table,
1524 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1525 vn_reference_t stored in the hashtable if something is found. */
1528 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1530 vn_reference_s
**slot
;
1533 hash
= vr
->hashcode
;
1534 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1535 if (!slot
&& current_info
== optimistic_info
)
1536 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1540 *vnresult
= (vn_reference_t
)*slot
;
1541 return ((vn_reference_t
)*slot
)->result
;
1547 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1548 with the current VUSE and performs the expression lookup. */
1551 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1552 unsigned int cnt
, void *vr_
)
1554 vn_reference_t vr
= (vn_reference_t
)vr_
;
1555 vn_reference_s
**slot
;
1558 /* This bounds the stmt walks we perform on reference lookups
1559 to O(1) instead of O(N) where N is the number of dominating
1561 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1565 *last_vuse_ptr
= vuse
;
1567 /* Fixup vuse and hash. */
1569 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1570 vr
->vuse
= vuse_ssa_val (vuse
);
1572 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1574 hash
= vr
->hashcode
;
1575 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1576 if (!slot
&& current_info
== optimistic_info
)
1577 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1584 /* Lookup an existing or insert a new vn_reference entry into the
1585 value table for the VUSE, SET, TYPE, OPERANDS reference which
1586 has the value VALUE which is either a constant or an SSA name. */
1588 static vn_reference_t
1589 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1592 vec
<vn_reference_op_s
,
1597 vn_reference_t result
;
1600 vr1
.operands
= operands
;
1603 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1604 if (vn_reference_lookup_1 (&vr1
, &result
))
1606 if (TREE_CODE (value
) == SSA_NAME
)
1607 value_id
= VN_INFO (value
)->value_id
;
1609 value_id
= get_or_alloc_constant_value_id (value
);
1610 return vn_reference_insert_pieces (vuse
, set
, type
,
1611 operands
.copy (), value
, value_id
);
1614 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1615 from the statement defining VUSE and if not successful tries to
1616 translate *REFP and VR_ through an aggregate copy at the definition
1617 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1618 of *REF and *VR. If only disambiguation was performed then
1619 *DISAMBIGUATE_ONLY is set to true. */
1622 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1623 bool *disambiguate_only
)
1625 vn_reference_t vr
= (vn_reference_t
)vr_
;
1626 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1627 tree base
= ao_ref_base (ref
);
1628 HOST_WIDE_INT offset
, maxsize
;
1629 static vec
<vn_reference_op_s
>
1632 bool lhs_ref_ok
= false;
1634 /* If the reference is based on a parameter that was determined as
1635 pointing to readonly memory it doesn't change. */
1636 if (TREE_CODE (base
) == MEM_REF
1637 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
1638 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0))
1639 && bitmap_bit_p (const_parms
,
1640 SSA_NAME_VERSION (TREE_OPERAND (base
, 0))))
1642 *disambiguate_only
= true;
1646 /* First try to disambiguate after value-replacing in the definitions LHS. */
1647 if (is_gimple_assign (def_stmt
))
1649 tree lhs
= gimple_assign_lhs (def_stmt
);
1650 bool valueized_anything
= false;
1651 /* Avoid re-allocation overhead. */
1652 lhs_ops
.truncate (0);
1653 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1654 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
);
1655 if (valueized_anything
)
1657 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1658 get_alias_set (lhs
),
1659 TREE_TYPE (lhs
), lhs_ops
);
1661 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1663 *disambiguate_only
= true;
1669 ao_ref_init (&lhs_ref
, lhs
);
1673 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1674 && gimple_call_num_args (def_stmt
) <= 4)
1676 /* For builtin calls valueize its arguments and call the
1677 alias oracle again. Valueization may improve points-to
1678 info of pointers and constify size and position arguments.
1679 Originally this was motivated by PR61034 which has
1680 conditional calls to free falsely clobbering ref because
1681 of imprecise points-to info of the argument. */
1683 bool valueized_anything
= false;
1684 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1686 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1687 if (TREE_CODE (oldargs
[i
]) == SSA_NAME
1688 && VN_INFO (oldargs
[i
])->valnum
!= oldargs
[i
])
1690 gimple_call_set_arg (def_stmt
, i
, VN_INFO (oldargs
[i
])->valnum
);
1691 valueized_anything
= true;
1694 if (valueized_anything
)
1696 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1698 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1699 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1702 *disambiguate_only
= true;
1708 if (*disambiguate_only
)
1711 offset
= ref
->offset
;
1712 maxsize
= ref
->max_size
;
1714 /* If we cannot constrain the size of the reference we cannot
1715 test if anything kills it. */
1719 /* We can't deduce anything useful from clobbers. */
1720 if (gimple_clobber_p (def_stmt
))
1723 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1724 from that definition.
1726 if (is_gimple_reg_type (vr
->type
)
1727 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1728 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1729 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2))
1730 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1732 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1734 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1736 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
1738 size2
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2)) * 8;
1739 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1740 == tree_to_uhwi (gimple_call_arg (def_stmt
, 2))
1742 && operand_equal_p (base
, base2
, 0)
1743 && offset2
<= offset
1744 && offset2
+ size2
>= offset
+ maxsize
)
1746 tree val
= build_zero_cst (vr
->type
);
1747 return vn_reference_lookup_or_insert_for_pieces
1748 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1752 /* 2) Assignment from an empty CONSTRUCTOR. */
1753 else if (is_gimple_reg_type (vr
->type
)
1754 && gimple_assign_single_p (def_stmt
)
1755 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1756 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1759 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1761 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1762 &offset2
, &size2
, &maxsize2
, &reverse
);
1764 && operand_equal_p (base
, base2
, 0)
1765 && offset2
<= offset
1766 && offset2
+ size2
>= offset
+ maxsize
)
1768 tree val
= build_zero_cst (vr
->type
);
1769 return vn_reference_lookup_or_insert_for_pieces
1770 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1774 /* 3) Assignment from a constant. We can use folds native encode/interpret
1775 routines to extract the assigned bits. */
1776 else if (vn_walk_kind
== VN_WALKREWRITE
1777 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
1778 && ref
->size
== maxsize
1779 && maxsize
% BITS_PER_UNIT
== 0
1780 && offset
% BITS_PER_UNIT
== 0
1781 && is_gimple_reg_type (vr
->type
)
1782 && !contains_storage_order_barrier_p (vr
->operands
)
1783 && gimple_assign_single_p (def_stmt
)
1784 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
1787 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1789 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1790 &offset2
, &size2
, &maxsize2
, &reverse
);
1793 && maxsize2
== size2
1794 && size2
% BITS_PER_UNIT
== 0
1795 && offset2
% BITS_PER_UNIT
== 0
1796 && operand_equal_p (base
, base2
, 0)
1797 && offset2
<= offset
1798 && offset2
+ size2
>= offset
+ maxsize
)
1800 /* We support up to 512-bit values (for V8DFmode). */
1801 unsigned char buffer
[64];
1804 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
1805 buffer
, sizeof (buffer
));
1808 tree val
= native_interpret_expr (vr
->type
,
1810 + ((offset
- offset2
)
1812 ref
->size
/ BITS_PER_UNIT
);
1814 return vn_reference_lookup_or_insert_for_pieces
1815 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1820 /* 4) Assignment from an SSA name which definition we may be able
1821 to access pieces from. */
1822 else if (ref
->size
== maxsize
1823 && is_gimple_reg_type (vr
->type
)
1824 && !contains_storage_order_barrier_p (vr
->operands
)
1825 && gimple_assign_single_p (def_stmt
)
1826 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
1828 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
1829 gimple
*def_stmt2
= SSA_NAME_DEF_STMT (rhs1
);
1830 if (is_gimple_assign (def_stmt2
)
1831 && (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
1832 || gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
)
1833 && types_compatible_p (vr
->type
, TREE_TYPE (TREE_TYPE (rhs1
))))
1836 HOST_WIDE_INT offset2
, size2
, maxsize2
, off
;
1838 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1839 &offset2
, &size2
, &maxsize2
,
1841 off
= offset
- offset2
;
1844 && maxsize2
== size2
1845 && operand_equal_p (base
, base2
, 0)
1846 && offset2
<= offset
1847 && offset2
+ size2
>= offset
+ maxsize
)
1849 tree val
= NULL_TREE
;
1851 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1
))));
1852 if (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
)
1855 val
= gimple_assign_rhs1 (def_stmt2
);
1856 else if (off
== elsz
)
1857 val
= gimple_assign_rhs2 (def_stmt2
);
1859 else if (gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
1862 tree ctor
= gimple_assign_rhs1 (def_stmt2
);
1863 unsigned i
= off
/ elsz
;
1864 if (i
< CONSTRUCTOR_NELTS (ctor
))
1866 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, i
);
1867 if (TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
1869 if (TREE_CODE (TREE_TYPE (elt
->value
))
1876 return vn_reference_lookup_or_insert_for_pieces
1877 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1882 /* 5) For aggregate copies translate the reference through them if
1883 the copy kills ref. */
1884 else if (vn_walk_kind
== VN_WALKREWRITE
1885 && gimple_assign_single_p (def_stmt
)
1886 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
1887 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
1888 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
1891 HOST_WIDE_INT maxsize2
;
1893 auto_vec
<vn_reference_op_s
> rhs
;
1894 vn_reference_op_t vro
;
1900 /* See if the assignment kills REF. */
1901 base2
= ao_ref_base (&lhs_ref
);
1902 maxsize2
= lhs_ref
.max_size
;
1905 && (TREE_CODE (base
) != MEM_REF
1906 || TREE_CODE (base2
) != MEM_REF
1907 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
1908 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
1909 TREE_OPERAND (base2
, 1))))
1910 || !stmt_kills_ref_p (def_stmt
, ref
))
1913 /* Find the common base of ref and the lhs. lhs_ops already
1914 contains valueized operands for the lhs. */
1915 i
= vr
->operands
.length () - 1;
1916 j
= lhs_ops
.length () - 1;
1917 while (j
>= 0 && i
>= 0
1918 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
1924 /* ??? The innermost op should always be a MEM_REF and we already
1925 checked that the assignment to the lhs kills vr. Thus for
1926 aggregate copies using char[] types the vn_reference_op_eq
1927 may fail when comparing types for compatibility. But we really
1928 don't care here - further lookups with the rewritten operands
1929 will simply fail if we messed up types too badly. */
1930 HOST_WIDE_INT extra_off
= 0;
1931 if (j
== 0 && i
>= 0
1932 && lhs_ops
[0].opcode
== MEM_REF
1933 && lhs_ops
[0].off
!= -1)
1935 if (lhs_ops
[0].off
== vr
->operands
[i
].off
)
1937 else if (vr
->operands
[i
].opcode
== MEM_REF
1938 && vr
->operands
[i
].off
!= -1)
1940 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
1945 /* i now points to the first additional op.
1946 ??? LHS may not be completely contained in VR, one or more
1947 VIEW_CONVERT_EXPRs could be in its way. We could at least
1948 try handling outermost VIEW_CONVERT_EXPRs. */
1952 /* Punt if the additional ops contain a storage order barrier. */
1953 for (k
= i
; k
>= 0; k
--)
1955 vro
= &vr
->operands
[k
];
1956 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
1960 /* Now re-write REF to be based on the rhs of the assignment. */
1961 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
1963 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1966 if (rhs
.length () < 2
1967 || rhs
[0].opcode
!= MEM_REF
1968 || rhs
[0].off
== -1)
1970 rhs
[0].off
+= extra_off
;
1971 rhs
[0].op0
= int_const_binop (PLUS_EXPR
, rhs
[0].op0
,
1972 build_int_cst (TREE_TYPE (rhs
[0].op0
),
1976 /* We need to pre-pend vr->operands[0..i] to rhs. */
1977 vec
<vn_reference_op_s
> old
= vr
->operands
;
1978 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
1980 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
1981 if (old
== shared_lookup_references
)
1982 shared_lookup_references
= vr
->operands
;
1985 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
1986 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
1987 vr
->operands
[i
+ 1 + j
] = *vro
;
1988 vr
->operands
= valueize_refs (vr
->operands
);
1989 if (old
== shared_lookup_references
)
1990 shared_lookup_references
= vr
->operands
;
1991 vr
->hashcode
= vn_reference_compute_hash (vr
);
1993 /* Try folding the new reference to a constant. */
1994 tree val
= fully_constant_vn_reference_p (vr
);
1996 return vn_reference_lookup_or_insert_for_pieces
1997 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1999 /* Adjust *ref from the new operands. */
2000 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2002 /* This can happen with bitfields. */
2003 if (ref
->size
!= r
.size
)
2007 /* Do not update last seen VUSE after translating. */
2008 last_vuse_ptr
= NULL
;
2010 /* Keep looking for the adjusted *REF / VR pair. */
2014 /* 6) For memcpy copies translate the reference through them if
2015 the copy kills ref. */
2016 else if (vn_walk_kind
== VN_WALKREWRITE
2017 && is_gimple_reg_type (vr
->type
)
2018 /* ??? Handle BCOPY as well. */
2019 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
2020 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
2021 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
2022 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2023 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
2024 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
2025 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
2026 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2)))
2030 HOST_WIDE_INT rhs_offset
, copy_size
, lhs_offset
;
2031 vn_reference_op_s op
;
2034 /* Only handle non-variable, addressable refs. */
2035 if (ref
->size
!= maxsize
2036 || offset
% BITS_PER_UNIT
!= 0
2037 || ref
->size
% BITS_PER_UNIT
!= 0)
2040 /* Extract a pointer base and an offset for the destination. */
2041 lhs
= gimple_call_arg (def_stmt
, 0);
2043 if (TREE_CODE (lhs
) == SSA_NAME
)
2045 lhs
= SSA_VAL (lhs
);
2046 if (TREE_CODE (lhs
) == SSA_NAME
)
2048 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
2049 if (gimple_assign_single_p (def_stmt
)
2050 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2051 lhs
= gimple_assign_rhs1 (def_stmt
);
2054 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2056 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2060 if (TREE_CODE (tem
) == MEM_REF
2061 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2063 lhs
= TREE_OPERAND (tem
, 0);
2064 if (TREE_CODE (lhs
) == SSA_NAME
)
2065 lhs
= SSA_VAL (lhs
);
2066 lhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2068 else if (DECL_P (tem
))
2069 lhs
= build_fold_addr_expr (tem
);
2073 if (TREE_CODE (lhs
) != SSA_NAME
2074 && TREE_CODE (lhs
) != ADDR_EXPR
)
2077 /* Extract a pointer base and an offset for the source. */
2078 rhs
= gimple_call_arg (def_stmt
, 1);
2080 if (TREE_CODE (rhs
) == SSA_NAME
)
2081 rhs
= SSA_VAL (rhs
);
2082 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2084 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2088 if (TREE_CODE (tem
) == MEM_REF
2089 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2091 rhs
= TREE_OPERAND (tem
, 0);
2092 rhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2094 else if (DECL_P (tem
))
2095 rhs
= build_fold_addr_expr (tem
);
2099 if (TREE_CODE (rhs
) != SSA_NAME
2100 && TREE_CODE (rhs
) != ADDR_EXPR
)
2103 copy_size
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2));
2105 /* The bases of the destination and the references have to agree. */
2106 if ((TREE_CODE (base
) != MEM_REF
2108 || (TREE_CODE (base
) == MEM_REF
2109 && (TREE_OPERAND (base
, 0) != lhs
2110 || !tree_fits_uhwi_p (TREE_OPERAND (base
, 1))))
2112 && (TREE_CODE (lhs
) != ADDR_EXPR
2113 || TREE_OPERAND (lhs
, 0) != base
)))
2116 at
= offset
/ BITS_PER_UNIT
;
2117 if (TREE_CODE (base
) == MEM_REF
)
2118 at
+= tree_to_uhwi (TREE_OPERAND (base
, 1));
2119 /* If the access is completely outside of the memcpy destination
2120 area there is no aliasing. */
2121 if (lhs_offset
>= at
+ maxsize
/ BITS_PER_UNIT
2122 || lhs_offset
+ copy_size
<= at
)
2124 /* And the access has to be contained within the memcpy destination. */
2126 || lhs_offset
+ copy_size
< at
+ maxsize
/ BITS_PER_UNIT
)
2129 /* Make room for 2 operands in the new reference. */
2130 if (vr
->operands
.length () < 2)
2132 vec
<vn_reference_op_s
> old
= vr
->operands
;
2133 vr
->operands
.safe_grow_cleared (2);
2134 if (old
== shared_lookup_references
2135 && vr
->operands
!= old
)
2136 shared_lookup_references
= vr
->operands
;
2139 vr
->operands
.truncate (2);
2141 /* The looked-through reference is a simple MEM_REF. */
2142 memset (&op
, 0, sizeof (op
));
2144 op
.opcode
= MEM_REF
;
2145 op
.op0
= build_int_cst (ptr_type_node
, at
- rhs_offset
);
2146 op
.off
= at
- lhs_offset
+ rhs_offset
;
2147 vr
->operands
[0] = op
;
2148 op
.type
= TREE_TYPE (rhs
);
2149 op
.opcode
= TREE_CODE (rhs
);
2152 vr
->operands
[1] = op
;
2153 vr
->hashcode
= vn_reference_compute_hash (vr
);
2155 /* Try folding the new reference to a constant. */
2156 tree val
= fully_constant_vn_reference_p (vr
);
2158 return vn_reference_lookup_or_insert_for_pieces
2159 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2161 /* Adjust *ref from the new operands. */
2162 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2164 /* This can happen with bitfields. */
2165 if (ref
->size
!= r
.size
)
2169 /* Do not update last seen VUSE after translating. */
2170 last_vuse_ptr
= NULL
;
2172 /* Keep looking for the adjusted *REF / VR pair. */
2176 /* Bail out and stop walking. */
2180 /* Lookup a reference operation by it's parts, in the current hash table.
2181 Returns the resulting value number if it exists in the hash table,
2182 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2183 vn_reference_t stored in the hashtable if something is found. */
2186 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2187 vec
<vn_reference_op_s
> operands
,
2188 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2190 struct vn_reference_s vr1
;
2198 vr1
.vuse
= vuse_ssa_val (vuse
);
2199 shared_lookup_references
.truncate (0);
2200 shared_lookup_references
.safe_grow (operands
.length ());
2201 memcpy (shared_lookup_references
.address (),
2202 operands
.address (),
2203 sizeof (vn_reference_op_s
)
2204 * operands
.length ());
2205 vr1
.operands
= operands
= shared_lookup_references
2206 = valueize_refs (shared_lookup_references
);
2209 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2210 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2213 vn_reference_lookup_1 (&vr1
, vnresult
);
2215 && kind
!= VN_NOWALK
2219 vn_walk_kind
= kind
;
2220 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2222 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2223 vn_reference_lookup_2
,
2224 vn_reference_lookup_3
,
2225 vuse_ssa_val
, &vr1
);
2226 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2230 return (*vnresult
)->result
;
2235 /* Lookup OP in the current hash table, and return the resulting value
2236 number if it exists in the hash table. Return NULL_TREE if it does
2237 not exist in the hash table or if the result field of the structure
2238 was NULL.. VNRESULT will be filled in with the vn_reference_t
2239 stored in the hashtable if one exists. When TBAA_P is false assume
2240 we are looking up a store and treat it as having alias-set zero. */
2243 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2244 vn_reference_t
*vnresult
, bool tbaa_p
)
2246 vec
<vn_reference_op_s
> operands
;
2247 struct vn_reference_s vr1
;
2249 bool valuezied_anything
;
2254 vr1
.vuse
= vuse_ssa_val (vuse
);
2255 vr1
.operands
= operands
2256 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2257 vr1
.type
= TREE_TYPE (op
);
2258 vr1
.set
= tbaa_p
? get_alias_set (op
) : 0;
2259 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2260 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2263 if (kind
!= VN_NOWALK
2266 vn_reference_t wvnresult
;
2268 /* Make sure to use a valueized reference if we valueized anything.
2269 Otherwise preserve the full reference for advanced TBAA. */
2270 if (!valuezied_anything
2271 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2273 ao_ref_init (&r
, op
);
2275 r
.ref_alias_set
= r
.base_alias_set
= 0;
2276 vn_walk_kind
= kind
;
2278 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2279 vn_reference_lookup_2
,
2280 vn_reference_lookup_3
,
2281 vuse_ssa_val
, &vr1
);
2282 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2286 *vnresult
= wvnresult
;
2287 return wvnresult
->result
;
2293 return vn_reference_lookup_1 (&vr1
, vnresult
);
2296 /* Lookup CALL in the current hash table and return the entry in
2297 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2300 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2306 tree vuse
= gimple_vuse (call
);
2308 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2309 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2310 vr
->type
= gimple_expr_type (call
);
2312 vr
->hashcode
= vn_reference_compute_hash (vr
);
2313 vn_reference_lookup_1 (vr
, vnresult
);
2316 /* Insert OP into the current hash table with a value number of
2317 RESULT, and return the resulting reference structure we created. */
2319 static vn_reference_t
2320 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2322 vn_reference_s
**slot
;
2326 vr1
= current_info
->references_pool
->allocate ();
2327 if (TREE_CODE (result
) == SSA_NAME
)
2328 vr1
->value_id
= VN_INFO (result
)->value_id
;
2330 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2331 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2332 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2333 vr1
->type
= TREE_TYPE (op
);
2334 vr1
->set
= get_alias_set (op
);
2335 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2336 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2337 vr1
->result_vdef
= vdef
;
2339 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2342 /* Because we lookup stores using vuses, and value number failures
2343 using the vdefs (see visit_reference_op_store for how and why),
2344 it's possible that on failure we may try to insert an already
2345 inserted store. This is not wrong, there is no ssa name for a
2346 store that we could use as a differentiator anyway. Thus, unlike
2347 the other lookup functions, you cannot gcc_assert (!*slot)
2350 /* But free the old slot in case of a collision. */
2352 free_reference (*slot
);
2358 /* Insert a reference by it's pieces into the current hash table with
2359 a value number of RESULT. Return the resulting reference
2360 structure we created. */
2363 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2364 vec
<vn_reference_op_s
> operands
,
2365 tree result
, unsigned int value_id
)
2368 vn_reference_s
**slot
;
2371 vr1
= current_info
->references_pool
->allocate ();
2372 vr1
->value_id
= value_id
;
2373 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2374 vr1
->operands
= valueize_refs (operands
);
2377 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2378 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2379 result
= SSA_VAL (result
);
2380 vr1
->result
= result
;
2382 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2385 /* At this point we should have all the things inserted that we have
2386 seen before, and we should never try inserting something that
2388 gcc_assert (!*slot
);
2390 free_reference (*slot
);
2396 /* Compute and return the hash value for nary operation VBO1. */
2399 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2401 inchash::hash hstate
;
2404 for (i
= 0; i
< vno1
->length
; ++i
)
2405 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2406 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2408 if (((vno1
->length
== 2
2409 && commutative_tree_code (vno1
->opcode
))
2410 || (vno1
->length
== 3
2411 && commutative_ternary_tree_code (vno1
->opcode
)))
2412 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2413 std::swap (vno1
->op
[0], vno1
->op
[1]);
2414 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
2415 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2417 std::swap (vno1
->op
[0], vno1
->op
[1]);
2418 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
2421 hstate
.add_int (vno1
->opcode
);
2422 for (i
= 0; i
< vno1
->length
; ++i
)
2423 inchash::add_expr (vno1
->op
[i
], hstate
);
2425 return hstate
.end ();
2428 /* Compare nary operations VNO1 and VNO2 and return true if they are
2432 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2436 if (vno1
->hashcode
!= vno2
->hashcode
)
2439 if (vno1
->length
!= vno2
->length
)
2442 if (vno1
->opcode
!= vno2
->opcode
2443 || !types_compatible_p (vno1
->type
, vno2
->type
))
2446 for (i
= 0; i
< vno1
->length
; ++i
)
2447 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2453 /* Initialize VNO from the pieces provided. */
2456 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2457 enum tree_code code
, tree type
, tree
*ops
)
2460 vno
->length
= length
;
2462 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2465 /* Initialize VNO from OP. */
2468 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2472 vno
->opcode
= TREE_CODE (op
);
2473 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2474 vno
->type
= TREE_TYPE (op
);
2475 for (i
= 0; i
< vno
->length
; ++i
)
2476 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2479 /* Return the number of operands for a vn_nary ops structure from STMT. */
2482 vn_nary_length_from_stmt (gimple
*stmt
)
2484 switch (gimple_assign_rhs_code (stmt
))
2488 case VIEW_CONVERT_EXPR
:
2495 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2498 return gimple_num_ops (stmt
) - 1;
2502 /* Initialize VNO from STMT. */
2505 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
2509 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2510 vno
->type
= gimple_expr_type (stmt
);
2511 switch (vno
->opcode
)
2515 case VIEW_CONVERT_EXPR
:
2517 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2522 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2523 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2524 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2528 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2529 for (i
= 0; i
< vno
->length
; ++i
)
2530 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2534 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2535 vno
->length
= gimple_num_ops (stmt
) - 1;
2536 for (i
= 0; i
< vno
->length
; ++i
)
2537 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2541 /* Compute the hashcode for VNO and look for it in the hash table;
2542 return the resulting value number if it exists in the hash table.
2543 Return NULL_TREE if it does not exist in the hash table or if the
2544 result field of the operation is NULL. VNRESULT will contain the
2545 vn_nary_op_t from the hashtable if it exists. */
2548 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2550 vn_nary_op_s
**slot
;
2555 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2556 slot
= current_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2558 if (!slot
&& current_info
== optimistic_info
)
2559 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2565 return (*slot
)->result
;
2568 /* Lookup a n-ary operation by its pieces and return the resulting value
2569 number if it exists in the hash table. Return NULL_TREE if it does
2570 not exist in the hash table or if the result field of the operation
2571 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2575 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2576 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2578 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2579 sizeof_vn_nary_op (length
));
2580 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2581 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2584 /* Lookup OP in the current hash table, and return the resulting value
2585 number if it exists in the hash table. Return NULL_TREE if it does
2586 not exist in the hash table or if the result field of the operation
2587 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2591 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2594 = XALLOCAVAR (struct vn_nary_op_s
,
2595 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2596 init_vn_nary_op_from_op (vno1
, op
);
2597 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2600 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2601 value number if it exists in the hash table. Return NULL_TREE if
2602 it does not exist in the hash table. VNRESULT will contain the
2603 vn_nary_op_t from the hashtable if it exists. */
2606 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
2609 = XALLOCAVAR (struct vn_nary_op_s
,
2610 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2611 init_vn_nary_op_from_stmt (vno1
, stmt
);
2612 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2615 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
2618 vn_lookup_simplify_result (code_helper rcode
, tree type
, tree
*ops
)
2620 if (!rcode
.is_tree_code ())
2622 vn_nary_op_t vnresult
= NULL
;
2623 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code
) rcode
),
2624 (tree_code
) rcode
, type
, ops
, &vnresult
);
2627 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2630 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2632 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2635 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2639 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2641 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2642 ¤t_info
->nary_obstack
);
2644 vno1
->value_id
= value_id
;
2645 vno1
->length
= length
;
2646 vno1
->result
= result
;
2651 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2652 VNO->HASHCODE first. */
2655 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
2658 vn_nary_op_s
**slot
;
2661 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2663 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
2664 gcc_assert (!*slot
);
2670 /* Insert a n-ary operation into the current hash table using it's
2671 pieces. Return the vn_nary_op_t structure we created and put in
2675 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2676 tree type
, tree
*ops
,
2677 tree result
, unsigned int value_id
)
2679 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2680 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2681 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2684 /* Insert OP into the current hash table with a value number of
2685 RESULT. Return the vn_nary_op_t structure we created and put in
2689 vn_nary_op_insert (tree op
, tree result
)
2691 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2694 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2695 init_vn_nary_op_from_op (vno1
, op
);
2696 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2699 /* Insert the rhs of STMT into the current hash table with a value number of
2703 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
2706 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
2707 result
, VN_INFO (result
)->value_id
);
2708 init_vn_nary_op_from_stmt (vno1
, stmt
);
2709 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2712 /* Compute a hashcode for PHI operation VP1 and return it. */
2714 static inline hashval_t
2715 vn_phi_compute_hash (vn_phi_t vp1
)
2717 inchash::hash
hstate (vp1
->phiargs
.length () > 2
2718 ? vp1
->block
->index
: vp1
->phiargs
.length ());
2724 /* If all PHI arguments are constants we need to distinguish
2725 the PHI node via its type. */
2727 hstate
.merge_hash (vn_hash_type (type
));
2729 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
2731 /* Don't hash backedge values they need to be handled as VN_TOP
2732 for optimistic value-numbering. */
2733 if (e
->flags
& EDGE_DFS_BACK
)
2736 phi1op
= vp1
->phiargs
[e
->dest_idx
];
2737 if (phi1op
== VN_TOP
)
2739 inchash::add_expr (phi1op
, hstate
);
2742 return hstate
.end ();
2746 /* Return true if COND1 and COND2 represent the same condition, set
2747 *INVERTED_P if one needs to be inverted to make it the same as
2751 cond_stmts_equal_p (gcond
*cond1
, gcond
*cond2
, bool *inverted_p
)
2753 enum tree_code code1
= gimple_cond_code (cond1
);
2754 enum tree_code code2
= gimple_cond_code (cond2
);
2755 tree lhs1
= gimple_cond_lhs (cond1
);
2756 tree lhs2
= gimple_cond_lhs (cond2
);
2757 tree rhs1
= gimple_cond_rhs (cond1
);
2758 tree rhs2
= gimple_cond_rhs (cond2
);
2760 *inverted_p
= false;
2763 else if (code1
== swap_tree_comparison (code2
))
2764 std::swap (lhs2
, rhs2
);
2765 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
2767 else if (code1
== invert_tree_comparison
2768 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
2770 std::swap (lhs2
, rhs2
);
2776 lhs1
= vn_valueize (lhs1
);
2777 rhs1
= vn_valueize (rhs1
);
2778 lhs2
= vn_valueize (lhs2
);
2779 rhs2
= vn_valueize (rhs2
);
2780 return ((expressions_equal_p (lhs1
, lhs2
)
2781 && expressions_equal_p (rhs1
, rhs2
))
2782 || (commutative_tree_code (code1
)
2783 && expressions_equal_p (lhs1
, rhs2
)
2784 && expressions_equal_p (rhs1
, lhs2
)));
2787 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2790 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
2792 if (vp1
->hashcode
!= vp2
->hashcode
)
2795 if (vp1
->block
!= vp2
->block
)
2797 if (vp1
->phiargs
.length () != vp2
->phiargs
.length ())
2800 switch (vp1
->phiargs
.length ())
2803 /* Single-arg PHIs are just copies. */
2808 /* Rule out backedges into the PHI. */
2809 if (vp1
->block
->loop_father
->header
== vp1
->block
2810 || vp2
->block
->loop_father
->header
== vp2
->block
)
2813 /* If the PHI nodes do not have compatible types
2814 they are not the same. */
2815 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2819 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
2821 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
2822 /* If the immediate dominator end in switch stmts multiple
2823 values may end up in the same PHI arg via intermediate
2825 if (EDGE_COUNT (idom1
->succs
) != 2
2826 || EDGE_COUNT (idom2
->succs
) != 2)
2829 /* Verify the controlling stmt is the same. */
2830 gimple
*last1
= last_stmt (idom1
);
2831 gimple
*last2
= last_stmt (idom2
);
2832 if (gimple_code (last1
) != GIMPLE_COND
2833 || gimple_code (last2
) != GIMPLE_COND
)
2836 if (! cond_stmts_equal_p (as_a
<gcond
*> (last1
),
2837 as_a
<gcond
*> (last2
), &inverted_p
))
2840 /* Get at true/false controlled edges into the PHI. */
2841 edge te1
, te2
, fe1
, fe2
;
2842 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
2844 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
2848 /* Swap edges if the second condition is the inverted of the
2851 std::swap (te2
, fe2
);
2853 /* ??? Handle VN_TOP specially. */
2854 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
2855 vp2
->phiargs
[te2
->dest_idx
])
2856 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
2857 vp2
->phiargs
[fe2
->dest_idx
]))
2868 /* If the PHI nodes do not have compatible types
2869 they are not the same. */
2870 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2873 /* Any phi in the same block will have it's arguments in the
2874 same edge order, because of how we store phi nodes. */
2877 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2879 tree phi2op
= vp2
->phiargs
[i
];
2880 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
2882 if (!expressions_equal_p (phi1op
, phi2op
))
2889 static vec
<tree
> shared_lookup_phiargs
;
2891 /* Lookup PHI in the current hash table, and return the resulting
2892 value number if it exists in the hash table. Return NULL_TREE if
2893 it does not exist in the hash table. */
2896 vn_phi_lookup (gimple
*phi
)
2899 struct vn_phi_s vp1
;
2903 shared_lookup_phiargs
.truncate (0);
2904 shared_lookup_phiargs
.safe_grow (gimple_phi_num_args (phi
));
2906 /* Canonicalize the SSA_NAME's to their value number. */
2907 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
2909 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
2910 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2911 shared_lookup_phiargs
[e
->dest_idx
] = def
;
2913 vp1
.type
= TREE_TYPE (gimple_phi_result (phi
));
2914 vp1
.phiargs
= shared_lookup_phiargs
;
2915 vp1
.block
= gimple_bb (phi
);
2916 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
2917 slot
= current_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2919 if (!slot
&& current_info
== optimistic_info
)
2920 slot
= valid_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2924 return (*slot
)->result
;
2927 /* Insert PHI into the current hash table with a value number of
2931 vn_phi_insert (gimple
*phi
, tree result
)
2934 vn_phi_t vp1
= current_info
->phis_pool
->allocate ();
2935 vec
<tree
> args
= vNULL
;
2939 args
.safe_grow (gimple_phi_num_args (phi
));
2941 /* Canonicalize the SSA_NAME's to their value number. */
2942 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
2944 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
2945 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2946 args
[e
->dest_idx
] = def
;
2948 vp1
->value_id
= VN_INFO (result
)->value_id
;
2949 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
2950 vp1
->phiargs
= args
;
2951 vp1
->block
= gimple_bb (phi
);
2952 vp1
->result
= result
;
2953 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
2955 slot
= current_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
2957 /* Because we iterate over phi operations more than once, it's
2958 possible the slot might already exist here, hence no assert.*/
2964 /* Print set of components in strongly connected component SCC to OUT. */
2967 print_scc (FILE *out
, vec
<tree
> scc
)
2972 fprintf (out
, "SCC consists of:");
2973 FOR_EACH_VEC_ELT (scc
, i
, var
)
2976 print_generic_expr (out
, var
, 0);
2978 fprintf (out
, "\n");
2981 /* Return true if BB1 is dominated by BB2 taking into account edges
2982 that are not executable. */
2985 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
)
2990 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
2993 /* Before iterating we'd like to know if there exists a
2994 (executable) path from bb2 to bb1 at all, if not we can
2995 directly return false. For now simply iterate once. */
2997 /* Iterate to the single executable bb1 predecessor. */
2998 if (EDGE_COUNT (bb1
->preds
) > 1)
3001 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
3002 if (e
->flags
& EDGE_EXECUTABLE
)
3015 /* Re-do the dominance check with changed bb1. */
3016 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3021 /* Iterate to the single executable bb2 successor. */
3023 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
3024 if (e
->flags
& EDGE_EXECUTABLE
)
3035 /* Verify the reached block is only reached through succe.
3036 If there is only one edge we can spare us the dominator
3037 check and iterate directly. */
3038 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
3040 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
3042 && (e
->flags
& EDGE_EXECUTABLE
))
3052 /* Re-do the dominance check with changed bb2. */
3053 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3058 /* We could now iterate updating bb1 / bb2. */
3062 /* Set the value number of FROM to TO, return true if it has changed
3066 set_ssa_val_to (tree from
, tree to
)
3068 tree currval
= SSA_VAL (from
);
3069 HOST_WIDE_INT toff
, coff
;
3071 /* The only thing we allow as value numbers are ssa_names
3072 and invariants. So assert that here. We don't allow VN_TOP
3073 as visiting a stmt should produce a value-number other than
3075 ??? Still VN_TOP can happen for unreachable code, so force
3076 it to varying in that case. Not all code is prepared to
3077 get VN_TOP on valueization. */
3080 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3081 fprintf (dump_file
, "Forcing value number to varying on "
3082 "receiving VN_TOP\n");
3086 gcc_assert (to
!= NULL_TREE
3087 && ((TREE_CODE (to
) == SSA_NAME
3088 && (to
== from
|| SSA_VAL (to
) == to
))
3089 || is_gimple_min_invariant (to
)));
3093 if (currval
== from
)
3095 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3097 fprintf (dump_file
, "Not changing value number of ");
3098 print_generic_expr (dump_file
, from
, 0);
3099 fprintf (dump_file
, " from VARYING to ");
3100 print_generic_expr (dump_file
, to
, 0);
3101 fprintf (dump_file
, "\n");
3105 else if (TREE_CODE (to
) == SSA_NAME
3106 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
3110 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3112 fprintf (dump_file
, "Setting value number of ");
3113 print_generic_expr (dump_file
, from
, 0);
3114 fprintf (dump_file
, " to ");
3115 print_generic_expr (dump_file
, to
, 0);
3119 && !operand_equal_p (currval
, to
, 0)
3120 /* ??? For addresses involving volatile objects or types operand_equal_p
3121 does not reliably detect ADDR_EXPRs as equal. We know we are only
3122 getting invariant gimple addresses here, so can use
3123 get_addr_base_and_unit_offset to do this comparison. */
3124 && !(TREE_CODE (currval
) == ADDR_EXPR
3125 && TREE_CODE (to
) == ADDR_EXPR
3126 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
3127 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
3130 /* If we equate two SSA names we have to make the side-band info
3131 of the leader conservative (and remember whatever original value
3133 if (TREE_CODE (to
) == SSA_NAME
)
3135 if (INTEGRAL_TYPE_P (TREE_TYPE (to
))
3136 && SSA_NAME_RANGE_INFO (to
))
3138 if (SSA_NAME_IS_DEFAULT_DEF (to
)
3139 || dominated_by_p_w_unex
3140 (gimple_bb (SSA_NAME_DEF_STMT (from
)),
3141 gimple_bb (SSA_NAME_DEF_STMT (to
))))
3142 /* Keep the info from the dominator. */
3144 else if (SSA_NAME_IS_DEFAULT_DEF (from
)
3145 || dominated_by_p_w_unex
3146 (gimple_bb (SSA_NAME_DEF_STMT (to
)),
3147 gimple_bb (SSA_NAME_DEF_STMT (from
))))
3149 /* Save old info. */
3150 if (! VN_INFO (to
)->info
.range_info
)
3152 VN_INFO (to
)->info
.range_info
= SSA_NAME_RANGE_INFO (to
);
3153 VN_INFO (to
)->range_info_anti_range_p
3154 = SSA_NAME_ANTI_RANGE_P (to
);
3156 /* Use that from the dominator. */
3157 SSA_NAME_RANGE_INFO (to
) = SSA_NAME_RANGE_INFO (from
);
3158 SSA_NAME_ANTI_RANGE_P (to
) = SSA_NAME_ANTI_RANGE_P (from
);
3162 /* Save old info. */
3163 if (! VN_INFO (to
)->info
.range_info
)
3165 VN_INFO (to
)->info
.range_info
= SSA_NAME_RANGE_INFO (to
);
3166 VN_INFO (to
)->range_info_anti_range_p
3167 = SSA_NAME_ANTI_RANGE_P (to
);
3169 /* Rather than allocating memory and unioning the info
3171 SSA_NAME_RANGE_INFO (to
) = NULL
;
3174 else if (POINTER_TYPE_P (TREE_TYPE (to
))
3175 && SSA_NAME_PTR_INFO (to
))
3177 if (SSA_NAME_IS_DEFAULT_DEF (to
)
3178 || dominated_by_p_w_unex
3179 (gimple_bb (SSA_NAME_DEF_STMT (from
)),
3180 gimple_bb (SSA_NAME_DEF_STMT (to
))))
3181 /* Keep the info from the dominator. */
3183 else if (SSA_NAME_IS_DEFAULT_DEF (from
)
3184 || dominated_by_p_w_unex
3185 (gimple_bb (SSA_NAME_DEF_STMT (to
)),
3186 gimple_bb (SSA_NAME_DEF_STMT (from
))))
3188 /* Save old info. */
3189 if (! VN_INFO (to
)->info
.ptr_info
)
3190 VN_INFO (to
)->info
.ptr_info
= SSA_NAME_PTR_INFO (to
);
3191 /* Use that from the dominator. */
3192 SSA_NAME_PTR_INFO (to
) = SSA_NAME_PTR_INFO (from
);
3194 else if (! SSA_NAME_PTR_INFO (from
)
3195 /* Handle the case of trivially equivalent info. */
3196 || memcmp (SSA_NAME_PTR_INFO (to
),
3197 SSA_NAME_PTR_INFO (from
),
3198 sizeof (ptr_info_def
)) != 0)
3200 /* Save old info. */
3201 if (! VN_INFO (to
)->info
.ptr_info
)
3202 VN_INFO (to
)->info
.ptr_info
= SSA_NAME_PTR_INFO (to
);
3203 /* Rather than allocating memory and unioning the info
3205 SSA_NAME_PTR_INFO (to
) = NULL
;
3210 VN_INFO (from
)->valnum
= to
;
3211 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3212 fprintf (dump_file
, " (changed)\n");
3215 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3216 fprintf (dump_file
, "\n");
3220 /* Mark as processed all the definitions in the defining stmt of USE, or
3224 mark_use_processed (tree use
)
3228 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
3230 if (SSA_NAME_IS_DEFAULT_DEF (use
) || gimple_code (stmt
) == GIMPLE_PHI
)
3232 VN_INFO (use
)->use_processed
= true;
3236 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3238 tree def
= DEF_FROM_PTR (defp
);
3240 VN_INFO (def
)->use_processed
= true;
3244 /* Set all definitions in STMT to value number to themselves.
3245 Return true if a value number changed. */
3248 defs_to_varying (gimple
*stmt
)
3250 bool changed
= false;
3254 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3256 tree def
= DEF_FROM_PTR (defp
);
3257 changed
|= set_ssa_val_to (def
, def
);
3262 /* Visit a copy between LHS and RHS, return true if the value number
3266 visit_copy (tree lhs
, tree rhs
)
3269 rhs
= SSA_VAL (rhs
);
3271 return set_ssa_val_to (lhs
, rhs
);
3274 /* Visit a nary operator RHS, value number it, and return true if the
3275 value number of LHS has changed as a result. */
3278 visit_nary_op (tree lhs
, gimple
*stmt
)
3280 bool changed
= false;
3281 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
3284 changed
= set_ssa_val_to (lhs
, result
);
3287 changed
= set_ssa_val_to (lhs
, lhs
);
3288 vn_nary_op_insert_stmt (stmt
, lhs
);
3294 /* Visit a call STMT storing into LHS. Return true if the value number
3295 of the LHS has changed as a result. */
3298 visit_reference_op_call (tree lhs
, gcall
*stmt
)
3300 bool changed
= false;
3301 struct vn_reference_s vr1
;
3302 vn_reference_t vnresult
= NULL
;
3303 tree vdef
= gimple_vdef (stmt
);
3305 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3306 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
3309 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
3312 if (vnresult
->result_vdef
&& vdef
)
3313 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3315 if (!vnresult
->result
&& lhs
)
3316 vnresult
->result
= lhs
;
3318 if (vnresult
->result
&& lhs
)
3319 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
3324 vn_reference_s
**slot
;
3326 changed
|= set_ssa_val_to (vdef
, vdef
);
3328 changed
|= set_ssa_val_to (lhs
, lhs
);
3329 vr2
= current_info
->references_pool
->allocate ();
3330 vr2
->vuse
= vr1
.vuse
;
3331 /* As we are not walking the virtual operand chain we know the
3332 shared_lookup_references are still original so we can re-use
3334 vr2
->operands
= vr1
.operands
.copy ();
3335 vr2
->type
= vr1
.type
;
3337 vr2
->hashcode
= vr1
.hashcode
;
3339 vr2
->result_vdef
= vdef
;
3340 slot
= current_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
3342 gcc_assert (!*slot
);
3349 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3350 and return true if the value number of the LHS has changed as a result. */
3353 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
3355 bool changed
= false;
3359 last_vuse
= gimple_vuse (stmt
);
3360 last_vuse_ptr
= &last_vuse
;
3361 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3362 default_vn_walk_kind
, NULL
, true);
3363 last_vuse_ptr
= NULL
;
3365 /* We handle type-punning through unions by value-numbering based
3366 on offset and size of the access. Be prepared to handle a
3367 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3369 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3371 /* We will be setting the value number of lhs to the value number
3372 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3373 So first simplify and lookup this expression to see if it
3374 is already available. */
3375 mprts_hook
= vn_lookup_simplify_result
;
3376 code_helper rcode
= VIEW_CONVERT_EXPR
;
3377 tree ops
[3] = { result
};
3378 bool res
= gimple_resimplify1 (NULL
, &rcode
, TREE_TYPE (op
), ops
,
3381 gimple
*new_stmt
= NULL
;
3383 && gimple_simplified_result_is_gimple_val (rcode
, ops
))
3384 /* The expression is already available. */
3388 tree val
= vn_lookup_simplify_result (rcode
, TREE_TYPE (op
), ops
);
3391 gimple_seq stmts
= NULL
;
3392 result
= maybe_push_res_to_seq (rcode
, TREE_TYPE (op
), ops
,
3396 gcc_assert (gimple_seq_singleton_p (stmts
));
3397 new_stmt
= gimple_seq_first_stmt (stmts
);
3401 /* The expression is already available. */
3406 /* The expression is not yet available, value-number lhs to
3407 the new SSA_NAME we created. */
3408 /* Initialize value-number information properly. */
3409 VN_INFO_GET (result
)->valnum
= result
;
3410 VN_INFO (result
)->value_id
= get_next_value_id ();
3411 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
3413 VN_INFO (result
)->needs_insertion
= true;
3414 /* As all "inserted" statements are singleton SCCs, insert
3415 to the valid table. This is strictly needed to
3416 avoid re-generating new value SSA_NAMEs for the same
3417 expression during SCC iteration over and over (the
3418 optimistic table gets cleared after each iteration).
3419 We do not need to insert into the optimistic table, as
3420 lookups there will fall back to the valid table. */
3421 if (current_info
== optimistic_info
)
3423 current_info
= valid_info
;
3424 vn_nary_op_insert_stmt (new_stmt
, result
);
3425 current_info
= optimistic_info
;
3428 vn_nary_op_insert_stmt (new_stmt
, result
);
3429 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3431 fprintf (dump_file
, "Inserting name ");
3432 print_generic_expr (dump_file
, result
, 0);
3433 fprintf (dump_file
, " for expression ");
3434 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
3435 fprintf (dump_file
, "\n");
3441 changed
= set_ssa_val_to (lhs
, result
);
3444 changed
= set_ssa_val_to (lhs
, lhs
);
3445 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
3452 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3453 and return true if the value number of the LHS has changed as a result. */
3456 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
3458 bool changed
= false;
3459 vn_reference_t vnresult
= NULL
;
3460 tree result
, assign
;
3461 bool resultsame
= false;
3462 tree vuse
= gimple_vuse (stmt
);
3463 tree vdef
= gimple_vdef (stmt
);
3465 if (TREE_CODE (op
) == SSA_NAME
)
3468 /* First we want to lookup using the *vuses* from the store and see
3469 if there the last store to this location with the same address
3472 The vuses represent the memory state before the store. If the
3473 memory state, address, and value of the store is the same as the
3474 last store to this location, then this store will produce the
3475 same memory state as that store.
3477 In this case the vdef versions for this store are value numbered to those
3478 vuse versions, since they represent the same memory state after
3481 Otherwise, the vdefs for the store are used when inserting into
3482 the table, since the store generates a new memory state. */
3484 result
= vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, NULL
, false);
3488 if (TREE_CODE (result
) == SSA_NAME
)
3489 result
= SSA_VAL (result
);
3490 resultsame
= expressions_equal_p (result
, op
);
3493 if ((!result
|| !resultsame
)
3494 /* Only perform the following when being called from PRE
3495 which embeds tail merging. */
3496 && default_vn_walk_kind
== VN_WALK
)
3498 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3499 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
3502 VN_INFO (vdef
)->use_processed
= true;
3503 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3507 if (!result
|| !resultsame
)
3509 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3511 fprintf (dump_file
, "No store match\n");
3512 fprintf (dump_file
, "Value numbering store ");
3513 print_generic_expr (dump_file
, lhs
, 0);
3514 fprintf (dump_file
, " to ");
3515 print_generic_expr (dump_file
, op
, 0);
3516 fprintf (dump_file
, "\n");
3518 /* Have to set value numbers before insert, since insert is
3519 going to valueize the references in-place. */
3522 changed
|= set_ssa_val_to (vdef
, vdef
);
3525 /* Do not insert structure copies into the tables. */
3526 if (is_gimple_min_invariant (op
)
3527 || is_gimple_reg (op
))
3528 vn_reference_insert (lhs
, op
, vdef
, NULL
);
3530 /* Only perform the following when being called from PRE
3531 which embeds tail merging. */
3532 if (default_vn_walk_kind
== VN_WALK
)
3534 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3535 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
3540 /* We had a match, so value number the vdef to have the value
3541 number of the vuse it came from. */
3543 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3544 fprintf (dump_file
, "Store matched earlier value,"
3545 "value numbering store vdefs to matching vuses.\n");
3547 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
3553 /* Visit and value number PHI, return true if the value number
3557 visit_phi (gimple
*phi
)
3559 bool changed
= false;
3561 tree sameval
= VN_TOP
;
3562 bool allsame
= true;
3563 unsigned n_executable
= 0;
3565 /* TODO: We could check for this in init_sccvn, and replace this
3566 with a gcc_assert. */
3567 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
3568 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3570 /* See if all non-TOP arguments have the same value. TOP is
3571 equivalent to everything, so we can ignore it. */
3574 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3575 if (e
->flags
& EDGE_EXECUTABLE
)
3577 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3580 if (TREE_CODE (def
) == SSA_NAME
)
3581 def
= SSA_VAL (def
);
3584 if (sameval
== VN_TOP
)
3586 else if (!expressions_equal_p (def
, sameval
))
3593 /* If none of the edges was executable or all incoming values are
3594 undefined keep the value-number at VN_TOP. If only a single edge
3595 is exectuable use its value. */
3596 if (sameval
== VN_TOP
3597 || n_executable
== 1)
3598 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3600 /* First see if it is equivalent to a phi node in this block. We prefer
3601 this as it allows IV elimination - see PRs 66502 and 67167. */
3602 result
= vn_phi_lookup (phi
);
3604 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
3605 /* Otherwise all value numbered to the same value, the phi node has that
3608 changed
= set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3611 vn_phi_insert (phi
, PHI_RESULT (phi
));
3612 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3618 /* Try to simplify RHS using equivalences and constant folding. */
3621 try_to_simplify (gassign
*stmt
)
3623 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3626 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3627 in this case, there is no point in doing extra work. */
3628 if (code
== SSA_NAME
)
3631 /* First try constant folding based on our current lattice. */
3632 mprts_hook
= vn_lookup_simplify_result
;
3633 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
3636 && (TREE_CODE (tem
) == SSA_NAME
3637 || is_gimple_min_invariant (tem
)))
3643 /* Visit and value number USE, return true if the value number
3647 visit_use (tree use
)
3649 bool changed
= false;
3650 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
3652 mark_use_processed (use
);
3654 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
3655 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3656 && !SSA_NAME_IS_DEFAULT_DEF (use
))
3658 fprintf (dump_file
, "Value numbering ");
3659 print_generic_expr (dump_file
, use
, 0);
3660 fprintf (dump_file
, " stmt = ");
3661 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3664 /* Handle uninitialized uses. */
3665 if (SSA_NAME_IS_DEFAULT_DEF (use
))
3666 changed
= set_ssa_val_to (use
, use
);
3667 else if (gimple_code (stmt
) == GIMPLE_PHI
)
3668 changed
= visit_phi (stmt
);
3669 else if (gimple_has_volatile_ops (stmt
))
3670 changed
= defs_to_varying (stmt
);
3671 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
3673 enum tree_code code
= gimple_assign_rhs_code (ass
);
3674 tree lhs
= gimple_assign_lhs (ass
);
3675 tree rhs1
= gimple_assign_rhs1 (ass
);
3678 /* Shortcut for copies. Simplifying copies is pointless,
3679 since we copy the expression and value they represent. */
3680 if (code
== SSA_NAME
3681 && TREE_CODE (lhs
) == SSA_NAME
)
3683 changed
= visit_copy (lhs
, rhs1
);
3686 simplified
= try_to_simplify (ass
);
3689 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3691 fprintf (dump_file
, "RHS ");
3692 print_gimple_expr (dump_file
, ass
, 0, 0);
3693 fprintf (dump_file
, " simplified to ");
3694 print_generic_expr (dump_file
, simplified
, 0);
3695 fprintf (dump_file
, "\n");
3698 /* Setting value numbers to constants will occasionally
3699 screw up phi congruence because constants are not
3700 uniquely associated with a single ssa name that can be
3703 && is_gimple_min_invariant (simplified
)
3704 && TREE_CODE (lhs
) == SSA_NAME
)
3706 changed
= set_ssa_val_to (lhs
, simplified
);
3710 && TREE_CODE (simplified
) == SSA_NAME
3711 && TREE_CODE (lhs
) == SSA_NAME
)
3713 changed
= visit_copy (lhs
, simplified
);
3717 if ((TREE_CODE (lhs
) == SSA_NAME
3718 /* We can substitute SSA_NAMEs that are live over
3719 abnormal edges with their constant value. */
3720 && !(gimple_assign_copy_p (ass
)
3721 && is_gimple_min_invariant (rhs1
))
3723 && is_gimple_min_invariant (simplified
))
3724 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3725 /* Stores or copies from SSA_NAMEs that are live over
3726 abnormal edges are a problem. */
3727 || (code
== SSA_NAME
3728 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
3729 changed
= defs_to_varying (ass
);
3730 else if (REFERENCE_CLASS_P (lhs
)
3732 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
3733 else if (TREE_CODE (lhs
) == SSA_NAME
)
3735 if ((gimple_assign_copy_p (ass
)
3736 && is_gimple_min_invariant (rhs1
))
3738 && is_gimple_min_invariant (simplified
)))
3741 changed
= set_ssa_val_to (lhs
, simplified
);
3743 changed
= set_ssa_val_to (lhs
, rhs1
);
3747 /* Visit the original statement. */
3748 switch (vn_get_stmt_kind (ass
))
3751 changed
= visit_nary_op (lhs
, ass
);
3754 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
3757 changed
= defs_to_varying (ass
);
3763 changed
= defs_to_varying (ass
);
3765 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
3767 tree lhs
= gimple_call_lhs (call_stmt
);
3768 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3770 /* Try constant folding based on our current lattice. */
3771 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
3775 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3777 fprintf (dump_file
, "call ");
3778 print_gimple_expr (dump_file
, call_stmt
, 0, 0);
3779 fprintf (dump_file
, " simplified to ");
3780 print_generic_expr (dump_file
, simplified
, 0);
3781 fprintf (dump_file
, "\n");
3784 /* Setting value numbers to constants will occasionally
3785 screw up phi congruence because constants are not
3786 uniquely associated with a single ssa name that can be
3789 && is_gimple_min_invariant (simplified
))
3791 changed
= set_ssa_val_to (lhs
, simplified
);
3792 if (gimple_vdef (call_stmt
))
3793 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
3794 SSA_VAL (gimple_vuse (call_stmt
)));
3798 && TREE_CODE (simplified
) == SSA_NAME
)
3800 changed
= visit_copy (lhs
, simplified
);
3801 if (gimple_vdef (call_stmt
))
3802 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
3803 SSA_VAL (gimple_vuse (call_stmt
)));
3806 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3808 changed
= defs_to_varying (call_stmt
);
3813 if (!gimple_call_internal_p (call_stmt
)
3814 && (/* Calls to the same function with the same vuse
3815 and the same operands do not necessarily return the same
3816 value, unless they're pure or const. */
3817 gimple_call_flags (call_stmt
) & (ECF_PURE
| ECF_CONST
)
3818 /* If calls have a vdef, subsequent calls won't have
3819 the same incoming vuse. So, if 2 calls with vdef have the
3820 same vuse, we know they're not subsequent.
3821 We can value number 2 calls to the same function with the
3822 same vuse and the same operands which are not subsequent
3823 the same, because there is no code in the program that can
3824 compare the 2 values... */
3825 || (gimple_vdef (call_stmt
)
3826 /* ... unless the call returns a pointer which does
3827 not alias with anything else. In which case the
3828 information that the values are distinct are encoded
3830 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
3831 /* Only perform the following when being called from PRE
3832 which embeds tail merging. */
3833 && default_vn_walk_kind
== VN_WALK
)))
3834 changed
= visit_reference_op_call (lhs
, call_stmt
);
3836 changed
= defs_to_varying (call_stmt
);
3839 changed
= defs_to_varying (stmt
);
3844 /* Compare two operands by reverse postorder index */
3847 compare_ops (const void *pa
, const void *pb
)
3849 const tree opa
= *((const tree
*)pa
);
3850 const tree opb
= *((const tree
*)pb
);
3851 gimple
*opstmta
= SSA_NAME_DEF_STMT (opa
);
3852 gimple
*opstmtb
= SSA_NAME_DEF_STMT (opb
);
3856 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
3857 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3858 else if (gimple_nop_p (opstmta
))
3860 else if (gimple_nop_p (opstmtb
))
3863 bba
= gimple_bb (opstmta
);
3864 bbb
= gimple_bb (opstmtb
);
3867 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3875 if (gimple_code (opstmta
) == GIMPLE_PHI
3876 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3877 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3878 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3880 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3882 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3883 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3885 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3887 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3890 /* Sort an array containing members of a strongly connected component
3891 SCC so that the members are ordered by RPO number.
3892 This means that when the sort is complete, iterating through the
3893 array will give you the members in RPO order. */
3896 sort_scc (vec
<tree
> scc
)
3898 scc
.qsort (compare_ops
);
3901 /* Insert the no longer used nary ONARY to the hash INFO. */
3904 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3906 size_t size
= sizeof_vn_nary_op (onary
->length
);
3907 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
3908 &info
->nary_obstack
);
3909 memcpy (nary
, onary
, size
);
3910 vn_nary_op_insert_into (nary
, info
->nary
, false);
3913 /* Insert the no longer used phi OPHI to the hash INFO. */
3916 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3918 vn_phi_t phi
= info
->phis_pool
->allocate ();
3920 memcpy (phi
, ophi
, sizeof (*phi
));
3921 ophi
->phiargs
.create (0);
3922 slot
= info
->phis
->find_slot_with_hash (phi
, phi
->hashcode
, INSERT
);
3923 gcc_assert (!*slot
);
3927 /* Insert the no longer used reference OREF to the hash INFO. */
3930 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3933 vn_reference_s
**slot
;
3934 ref
= info
->references_pool
->allocate ();
3935 memcpy (ref
, oref
, sizeof (*ref
));
3936 oref
->operands
.create (0);
3937 slot
= info
->references
->find_slot_with_hash (ref
, ref
->hashcode
, INSERT
);
3939 free_reference (*slot
);
3943 /* Process a strongly connected component in the SSA graph. */
3946 process_scc (vec
<tree
> scc
)
3950 unsigned int iterations
= 0;
3951 bool changed
= true;
3952 vn_nary_op_iterator_type hin
;
3953 vn_phi_iterator_type hip
;
3954 vn_reference_iterator_type hir
;
3959 /* If the SCC has a single member, just visit it. */
3960 if (scc
.length () == 1)
3963 if (VN_INFO (use
)->use_processed
)
3965 /* We need to make sure it doesn't form a cycle itself, which can
3966 happen for self-referential PHI nodes. In that case we would
3967 end up inserting an expression with VN_TOP operands into the
3968 valid table which makes us derive bogus equivalences later.
3969 The cheapest way to check this is to assume it for all PHI nodes. */
3970 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
3971 /* Fallthru to iteration. */ ;
3979 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3980 print_scc (dump_file
, scc
);
3982 /* Iterate over the SCC with the optimistic table until it stops
3984 current_info
= optimistic_info
;
3989 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3990 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
3991 /* As we are value-numbering optimistically we have to
3992 clear the expression tables and the simplified expressions
3993 in each iteration until we converge. */
3994 optimistic_info
->nary
->empty ();
3995 optimistic_info
->phis
->empty ();
3996 optimistic_info
->references
->empty ();
3997 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
3998 gcc_obstack_init (&optimistic_info
->nary_obstack
);
3999 optimistic_info
->phis_pool
->release ();
4000 optimistic_info
->references_pool
->release ();
4001 FOR_EACH_VEC_ELT (scc
, i
, var
)
4002 gcc_assert (!VN_INFO (var
)->needs_insertion
4003 && VN_INFO (var
)->expr
== NULL
);
4004 FOR_EACH_VEC_ELT (scc
, i
, var
)
4005 changed
|= visit_use (var
);
4008 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4009 fprintf (dump_file
, "Processing SCC needed %d iterations\n", iterations
);
4010 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
4012 /* Finally, copy the contents of the no longer used optimistic
4013 table to the valid table. */
4014 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->nary
, nary
, vn_nary_op_t
, hin
)
4015 copy_nary (nary
, valid_info
);
4016 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->phis
, phi
, vn_phi_t
, hip
)
4017 copy_phi (phi
, valid_info
);
4018 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->references
,
4019 ref
, vn_reference_t
, hir
)
4020 copy_reference (ref
, valid_info
);
4022 current_info
= valid_info
;
4026 /* Pop the components of the found SCC for NAME off the SCC stack
4027 and process them. Returns true if all went well, false if
4028 we run into resource limits. */
4031 extract_and_process_scc_for_name (tree name
)
4036 /* Found an SCC, pop the components off the SCC stack and
4040 x
= sccstack
.pop ();
4042 VN_INFO (x
)->on_sccstack
= false;
4044 } while (x
!= name
);
4046 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4048 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
4051 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
4052 "SCC size %u exceeding %u\n", scc
.length (),
4053 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
4058 if (scc
.length () > 1)
4066 /* Depth first search on NAME to discover and process SCC's in the SSA
4068 Execution of this algorithm relies on the fact that the SCC's are
4069 popped off the stack in topological order.
4070 Returns true if successful, false if we stopped processing SCC's due
4071 to resource constraints. */
4076 vec
<ssa_op_iter
> itervec
= vNULL
;
4077 vec
<tree
> namevec
= vNULL
;
4078 use_operand_p usep
= NULL
;
4085 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
4086 VN_INFO (name
)->visited
= true;
4087 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
4089 sccstack
.safe_push (name
);
4090 VN_INFO (name
)->on_sccstack
= true;
4091 defstmt
= SSA_NAME_DEF_STMT (name
);
4093 /* Recursively DFS on our operands, looking for SCC's. */
4094 if (!gimple_nop_p (defstmt
))
4096 /* Push a new iterator. */
4097 if (gphi
*phi
= dyn_cast
<gphi
*> (defstmt
))
4098 usep
= op_iter_init_phiuse (&iter
, phi
, SSA_OP_ALL_USES
);
4100 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
4103 clear_and_done_ssa_iter (&iter
);
4107 /* If we are done processing uses of a name, go up the stack
4108 of iterators and process SCCs as we found them. */
4109 if (op_iter_done (&iter
))
4111 /* See if we found an SCC. */
4112 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
4113 if (!extract_and_process_scc_for_name (name
))
4120 /* Check if we are done. */
4121 if (namevec
.is_empty ())
4128 /* Restore the last use walker and continue walking there. */
4130 name
= namevec
.pop ();
4131 memcpy (&iter
, &itervec
.last (),
4132 sizeof (ssa_op_iter
));
4134 goto continue_walking
;
4137 use
= USE_FROM_PTR (usep
);
4139 /* Since we handle phi nodes, we will sometimes get
4140 invariants in the use expression. */
4141 if (TREE_CODE (use
) == SSA_NAME
)
4143 if (! (VN_INFO (use
)->visited
))
4145 /* Recurse by pushing the current use walking state on
4146 the stack and starting over. */
4147 itervec
.safe_push (iter
);
4148 namevec
.safe_push (name
);
4153 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
4154 VN_INFO (use
)->low
);
4156 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
4157 && VN_INFO (use
)->on_sccstack
)
4159 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
4160 VN_INFO (name
)->low
);
4164 usep
= op_iter_next_use (&iter
);
4168 /* Allocate a value number table. */
4171 allocate_vn_table (vn_tables_t table
)
4173 table
->phis
= new vn_phi_table_type (23);
4174 table
->nary
= new vn_nary_op_table_type (23);
4175 table
->references
= new vn_reference_table_type (23);
4177 gcc_obstack_init (&table
->nary_obstack
);
4178 table
->phis_pool
= new object_allocator
<vn_phi_s
> ("VN phis");
4179 table
->references_pool
= new object_allocator
<vn_reference_s
>
4183 /* Free a value number table. */
4186 free_vn_table (vn_tables_t table
)
4192 delete table
->references
;
4193 table
->references
= NULL
;
4194 obstack_free (&table
->nary_obstack
, NULL
);
4195 delete table
->phis_pool
;
4196 delete table
->references_pool
;
4204 int *rpo_numbers_temp
;
4206 calculate_dominance_info (CDI_DOMINATORS
);
4207 mark_dfs_back_edges ();
4209 sccstack
.create (0);
4210 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
4212 constant_value_ids
= BITMAP_ALLOC (NULL
);
4217 vn_ssa_aux_table
.create (num_ssa_names
+ 1);
4218 /* VEC_alloc doesn't actually grow it to the right size, it just
4219 preallocates the space to do so. */
4220 vn_ssa_aux_table
.safe_grow_cleared (num_ssa_names
+ 1);
4221 gcc_obstack_init (&vn_ssa_aux_obstack
);
4223 shared_lookup_phiargs
.create (0);
4224 shared_lookup_references
.create (0);
4225 rpo_numbers
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
4227 XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
4228 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
4230 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4231 the i'th block in RPO order is bb. We want to map bb's to RPO
4232 numbers, so we need to rearrange this array. */
4233 for (j
= 0; j
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; j
++)
4234 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
4236 XDELETE (rpo_numbers_temp
);
4238 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
4240 renumber_gimple_stmt_uids ();
4242 /* Create the valid and optimistic value numbering tables. */
4243 valid_info
= XCNEW (struct vn_tables_s
);
4244 allocate_vn_table (valid_info
);
4245 optimistic_info
= XCNEW (struct vn_tables_s
);
4246 allocate_vn_table (optimistic_info
);
4247 current_info
= valid_info
;
4249 /* Create the VN_INFO structures, and initialize value numbers to
4250 TOP or VARYING for parameters. */
4251 for (i
= 1; i
< num_ssa_names
; i
++)
4253 tree name
= ssa_name (i
);
4257 VN_INFO_GET (name
)->valnum
= VN_TOP
;
4258 VN_INFO (name
)->needs_insertion
= false;
4259 VN_INFO (name
)->expr
= NULL
;
4260 VN_INFO (name
)->value_id
= 0;
4262 if (!SSA_NAME_IS_DEFAULT_DEF (name
))
4265 switch (TREE_CODE (SSA_NAME_VAR (name
)))
4268 /* Undefined vars keep TOP. */
4272 /* Parameters are VARYING but we can record a condition
4273 if we know it is a non-NULL pointer. */
4274 VN_INFO (name
)->visited
= true;
4275 VN_INFO (name
)->valnum
= name
;
4276 if (POINTER_TYPE_P (TREE_TYPE (name
))
4277 && nonnull_arg_p (SSA_NAME_VAR (name
)))
4281 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
4282 vn_nary_op_insert_pieces (2, NE_EXPR
, boolean_type_node
, ops
,
4283 boolean_true_node
, 0);
4284 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4286 fprintf (dump_file
, "Recording ");
4287 print_generic_expr (dump_file
, name
, TDF_SLIM
);
4288 fprintf (dump_file
, " != 0\n");
4294 /* If the result is passed by invisible reference the default
4295 def is initialized, otherwise it's uninitialized. */
4296 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name
)))
4298 VN_INFO (name
)->visited
= true;
4299 VN_INFO (name
)->valnum
= name
;
4309 /* Restore SSA info that has been reset on value leaders. */
4312 scc_vn_restore_ssa_info (void)
4314 for (unsigned i
= 0; i
< num_ssa_names
; i
++)
4316 tree name
= ssa_name (i
);
4318 && has_VN_INFO (name
))
4320 if (VN_INFO (name
)->needs_insertion
)
4322 else if (POINTER_TYPE_P (TREE_TYPE (name
))
4323 && VN_INFO (name
)->info
.ptr_info
)
4324 SSA_NAME_PTR_INFO (name
) = VN_INFO (name
)->info
.ptr_info
;
4325 else if (INTEGRAL_TYPE_P (TREE_TYPE (name
))
4326 && VN_INFO (name
)->info
.range_info
)
4328 SSA_NAME_RANGE_INFO (name
) = VN_INFO (name
)->info
.range_info
;
4329 SSA_NAME_ANTI_RANGE_P (name
)
4330 = VN_INFO (name
)->range_info_anti_range_p
;
4341 delete constant_to_value_id
;
4342 constant_to_value_id
= NULL
;
4343 BITMAP_FREE (constant_value_ids
);
4344 shared_lookup_phiargs
.release ();
4345 shared_lookup_references
.release ();
4346 XDELETEVEC (rpo_numbers
);
4348 for (i
= 0; i
< num_ssa_names
; i
++)
4350 tree name
= ssa_name (i
);
4352 && has_VN_INFO (name
)
4353 && VN_INFO (name
)->needs_insertion
)
4354 release_ssa_name (name
);
4356 obstack_free (&vn_ssa_aux_obstack
, NULL
);
4357 vn_ssa_aux_table
.release ();
4359 sccstack
.release ();
4360 free_vn_table (valid_info
);
4361 XDELETE (valid_info
);
4362 free_vn_table (optimistic_info
);
4363 XDELETE (optimistic_info
);
4365 BITMAP_FREE (const_parms
);
4368 /* Set *ID according to RESULT. */
4371 set_value_id_for_result (tree result
, unsigned int *id
)
4373 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4374 *id
= VN_INFO (result
)->value_id
;
4375 else if (result
&& is_gimple_min_invariant (result
))
4376 *id
= get_or_alloc_constant_value_id (result
);
4378 *id
= get_next_value_id ();
4381 /* Set the value ids in the valid hash tables. */
4384 set_hashtable_value_ids (void)
4386 vn_nary_op_iterator_type hin
;
4387 vn_phi_iterator_type hip
;
4388 vn_reference_iterator_type hir
;
4393 /* Now set the value ids of the things we had put in the hash
4396 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4397 set_value_id_for_result (vno
->result
, &vno
->value_id
);
4399 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4400 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4402 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4404 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4407 class sccvn_dom_walker
: public dom_walker
4411 : dom_walker (CDI_DOMINATORS
, true), fail (false), cond_stack (vNULL
) {}
4412 ~sccvn_dom_walker ();
4414 virtual edge
before_dom_children (basic_block
);
4415 virtual void after_dom_children (basic_block
);
4417 void record_cond (basic_block
,
4418 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4419 void record_conds (basic_block
,
4420 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4423 vec
<std::pair
<basic_block
, std::pair
<vn_nary_op_t
, vn_nary_op_t
> > >
4427 sccvn_dom_walker::~sccvn_dom_walker ()
4429 cond_stack
.release ();
4432 /* Record a temporary condition for the BB and its dominated blocks. */
4435 sccvn_dom_walker::record_cond (basic_block bb
,
4436 enum tree_code code
, tree lhs
, tree rhs
,
4439 tree ops
[2] = { lhs
, rhs
};
4440 vn_nary_op_t old
= NULL
;
4441 if (vn_nary_op_lookup_pieces (2, code
, boolean_type_node
, ops
, &old
))
4442 current_info
->nary
->remove_elt_with_hash (old
, old
->hashcode
);
4444 = vn_nary_op_insert_pieces (2, code
, boolean_type_node
, ops
,
4447 : boolean_false_node
, 0);
4448 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4450 fprintf (dump_file
, "Recording temporarily ");
4451 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
4452 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
4453 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
4454 fprintf (dump_file
, " == %s%s\n",
4455 value
? "true" : "false",
4456 old
? " (old entry saved)" : "");
4458 cond_stack
.safe_push (std::make_pair (bb
, std::make_pair (cond
, old
)));
4461 /* Record temporary conditions for the BB and its dominated blocks
4462 according to LHS CODE RHS == VALUE and its dominated conditions. */
4465 sccvn_dom_walker::record_conds (basic_block bb
,
4466 enum tree_code code
, tree lhs
, tree rhs
,
4469 /* Record the original condition. */
4470 record_cond (bb
, code
, lhs
, rhs
, value
);
4475 /* Record dominated conditions if the condition is true. Note that
4476 the inversion is already recorded. */
4481 record_cond (bb
, code
== LT_EXPR
? LE_EXPR
: GE_EXPR
, lhs
, rhs
, true);
4482 record_cond (bb
, NE_EXPR
, lhs
, rhs
, true);
4483 record_cond (bb
, EQ_EXPR
, lhs
, rhs
, false);
4487 record_cond (bb
, LE_EXPR
, lhs
, rhs
, true);
4488 record_cond (bb
, GE_EXPR
, lhs
, rhs
, true);
4489 record_cond (bb
, LT_EXPR
, lhs
, rhs
, false);
4490 record_cond (bb
, GT_EXPR
, lhs
, rhs
, false);
4498 /* Restore expressions and values derived from conditionals. */
4501 sccvn_dom_walker::after_dom_children (basic_block bb
)
4503 while (!cond_stack
.is_empty ()
4504 && cond_stack
.last ().first
== bb
)
4506 vn_nary_op_t cond
= cond_stack
.last ().second
.first
;
4507 vn_nary_op_t old
= cond_stack
.last ().second
.second
;
4508 current_info
->nary
->remove_elt_with_hash (cond
, cond
->hashcode
);
4510 vn_nary_op_insert_into (old
, current_info
->nary
, false);
4515 /* Value number all statements in BB. */
4518 sccvn_dom_walker::before_dom_children (basic_block bb
)
4526 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4527 fprintf (dump_file
, "Visiting BB %d\n", bb
->index
);
4529 /* If we have a single predecessor record the equivalence from a
4530 possible condition on the predecessor edge. */
4532 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4534 /* Ignore simple backedges from this to allow recording conditions
4536 if (dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
4548 /* Check if there are multiple executable successor edges in
4549 the source block. Otherwise there is no additional info
4552 FOR_EACH_EDGE (e2
, ei
, pred_e
->src
->succs
)
4554 && e2
->flags
& EDGE_EXECUTABLE
)
4556 if (e2
&& (e2
->flags
& EDGE_EXECUTABLE
))
4558 gimple
*stmt
= last_stmt (pred_e
->src
);
4560 && gimple_code (stmt
) == GIMPLE_COND
)
4562 enum tree_code code
= gimple_cond_code (stmt
);
4563 tree lhs
= gimple_cond_lhs (stmt
);
4564 tree rhs
= gimple_cond_rhs (stmt
);
4565 record_conds (bb
, code
, lhs
, rhs
,
4566 (pred_e
->flags
& EDGE_TRUE_VALUE
) != 0);
4567 code
= invert_tree_comparison (code
, HONOR_NANS (lhs
));
4568 if (code
!= ERROR_MARK
)
4569 record_conds (bb
, code
, lhs
, rhs
,
4570 (pred_e
->flags
& EDGE_TRUE_VALUE
) == 0);
4575 /* Value-number all defs in the basic-block. */
4576 for (gphi_iterator gsi
= gsi_start_phis (bb
);
4577 !gsi_end_p (gsi
); gsi_next (&gsi
))
4579 gphi
*phi
= gsi
.phi ();
4580 tree res
= PHI_RESULT (phi
);
4581 if (!VN_INFO (res
)->visited
4588 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
4589 !gsi_end_p (gsi
); gsi_next (&gsi
))
4593 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
4594 if (!VN_INFO (op
)->visited
4602 /* Finally look at the last stmt. */
4603 gimple
*stmt
= last_stmt (bb
);
4607 enum gimple_code code
= gimple_code (stmt
);
4608 if (code
!= GIMPLE_COND
4609 && code
!= GIMPLE_SWITCH
4610 && code
!= GIMPLE_GOTO
)
4613 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4615 fprintf (dump_file
, "Visiting control stmt ending BB %d: ", bb
->index
);
4616 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4619 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4620 if value-numbering can prove they are not reachable. Handling
4621 computed gotos is also possible. */
4627 tree lhs
= vn_valueize (gimple_cond_lhs (stmt
));
4628 tree rhs
= vn_valueize (gimple_cond_rhs (stmt
));
4629 val
= gimple_simplify (gimple_cond_code (stmt
),
4630 boolean_type_node
, lhs
, rhs
,
4632 /* If that didn't simplify to a constant see if we have recorded
4633 temporary expressions from taken edges. */
4634 if (!val
|| TREE_CODE (val
) != INTEGER_CST
)
4639 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt
),
4640 boolean_type_node
, ops
, NULL
);
4645 val
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
4648 val
= gimple_goto_dest (stmt
);
4656 edge taken
= find_taken_edge (bb
, vn_valueize (val
));
4660 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4661 fprintf (dump_file
, "Marking all edges out of BB %d but (%d -> %d) as "
4662 "not executable\n", bb
->index
, bb
->index
, taken
->dest
->index
);
4667 /* Do SCCVN. Returns true if it finished, false if we bailed out
4668 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4669 how we use the alias oracle walking during the VN process. */
4672 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
4676 default_vn_walk_kind
= default_vn_walk_kind_
;
4680 /* Collect pointers we know point to readonly memory. */
4681 const_parms
= BITMAP_ALLOC (NULL
);
4682 tree fnspec
= lookup_attribute ("fn spec",
4683 TYPE_ATTRIBUTES (TREE_TYPE (cfun
->decl
)));
4686 fnspec
= TREE_VALUE (TREE_VALUE (fnspec
));
4688 for (tree arg
= DECL_ARGUMENTS (cfun
->decl
);
4689 arg
; arg
= DECL_CHAIN (arg
), ++i
)
4691 if (i
>= (unsigned) TREE_STRING_LENGTH (fnspec
))
4693 if (TREE_STRING_POINTER (fnspec
)[i
] == 'R'
4694 || TREE_STRING_POINTER (fnspec
)[i
] == 'r')
4696 tree name
= ssa_default_def (cfun
, arg
);
4698 bitmap_set_bit (const_parms
, SSA_NAME_VERSION (name
));
4703 /* Walk all blocks in dominator order, value-numbering stmts
4704 SSA defs and decide whether outgoing edges are not executable. */
4705 sccvn_dom_walker walker
;
4706 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
4713 /* Initialize the value ids and prune out remaining VN_TOPs
4715 for (i
= 1; i
< num_ssa_names
; ++i
)
4717 tree name
= ssa_name (i
);
4721 info
= VN_INFO (name
);
4723 info
->valnum
= name
;
4724 if (info
->valnum
== name
4725 || info
->valnum
== VN_TOP
)
4726 info
->value_id
= get_next_value_id ();
4727 else if (is_gimple_min_invariant (info
->valnum
))
4728 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
4732 for (i
= 1; i
< num_ssa_names
; ++i
)
4734 tree name
= ssa_name (i
);
4738 info
= VN_INFO (name
);
4739 if (TREE_CODE (info
->valnum
) == SSA_NAME
4740 && info
->valnum
!= name
4741 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
4742 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
4745 set_hashtable_value_ids ();
4747 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4749 fprintf (dump_file
, "Value numbers:\n");
4750 for (i
= 0; i
< num_ssa_names
; i
++)
4752 tree name
= ssa_name (i
);
4754 && VN_INFO (name
)->visited
4755 && SSA_VAL (name
) != name
)
4757 print_generic_expr (dump_file
, name
, 0);
4758 fprintf (dump_file
, " = ");
4759 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
4760 fprintf (dump_file
, "\n");
4768 /* Return the maximum value id we have ever seen. */
4771 get_max_value_id (void)
4773 return next_value_id
;
4776 /* Return the next unique value id. */
4779 get_next_value_id (void)
4781 return next_value_id
++;
4785 /* Compare two expressions E1 and E2 and return true if they are equal. */
4788 expressions_equal_p (tree e1
, tree e2
)
4790 /* The obvious case. */
4794 /* If either one is VN_TOP consider them equal. */
4795 if (e1
== VN_TOP
|| e2
== VN_TOP
)
4798 /* If only one of them is null, they cannot be equal. */
4802 /* Now perform the actual comparison. */
4803 if (TREE_CODE (e1
) == TREE_CODE (e2
)
4804 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
4811 /* Return true if the nary operation NARY may trap. This is a copy
4812 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4815 vn_nary_may_trap (vn_nary_op_t nary
)
4818 tree rhs2
= NULL_TREE
;
4819 bool honor_nans
= false;
4820 bool honor_snans
= false;
4821 bool fp_operation
= false;
4822 bool honor_trapv
= false;
4826 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
4827 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
4828 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
4831 fp_operation
= FLOAT_TYPE_P (type
);
4834 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
4835 honor_snans
= flag_signaling_nans
!= 0;
4837 else if (INTEGRAL_TYPE_P (type
)
4838 && TYPE_OVERFLOW_TRAPS (type
))
4841 if (nary
->length
>= 2)
4843 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
4845 honor_nans
, honor_snans
, rhs2
,
4851 for (i
= 0; i
< nary
->length
; ++i
)
4852 if (tree_could_trap_p (nary
->op
[i
]))