1 /* SCC value numbering for trees
2 Copyright (C) 2006-2014 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stor-layout.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "hash-table.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
33 #include "gimple-fold.h"
35 #include "gimple-expr.h"
39 #include "gimple-ssa.h"
40 #include "tree-phinodes.h"
41 #include "ssa-iterators.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
48 #include "alloc-pool.h"
52 #include "tree-ssa-propagate.h"
53 #include "tree-ssa-sccvn.h"
57 /* This algorithm is based on the SCC algorithm presented by Keith
58 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
59 (http://citeseer.ist.psu.edu/41805.html). In
60 straight line code, it is equivalent to a regular hash based value
61 numbering that is performed in reverse postorder.
63 For code with cycles, there are two alternatives, both of which
64 require keeping the hashtables separate from the actual list of
65 value numbers for SSA names.
67 1. Iterate value numbering in an RPO walk of the blocks, removing
68 all the entries from the hashtable after each iteration (but
69 keeping the SSA name->value number mapping between iterations).
70 Iterate until it does not change.
72 2. Perform value numbering as part of an SCC walk on the SSA graph,
73 iterating only the cycles in the SSA graph until they do not change
74 (using a separate, optimistic hashtable for value numbering the SCC
77 The second is not just faster in practice (because most SSA graph
78 cycles do not involve all the variables in the graph), it also has
81 One of these nice properties is that when we pop an SCC off the
82 stack, we are guaranteed to have processed all the operands coming from
83 *outside of that SCC*, so we do not need to do anything special to
84 ensure they have value numbers.
86 Another nice property is that the SCC walk is done as part of a DFS
87 of the SSA graph, which makes it easy to perform combining and
88 simplifying operations at the same time.
90 The code below is deliberately written in a way that makes it easy
91 to separate the SCC walk from the other work it does.
93 In order to propagate constants through the code, we track which
94 expressions contain constants, and use those while folding. In
95 theory, we could also track expressions whose value numbers are
96 replaced, in case we end up folding based on expression
99 In order to value number memory, we assign value numbers to vuses.
100 This enables us to note that, for example, stores to the same
101 address of the same value from the same starting memory states are
105 1. We can iterate only the changing portions of the SCC's, but
106 I have not seen an SCC big enough for this to be a win.
107 2. If you differentiate between phi nodes for loops and phi nodes
108 for if-then-else, you can properly consider phi nodes in different
109 blocks for equivalence.
110 3. We could value number vuses in more cases, particularly, whole
115 /* vn_nary_op hashtable helpers. */
117 struct vn_nary_op_hasher
: typed_noop_remove
<vn_nary_op_s
>
119 typedef vn_nary_op_s value_type
;
120 typedef vn_nary_op_s compare_type
;
121 static inline hashval_t
hash (const value_type
*);
122 static inline bool equal (const value_type
*, const compare_type
*);
125 /* Return the computed hashcode for nary operation P1. */
128 vn_nary_op_hasher::hash (const value_type
*vno1
)
130 return vno1
->hashcode
;
133 /* Compare nary operations P1 and P2 and return true if they are
137 vn_nary_op_hasher::equal (const value_type
*vno1
, const compare_type
*vno2
)
139 return vn_nary_op_eq (vno1
, vno2
);
142 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
143 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
146 /* vn_phi hashtable helpers. */
149 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
153 typedef vn_phi_s value_type
;
154 typedef vn_phi_s compare_type
;
155 static inline hashval_t
hash (const value_type
*);
156 static inline bool equal (const value_type
*, const compare_type
*);
157 static inline void remove (value_type
*);
160 /* Return the computed hashcode for phi operation P1. */
163 vn_phi_hasher::hash (const value_type
*vp1
)
165 return vp1
->hashcode
;
168 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
171 vn_phi_hasher::equal (const value_type
*vp1
, const compare_type
*vp2
)
173 return vn_phi_eq (vp1
, vp2
);
176 /* Free a phi operation structure VP. */
179 vn_phi_hasher::remove (value_type
*phi
)
181 phi
->phiargs
.release ();
184 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
185 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
188 /* Compare two reference operands P1 and P2 for equality. Return true if
189 they are equal, and false otherwise. */
192 vn_reference_op_eq (const void *p1
, const void *p2
)
194 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
195 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
197 return (vro1
->opcode
== vro2
->opcode
198 /* We do not care for differences in type qualification. */
199 && (vro1
->type
== vro2
->type
200 || (vro1
->type
&& vro2
->type
201 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
202 TYPE_MAIN_VARIANT (vro2
->type
))))
203 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
204 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
205 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
208 /* Free a reference operation structure VP. */
211 free_reference (vn_reference_s
*vr
)
213 vr
->operands
.release ();
217 /* vn_reference hashtable helpers. */
219 struct vn_reference_hasher
221 typedef vn_reference_s value_type
;
222 typedef vn_reference_s compare_type
;
223 static inline hashval_t
hash (const value_type
*);
224 static inline bool equal (const value_type
*, const compare_type
*);
225 static inline void remove (value_type
*);
228 /* Return the hashcode for a given reference operation P1. */
231 vn_reference_hasher::hash (const value_type
*vr1
)
233 return vr1
->hashcode
;
237 vn_reference_hasher::equal (const value_type
*v
, const compare_type
*c
)
239 return vn_reference_eq (v
, c
);
243 vn_reference_hasher::remove (value_type
*v
)
248 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
249 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
252 /* The set of hashtables and alloc_pool's for their items. */
254 typedef struct vn_tables_s
256 vn_nary_op_table_type nary
;
257 vn_phi_table_type phis
;
258 vn_reference_table_type references
;
259 struct obstack nary_obstack
;
260 alloc_pool phis_pool
;
261 alloc_pool references_pool
;
265 /* vn_constant hashtable helpers. */
267 struct vn_constant_hasher
: typed_free_remove
<vn_constant_s
>
269 typedef vn_constant_s value_type
;
270 typedef vn_constant_s compare_type
;
271 static inline hashval_t
hash (const value_type
*);
272 static inline bool equal (const value_type
*, const compare_type
*);
275 /* Hash table hash function for vn_constant_t. */
278 vn_constant_hasher::hash (const value_type
*vc1
)
280 return vc1
->hashcode
;
283 /* Hash table equality function for vn_constant_t. */
286 vn_constant_hasher::equal (const value_type
*vc1
, const compare_type
*vc2
)
288 if (vc1
->hashcode
!= vc2
->hashcode
)
291 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
294 static hash_table
<vn_constant_hasher
> constant_to_value_id
;
295 static bitmap constant_value_ids
;
298 /* Valid hashtables storing information we have proven to be
301 static vn_tables_t valid_info
;
303 /* Optimistic hashtables storing information we are making assumptions about
304 during iterations. */
306 static vn_tables_t optimistic_info
;
308 /* Pointer to the set of hashtables that is currently being used.
309 Should always point to either the optimistic_info, or the
312 static vn_tables_t current_info
;
315 /* Reverse post order index for each basic block. */
317 static int *rpo_numbers
;
319 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
321 /* Return the SSA value of the VUSE x, supporting released VDEFs
322 during elimination which will value-number the VDEF to the
323 associated VUSE (but not substitute in the whole lattice). */
326 vuse_ssa_val (tree x
)
335 while (SSA_NAME_IN_FREE_LIST (x
));
340 /* This represents the top of the VN lattice, which is the universal
345 /* Unique counter for our value ids. */
347 static unsigned int next_value_id
;
349 /* Next DFS number and the stack for strongly connected component
352 static unsigned int next_dfs_num
;
353 static vec
<tree
> sccstack
;
357 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
358 are allocated on an obstack for locality reasons, and to free them
359 without looping over the vec. */
361 static vec
<vn_ssa_aux_t
> vn_ssa_aux_table
;
362 static struct obstack vn_ssa_aux_obstack
;
364 /* Return the value numbering information for a given SSA name. */
369 vn_ssa_aux_t res
= vn_ssa_aux_table
[SSA_NAME_VERSION (name
)];
370 gcc_checking_assert (res
);
374 /* Set the value numbering info for a given SSA name to a given
378 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
380 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = value
;
383 /* Initialize the value numbering info for a given SSA name.
384 This should be called just once for every SSA name. */
387 VN_INFO_GET (tree name
)
389 vn_ssa_aux_t newinfo
;
391 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
392 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
393 if (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ())
394 vn_ssa_aux_table
.safe_grow (SSA_NAME_VERSION (name
) + 1);
395 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = newinfo
;
400 /* Get the representative expression for the SSA_NAME NAME. Returns
401 the representative SSA_NAME if there is no expression associated with it. */
404 vn_get_expr_for (tree name
)
406 vn_ssa_aux_t vn
= VN_INFO (name
);
408 tree expr
= NULL_TREE
;
411 if (vn
->valnum
== VN_TOP
)
414 /* If the value-number is a constant it is the representative
416 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
419 /* Get to the information of the value of this SSA_NAME. */
420 vn
= VN_INFO (vn
->valnum
);
422 /* If the value-number is a constant it is the representative
424 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
427 /* Else if we have an expression, return it. */
428 if (vn
->expr
!= NULL_TREE
)
431 /* Otherwise use the defining statement to build the expression. */
432 def_stmt
= SSA_NAME_DEF_STMT (vn
->valnum
);
434 /* If the value number is not an assignment use it directly. */
435 if (!is_gimple_assign (def_stmt
))
438 /* Note that we can valueize here because we clear the cached
439 simplified expressions after each optimistic iteration. */
440 code
= gimple_assign_rhs_code (def_stmt
);
441 switch (TREE_CODE_CLASS (code
))
444 if ((code
== REALPART_EXPR
445 || code
== IMAGPART_EXPR
446 || code
== VIEW_CONVERT_EXPR
)
447 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt
),
449 expr
= fold_build1 (code
,
450 gimple_expr_type (def_stmt
),
451 vn_valueize (TREE_OPERAND
452 (gimple_assign_rhs1 (def_stmt
), 0)));
456 expr
= fold_build1 (code
,
457 gimple_expr_type (def_stmt
),
458 vn_valueize (gimple_assign_rhs1 (def_stmt
)));
462 expr
= fold_build2 (code
,
463 gimple_expr_type (def_stmt
),
464 vn_valueize (gimple_assign_rhs1 (def_stmt
)),
465 vn_valueize (gimple_assign_rhs2 (def_stmt
)));
468 case tcc_exceptional
:
469 if (code
== CONSTRUCTOR
471 (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))) == VECTOR_TYPE
)
472 expr
= gimple_assign_rhs1 (def_stmt
);
477 if (expr
== NULL_TREE
)
480 /* Cache the expression. */
486 /* Return the vn_kind the expression computed by the stmt should be
490 vn_get_stmt_kind (gimple stmt
)
492 switch (gimple_code (stmt
))
500 enum tree_code code
= gimple_assign_rhs_code (stmt
);
501 tree rhs1
= gimple_assign_rhs1 (stmt
);
502 switch (get_gimple_rhs_class (code
))
504 case GIMPLE_UNARY_RHS
:
505 case GIMPLE_BINARY_RHS
:
506 case GIMPLE_TERNARY_RHS
:
508 case GIMPLE_SINGLE_RHS
:
509 switch (TREE_CODE_CLASS (code
))
512 /* VOP-less references can go through unary case. */
513 if ((code
== REALPART_EXPR
514 || code
== IMAGPART_EXPR
515 || code
== VIEW_CONVERT_EXPR
516 || code
== BIT_FIELD_REF
)
517 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
521 case tcc_declaration
:
528 if (code
== ADDR_EXPR
)
529 return (is_gimple_min_invariant (rhs1
)
530 ? VN_CONSTANT
: VN_REFERENCE
);
531 else if (code
== CONSTRUCTOR
)
544 /* Lookup a value id for CONSTANT and return it. If it does not
548 get_constant_value_id (tree constant
)
550 vn_constant_s
**slot
;
551 struct vn_constant_s vc
;
553 vc
.hashcode
= vn_hash_constant_with_type (constant
);
554 vc
.constant
= constant
;
555 slot
= constant_to_value_id
.find_slot_with_hash (&vc
, vc
.hashcode
, NO_INSERT
);
557 return (*slot
)->value_id
;
561 /* Lookup a value id for CONSTANT, and if it does not exist, create a
562 new one and return it. If it does exist, return it. */
565 get_or_alloc_constant_value_id (tree constant
)
567 vn_constant_s
**slot
;
568 struct vn_constant_s vc
;
571 vc
.hashcode
= vn_hash_constant_with_type (constant
);
572 vc
.constant
= constant
;
573 slot
= constant_to_value_id
.find_slot_with_hash (&vc
, vc
.hashcode
, INSERT
);
575 return (*slot
)->value_id
;
577 vcp
= XNEW (struct vn_constant_s
);
578 vcp
->hashcode
= vc
.hashcode
;
579 vcp
->constant
= constant
;
580 vcp
->value_id
= get_next_value_id ();
582 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
583 return vcp
->value_id
;
586 /* Return true if V is a value id for a constant. */
589 value_id_constant_p (unsigned int v
)
591 return bitmap_bit_p (constant_value_ids
, v
);
594 /* Compute the hash for a reference operand VRO1. */
597 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, hashval_t result
)
599 result
= iterative_hash_hashval_t (vro1
->opcode
, result
);
601 result
= iterative_hash_expr (vro1
->op0
, result
);
603 result
= iterative_hash_expr (vro1
->op1
, result
);
605 result
= iterative_hash_expr (vro1
->op2
, result
);
609 /* Compute a hash for the reference operation VR1 and return it. */
612 vn_reference_compute_hash (const vn_reference_t vr1
)
614 hashval_t result
= 0;
616 vn_reference_op_t vro
;
617 HOST_WIDE_INT off
= -1;
620 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
622 if (vro
->opcode
== MEM_REF
)
624 else if (vro
->opcode
!= ADDR_EXPR
)
636 result
= iterative_hash_hashval_t (off
, result
);
639 && vro
->opcode
== ADDR_EXPR
)
643 tree op
= TREE_OPERAND (vro
->op0
, 0);
644 result
= iterative_hash_hashval_t (TREE_CODE (op
), result
);
645 result
= iterative_hash_expr (op
, result
);
649 result
= vn_reference_op_compute_hash (vro
, result
);
653 result
+= SSA_NAME_VERSION (vr1
->vuse
);
658 /* Return true if reference operations VR1 and VR2 are equivalent. This
659 means they have the same set of operands and vuses. */
662 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
666 /* Early out if this is not a hash collision. */
667 if (vr1
->hashcode
!= vr2
->hashcode
)
670 /* The VOP needs to be the same. */
671 if (vr1
->vuse
!= vr2
->vuse
)
674 /* If the operands are the same we are done. */
675 if (vr1
->operands
== vr2
->operands
)
678 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
681 if (INTEGRAL_TYPE_P (vr1
->type
)
682 && INTEGRAL_TYPE_P (vr2
->type
))
684 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
687 else if (INTEGRAL_TYPE_P (vr1
->type
)
688 && (TYPE_PRECISION (vr1
->type
)
689 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
691 else if (INTEGRAL_TYPE_P (vr2
->type
)
692 && (TYPE_PRECISION (vr2
->type
)
693 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
700 HOST_WIDE_INT off1
= 0, off2
= 0;
701 vn_reference_op_t vro1
, vro2
;
702 vn_reference_op_s tem1
, tem2
;
703 bool deref1
= false, deref2
= false;
704 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
706 if (vro1
->opcode
== MEM_REF
)
712 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
714 if (vro2
->opcode
== MEM_REF
)
722 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
724 memset (&tem1
, 0, sizeof (tem1
));
725 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
726 tem1
.type
= TREE_TYPE (tem1
.op0
);
727 tem1
.opcode
= TREE_CODE (tem1
.op0
);
731 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
733 memset (&tem2
, 0, sizeof (tem2
));
734 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
735 tem2
.type
= TREE_TYPE (tem2
.op0
);
736 tem2
.opcode
= TREE_CODE (tem2
.op0
);
740 if (deref1
!= deref2
)
742 if (!vn_reference_op_eq (vro1
, vro2
))
747 while (vr1
->operands
.length () != i
748 || vr2
->operands
.length () != j
);
753 /* Copy the operations present in load/store REF into RESULT, a vector of
754 vn_reference_op_s's. */
757 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
759 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
761 vn_reference_op_s temp
;
765 memset (&temp
, 0, sizeof (temp
));
766 temp
.type
= TREE_TYPE (ref
);
767 temp
.opcode
= TREE_CODE (ref
);
768 temp
.op0
= TMR_INDEX (ref
);
769 temp
.op1
= TMR_STEP (ref
);
770 temp
.op2
= TMR_OFFSET (ref
);
772 result
->quick_push (temp
);
774 memset (&temp
, 0, sizeof (temp
));
775 temp
.type
= NULL_TREE
;
776 temp
.opcode
= ERROR_MARK
;
777 temp
.op0
= TMR_INDEX2 (ref
);
779 result
->quick_push (temp
);
781 memset (&temp
, 0, sizeof (temp
));
782 temp
.type
= NULL_TREE
;
783 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
784 temp
.op0
= TMR_BASE (ref
);
786 result
->quick_push (temp
);
790 /* For non-calls, store the information that makes up the address. */
794 vn_reference_op_s temp
;
796 memset (&temp
, 0, sizeof (temp
));
797 temp
.type
= TREE_TYPE (ref
);
798 temp
.opcode
= TREE_CODE (ref
);
804 temp
.op0
= TREE_OPERAND (ref
, 1);
807 temp
.op0
= TREE_OPERAND (ref
, 1);
811 /* The base address gets its own vn_reference_op_s structure. */
812 temp
.op0
= TREE_OPERAND (ref
, 1);
813 if (tree_fits_shwi_p (TREE_OPERAND (ref
, 1)))
814 temp
.off
= tree_to_shwi (TREE_OPERAND (ref
, 1));
817 /* Record bits and position. */
818 temp
.op0
= TREE_OPERAND (ref
, 1);
819 temp
.op1
= TREE_OPERAND (ref
, 2);
822 /* The field decl is enough to unambiguously specify the field,
823 a matching type is not necessary and a mismatching type
824 is always a spurious difference. */
825 temp
.type
= NULL_TREE
;
826 temp
.op0
= TREE_OPERAND (ref
, 1);
827 temp
.op1
= TREE_OPERAND (ref
, 2);
829 tree this_offset
= component_ref_field_offset (ref
);
831 && TREE_CODE (this_offset
) == INTEGER_CST
)
833 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
834 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
837 = (wi::to_offset (this_offset
)
838 + wi::lrshift (wi::to_offset (bit_offset
),
839 LOG2_BITS_PER_UNIT
));
840 if (wi::fits_shwi_p (off
)
841 /* Probibit value-numbering zero offset components
842 of addresses the same before the pass folding
843 __builtin_object_size had a chance to run
844 (checking cfun->after_inlining does the
846 && (TREE_CODE (orig
) != ADDR_EXPR
848 || cfun
->after_inlining
))
849 temp
.off
= off
.to_shwi ();
854 case ARRAY_RANGE_REF
:
856 /* Record index as operand. */
857 temp
.op0
= TREE_OPERAND (ref
, 1);
858 /* Always record lower bounds and element size. */
859 temp
.op1
= array_ref_low_bound (ref
);
860 temp
.op2
= array_ref_element_size (ref
);
861 if (TREE_CODE (temp
.op0
) == INTEGER_CST
862 && TREE_CODE (temp
.op1
) == INTEGER_CST
863 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
865 offset_int off
= ((wi::to_offset (temp
.op0
)
866 - wi::to_offset (temp
.op1
))
867 * wi::to_offset (temp
.op2
));
868 if (wi::fits_shwi_p (off
))
869 temp
.off
= off
.to_shwi();
873 if (DECL_HARD_REGISTER (ref
))
882 /* Canonicalize decls to MEM[&decl] which is what we end up with
883 when valueizing MEM[ptr] with ptr = &decl. */
884 temp
.opcode
= MEM_REF
;
885 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
887 result
->safe_push (temp
);
888 temp
.opcode
= ADDR_EXPR
;
889 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
890 temp
.type
= TREE_TYPE (temp
.op0
);
904 if (is_gimple_min_invariant (ref
))
910 /* These are only interesting for their operands, their
911 existence, and their type. They will never be the last
912 ref in the chain of references (IE they require an
913 operand), so we don't have to put anything
914 for op* as it will be handled by the iteration */
916 case VIEW_CONVERT_EXPR
:
920 /* This is only interesting for its constant offset. */
921 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
926 result
->safe_push (temp
);
928 if (REFERENCE_CLASS_P (ref
)
929 || TREE_CODE (ref
) == MODIFY_EXPR
930 || TREE_CODE (ref
) == WITH_SIZE_EXPR
931 || (TREE_CODE (ref
) == ADDR_EXPR
932 && !is_gimple_min_invariant (ref
)))
933 ref
= TREE_OPERAND (ref
, 0);
939 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
940 operands in *OPS, the reference alias set SET and the reference type TYPE.
941 Return true if something useful was produced. */
944 ao_ref_init_from_vn_reference (ao_ref
*ref
,
945 alias_set_type set
, tree type
,
946 vec
<vn_reference_op_s
> ops
)
948 vn_reference_op_t op
;
950 tree base
= NULL_TREE
;
952 HOST_WIDE_INT offset
= 0;
953 HOST_WIDE_INT max_size
;
954 HOST_WIDE_INT size
= -1;
955 tree size_tree
= NULL_TREE
;
956 alias_set_type base_alias_set
= -1;
958 /* First get the final access size from just the outermost expression. */
960 if (op
->opcode
== COMPONENT_REF
)
961 size_tree
= DECL_SIZE (op
->op0
);
962 else if (op
->opcode
== BIT_FIELD_REF
)
966 enum machine_mode mode
= TYPE_MODE (type
);
968 size_tree
= TYPE_SIZE (type
);
970 size
= GET_MODE_BITSIZE (mode
);
972 if (size_tree
!= NULL_TREE
)
974 if (!tree_fits_uhwi_p (size_tree
))
977 size
= tree_to_uhwi (size_tree
);
980 /* Initially, maxsize is the same as the accessed element size.
981 In the following it will only grow (or become -1). */
984 /* Compute cumulative bit-offset for nested component-refs and array-refs,
985 and find the ultimate containing object. */
986 FOR_EACH_VEC_ELT (ops
, i
, op
)
990 /* These may be in the reference ops, but we cannot do anything
991 sensible with them here. */
993 /* Apart from ADDR_EXPR arguments to MEM_REF. */
994 if (base
!= NULL_TREE
995 && TREE_CODE (base
) == MEM_REF
997 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
999 vn_reference_op_t pop
= &ops
[i
-1];
1000 base
= TREE_OPERAND (op
->op0
, 0);
1007 offset
+= pop
->off
* BITS_PER_UNIT
;
1015 /* Record the base objects. */
1017 base_alias_set
= get_deref_alias_set (op
->op0
);
1018 *op0_p
= build2 (MEM_REF
, op
->type
,
1019 NULL_TREE
, op
->op0
);
1020 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1031 /* And now the usual component-reference style ops. */
1033 offset
+= tree_to_shwi (op
->op1
);
1038 tree field
= op
->op0
;
1039 /* We do not have a complete COMPONENT_REF tree here so we
1040 cannot use component_ref_field_offset. Do the interesting
1044 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
)))
1048 offset
+= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
1050 offset
+= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1055 case ARRAY_RANGE_REF
:
1057 /* We recorded the lower bound and the element size. */
1058 if (!tree_fits_shwi_p (op
->op0
)
1059 || !tree_fits_shwi_p (op
->op1
)
1060 || !tree_fits_shwi_p (op
->op2
))
1064 HOST_WIDE_INT hindex
= tree_to_shwi (op
->op0
);
1065 hindex
-= tree_to_shwi (op
->op1
);
1066 hindex
*= tree_to_shwi (op
->op2
);
1067 hindex
*= BITS_PER_UNIT
;
1079 case VIEW_CONVERT_EXPR
:
1096 if (base
== NULL_TREE
)
1099 ref
->ref
= NULL_TREE
;
1101 ref
->offset
= offset
;
1103 ref
->max_size
= max_size
;
1104 ref
->ref_alias_set
= set
;
1105 if (base_alias_set
!= -1)
1106 ref
->base_alias_set
= base_alias_set
;
1108 ref
->base_alias_set
= get_alias_set (base
);
1109 /* We discount volatiles from value-numbering elsewhere. */
1110 ref
->volatile_p
= false;
1115 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1116 vn_reference_op_s's. */
1119 copy_reference_ops_from_call (gimple call
,
1120 vec
<vn_reference_op_s
> *result
)
1122 vn_reference_op_s temp
;
1124 tree lhs
= gimple_call_lhs (call
);
1127 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1128 different. By adding the lhs here in the vector, we ensure that the
1129 hashcode is different, guaranteeing a different value number. */
1130 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1132 memset (&temp
, 0, sizeof (temp
));
1133 temp
.opcode
= MODIFY_EXPR
;
1134 temp
.type
= TREE_TYPE (lhs
);
1137 result
->safe_push (temp
);
1140 /* Copy the type, opcode, function, static chain and EH region, if any. */
1141 memset (&temp
, 0, sizeof (temp
));
1142 temp
.type
= gimple_call_return_type (call
);
1143 temp
.opcode
= CALL_EXPR
;
1144 temp
.op0
= gimple_call_fn (call
);
1145 temp
.op1
= gimple_call_chain (call
);
1146 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1147 temp
.op2
= size_int (lr
);
1149 result
->safe_push (temp
);
1151 /* Copy the call arguments. As they can be references as well,
1152 just chain them together. */
1153 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1155 tree callarg
= gimple_call_arg (call
, i
);
1156 copy_reference_ops_from_ref (callarg
, result
);
1160 /* Create a vector of vn_reference_op_s structures from CALL, a
1161 call statement. The vector is not shared. */
1163 static vec
<vn_reference_op_s
>
1164 create_reference_ops_from_call (gimple call
)
1166 vec
<vn_reference_op_s
> result
= vNULL
;
1168 copy_reference_ops_from_call (call
, &result
);
1172 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1173 *I_P to point to the last element of the replacement. */
1175 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1178 unsigned int i
= *i_p
;
1179 vn_reference_op_t op
= &(*ops
)[i
];
1180 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1182 HOST_WIDE_INT addr_offset
= 0;
1184 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1185 from .foo.bar to the preceding MEM_REF offset and replace the
1186 address with &OBJ. */
1187 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1189 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1190 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1192 offset_int off
= offset_int::from (mem_op
->op0
, SIGNED
);
1194 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1195 op
->op0
= build_fold_addr_expr (addr_base
);
1196 if (tree_fits_shwi_p (mem_op
->op0
))
1197 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1203 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1204 *I_P to point to the last element of the replacement. */
1206 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1209 unsigned int i
= *i_p
;
1210 vn_reference_op_t op
= &(*ops
)[i
];
1211 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1213 enum tree_code code
;
1216 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1217 if (!is_gimple_assign (def_stmt
))
1220 code
= gimple_assign_rhs_code (def_stmt
);
1221 if (code
!= ADDR_EXPR
1222 && code
!= POINTER_PLUS_EXPR
)
1225 off
= offset_int::from (mem_op
->op0
, SIGNED
);
1227 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1228 from .foo.bar to the preceding MEM_REF offset and replace the
1229 address with &OBJ. */
1230 if (code
== ADDR_EXPR
)
1232 tree addr
, addr_base
;
1233 HOST_WIDE_INT addr_offset
;
1235 addr
= gimple_assign_rhs1 (def_stmt
);
1236 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1239 || TREE_CODE (addr_base
) != MEM_REF
)
1243 off
+= mem_ref_offset (addr_base
);
1244 op
->op0
= TREE_OPERAND (addr_base
, 0);
1249 ptr
= gimple_assign_rhs1 (def_stmt
);
1250 ptroff
= gimple_assign_rhs2 (def_stmt
);
1251 if (TREE_CODE (ptr
) != SSA_NAME
1252 || TREE_CODE (ptroff
) != INTEGER_CST
)
1255 off
+= wi::to_offset (ptroff
);
1259 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1260 if (tree_fits_shwi_p (mem_op
->op0
))
1261 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1264 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1265 op
->op0
= SSA_VAL (op
->op0
);
1266 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1267 op
->opcode
= TREE_CODE (op
->op0
);
1270 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1271 vn_reference_maybe_forwprop_address (ops
, i_p
);
1272 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1273 vn_reference_fold_indirect (ops
, i_p
);
1276 /* Optimize the reference REF to a constant if possible or return
1277 NULL_TREE if not. */
1280 fully_constant_vn_reference_p (vn_reference_t ref
)
1282 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1283 vn_reference_op_t op
;
1285 /* Try to simplify the translated expression if it is
1286 a call to a builtin function with at most two arguments. */
1288 if (op
->opcode
== CALL_EXPR
1289 && TREE_CODE (op
->op0
) == ADDR_EXPR
1290 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1291 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1292 && operands
.length () >= 2
1293 && operands
.length () <= 3)
1295 vn_reference_op_t arg0
, arg1
= NULL
;
1296 bool anyconst
= false;
1297 arg0
= &operands
[1];
1298 if (operands
.length () > 2)
1299 arg1
= &operands
[2];
1300 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1301 || (arg0
->opcode
== ADDR_EXPR
1302 && is_gimple_min_invariant (arg0
->op0
)))
1305 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1306 || (arg1
->opcode
== ADDR_EXPR
1307 && is_gimple_min_invariant (arg1
->op0
))))
1311 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1314 arg1
? arg1
->op0
: NULL
);
1316 && TREE_CODE (folded
) == NOP_EXPR
)
1317 folded
= TREE_OPERAND (folded
, 0);
1319 && is_gimple_min_invariant (folded
))
1324 /* Simplify reads from constant strings. */
1325 else if (op
->opcode
== ARRAY_REF
1326 && TREE_CODE (op
->op0
) == INTEGER_CST
1327 && integer_zerop (op
->op1
)
1328 && operands
.length () == 2)
1330 vn_reference_op_t arg0
;
1331 arg0
= &operands
[1];
1332 if (arg0
->opcode
== STRING_CST
1333 && (TYPE_MODE (op
->type
)
1334 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
->op0
))))
1335 && GET_MODE_CLASS (TYPE_MODE (op
->type
)) == MODE_INT
1336 && GET_MODE_SIZE (TYPE_MODE (op
->type
)) == 1
1337 && tree_int_cst_sgn (op
->op0
) >= 0
1338 && compare_tree_int (op
->op0
, TREE_STRING_LENGTH (arg0
->op0
)) < 0)
1339 return build_int_cst_type (op
->type
,
1340 (TREE_STRING_POINTER (arg0
->op0
)
1341 [TREE_INT_CST_LOW (op
->op0
)]));
1347 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1348 structures into their value numbers. This is done in-place, and
1349 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1350 whether any operands were valueized. */
1352 static vec
<vn_reference_op_s
>
1353 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
)
1355 vn_reference_op_t vro
;
1358 *valueized_anything
= false;
1360 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1362 if (vro
->opcode
== SSA_NAME
1363 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1365 tree tem
= SSA_VAL (vro
->op0
);
1366 if (tem
!= vro
->op0
)
1368 *valueized_anything
= true;
1371 /* If it transforms from an SSA_NAME to a constant, update
1373 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1374 vro
->opcode
= TREE_CODE (vro
->op0
);
1376 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1378 tree tem
= SSA_VAL (vro
->op1
);
1379 if (tem
!= vro
->op1
)
1381 *valueized_anything
= true;
1385 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1387 tree tem
= SSA_VAL (vro
->op2
);
1388 if (tem
!= vro
->op2
)
1390 *valueized_anything
= true;
1394 /* If it transforms from an SSA_NAME to an address, fold with
1395 a preceding indirect reference. */
1398 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1399 && orig
[i
- 1].opcode
== MEM_REF
)
1400 vn_reference_fold_indirect (&orig
, &i
);
1402 && vro
->opcode
== SSA_NAME
1403 && orig
[i
- 1].opcode
== MEM_REF
)
1404 vn_reference_maybe_forwprop_address (&orig
, &i
);
1405 /* If it transforms a non-constant ARRAY_REF into a constant
1406 one, adjust the constant offset. */
1407 else if (vro
->opcode
== ARRAY_REF
1409 && TREE_CODE (vro
->op0
) == INTEGER_CST
1410 && TREE_CODE (vro
->op1
) == INTEGER_CST
1411 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1413 offset_int off
= ((wi::to_offset (vro
->op0
)
1414 - wi::to_offset (vro
->op1
))
1415 * wi::to_offset (vro
->op2
));
1416 if (wi::fits_shwi_p (off
))
1417 vro
->off
= off
.to_shwi ();
1424 static vec
<vn_reference_op_s
>
1425 valueize_refs (vec
<vn_reference_op_s
> orig
)
1428 return valueize_refs_1 (orig
, &tem
);
1431 static vec
<vn_reference_op_s
> shared_lookup_references
;
1433 /* Create a vector of vn_reference_op_s structures from REF, a
1434 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1435 this function. *VALUEIZED_ANYTHING will specify whether any
1436 operands were valueized. */
1438 static vec
<vn_reference_op_s
>
1439 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1443 shared_lookup_references
.truncate (0);
1444 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1445 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1446 valueized_anything
);
1447 return shared_lookup_references
;
1450 /* Create a vector of vn_reference_op_s structures from CALL, a
1451 call statement. The vector is shared among all callers of
1454 static vec
<vn_reference_op_s
>
1455 valueize_shared_reference_ops_from_call (gimple call
)
1459 shared_lookup_references
.truncate (0);
1460 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1461 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1462 return shared_lookup_references
;
1465 /* Lookup a SCCVN reference operation VR in the current hash table.
1466 Returns the resulting value number if it exists in the hash table,
1467 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1468 vn_reference_t stored in the hashtable if something is found. */
1471 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1473 vn_reference_s
**slot
;
1476 hash
= vr
->hashcode
;
1477 slot
= current_info
->references
.find_slot_with_hash (vr
, hash
, NO_INSERT
);
1478 if (!slot
&& current_info
== optimistic_info
)
1479 slot
= valid_info
->references
.find_slot_with_hash (vr
, hash
, NO_INSERT
);
1483 *vnresult
= (vn_reference_t
)*slot
;
1484 return ((vn_reference_t
)*slot
)->result
;
1490 static tree
*last_vuse_ptr
;
1491 static vn_lookup_kind vn_walk_kind
;
1492 static vn_lookup_kind default_vn_walk_kind
;
1494 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1495 with the current VUSE and performs the expression lookup. */
1498 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1499 unsigned int cnt
, void *vr_
)
1501 vn_reference_t vr
= (vn_reference_t
)vr_
;
1502 vn_reference_s
**slot
;
1505 /* This bounds the stmt walks we perform on reference lookups
1506 to O(1) instead of O(N) where N is the number of dominating
1508 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1512 *last_vuse_ptr
= vuse
;
1514 /* Fixup vuse and hash. */
1516 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1517 vr
->vuse
= vuse_ssa_val (vuse
);
1519 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1521 hash
= vr
->hashcode
;
1522 slot
= current_info
->references
.find_slot_with_hash (vr
, hash
, NO_INSERT
);
1523 if (!slot
&& current_info
== optimistic_info
)
1524 slot
= valid_info
->references
.find_slot_with_hash (vr
, hash
, NO_INSERT
);
1531 /* Lookup an existing or insert a new vn_reference entry into the
1532 value table for the VUSE, SET, TYPE, OPERANDS reference which
1533 has the value VALUE which is either a constant or an SSA name. */
1535 static vn_reference_t
1536 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1539 vec
<vn_reference_op_s
,
1543 struct vn_reference_s vr1
;
1544 vn_reference_t result
;
1547 vr1
.operands
= operands
;
1550 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1551 if (vn_reference_lookup_1 (&vr1
, &result
))
1553 if (TREE_CODE (value
) == SSA_NAME
)
1554 value_id
= VN_INFO (value
)->value_id
;
1556 value_id
= get_or_alloc_constant_value_id (value
);
1557 return vn_reference_insert_pieces (vuse
, set
, type
,
1558 operands
.copy (), value
, value_id
);
1561 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1562 from the statement defining VUSE and if not successful tries to
1563 translate *REFP and VR_ through an aggregate copy at the definition
1567 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1568 bool disambiguate_only
)
1570 vn_reference_t vr
= (vn_reference_t
)vr_
;
1571 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1573 HOST_WIDE_INT offset
, maxsize
;
1574 static vec
<vn_reference_op_s
>
1577 bool lhs_ref_ok
= false;
1579 /* First try to disambiguate after value-replacing in the definitions LHS. */
1580 if (is_gimple_assign (def_stmt
))
1582 vec
<vn_reference_op_s
> tem
;
1583 tree lhs
= gimple_assign_lhs (def_stmt
);
1584 bool valueized_anything
= false;
1585 /* Avoid re-allocation overhead. */
1586 lhs_ops
.truncate (0);
1587 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1589 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
);
1590 gcc_assert (lhs_ops
== tem
);
1591 if (valueized_anything
)
1593 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1594 get_alias_set (lhs
),
1595 TREE_TYPE (lhs
), lhs_ops
);
1597 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1602 ao_ref_init (&lhs_ref
, lhs
);
1606 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1607 && gimple_call_num_args (def_stmt
) <= 4)
1609 /* For builtin calls valueize its arguments and call the
1610 alias oracle again. Valueization may improve points-to
1611 info of pointers and constify size and position arguments.
1612 Originally this was motivated by PR61034 which has
1613 conditional calls to free falsely clobbering ref because
1614 of imprecise points-to info of the argument. */
1616 bool valueized_anything
= false;
1617 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1619 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1620 if (TREE_CODE (oldargs
[i
]) == SSA_NAME
1621 && VN_INFO (oldargs
[i
])->valnum
!= oldargs
[i
])
1623 gimple_call_set_arg (def_stmt
, i
, VN_INFO (oldargs
[i
])->valnum
);
1624 valueized_anything
= true;
1627 if (valueized_anything
)
1629 bool res
= call_may_clobber_ref_p_1 (def_stmt
, ref
);
1630 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1631 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1637 if (disambiguate_only
)
1640 base
= ao_ref_base (ref
);
1641 offset
= ref
->offset
;
1642 maxsize
= ref
->max_size
;
1644 /* If we cannot constrain the size of the reference we cannot
1645 test if anything kills it. */
1649 /* We can't deduce anything useful from clobbers. */
1650 if (gimple_clobber_p (def_stmt
))
1653 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1654 from that definition.
1656 if (is_gimple_reg_type (vr
->type
)
1657 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1658 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1659 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2))
1660 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1662 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1664 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1665 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
);
1666 size2
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2)) * 8;
1667 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1668 == tree_to_uhwi (gimple_call_arg (def_stmt
, 2))
1670 && operand_equal_p (base
, base2
, 0)
1671 && offset2
<= offset
1672 && offset2
+ size2
>= offset
+ maxsize
)
1674 tree val
= build_zero_cst (vr
->type
);
1675 return vn_reference_lookup_or_insert_for_pieces
1676 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1680 /* 2) Assignment from an empty CONSTRUCTOR. */
1681 else if (is_gimple_reg_type (vr
->type
)
1682 && gimple_assign_single_p (def_stmt
)
1683 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1684 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1687 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1688 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1689 &offset2
, &size2
, &maxsize2
);
1691 && operand_equal_p (base
, base2
, 0)
1692 && offset2
<= offset
1693 && offset2
+ size2
>= offset
+ maxsize
)
1695 tree val
= build_zero_cst (vr
->type
);
1696 return vn_reference_lookup_or_insert_for_pieces
1697 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1701 /* 3) Assignment from a constant. We can use folds native encode/interpret
1702 routines to extract the assigned bits. */
1703 else if (vn_walk_kind
== VN_WALKREWRITE
1704 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
1705 && ref
->size
== maxsize
1706 && maxsize
% BITS_PER_UNIT
== 0
1707 && offset
% BITS_PER_UNIT
== 0
1708 && is_gimple_reg_type (vr
->type
)
1709 && gimple_assign_single_p (def_stmt
)
1710 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
1713 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1714 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1715 &offset2
, &size2
, &maxsize2
);
1717 && maxsize2
== size2
1718 && size2
% BITS_PER_UNIT
== 0
1719 && offset2
% BITS_PER_UNIT
== 0
1720 && operand_equal_p (base
, base2
, 0)
1721 && offset2
<= offset
1722 && offset2
+ size2
>= offset
+ maxsize
)
1724 /* We support up to 512-bit values (for V8DFmode). */
1725 unsigned char buffer
[64];
1728 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
1729 buffer
, sizeof (buffer
));
1732 tree val
= native_interpret_expr (vr
->type
,
1734 + ((offset
- offset2
)
1736 ref
->size
/ BITS_PER_UNIT
);
1738 return vn_reference_lookup_or_insert_for_pieces
1739 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1744 /* 4) Assignment from an SSA name which definition we may be able
1745 to access pieces from. */
1746 else if (ref
->size
== maxsize
1747 && is_gimple_reg_type (vr
->type
)
1748 && gimple_assign_single_p (def_stmt
)
1749 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
1751 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
1752 gimple def_stmt2
= SSA_NAME_DEF_STMT (rhs1
);
1753 if (is_gimple_assign (def_stmt2
)
1754 && (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
1755 || gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
)
1756 && types_compatible_p (vr
->type
, TREE_TYPE (TREE_TYPE (rhs1
))))
1759 HOST_WIDE_INT offset2
, size2
, maxsize2
, off
;
1760 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1761 &offset2
, &size2
, &maxsize2
);
1762 off
= offset
- offset2
;
1764 && maxsize2
== size2
1765 && operand_equal_p (base
, base2
, 0)
1766 && offset2
<= offset
1767 && offset2
+ size2
>= offset
+ maxsize
)
1769 tree val
= NULL_TREE
;
1771 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1
))));
1772 if (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
)
1775 val
= gimple_assign_rhs1 (def_stmt2
);
1776 else if (off
== elsz
)
1777 val
= gimple_assign_rhs2 (def_stmt2
);
1779 else if (gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
1782 tree ctor
= gimple_assign_rhs1 (def_stmt2
);
1783 unsigned i
= off
/ elsz
;
1784 if (i
< CONSTRUCTOR_NELTS (ctor
))
1786 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, i
);
1787 if (TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
1789 if (TREE_CODE (TREE_TYPE (elt
->value
))
1796 return vn_reference_lookup_or_insert_for_pieces
1797 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1802 /* 5) For aggregate copies translate the reference through them if
1803 the copy kills ref. */
1804 else if (vn_walk_kind
== VN_WALKREWRITE
1805 && gimple_assign_single_p (def_stmt
)
1806 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
1807 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
1808 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
1811 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1813 auto_vec
<vn_reference_op_s
> rhs
;
1814 vn_reference_op_t vro
;
1820 /* See if the assignment kills REF. */
1821 base2
= ao_ref_base (&lhs_ref
);
1822 offset2
= lhs_ref
.offset
;
1823 size2
= lhs_ref
.size
;
1824 maxsize2
= lhs_ref
.max_size
;
1826 || (base
!= base2
&& !operand_equal_p (base
, base2
, 0))
1828 || offset2
+ size2
< offset
+ maxsize
)
1831 /* Find the common base of ref and the lhs. lhs_ops already
1832 contains valueized operands for the lhs. */
1833 i
= vr
->operands
.length () - 1;
1834 j
= lhs_ops
.length () - 1;
1835 while (j
>= 0 && i
>= 0
1836 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
1842 /* ??? The innermost op should always be a MEM_REF and we already
1843 checked that the assignment to the lhs kills vr. Thus for
1844 aggregate copies using char[] types the vn_reference_op_eq
1845 may fail when comparing types for compatibility. But we really
1846 don't care here - further lookups with the rewritten operands
1847 will simply fail if we messed up types too badly. */
1848 if (j
== 0 && i
>= 0
1849 && lhs_ops
[0].opcode
== MEM_REF
1850 && lhs_ops
[0].off
!= -1
1851 && (lhs_ops
[0].off
== vr
->operands
[i
].off
))
1854 /* i now points to the first additional op.
1855 ??? LHS may not be completely contained in VR, one or more
1856 VIEW_CONVERT_EXPRs could be in its way. We could at least
1857 try handling outermost VIEW_CONVERT_EXPRs. */
1861 /* Now re-write REF to be based on the rhs of the assignment. */
1862 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
1863 /* We need to pre-pend vr->operands[0..i] to rhs. */
1864 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
1866 vec
<vn_reference_op_s
> old
= vr
->operands
;
1867 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
1868 if (old
== shared_lookup_references
1869 && vr
->operands
!= old
)
1870 shared_lookup_references
= vNULL
;
1873 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
1874 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
1875 vr
->operands
[i
+ 1 + j
] = *vro
;
1876 vr
->operands
= valueize_refs (vr
->operands
);
1877 vr
->hashcode
= vn_reference_compute_hash (vr
);
1879 /* Adjust *ref from the new operands. */
1880 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
1882 /* This can happen with bitfields. */
1883 if (ref
->size
!= r
.size
)
1887 /* Do not update last seen VUSE after translating. */
1888 last_vuse_ptr
= NULL
;
1890 /* Keep looking for the adjusted *REF / VR pair. */
1894 /* 6) For memcpy copies translate the reference through them if
1895 the copy kills ref. */
1896 else if (vn_walk_kind
== VN_WALKREWRITE
1897 && is_gimple_reg_type (vr
->type
)
1898 /* ??? Handle BCOPY as well. */
1899 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
1900 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
1901 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
1902 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
1903 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
1904 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
1905 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
1906 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2)))
1910 HOST_WIDE_INT rhs_offset
, copy_size
, lhs_offset
;
1911 vn_reference_op_s op
;
1915 /* Only handle non-variable, addressable refs. */
1916 if (ref
->size
!= maxsize
1917 || offset
% BITS_PER_UNIT
!= 0
1918 || ref
->size
% BITS_PER_UNIT
!= 0)
1921 /* Extract a pointer base and an offset for the destination. */
1922 lhs
= gimple_call_arg (def_stmt
, 0);
1924 if (TREE_CODE (lhs
) == SSA_NAME
)
1925 lhs
= SSA_VAL (lhs
);
1926 if (TREE_CODE (lhs
) == ADDR_EXPR
)
1928 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
1932 if (TREE_CODE (tem
) == MEM_REF
1933 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
1935 lhs
= TREE_OPERAND (tem
, 0);
1936 lhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
1938 else if (DECL_P (tem
))
1939 lhs
= build_fold_addr_expr (tem
);
1943 if (TREE_CODE (lhs
) != SSA_NAME
1944 && TREE_CODE (lhs
) != ADDR_EXPR
)
1947 /* Extract a pointer base and an offset for the source. */
1948 rhs
= gimple_call_arg (def_stmt
, 1);
1950 if (TREE_CODE (rhs
) == SSA_NAME
)
1951 rhs
= SSA_VAL (rhs
);
1952 if (TREE_CODE (rhs
) == ADDR_EXPR
)
1954 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
1958 if (TREE_CODE (tem
) == MEM_REF
1959 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
1961 rhs
= TREE_OPERAND (tem
, 0);
1962 rhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
1964 else if (DECL_P (tem
))
1965 rhs
= build_fold_addr_expr (tem
);
1969 if (TREE_CODE (rhs
) != SSA_NAME
1970 && TREE_CODE (rhs
) != ADDR_EXPR
)
1973 copy_size
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2));
1975 /* The bases of the destination and the references have to agree. */
1976 if ((TREE_CODE (base
) != MEM_REF
1978 || (TREE_CODE (base
) == MEM_REF
1979 && (TREE_OPERAND (base
, 0) != lhs
1980 || !tree_fits_uhwi_p (TREE_OPERAND (base
, 1))))
1982 && (TREE_CODE (lhs
) != ADDR_EXPR
1983 || TREE_OPERAND (lhs
, 0) != base
)))
1986 /* And the access has to be contained within the memcpy destination. */
1987 at
= offset
/ BITS_PER_UNIT
;
1988 if (TREE_CODE (base
) == MEM_REF
)
1989 at
+= tree_to_uhwi (TREE_OPERAND (base
, 1));
1991 || lhs_offset
+ copy_size
< at
+ maxsize
/ BITS_PER_UNIT
)
1994 /* Make room for 2 operands in the new reference. */
1995 if (vr
->operands
.length () < 2)
1997 vec
<vn_reference_op_s
> old
= vr
->operands
;
1998 vr
->operands
.safe_grow_cleared (2);
1999 if (old
== shared_lookup_references
2000 && vr
->operands
!= old
)
2001 shared_lookup_references
.create (0);
2004 vr
->operands
.truncate (2);
2006 /* The looked-through reference is a simple MEM_REF. */
2007 memset (&op
, 0, sizeof (op
));
2009 op
.opcode
= MEM_REF
;
2010 op
.op0
= build_int_cst (ptr_type_node
, at
- rhs_offset
);
2011 op
.off
= at
- lhs_offset
+ rhs_offset
;
2012 vr
->operands
[0] = op
;
2013 op
.type
= TREE_TYPE (rhs
);
2014 op
.opcode
= TREE_CODE (rhs
);
2017 vr
->operands
[1] = op
;
2018 vr
->hashcode
= vn_reference_compute_hash (vr
);
2020 /* Adjust *ref from the new operands. */
2021 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2023 /* This can happen with bitfields. */
2024 if (ref
->size
!= r
.size
)
2028 /* Do not update last seen VUSE after translating. */
2029 last_vuse_ptr
= NULL
;
2031 /* Keep looking for the adjusted *REF / VR pair. */
2035 /* Bail out and stop walking. */
2039 /* Lookup a reference operation by it's parts, in the current hash table.
2040 Returns the resulting value number if it exists in the hash table,
2041 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2042 vn_reference_t stored in the hashtable if something is found. */
2045 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2046 vec
<vn_reference_op_s
> operands
,
2047 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2049 struct vn_reference_s vr1
;
2057 vr1
.vuse
= vuse_ssa_val (vuse
);
2058 shared_lookup_references
.truncate (0);
2059 shared_lookup_references
.safe_grow (operands
.length ());
2060 memcpy (shared_lookup_references
.address (),
2061 operands
.address (),
2062 sizeof (vn_reference_op_s
)
2063 * operands
.length ());
2064 vr1
.operands
= operands
= shared_lookup_references
2065 = valueize_refs (shared_lookup_references
);
2068 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2069 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2072 vn_reference_lookup_1 (&vr1
, vnresult
);
2074 && kind
!= VN_NOWALK
2078 vn_walk_kind
= kind
;
2079 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2081 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2082 vn_reference_lookup_2
,
2083 vn_reference_lookup_3
, &vr1
);
2084 if (vr1
.operands
!= operands
)
2085 vr1
.operands
.release ();
2089 return (*vnresult
)->result
;
2094 /* Lookup OP in the current hash table, and return the resulting value
2095 number if it exists in the hash table. Return NULL_TREE if it does
2096 not exist in the hash table or if the result field of the structure
2097 was NULL.. VNRESULT will be filled in with the vn_reference_t
2098 stored in the hashtable if one exists. */
2101 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2102 vn_reference_t
*vnresult
)
2104 vec
<vn_reference_op_s
> operands
;
2105 struct vn_reference_s vr1
;
2107 bool valuezied_anything
;
2112 vr1
.vuse
= vuse_ssa_val (vuse
);
2113 vr1
.operands
= operands
2114 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2115 vr1
.type
= TREE_TYPE (op
);
2116 vr1
.set
= get_alias_set (op
);
2117 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2118 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2121 if (kind
!= VN_NOWALK
2124 vn_reference_t wvnresult
;
2126 /* Make sure to use a valueized reference if we valueized anything.
2127 Otherwise preserve the full reference for advanced TBAA. */
2128 if (!valuezied_anything
2129 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2131 ao_ref_init (&r
, op
);
2132 vn_walk_kind
= kind
;
2134 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2135 vn_reference_lookup_2
,
2136 vn_reference_lookup_3
, &vr1
);
2137 if (vr1
.operands
!= operands
)
2138 vr1
.operands
.release ();
2142 *vnresult
= wvnresult
;
2143 return wvnresult
->result
;
2149 return vn_reference_lookup_1 (&vr1
, vnresult
);
2153 /* Insert OP into the current hash table with a value number of
2154 RESULT, and return the resulting reference structure we created. */
2157 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2159 vn_reference_s
**slot
;
2163 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
2164 if (TREE_CODE (result
) == SSA_NAME
)
2165 vr1
->value_id
= VN_INFO (result
)->value_id
;
2167 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2168 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2169 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2170 vr1
->type
= TREE_TYPE (op
);
2171 vr1
->set
= get_alias_set (op
);
2172 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2173 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2174 vr1
->result_vdef
= vdef
;
2176 slot
= current_info
->references
.find_slot_with_hash (vr1
, vr1
->hashcode
,
2179 /* Because we lookup stores using vuses, and value number failures
2180 using the vdefs (see visit_reference_op_store for how and why),
2181 it's possible that on failure we may try to insert an already
2182 inserted store. This is not wrong, there is no ssa name for a
2183 store that we could use as a differentiator anyway. Thus, unlike
2184 the other lookup functions, you cannot gcc_assert (!*slot)
2187 /* But free the old slot in case of a collision. */
2189 free_reference (*slot
);
2195 /* Insert a reference by it's pieces into the current hash table with
2196 a value number of RESULT. Return the resulting reference
2197 structure we created. */
2200 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2201 vec
<vn_reference_op_s
> operands
,
2202 tree result
, unsigned int value_id
)
2205 vn_reference_s
**slot
;
2208 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
2209 vr1
->value_id
= value_id
;
2210 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2211 vr1
->operands
= valueize_refs (operands
);
2214 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2215 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2216 result
= SSA_VAL (result
);
2217 vr1
->result
= result
;
2219 slot
= current_info
->references
.find_slot_with_hash (vr1
, vr1
->hashcode
,
2222 /* At this point we should have all the things inserted that we have
2223 seen before, and we should never try inserting something that
2225 gcc_assert (!*slot
);
2227 free_reference (*slot
);
2233 /* Compute and return the hash value for nary operation VBO1. */
2236 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2241 for (i
= 0; i
< vno1
->length
; ++i
)
2242 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2243 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2245 if (vno1
->length
== 2
2246 && commutative_tree_code (vno1
->opcode
)
2247 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2249 tree temp
= vno1
->op
[0];
2250 vno1
->op
[0] = vno1
->op
[1];
2254 hash
= iterative_hash_hashval_t (vno1
->opcode
, 0);
2255 for (i
= 0; i
< vno1
->length
; ++i
)
2256 hash
= iterative_hash_expr (vno1
->op
[i
], hash
);
2261 /* Compare nary operations VNO1 and VNO2 and return true if they are
2265 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2269 if (vno1
->hashcode
!= vno2
->hashcode
)
2272 if (vno1
->length
!= vno2
->length
)
2275 if (vno1
->opcode
!= vno2
->opcode
2276 || !types_compatible_p (vno1
->type
, vno2
->type
))
2279 for (i
= 0; i
< vno1
->length
; ++i
)
2280 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2286 /* Initialize VNO from the pieces provided. */
2289 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2290 enum tree_code code
, tree type
, tree
*ops
)
2293 vno
->length
= length
;
2295 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2298 /* Initialize VNO from OP. */
2301 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2305 vno
->opcode
= TREE_CODE (op
);
2306 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2307 vno
->type
= TREE_TYPE (op
);
2308 for (i
= 0; i
< vno
->length
; ++i
)
2309 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2312 /* Return the number of operands for a vn_nary ops structure from STMT. */
2315 vn_nary_length_from_stmt (gimple stmt
)
2317 switch (gimple_assign_rhs_code (stmt
))
2321 case VIEW_CONVERT_EXPR
:
2328 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2331 return gimple_num_ops (stmt
) - 1;
2335 /* Initialize VNO from STMT. */
2338 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple stmt
)
2342 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2343 vno
->type
= gimple_expr_type (stmt
);
2344 switch (vno
->opcode
)
2348 case VIEW_CONVERT_EXPR
:
2350 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2355 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2356 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2357 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2361 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2362 for (i
= 0; i
< vno
->length
; ++i
)
2363 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2367 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2368 vno
->length
= gimple_num_ops (stmt
) - 1;
2369 for (i
= 0; i
< vno
->length
; ++i
)
2370 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2374 /* Compute the hashcode for VNO and look for it in the hash table;
2375 return the resulting value number if it exists in the hash table.
2376 Return NULL_TREE if it does not exist in the hash table or if the
2377 result field of the operation is NULL. VNRESULT will contain the
2378 vn_nary_op_t from the hashtable if it exists. */
2381 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2383 vn_nary_op_s
**slot
;
2388 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2389 slot
= current_info
->nary
.find_slot_with_hash (vno
, vno
->hashcode
, NO_INSERT
);
2390 if (!slot
&& current_info
== optimistic_info
)
2391 slot
= valid_info
->nary
.find_slot_with_hash (vno
, vno
->hashcode
, NO_INSERT
);
2396 return (*slot
)->result
;
2399 /* Lookup a n-ary operation by its pieces and return the resulting value
2400 number if it exists in the hash table. Return NULL_TREE if it does
2401 not exist in the hash table or if the result field of the operation
2402 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2406 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2407 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2409 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2410 sizeof_vn_nary_op (length
));
2411 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2412 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2415 /* Lookup OP in the current hash table, and return the resulting value
2416 number if it exists in the hash table. Return NULL_TREE if it does
2417 not exist in the hash table or if the result field of the operation
2418 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2422 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2425 = XALLOCAVAR (struct vn_nary_op_s
,
2426 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2427 init_vn_nary_op_from_op (vno1
, op
);
2428 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2431 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2432 value number if it exists in the hash table. Return NULL_TREE if
2433 it does not exist in the hash table. VNRESULT will contain the
2434 vn_nary_op_t from the hashtable if it exists. */
2437 vn_nary_op_lookup_stmt (gimple stmt
, vn_nary_op_t
*vnresult
)
2440 = XALLOCAVAR (struct vn_nary_op_s
,
2441 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2442 init_vn_nary_op_from_stmt (vno1
, stmt
);
2443 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2446 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2449 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2451 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2454 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2458 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2460 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2461 ¤t_info
->nary_obstack
);
2463 vno1
->value_id
= value_id
;
2464 vno1
->length
= length
;
2465 vno1
->result
= result
;
2470 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2471 VNO->HASHCODE first. */
2474 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type table
,
2477 vn_nary_op_s
**slot
;
2480 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2482 slot
= table
.find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
2483 gcc_assert (!*slot
);
2489 /* Insert a n-ary operation into the current hash table using it's
2490 pieces. Return the vn_nary_op_t structure we created and put in
2494 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2495 tree type
, tree
*ops
,
2496 tree result
, unsigned int value_id
)
2498 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2499 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2500 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2503 /* Insert OP into the current hash table with a value number of
2504 RESULT. Return the vn_nary_op_t structure we created and put in
2508 vn_nary_op_insert (tree op
, tree result
)
2510 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2513 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2514 init_vn_nary_op_from_op (vno1
, op
);
2515 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2518 /* Insert the rhs of STMT into the current hash table with a value number of
2522 vn_nary_op_insert_stmt (gimple stmt
, tree result
)
2525 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
2526 result
, VN_INFO (result
)->value_id
);
2527 init_vn_nary_op_from_stmt (vno1
, stmt
);
2528 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2531 /* Compute a hashcode for PHI operation VP1 and return it. */
2533 static inline hashval_t
2534 vn_phi_compute_hash (vn_phi_t vp1
)
2541 result
= vp1
->block
->index
;
2543 /* If all PHI arguments are constants we need to distinguish
2544 the PHI node via its type. */
2546 result
+= vn_hash_type (type
);
2548 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2550 if (phi1op
== VN_TOP
)
2552 result
= iterative_hash_expr (phi1op
, result
);
2558 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2561 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
2563 if (vp1
->hashcode
!= vp2
->hashcode
)
2566 if (vp1
->block
== vp2
->block
)
2571 /* If the PHI nodes do not have compatible types
2572 they are not the same. */
2573 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2576 /* Any phi in the same block will have it's arguments in the
2577 same edge order, because of how we store phi nodes. */
2578 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2580 tree phi2op
= vp2
->phiargs
[i
];
2581 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
2583 if (!expressions_equal_p (phi1op
, phi2op
))
2591 static vec
<tree
> shared_lookup_phiargs
;
2593 /* Lookup PHI in the current hash table, and return the resulting
2594 value number if it exists in the hash table. Return NULL_TREE if
2595 it does not exist in the hash table. */
2598 vn_phi_lookup (gimple phi
)
2601 struct vn_phi_s vp1
;
2604 shared_lookup_phiargs
.truncate (0);
2606 /* Canonicalize the SSA_NAME's to their value number. */
2607 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2609 tree def
= PHI_ARG_DEF (phi
, i
);
2610 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2611 shared_lookup_phiargs
.safe_push (def
);
2613 vp1
.type
= TREE_TYPE (gimple_phi_result (phi
));
2614 vp1
.phiargs
= shared_lookup_phiargs
;
2615 vp1
.block
= gimple_bb (phi
);
2616 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
2617 slot
= current_info
->phis
.find_slot_with_hash (&vp1
, vp1
.hashcode
, NO_INSERT
);
2618 if (!slot
&& current_info
== optimistic_info
)
2619 slot
= valid_info
->phis
.find_slot_with_hash (&vp1
, vp1
.hashcode
, NO_INSERT
);
2622 return (*slot
)->result
;
2625 /* Insert PHI into the current hash table with a value number of
2629 vn_phi_insert (gimple phi
, tree result
)
2632 vn_phi_t vp1
= (vn_phi_t
) pool_alloc (current_info
->phis_pool
);
2634 vec
<tree
> args
= vNULL
;
2636 /* Canonicalize the SSA_NAME's to their value number. */
2637 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2639 tree def
= PHI_ARG_DEF (phi
, i
);
2640 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2641 args
.safe_push (def
);
2643 vp1
->value_id
= VN_INFO (result
)->value_id
;
2644 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
2645 vp1
->phiargs
= args
;
2646 vp1
->block
= gimple_bb (phi
);
2647 vp1
->result
= result
;
2648 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
2650 slot
= current_info
->phis
.find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
2652 /* Because we iterate over phi operations more than once, it's
2653 possible the slot might already exist here, hence no assert.*/
2659 /* Print set of components in strongly connected component SCC to OUT. */
2662 print_scc (FILE *out
, vec
<tree
> scc
)
2667 fprintf (out
, "SCC consists of:");
2668 FOR_EACH_VEC_ELT (scc
, i
, var
)
2671 print_generic_expr (out
, var
, 0);
2673 fprintf (out
, "\n");
2676 /* Set the value number of FROM to TO, return true if it has changed
2680 set_ssa_val_to (tree from
, tree to
)
2682 tree currval
= SSA_VAL (from
);
2683 HOST_WIDE_INT toff
, coff
;
2685 /* The only thing we allow as value numbers are ssa_names
2686 and invariants. So assert that here. We don't allow VN_TOP
2687 as visiting a stmt should produce a value-number other than
2689 ??? Still VN_TOP can happen for unreachable code, so force
2690 it to varying in that case. Not all code is prepared to
2691 get VN_TOP on valueization. */
2694 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2695 fprintf (dump_file
, "Forcing value number to varying on "
2696 "receiving VN_TOP\n");
2700 gcc_assert (to
!= NULL_TREE
2701 && (TREE_CODE (to
) == SSA_NAME
2702 || is_gimple_min_invariant (to
)));
2706 if (currval
== from
)
2708 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2710 fprintf (dump_file
, "Not changing value number of ");
2711 print_generic_expr (dump_file
, from
, 0);
2712 fprintf (dump_file
, " from VARYING to ");
2713 print_generic_expr (dump_file
, to
, 0);
2714 fprintf (dump_file
, "\n");
2718 else if (TREE_CODE (to
) == SSA_NAME
2719 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
2723 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2725 fprintf (dump_file
, "Setting value number of ");
2726 print_generic_expr (dump_file
, from
, 0);
2727 fprintf (dump_file
, " to ");
2728 print_generic_expr (dump_file
, to
, 0);
2732 && !operand_equal_p (currval
, to
, 0)
2733 /* ??? For addresses involving volatile objects or types operand_equal_p
2734 does not reliably detect ADDR_EXPRs as equal. We know we are only
2735 getting invariant gimple addresses here, so can use
2736 get_addr_base_and_unit_offset to do this comparison. */
2737 && !(TREE_CODE (currval
) == ADDR_EXPR
2738 && TREE_CODE (to
) == ADDR_EXPR
2739 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
2740 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
2743 VN_INFO (from
)->valnum
= to
;
2744 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2745 fprintf (dump_file
, " (changed)\n");
2748 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2749 fprintf (dump_file
, "\n");
2753 /* Mark as processed all the definitions in the defining stmt of USE, or
2757 mark_use_processed (tree use
)
2761 gimple stmt
= SSA_NAME_DEF_STMT (use
);
2763 if (SSA_NAME_IS_DEFAULT_DEF (use
) || gimple_code (stmt
) == GIMPLE_PHI
)
2765 VN_INFO (use
)->use_processed
= true;
2769 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2771 tree def
= DEF_FROM_PTR (defp
);
2773 VN_INFO (def
)->use_processed
= true;
2777 /* Set all definitions in STMT to value number to themselves.
2778 Return true if a value number changed. */
2781 defs_to_varying (gimple stmt
)
2783 bool changed
= false;
2787 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2789 tree def
= DEF_FROM_PTR (defp
);
2790 changed
|= set_ssa_val_to (def
, def
);
2795 static bool expr_has_constants (tree expr
);
2797 /* Visit a copy between LHS and RHS, return true if the value number
2801 visit_copy (tree lhs
, tree rhs
)
2803 /* The copy may have a more interesting constant filled expression
2804 (we don't, since we know our RHS is just an SSA name). */
2805 VN_INFO (lhs
)->has_constants
= VN_INFO (rhs
)->has_constants
;
2806 VN_INFO (lhs
)->expr
= VN_INFO (rhs
)->expr
;
2808 /* And finally valueize. */
2809 rhs
= SSA_VAL (rhs
);
2811 return set_ssa_val_to (lhs
, rhs
);
2814 /* Visit a nary operator RHS, value number it, and return true if the
2815 value number of LHS has changed as a result. */
2818 visit_nary_op (tree lhs
, gimple stmt
)
2820 bool changed
= false;
2821 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
2824 changed
= set_ssa_val_to (lhs
, result
);
2827 changed
= set_ssa_val_to (lhs
, lhs
);
2828 vn_nary_op_insert_stmt (stmt
, lhs
);
2834 /* Visit a call STMT storing into LHS. Return true if the value number
2835 of the LHS has changed as a result. */
2838 visit_reference_op_call (tree lhs
, gimple stmt
)
2840 bool changed
= false;
2841 struct vn_reference_s vr1
;
2842 vn_reference_t vnresult
= NULL
;
2843 tree vuse
= gimple_vuse (stmt
);
2844 tree vdef
= gimple_vdef (stmt
);
2846 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2847 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
2850 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2851 vr1
.operands
= valueize_shared_reference_ops_from_call (stmt
);
2852 vr1
.type
= gimple_expr_type (stmt
);
2854 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2855 vn_reference_lookup_1 (&vr1
, &vnresult
);
2859 if (vnresult
->result_vdef
&& vdef
)
2860 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
2862 if (!vnresult
->result
&& lhs
)
2863 vnresult
->result
= lhs
;
2865 if (vnresult
->result
&& lhs
)
2867 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
2869 if (VN_INFO (vnresult
->result
)->has_constants
)
2870 VN_INFO (lhs
)->has_constants
= true;
2875 vn_reference_s
**slot
;
2878 changed
|= set_ssa_val_to (vdef
, vdef
);
2880 changed
|= set_ssa_val_to (lhs
, lhs
);
2881 vr2
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
2882 vr2
->vuse
= vr1
.vuse
;
2883 vr2
->operands
= valueize_refs (create_reference_ops_from_call (stmt
));
2884 vr2
->type
= vr1
.type
;
2886 vr2
->hashcode
= vr1
.hashcode
;
2888 vr2
->result_vdef
= vdef
;
2889 slot
= current_info
->references
.find_slot_with_hash (vr2
, vr2
->hashcode
,
2892 free_reference (*slot
);
2899 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2900 and return true if the value number of the LHS has changed as a result. */
2903 visit_reference_op_load (tree lhs
, tree op
, gimple stmt
)
2905 bool changed
= false;
2909 last_vuse
= gimple_vuse (stmt
);
2910 last_vuse_ptr
= &last_vuse
;
2911 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
2912 default_vn_walk_kind
, NULL
);
2913 last_vuse_ptr
= NULL
;
2915 /* If we have a VCE, try looking up its operand as it might be stored in
2916 a different type. */
2917 if (!result
&& TREE_CODE (op
) == VIEW_CONVERT_EXPR
)
2918 result
= vn_reference_lookup (TREE_OPERAND (op
, 0), gimple_vuse (stmt
),
2919 default_vn_walk_kind
, NULL
);
2921 /* We handle type-punning through unions by value-numbering based
2922 on offset and size of the access. Be prepared to handle a
2923 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2925 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
2927 /* We will be setting the value number of lhs to the value number
2928 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2929 So first simplify and lookup this expression to see if it
2930 is already available. */
2931 tree val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
2932 if ((CONVERT_EXPR_P (val
)
2933 || TREE_CODE (val
) == VIEW_CONVERT_EXPR
)
2934 && TREE_CODE (TREE_OPERAND (val
, 0)) == SSA_NAME
)
2936 tree tem
= vn_get_expr_for (TREE_OPERAND (val
, 0));
2937 if ((CONVERT_EXPR_P (tem
)
2938 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
)
2939 && (tem
= fold_unary_ignore_overflow (TREE_CODE (val
),
2940 TREE_TYPE (val
), tem
)))
2944 if (!is_gimple_min_invariant (val
)
2945 && TREE_CODE (val
) != SSA_NAME
)
2946 result
= vn_nary_op_lookup (val
, NULL
);
2947 /* If the expression is not yet available, value-number lhs to
2948 a new SSA_NAME we create. */
2951 result
= make_temp_ssa_name (TREE_TYPE (lhs
), gimple_build_nop (),
2953 /* Initialize value-number information properly. */
2954 VN_INFO_GET (result
)->valnum
= result
;
2955 VN_INFO (result
)->value_id
= get_next_value_id ();
2956 VN_INFO (result
)->expr
= val
;
2957 VN_INFO (result
)->has_constants
= expr_has_constants (val
);
2958 VN_INFO (result
)->needs_insertion
= true;
2959 /* As all "inserted" statements are singleton SCCs, insert
2960 to the valid table. This is strictly needed to
2961 avoid re-generating new value SSA_NAMEs for the same
2962 expression during SCC iteration over and over (the
2963 optimistic table gets cleared after each iteration).
2964 We do not need to insert into the optimistic table, as
2965 lookups there will fall back to the valid table. */
2966 if (current_info
== optimistic_info
)
2968 current_info
= valid_info
;
2969 vn_nary_op_insert (val
, result
);
2970 current_info
= optimistic_info
;
2973 vn_nary_op_insert (val
, result
);
2974 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2976 fprintf (dump_file
, "Inserting name ");
2977 print_generic_expr (dump_file
, result
, 0);
2978 fprintf (dump_file
, " for expression ");
2979 print_generic_expr (dump_file
, val
, 0);
2980 fprintf (dump_file
, "\n");
2987 changed
= set_ssa_val_to (lhs
, result
);
2988 if (TREE_CODE (result
) == SSA_NAME
2989 && VN_INFO (result
)->has_constants
)
2991 VN_INFO (lhs
)->expr
= VN_INFO (result
)->expr
;
2992 VN_INFO (lhs
)->has_constants
= true;
2997 changed
= set_ssa_val_to (lhs
, lhs
);
2998 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
3005 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3006 and return true if the value number of the LHS has changed as a result. */
3009 visit_reference_op_store (tree lhs
, tree op
, gimple stmt
)
3011 bool changed
= false;
3012 vn_reference_t vnresult
= NULL
;
3013 tree result
, assign
;
3014 bool resultsame
= false;
3015 tree vuse
= gimple_vuse (stmt
);
3016 tree vdef
= gimple_vdef (stmt
);
3018 /* First we want to lookup using the *vuses* from the store and see
3019 if there the last store to this location with the same address
3022 The vuses represent the memory state before the store. If the
3023 memory state, address, and value of the store is the same as the
3024 last store to this location, then this store will produce the
3025 same memory state as that store.
3027 In this case the vdef versions for this store are value numbered to those
3028 vuse versions, since they represent the same memory state after
3031 Otherwise, the vdefs for the store are used when inserting into
3032 the table, since the store generates a new memory state. */
3034 result
= vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, NULL
);
3038 if (TREE_CODE (result
) == SSA_NAME
)
3039 result
= SSA_VAL (result
);
3040 if (TREE_CODE (op
) == SSA_NAME
)
3042 resultsame
= expressions_equal_p (result
, op
);
3045 if (!result
|| !resultsame
)
3047 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3048 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
);
3051 VN_INFO (vdef
)->use_processed
= true;
3052 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3056 if (!result
|| !resultsame
)
3058 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3060 fprintf (dump_file
, "No store match\n");
3061 fprintf (dump_file
, "Value numbering store ");
3062 print_generic_expr (dump_file
, lhs
, 0);
3063 fprintf (dump_file
, " to ");
3064 print_generic_expr (dump_file
, op
, 0);
3065 fprintf (dump_file
, "\n");
3067 /* Have to set value numbers before insert, since insert is
3068 going to valueize the references in-place. */
3071 changed
|= set_ssa_val_to (vdef
, vdef
);
3074 /* Do not insert structure copies into the tables. */
3075 if (is_gimple_min_invariant (op
)
3076 || is_gimple_reg (op
))
3077 vn_reference_insert (lhs
, op
, vdef
, NULL
);
3079 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3080 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
3084 /* We had a match, so value number the vdef to have the value
3085 number of the vuse it came from. */
3087 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3088 fprintf (dump_file
, "Store matched earlier value,"
3089 "value numbering store vdefs to matching vuses.\n");
3091 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
3097 /* Visit and value number PHI, return true if the value number
3101 visit_phi (gimple phi
)
3103 bool changed
= false;
3105 tree sameval
= VN_TOP
;
3106 bool allsame
= true;
3108 /* TODO: We could check for this in init_sccvn, and replace this
3109 with a gcc_assert. */
3110 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
3111 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3113 /* See if all non-TOP arguments have the same value. TOP is
3114 equivalent to everything, so we can ignore it. */
3117 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3118 if (e
->flags
& EDGE_EXECUTABLE
)
3120 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3122 if (TREE_CODE (def
) == SSA_NAME
)
3123 def
= SSA_VAL (def
);
3126 if (sameval
== VN_TOP
)
3132 if (!expressions_equal_p (def
, sameval
))
3140 /* If all value numbered to the same value, the phi node has that
3144 if (is_gimple_min_invariant (sameval
))
3146 VN_INFO (PHI_RESULT (phi
))->has_constants
= true;
3147 if (sameval
!= VN_TOP
)
3148 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
3152 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
3153 if (sameval
!= VN_TOP
)
3154 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
3157 if (TREE_CODE (sameval
) == SSA_NAME
)
3158 return visit_copy (PHI_RESULT (phi
), sameval
);
3160 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3163 /* Otherwise, see if it is equivalent to a phi node in this block. */
3164 result
= vn_phi_lookup (phi
);
3167 if (TREE_CODE (result
) == SSA_NAME
)
3168 changed
= visit_copy (PHI_RESULT (phi
), result
);
3170 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
3174 vn_phi_insert (phi
, PHI_RESULT (phi
));
3175 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
3176 VN_INFO (PHI_RESULT (phi
))->expr
= PHI_RESULT (phi
);
3177 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3183 /* Return true if EXPR contains constants. */
3186 expr_has_constants (tree expr
)
3188 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
3191 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0));
3194 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0))
3195 || is_gimple_min_invariant (TREE_OPERAND (expr
, 1));
3196 /* Constants inside reference ops are rarely interesting, but
3197 it can take a lot of looking to find them. */
3199 case tcc_declaration
:
3202 return is_gimple_min_invariant (expr
);
3207 /* Return true if STMT contains constants. */
3210 stmt_has_constants (gimple stmt
)
3214 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
3217 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
3219 case GIMPLE_TERNARY_RHS
:
3220 tem
= gimple_assign_rhs3 (stmt
);
3221 if (TREE_CODE (tem
) == SSA_NAME
)
3222 tem
= SSA_VAL (tem
);
3223 if (is_gimple_min_invariant (tem
))
3227 case GIMPLE_BINARY_RHS
:
3228 tem
= gimple_assign_rhs2 (stmt
);
3229 if (TREE_CODE (tem
) == SSA_NAME
)
3230 tem
= SSA_VAL (tem
);
3231 if (is_gimple_min_invariant (tem
))
3235 case GIMPLE_SINGLE_RHS
:
3236 /* Constants inside reference ops are rarely interesting, but
3237 it can take a lot of looking to find them. */
3238 case GIMPLE_UNARY_RHS
:
3239 tem
= gimple_assign_rhs1 (stmt
);
3240 if (TREE_CODE (tem
) == SSA_NAME
)
3241 tem
= SSA_VAL (tem
);
3242 return is_gimple_min_invariant (tem
);
3250 /* Simplify the binary expression RHS, and return the result if
3254 simplify_binary_expression (gimple stmt
)
3256 tree result
= NULL_TREE
;
3257 tree op0
= gimple_assign_rhs1 (stmt
);
3258 tree op1
= gimple_assign_rhs2 (stmt
);
3259 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3261 /* This will not catch every single case we could combine, but will
3262 catch those with constants. The goal here is to simultaneously
3263 combine constants between expressions, but avoid infinite
3264 expansion of expressions during simplification. */
3265 if (TREE_CODE (op0
) == SSA_NAME
)
3267 if (VN_INFO (op0
)->has_constants
3268 || TREE_CODE_CLASS (code
) == tcc_comparison
3269 || code
== COMPLEX_EXPR
)
3270 op0
= vn_get_expr_for (op0
);
3272 op0
= vn_valueize (op0
);
3275 if (TREE_CODE (op1
) == SSA_NAME
)
3277 if (VN_INFO (op1
)->has_constants
3278 || code
== COMPLEX_EXPR
)
3279 op1
= vn_get_expr_for (op1
);
3281 op1
= vn_valueize (op1
);
3284 /* Pointer plus constant can be represented as invariant address.
3285 Do so to allow further propatation, see also tree forwprop. */
3286 if (code
== POINTER_PLUS_EXPR
3287 && tree_fits_uhwi_p (op1
)
3288 && TREE_CODE (op0
) == ADDR_EXPR
3289 && is_gimple_min_invariant (op0
))
3290 return build_invariant_address (TREE_TYPE (op0
),
3291 TREE_OPERAND (op0
, 0),
3292 tree_to_uhwi (op1
));
3294 /* Avoid folding if nothing changed. */
3295 if (op0
== gimple_assign_rhs1 (stmt
)
3296 && op1
== gimple_assign_rhs2 (stmt
))
3299 fold_defer_overflow_warnings ();
3301 result
= fold_binary (code
, gimple_expr_type (stmt
), op0
, op1
);
3303 STRIP_USELESS_TYPE_CONVERSION (result
);
3305 fold_undefer_overflow_warnings (result
&& valid_gimple_rhs_p (result
),
3308 /* Make sure result is not a complex expression consisting
3309 of operators of operators (IE (a + b) + (a + c))
3310 Otherwise, we will end up with unbounded expressions if
3311 fold does anything at all. */
3312 if (result
&& valid_gimple_rhs_p (result
))
3318 /* Simplify the unary expression RHS, and return the result if
3322 simplify_unary_expression (gimple stmt
)
3324 tree result
= NULL_TREE
;
3325 tree orig_op0
, op0
= gimple_assign_rhs1 (stmt
);
3326 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3328 /* We handle some tcc_reference codes here that are all
3329 GIMPLE_ASSIGN_SINGLE codes. */
3330 if (code
== REALPART_EXPR
3331 || code
== IMAGPART_EXPR
3332 || code
== VIEW_CONVERT_EXPR
3333 || code
== BIT_FIELD_REF
)
3334 op0
= TREE_OPERAND (op0
, 0);
3336 if (TREE_CODE (op0
) != SSA_NAME
)
3340 if (VN_INFO (op0
)->has_constants
)
3341 op0
= vn_get_expr_for (op0
);
3342 else if (CONVERT_EXPR_CODE_P (code
)
3343 || code
== REALPART_EXPR
3344 || code
== IMAGPART_EXPR
3345 || code
== VIEW_CONVERT_EXPR
3346 || code
== BIT_FIELD_REF
)
3348 /* We want to do tree-combining on conversion-like expressions.
3349 Make sure we feed only SSA_NAMEs or constants to fold though. */
3350 tree tem
= vn_get_expr_for (op0
);
3351 if (UNARY_CLASS_P (tem
)
3352 || BINARY_CLASS_P (tem
)
3353 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
3354 || TREE_CODE (tem
) == SSA_NAME
3355 || TREE_CODE (tem
) == CONSTRUCTOR
3356 || is_gimple_min_invariant (tem
))
3360 /* Avoid folding if nothing changed, but remember the expression. */
3361 if (op0
== orig_op0
)
3364 if (code
== BIT_FIELD_REF
)
3366 tree rhs
= gimple_assign_rhs1 (stmt
);
3367 result
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (rhs
),
3368 op0
, TREE_OPERAND (rhs
, 1), TREE_OPERAND (rhs
, 2));
3371 result
= fold_unary_ignore_overflow (code
, gimple_expr_type (stmt
), op0
);
3374 STRIP_USELESS_TYPE_CONVERSION (result
);
3375 if (valid_gimple_rhs_p (result
))
3382 /* Try to simplify RHS using equivalences and constant folding. */
3385 try_to_simplify (gimple stmt
)
3387 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3390 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3391 in this case, there is no point in doing extra work. */
3392 if (code
== SSA_NAME
)
3395 /* First try constant folding based on our current lattice. */
3396 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
);
3398 && (TREE_CODE (tem
) == SSA_NAME
3399 || is_gimple_min_invariant (tem
)))
3402 /* If that didn't work try combining multiple statements. */
3403 switch (TREE_CODE_CLASS (code
))
3406 /* Fallthrough for some unary codes that can operate on registers. */
3407 if (!(code
== REALPART_EXPR
3408 || code
== IMAGPART_EXPR
3409 || code
== VIEW_CONVERT_EXPR
3410 || code
== BIT_FIELD_REF
))
3412 /* We could do a little more with unary ops, if they expand
3413 into binary ops, but it's debatable whether it is worth it. */
3415 return simplify_unary_expression (stmt
);
3417 case tcc_comparison
:
3419 return simplify_binary_expression (stmt
);
3428 /* Visit and value number USE, return true if the value number
3432 visit_use (tree use
)
3434 bool changed
= false;
3435 gimple stmt
= SSA_NAME_DEF_STMT (use
);
3437 mark_use_processed (use
);
3439 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
3440 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3441 && !SSA_NAME_IS_DEFAULT_DEF (use
))
3443 fprintf (dump_file
, "Value numbering ");
3444 print_generic_expr (dump_file
, use
, 0);
3445 fprintf (dump_file
, " stmt = ");
3446 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3449 /* Handle uninitialized uses. */
3450 if (SSA_NAME_IS_DEFAULT_DEF (use
))
3451 changed
= set_ssa_val_to (use
, use
);
3454 if (gimple_code (stmt
) == GIMPLE_PHI
)
3455 changed
= visit_phi (stmt
);
3456 else if (gimple_has_volatile_ops (stmt
))
3457 changed
= defs_to_varying (stmt
);
3458 else if (is_gimple_assign (stmt
))
3460 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3461 tree lhs
= gimple_assign_lhs (stmt
);
3462 tree rhs1
= gimple_assign_rhs1 (stmt
);
3465 /* Shortcut for copies. Simplifying copies is pointless,
3466 since we copy the expression and value they represent. */
3467 if (code
== SSA_NAME
3468 && TREE_CODE (lhs
) == SSA_NAME
)
3470 changed
= visit_copy (lhs
, rhs1
);
3473 simplified
= try_to_simplify (stmt
);
3476 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3478 fprintf (dump_file
, "RHS ");
3479 print_gimple_expr (dump_file
, stmt
, 0, 0);
3480 fprintf (dump_file
, " simplified to ");
3481 print_generic_expr (dump_file
, simplified
, 0);
3482 if (TREE_CODE (lhs
) == SSA_NAME
)
3483 fprintf (dump_file
, " has constants %d\n",
3484 expr_has_constants (simplified
));
3486 fprintf (dump_file
, "\n");
3489 /* Setting value numbers to constants will occasionally
3490 screw up phi congruence because constants are not
3491 uniquely associated with a single ssa name that can be
3494 && is_gimple_min_invariant (simplified
)
3495 && TREE_CODE (lhs
) == SSA_NAME
)
3497 VN_INFO (lhs
)->expr
= simplified
;
3498 VN_INFO (lhs
)->has_constants
= true;
3499 changed
= set_ssa_val_to (lhs
, simplified
);
3503 && TREE_CODE (simplified
) == SSA_NAME
3504 && TREE_CODE (lhs
) == SSA_NAME
)
3506 changed
= visit_copy (lhs
, simplified
);
3509 else if (simplified
)
3511 if (TREE_CODE (lhs
) == SSA_NAME
)
3513 VN_INFO (lhs
)->has_constants
= expr_has_constants (simplified
);
3514 /* We have to unshare the expression or else
3515 valuizing may change the IL stream. */
3516 VN_INFO (lhs
)->expr
= unshare_expr (simplified
);
3519 else if (stmt_has_constants (stmt
)
3520 && TREE_CODE (lhs
) == SSA_NAME
)
3521 VN_INFO (lhs
)->has_constants
= true;
3522 else if (TREE_CODE (lhs
) == SSA_NAME
)
3524 /* We reset expr and constantness here because we may
3525 have been value numbering optimistically, and
3526 iterating. They may become non-constant in this case,
3527 even if they were optimistically constant. */
3529 VN_INFO (lhs
)->has_constants
= false;
3530 VN_INFO (lhs
)->expr
= NULL_TREE
;
3533 if ((TREE_CODE (lhs
) == SSA_NAME
3534 /* We can substitute SSA_NAMEs that are live over
3535 abnormal edges with their constant value. */
3536 && !(gimple_assign_copy_p (stmt
)
3537 && is_gimple_min_invariant (rhs1
))
3539 && is_gimple_min_invariant (simplified
))
3540 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3541 /* Stores or copies from SSA_NAMEs that are live over
3542 abnormal edges are a problem. */
3543 || (code
== SSA_NAME
3544 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
3545 changed
= defs_to_varying (stmt
);
3546 else if (REFERENCE_CLASS_P (lhs
)
3548 changed
= visit_reference_op_store (lhs
, rhs1
, stmt
);
3549 else if (TREE_CODE (lhs
) == SSA_NAME
)
3551 if ((gimple_assign_copy_p (stmt
)
3552 && is_gimple_min_invariant (rhs1
))
3554 && is_gimple_min_invariant (simplified
)))
3556 VN_INFO (lhs
)->has_constants
= true;
3558 changed
= set_ssa_val_to (lhs
, simplified
);
3560 changed
= set_ssa_val_to (lhs
, rhs1
);
3564 /* First try to lookup the simplified expression. */
3567 enum gimple_rhs_class rhs_class
;
3570 rhs_class
= get_gimple_rhs_class (TREE_CODE (simplified
));
3571 if ((rhs_class
== GIMPLE_UNARY_RHS
3572 || rhs_class
== GIMPLE_BINARY_RHS
3573 || rhs_class
== GIMPLE_TERNARY_RHS
)
3574 && valid_gimple_rhs_p (simplified
))
3576 tree result
= vn_nary_op_lookup (simplified
, NULL
);
3579 changed
= set_ssa_val_to (lhs
, result
);
3585 /* Otherwise visit the original statement. */
3586 switch (vn_get_stmt_kind (stmt
))
3589 changed
= visit_nary_op (lhs
, stmt
);
3592 changed
= visit_reference_op_load (lhs
, rhs1
, stmt
);
3595 changed
= defs_to_varying (stmt
);
3601 changed
= defs_to_varying (stmt
);
3603 else if (is_gimple_call (stmt
))
3605 tree lhs
= gimple_call_lhs (stmt
);
3606 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3608 /* Try constant folding based on our current lattice. */
3609 tree simplified
= gimple_fold_stmt_to_constant_1 (stmt
,
3613 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3615 fprintf (dump_file
, "call ");
3616 print_gimple_expr (dump_file
, stmt
, 0, 0);
3617 fprintf (dump_file
, " simplified to ");
3618 print_generic_expr (dump_file
, simplified
, 0);
3619 if (TREE_CODE (lhs
) == SSA_NAME
)
3620 fprintf (dump_file
, " has constants %d\n",
3621 expr_has_constants (simplified
));
3623 fprintf (dump_file
, "\n");
3626 /* Setting value numbers to constants will occasionally
3627 screw up phi congruence because constants are not
3628 uniquely associated with a single ssa name that can be
3631 && is_gimple_min_invariant (simplified
))
3633 VN_INFO (lhs
)->expr
= simplified
;
3634 VN_INFO (lhs
)->has_constants
= true;
3635 changed
= set_ssa_val_to (lhs
, simplified
);
3636 if (gimple_vdef (stmt
))
3637 changed
|= set_ssa_val_to (gimple_vdef (stmt
),
3638 gimple_vuse (stmt
));
3642 && TREE_CODE (simplified
) == SSA_NAME
)
3644 changed
= visit_copy (lhs
, simplified
);
3645 if (gimple_vdef (stmt
))
3646 changed
|= set_ssa_val_to (gimple_vdef (stmt
),
3647 gimple_vuse (stmt
));
3652 if (stmt_has_constants (stmt
))
3653 VN_INFO (lhs
)->has_constants
= true;
3656 /* We reset expr and constantness here because we may
3657 have been value numbering optimistically, and
3658 iterating. They may become non-constant in this case,
3659 even if they were optimistically constant. */
3660 VN_INFO (lhs
)->has_constants
= false;
3661 VN_INFO (lhs
)->expr
= NULL_TREE
;
3664 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3666 changed
= defs_to_varying (stmt
);
3672 if (!gimple_call_internal_p (stmt
)
3673 && (/* Calls to the same function with the same vuse
3674 and the same operands do not necessarily return the same
3675 value, unless they're pure or const. */
3676 gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
)
3677 /* If calls have a vdef, subsequent calls won't have
3678 the same incoming vuse. So, if 2 calls with vdef have the
3679 same vuse, we know they're not subsequent.
3680 We can value number 2 calls to the same function with the
3681 same vuse and the same operands which are not subsequent
3682 the same, because there is no code in the program that can
3683 compare the 2 values... */
3684 || (gimple_vdef (stmt
)
3685 /* ... unless the call returns a pointer which does
3686 not alias with anything else. In which case the
3687 information that the values are distinct are encoded
3689 && !(gimple_call_return_flags (stmt
) & ERF_NOALIAS
))))
3690 changed
= visit_reference_op_call (lhs
, stmt
);
3692 changed
= defs_to_varying (stmt
);
3695 changed
= defs_to_varying (stmt
);
3701 /* Compare two operands by reverse postorder index */
3704 compare_ops (const void *pa
, const void *pb
)
3706 const tree opa
= *((const tree
*)pa
);
3707 const tree opb
= *((const tree
*)pb
);
3708 gimple opstmta
= SSA_NAME_DEF_STMT (opa
);
3709 gimple opstmtb
= SSA_NAME_DEF_STMT (opb
);
3713 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
3714 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3715 else if (gimple_nop_p (opstmta
))
3717 else if (gimple_nop_p (opstmtb
))
3720 bba
= gimple_bb (opstmta
);
3721 bbb
= gimple_bb (opstmtb
);
3724 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3732 if (gimple_code (opstmta
) == GIMPLE_PHI
3733 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3734 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3735 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3737 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3739 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3740 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3742 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3744 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3747 /* Sort an array containing members of a strongly connected component
3748 SCC so that the members are ordered by RPO number.
3749 This means that when the sort is complete, iterating through the
3750 array will give you the members in RPO order. */
3753 sort_scc (vec
<tree
> scc
)
3755 scc
.qsort (compare_ops
);
3758 /* Insert the no longer used nary ONARY to the hash INFO. */
3761 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3763 size_t size
= sizeof_vn_nary_op (onary
->length
);
3764 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
3765 &info
->nary_obstack
);
3766 memcpy (nary
, onary
, size
);
3767 vn_nary_op_insert_into (nary
, info
->nary
, false);
3770 /* Insert the no longer used phi OPHI to the hash INFO. */
3773 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3775 vn_phi_t phi
= (vn_phi_t
) pool_alloc (info
->phis_pool
);
3777 memcpy (phi
, ophi
, sizeof (*phi
));
3778 ophi
->phiargs
.create (0);
3779 slot
= info
->phis
.find_slot_with_hash (phi
, phi
->hashcode
, INSERT
);
3780 gcc_assert (!*slot
);
3784 /* Insert the no longer used reference OREF to the hash INFO. */
3787 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3790 vn_reference_s
**slot
;
3791 ref
= (vn_reference_t
) pool_alloc (info
->references_pool
);
3792 memcpy (ref
, oref
, sizeof (*ref
));
3793 oref
->operands
.create (0);
3794 slot
= info
->references
.find_slot_with_hash (ref
, ref
->hashcode
, INSERT
);
3796 free_reference (*slot
);
3800 /* Process a strongly connected component in the SSA graph. */
3803 process_scc (vec
<tree
> scc
)
3807 unsigned int iterations
= 0;
3808 bool changed
= true;
3809 vn_nary_op_iterator_type hin
;
3810 vn_phi_iterator_type hip
;
3811 vn_reference_iterator_type hir
;
3816 /* If the SCC has a single member, just visit it. */
3817 if (scc
.length () == 1)
3820 if (VN_INFO (use
)->use_processed
)
3822 /* We need to make sure it doesn't form a cycle itself, which can
3823 happen for self-referential PHI nodes. In that case we would
3824 end up inserting an expression with VN_TOP operands into the
3825 valid table which makes us derive bogus equivalences later.
3826 The cheapest way to check this is to assume it for all PHI nodes. */
3827 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
3828 /* Fallthru to iteration. */ ;
3836 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3837 print_scc (dump_file
, scc
);
3839 /* Iterate over the SCC with the optimistic table until it stops
3841 current_info
= optimistic_info
;
3846 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3847 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
3848 /* As we are value-numbering optimistically we have to
3849 clear the expression tables and the simplified expressions
3850 in each iteration until we converge. */
3851 optimistic_info
->nary
.empty ();
3852 optimistic_info
->phis
.empty ();
3853 optimistic_info
->references
.empty ();
3854 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
3855 gcc_obstack_init (&optimistic_info
->nary_obstack
);
3856 empty_alloc_pool (optimistic_info
->phis_pool
);
3857 empty_alloc_pool (optimistic_info
->references_pool
);
3858 FOR_EACH_VEC_ELT (scc
, i
, var
)
3859 VN_INFO (var
)->expr
= NULL_TREE
;
3860 FOR_EACH_VEC_ELT (scc
, i
, var
)
3861 changed
|= visit_use (var
);
3864 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3865 fprintf (dump_file
, "Processing SCC needed %d iterations\n", iterations
);
3866 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
3868 /* Finally, copy the contents of the no longer used optimistic
3869 table to the valid table. */
3870 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info
->nary
, nary
, vn_nary_op_t
, hin
)
3871 copy_nary (nary
, valid_info
);
3872 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info
->phis
, phi
, vn_phi_t
, hip
)
3873 copy_phi (phi
, valid_info
);
3874 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info
->references
,
3875 ref
, vn_reference_t
, hir
)
3876 copy_reference (ref
, valid_info
);
3878 current_info
= valid_info
;
3882 /* Pop the components of the found SCC for NAME off the SCC stack
3883 and process them. Returns true if all went well, false if
3884 we run into resource limits. */
3887 extract_and_process_scc_for_name (tree name
)
3892 /* Found an SCC, pop the components off the SCC stack and
3896 x
= sccstack
.pop ();
3898 VN_INFO (x
)->on_sccstack
= false;
3900 } while (x
!= name
);
3902 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3904 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
3907 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
3908 "SCC size %u exceeding %u\n", scc
.length (),
3909 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
3914 if (scc
.length () > 1)
3922 /* Depth first search on NAME to discover and process SCC's in the SSA
3924 Execution of this algorithm relies on the fact that the SCC's are
3925 popped off the stack in topological order.
3926 Returns true if successful, false if we stopped processing SCC's due
3927 to resource constraints. */
3932 vec
<ssa_op_iter
> itervec
= vNULL
;
3933 vec
<tree
> namevec
= vNULL
;
3934 use_operand_p usep
= NULL
;
3941 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
3942 VN_INFO (name
)->visited
= true;
3943 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
3945 sccstack
.safe_push (name
);
3946 VN_INFO (name
)->on_sccstack
= true;
3947 defstmt
= SSA_NAME_DEF_STMT (name
);
3949 /* Recursively DFS on our operands, looking for SCC's. */
3950 if (!gimple_nop_p (defstmt
))
3952 /* Push a new iterator. */
3953 if (gimple_code (defstmt
) == GIMPLE_PHI
)
3954 usep
= op_iter_init_phiuse (&iter
, defstmt
, SSA_OP_ALL_USES
);
3956 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
3959 clear_and_done_ssa_iter (&iter
);
3963 /* If we are done processing uses of a name, go up the stack
3964 of iterators and process SCCs as we found them. */
3965 if (op_iter_done (&iter
))
3967 /* See if we found an SCC. */
3968 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
3969 if (!extract_and_process_scc_for_name (name
))
3976 /* Check if we are done. */
3977 if (namevec
.is_empty ())
3984 /* Restore the last use walker and continue walking there. */
3986 name
= namevec
.pop ();
3987 memcpy (&iter
, &itervec
.last (),
3988 sizeof (ssa_op_iter
));
3990 goto continue_walking
;
3993 use
= USE_FROM_PTR (usep
);
3995 /* Since we handle phi nodes, we will sometimes get
3996 invariants in the use expression. */
3997 if (TREE_CODE (use
) == SSA_NAME
)
3999 if (! (VN_INFO (use
)->visited
))
4001 /* Recurse by pushing the current use walking state on
4002 the stack and starting over. */
4003 itervec
.safe_push (iter
);
4004 namevec
.safe_push (name
);
4009 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
4010 VN_INFO (use
)->low
);
4012 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
4013 && VN_INFO (use
)->on_sccstack
)
4015 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
4016 VN_INFO (name
)->low
);
4020 usep
= op_iter_next_use (&iter
);
4024 /* Allocate a value number table. */
4027 allocate_vn_table (vn_tables_t table
)
4029 table
->phis
.create (23);
4030 table
->nary
.create (23);
4031 table
->references
.create (23);
4033 gcc_obstack_init (&table
->nary_obstack
);
4034 table
->phis_pool
= create_alloc_pool ("VN phis",
4035 sizeof (struct vn_phi_s
),
4037 table
->references_pool
= create_alloc_pool ("VN references",
4038 sizeof (struct vn_reference_s
),
4042 /* Free a value number table. */
4045 free_vn_table (vn_tables_t table
)
4047 table
->phis
.dispose ();
4048 table
->nary
.dispose ();
4049 table
->references
.dispose ();
4050 obstack_free (&table
->nary_obstack
, NULL
);
4051 free_alloc_pool (table
->phis_pool
);
4052 free_alloc_pool (table
->references_pool
);
4060 int *rpo_numbers_temp
;
4062 calculate_dominance_info (CDI_DOMINATORS
);
4063 sccstack
.create (0);
4064 constant_to_value_id
.create (23);
4066 constant_value_ids
= BITMAP_ALLOC (NULL
);
4071 vn_ssa_aux_table
.create (num_ssa_names
+ 1);
4072 /* VEC_alloc doesn't actually grow it to the right size, it just
4073 preallocates the space to do so. */
4074 vn_ssa_aux_table
.safe_grow_cleared (num_ssa_names
+ 1);
4075 gcc_obstack_init (&vn_ssa_aux_obstack
);
4077 shared_lookup_phiargs
.create (0);
4078 shared_lookup_references
.create (0);
4079 rpo_numbers
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
4081 XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
4082 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
4084 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4085 the i'th block in RPO order is bb. We want to map bb's to RPO
4086 numbers, so we need to rearrange this array. */
4087 for (j
= 0; j
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; j
++)
4088 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
4090 XDELETE (rpo_numbers_temp
);
4092 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
4094 /* Create the VN_INFO structures, and initialize value numbers to
4096 for (i
= 0; i
< num_ssa_names
; i
++)
4098 tree name
= ssa_name (i
);
4101 VN_INFO_GET (name
)->valnum
= VN_TOP
;
4102 VN_INFO (name
)->expr
= NULL_TREE
;
4103 VN_INFO (name
)->value_id
= 0;
4107 renumber_gimple_stmt_uids ();
4109 /* Create the valid and optimistic value numbering tables. */
4110 valid_info
= XCNEW (struct vn_tables_s
);
4111 allocate_vn_table (valid_info
);
4112 optimistic_info
= XCNEW (struct vn_tables_s
);
4113 allocate_vn_table (optimistic_info
);
4121 constant_to_value_id
.dispose ();
4122 BITMAP_FREE (constant_value_ids
);
4123 shared_lookup_phiargs
.release ();
4124 shared_lookup_references
.release ();
4125 XDELETEVEC (rpo_numbers
);
4127 for (i
= 0; i
< num_ssa_names
; i
++)
4129 tree name
= ssa_name (i
);
4131 && VN_INFO (name
)->needs_insertion
)
4132 release_ssa_name (name
);
4134 obstack_free (&vn_ssa_aux_obstack
, NULL
);
4135 vn_ssa_aux_table
.release ();
4137 sccstack
.release ();
4138 free_vn_table (valid_info
);
4139 XDELETE (valid_info
);
4140 free_vn_table (optimistic_info
);
4141 XDELETE (optimistic_info
);
4144 /* Set *ID according to RESULT. */
4147 set_value_id_for_result (tree result
, unsigned int *id
)
4149 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4150 *id
= VN_INFO (result
)->value_id
;
4151 else if (result
&& is_gimple_min_invariant (result
))
4152 *id
= get_or_alloc_constant_value_id (result
);
4154 *id
= get_next_value_id ();
4157 /* Set the value ids in the valid hash tables. */
4160 set_hashtable_value_ids (void)
4162 vn_nary_op_iterator_type hin
;
4163 vn_phi_iterator_type hip
;
4164 vn_reference_iterator_type hir
;
4169 /* Now set the value ids of the things we had put in the hash
4172 FOR_EACH_HASH_TABLE_ELEMENT (valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4173 set_value_id_for_result (vno
->result
, &vno
->value_id
);
4175 FOR_EACH_HASH_TABLE_ELEMENT (valid_info
->phis
, vp
, vn_phi_t
, hip
)
4176 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4178 FOR_EACH_HASH_TABLE_ELEMENT (valid_info
->references
, vr
, vn_reference_t
, hir
)
4179 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4182 class cond_dom_walker
: public dom_walker
4185 cond_dom_walker () : dom_walker (CDI_DOMINATORS
), fail (false) {}
4187 virtual void before_dom_children (basic_block
);
4193 cond_dom_walker::before_dom_children (basic_block bb
)
4201 /* If any of the predecessor edges that do not come from blocks dominated
4202 by us are still marked as possibly executable consider this block
4204 bool reachable
= bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
);
4205 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4206 if (!dominated_by_p (CDI_DOMINATORS
, e
->src
, bb
))
4207 reachable
|= (e
->flags
& EDGE_EXECUTABLE
);
4209 /* If the block is not reachable all outgoing edges are not
4213 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4214 fprintf (dump_file
, "Marking all outgoing edges of unreachable "
4215 "BB %d as not executable\n", bb
->index
);
4217 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4218 e
->flags
&= ~EDGE_EXECUTABLE
;
4222 gimple stmt
= last_stmt (bb
);
4226 enum gimple_code code
= gimple_code (stmt
);
4227 if (code
!= GIMPLE_COND
4228 && code
!= GIMPLE_SWITCH
4229 && code
!= GIMPLE_GOTO
)
4232 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4234 fprintf (dump_file
, "Value-numbering operands of stmt ending BB %d: ",
4236 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4239 /* Value-number the last stmts SSA uses. */
4242 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_USE
)
4243 if (VN_INFO (op
)->visited
== false
4250 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4251 if value-numbering can prove they are not reachable. Handling
4252 computed gotos is also possible. */
4258 tree lhs
= gimple_cond_lhs (stmt
);
4259 tree rhs
= gimple_cond_rhs (stmt
);
4260 /* Work hard in computing the condition and take into account
4261 the valueization of the defining stmt. */
4262 if (TREE_CODE (lhs
) == SSA_NAME
)
4263 lhs
= vn_get_expr_for (lhs
);
4264 if (TREE_CODE (rhs
) == SSA_NAME
)
4265 rhs
= vn_get_expr_for (rhs
);
4266 val
= fold_binary (gimple_cond_code (stmt
),
4267 boolean_type_node
, lhs
, rhs
);
4271 val
= gimple_switch_index (stmt
);
4274 val
= gimple_goto_dest (stmt
);
4282 edge taken
= find_taken_edge (bb
, vn_valueize (val
));
4286 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4287 fprintf (dump_file
, "Marking all edges out of BB %d but (%d -> %d) as "
4288 "not executable\n", bb
->index
, bb
->index
, taken
->dest
->index
);
4290 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4292 e
->flags
&= ~EDGE_EXECUTABLE
;
4295 /* Do SCCVN. Returns true if it finished, false if we bailed out
4296 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4297 how we use the alias oracle walking during the VN process. */
4300 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
4306 default_vn_walk_kind
= default_vn_walk_kind_
;
4309 current_info
= valid_info
;
4311 for (param
= DECL_ARGUMENTS (current_function_decl
);
4313 param
= DECL_CHAIN (param
))
4315 tree def
= ssa_default_def (cfun
, param
);
4318 VN_INFO (def
)->visited
= true;
4319 VN_INFO (def
)->valnum
= def
;
4323 /* Mark all edges as possibly executable. */
4324 FOR_ALL_BB_FN (bb
, cfun
)
4328 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4329 e
->flags
|= EDGE_EXECUTABLE
;
4332 /* Walk all blocks in dominator order, value-numbering the last stmts
4333 SSA uses and decide whether outgoing edges are not executable. */
4334 cond_dom_walker walker
;
4335 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
4342 /* Value-number remaining SSA names. */
4343 for (i
= 1; i
< num_ssa_names
; ++i
)
4345 tree name
= ssa_name (i
);
4347 && VN_INFO (name
)->visited
== false
4348 && !has_zero_uses (name
))
4356 /* Initialize the value ids. */
4358 for (i
= 1; i
< num_ssa_names
; ++i
)
4360 tree name
= ssa_name (i
);
4364 info
= VN_INFO (name
);
4365 if (info
->valnum
== name
4366 || info
->valnum
== VN_TOP
)
4367 info
->value_id
= get_next_value_id ();
4368 else if (is_gimple_min_invariant (info
->valnum
))
4369 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
4373 for (i
= 1; i
< num_ssa_names
; ++i
)
4375 tree name
= ssa_name (i
);
4379 info
= VN_INFO (name
);
4380 if (TREE_CODE (info
->valnum
) == SSA_NAME
4381 && info
->valnum
!= name
4382 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
4383 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
4386 set_hashtable_value_ids ();
4388 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4390 fprintf (dump_file
, "Value numbers:\n");
4391 for (i
= 0; i
< num_ssa_names
; i
++)
4393 tree name
= ssa_name (i
);
4395 && VN_INFO (name
)->visited
4396 && SSA_VAL (name
) != name
)
4398 print_generic_expr (dump_file
, name
, 0);
4399 fprintf (dump_file
, " = ");
4400 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
4401 fprintf (dump_file
, "\n");
4409 /* Return the maximum value id we have ever seen. */
4412 get_max_value_id (void)
4414 return next_value_id
;
4417 /* Return the next unique value id. */
4420 get_next_value_id (void)
4422 return next_value_id
++;
4426 /* Compare two expressions E1 and E2 and return true if they are equal. */
4429 expressions_equal_p (tree e1
, tree e2
)
4431 /* The obvious case. */
4435 /* If only one of them is null, they cannot be equal. */
4439 /* Now perform the actual comparison. */
4440 if (TREE_CODE (e1
) == TREE_CODE (e2
)
4441 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
4448 /* Return true if the nary operation NARY may trap. This is a copy
4449 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4452 vn_nary_may_trap (vn_nary_op_t nary
)
4455 tree rhs2
= NULL_TREE
;
4456 bool honor_nans
= false;
4457 bool honor_snans
= false;
4458 bool fp_operation
= false;
4459 bool honor_trapv
= false;
4463 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
4464 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
4465 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
4468 fp_operation
= FLOAT_TYPE_P (type
);
4471 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
4472 honor_snans
= flag_signaling_nans
!= 0;
4474 else if (INTEGRAL_TYPE_P (type
)
4475 && TYPE_OVERFLOW_TRAPS (type
))
4478 if (nary
->length
>= 2)
4480 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
4482 honor_nans
, honor_snans
, rhs2
,
4488 for (i
= 0; i
< nary
->length
; ++i
)
4489 if (tree_could_trap_p (nary
->op
[i
]))