1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "double-int.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "hard-reg-set.h"
41 #include "dominance.h"
44 #include "basic-block.h"
45 #include "gimple-pretty-print.h"
46 #include "tree-inline.h"
47 #include "hash-table.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
51 #include "gimple-fold.h"
53 #include "gimple-expr.h"
57 #include "gimple-ssa.h"
58 #include "tree-phinodes.h"
59 #include "ssa-iterators.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
66 #include "alloc-pool.h"
70 #include "tree-ssa-propagate.h"
71 #include "tree-ssa-sccvn.h"
75 #include "plugin-api.h"
78 /* This algorithm is based on the SCC algorithm presented by Keith
79 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
80 (http://citeseer.ist.psu.edu/41805.html). In
81 straight line code, it is equivalent to a regular hash based value
82 numbering that is performed in reverse postorder.
84 For code with cycles, there are two alternatives, both of which
85 require keeping the hashtables separate from the actual list of
86 value numbers for SSA names.
88 1. Iterate value numbering in an RPO walk of the blocks, removing
89 all the entries from the hashtable after each iteration (but
90 keeping the SSA name->value number mapping between iterations).
91 Iterate until it does not change.
93 2. Perform value numbering as part of an SCC walk on the SSA graph,
94 iterating only the cycles in the SSA graph until they do not change
95 (using a separate, optimistic hashtable for value numbering the SCC
98 The second is not just faster in practice (because most SSA graph
99 cycles do not involve all the variables in the graph), it also has
100 some nice properties.
102 One of these nice properties is that when we pop an SCC off the
103 stack, we are guaranteed to have processed all the operands coming from
104 *outside of that SCC*, so we do not need to do anything special to
105 ensure they have value numbers.
107 Another nice property is that the SCC walk is done as part of a DFS
108 of the SSA graph, which makes it easy to perform combining and
109 simplifying operations at the same time.
111 The code below is deliberately written in a way that makes it easy
112 to separate the SCC walk from the other work it does.
114 In order to propagate constants through the code, we track which
115 expressions contain constants, and use those while folding. In
116 theory, we could also track expressions whose value numbers are
117 replaced, in case we end up folding based on expression
120 In order to value number memory, we assign value numbers to vuses.
121 This enables us to note that, for example, stores to the same
122 address of the same value from the same starting memory states are
126 1. We can iterate only the changing portions of the SCC's, but
127 I have not seen an SCC big enough for this to be a win.
128 2. If you differentiate between phi nodes for loops and phi nodes
129 for if-then-else, you can properly consider phi nodes in different
130 blocks for equivalence.
131 3. We could value number vuses in more cases, particularly, whole
136 /* vn_nary_op hashtable helpers. */
138 struct vn_nary_op_hasher
: typed_noop_remove
<vn_nary_op_s
>
140 typedef vn_nary_op_s value_type
;
141 typedef vn_nary_op_s compare_type
;
142 static inline hashval_t
hash (const value_type
*);
143 static inline bool equal (const value_type
*, const compare_type
*);
146 /* Return the computed hashcode for nary operation P1. */
149 vn_nary_op_hasher::hash (const value_type
*vno1
)
151 return vno1
->hashcode
;
154 /* Compare nary operations P1 and P2 and return true if they are
158 vn_nary_op_hasher::equal (const value_type
*vno1
, const compare_type
*vno2
)
160 return vn_nary_op_eq (vno1
, vno2
);
163 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
167 /* vn_phi hashtable helpers. */
170 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
174 typedef vn_phi_s value_type
;
175 typedef vn_phi_s compare_type
;
176 static inline hashval_t
hash (const value_type
*);
177 static inline bool equal (const value_type
*, const compare_type
*);
178 static inline void remove (value_type
*);
181 /* Return the computed hashcode for phi operation P1. */
184 vn_phi_hasher::hash (const value_type
*vp1
)
186 return vp1
->hashcode
;
189 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
192 vn_phi_hasher::equal (const value_type
*vp1
, const compare_type
*vp2
)
194 return vn_phi_eq (vp1
, vp2
);
197 /* Free a phi operation structure VP. */
200 vn_phi_hasher::remove (value_type
*phi
)
202 phi
->phiargs
.release ();
205 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
206 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
209 /* Compare two reference operands P1 and P2 for equality. Return true if
210 they are equal, and false otherwise. */
213 vn_reference_op_eq (const void *p1
, const void *p2
)
215 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
216 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
218 return (vro1
->opcode
== vro2
->opcode
219 /* We do not care for differences in type qualification. */
220 && (vro1
->type
== vro2
->type
221 || (vro1
->type
&& vro2
->type
222 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
223 TYPE_MAIN_VARIANT (vro2
->type
))))
224 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
225 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
226 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
229 /* Free a reference operation structure VP. */
232 free_reference (vn_reference_s
*vr
)
234 vr
->operands
.release ();
238 /* vn_reference hashtable helpers. */
240 struct vn_reference_hasher
242 typedef vn_reference_s value_type
;
243 typedef vn_reference_s compare_type
;
244 static inline hashval_t
hash (const value_type
*);
245 static inline bool equal (const value_type
*, const compare_type
*);
246 static inline void remove (value_type
*);
249 /* Return the hashcode for a given reference operation P1. */
252 vn_reference_hasher::hash (const value_type
*vr1
)
254 return vr1
->hashcode
;
258 vn_reference_hasher::equal (const value_type
*v
, const compare_type
*c
)
260 return vn_reference_eq (v
, c
);
264 vn_reference_hasher::remove (value_type
*v
)
269 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
270 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
273 /* The set of hashtables and alloc_pool's for their items. */
275 typedef struct vn_tables_s
277 vn_nary_op_table_type
*nary
;
278 vn_phi_table_type
*phis
;
279 vn_reference_table_type
*references
;
280 struct obstack nary_obstack
;
281 alloc_pool phis_pool
;
282 alloc_pool references_pool
;
286 /* vn_constant hashtable helpers. */
288 struct vn_constant_hasher
: typed_free_remove
<vn_constant_s
>
290 typedef vn_constant_s value_type
;
291 typedef vn_constant_s compare_type
;
292 static inline hashval_t
hash (const value_type
*);
293 static inline bool equal (const value_type
*, const compare_type
*);
296 /* Hash table hash function for vn_constant_t. */
299 vn_constant_hasher::hash (const value_type
*vc1
)
301 return vc1
->hashcode
;
304 /* Hash table equality function for vn_constant_t. */
307 vn_constant_hasher::equal (const value_type
*vc1
, const compare_type
*vc2
)
309 if (vc1
->hashcode
!= vc2
->hashcode
)
312 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
315 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
316 static bitmap constant_value_ids
;
319 /* Valid hashtables storing information we have proven to be
322 static vn_tables_t valid_info
;
324 /* Optimistic hashtables storing information we are making assumptions about
325 during iterations. */
327 static vn_tables_t optimistic_info
;
329 /* Pointer to the set of hashtables that is currently being used.
330 Should always point to either the optimistic_info, or the
333 static vn_tables_t current_info
;
336 /* Reverse post order index for each basic block. */
338 static int *rpo_numbers
;
340 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
342 /* Return the SSA value of the VUSE x, supporting released VDEFs
343 during elimination which will value-number the VDEF to the
344 associated VUSE (but not substitute in the whole lattice). */
347 vuse_ssa_val (tree x
)
356 while (SSA_NAME_IN_FREE_LIST (x
));
361 /* This represents the top of the VN lattice, which is the universal
366 /* Unique counter for our value ids. */
368 static unsigned int next_value_id
;
370 /* Next DFS number and the stack for strongly connected component
373 static unsigned int next_dfs_num
;
374 static vec
<tree
> sccstack
;
378 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
379 are allocated on an obstack for locality reasons, and to free them
380 without looping over the vec. */
382 static vec
<vn_ssa_aux_t
> vn_ssa_aux_table
;
383 static struct obstack vn_ssa_aux_obstack
;
385 /* Return the value numbering information for a given SSA name. */
390 vn_ssa_aux_t res
= vn_ssa_aux_table
[SSA_NAME_VERSION (name
)];
391 gcc_checking_assert (res
);
395 /* Set the value numbering info for a given SSA name to a given
399 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
401 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = value
;
404 /* Initialize the value numbering info for a given SSA name.
405 This should be called just once for every SSA name. */
408 VN_INFO_GET (tree name
)
410 vn_ssa_aux_t newinfo
;
412 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
413 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
414 if (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ())
415 vn_ssa_aux_table
.safe_grow (SSA_NAME_VERSION (name
) + 1);
416 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = newinfo
;
421 /* Get the representative expression for the SSA_NAME NAME. Returns
422 the representative SSA_NAME if there is no expression associated with it. */
425 vn_get_expr_for (tree name
)
427 vn_ssa_aux_t vn
= VN_INFO (name
);
429 tree expr
= NULL_TREE
;
432 if (vn
->valnum
== VN_TOP
)
435 /* If the value-number is a constant it is the representative
437 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
440 /* Get to the information of the value of this SSA_NAME. */
441 vn
= VN_INFO (vn
->valnum
);
443 /* If the value-number is a constant it is the representative
445 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
448 /* Else if we have an expression, return it. */
449 if (vn
->expr
!= NULL_TREE
)
452 /* Otherwise use the defining statement to build the expression. */
453 def_stmt
= SSA_NAME_DEF_STMT (vn
->valnum
);
455 /* If the value number is not an assignment use it directly. */
456 if (!is_gimple_assign (def_stmt
))
459 /* Note that we can valueize here because we clear the cached
460 simplified expressions after each optimistic iteration. */
461 code
= gimple_assign_rhs_code (def_stmt
);
462 switch (TREE_CODE_CLASS (code
))
465 if ((code
== REALPART_EXPR
466 || code
== IMAGPART_EXPR
467 || code
== VIEW_CONVERT_EXPR
)
468 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt
),
470 expr
= fold_build1 (code
,
471 gimple_expr_type (def_stmt
),
472 vn_valueize (TREE_OPERAND
473 (gimple_assign_rhs1 (def_stmt
), 0)));
477 expr
= fold_build1 (code
,
478 gimple_expr_type (def_stmt
),
479 vn_valueize (gimple_assign_rhs1 (def_stmt
)));
483 expr
= fold_build2 (code
,
484 gimple_expr_type (def_stmt
),
485 vn_valueize (gimple_assign_rhs1 (def_stmt
)),
486 vn_valueize (gimple_assign_rhs2 (def_stmt
)));
489 case tcc_exceptional
:
490 if (code
== CONSTRUCTOR
492 (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))) == VECTOR_TYPE
)
493 expr
= gimple_assign_rhs1 (def_stmt
);
498 if (expr
== NULL_TREE
)
501 /* Cache the expression. */
507 /* Return the vn_kind the expression computed by the stmt should be
511 vn_get_stmt_kind (gimple stmt
)
513 switch (gimple_code (stmt
))
521 enum tree_code code
= gimple_assign_rhs_code (stmt
);
522 tree rhs1
= gimple_assign_rhs1 (stmt
);
523 switch (get_gimple_rhs_class (code
))
525 case GIMPLE_UNARY_RHS
:
526 case GIMPLE_BINARY_RHS
:
527 case GIMPLE_TERNARY_RHS
:
529 case GIMPLE_SINGLE_RHS
:
530 switch (TREE_CODE_CLASS (code
))
533 /* VOP-less references can go through unary case. */
534 if ((code
== REALPART_EXPR
535 || code
== IMAGPART_EXPR
536 || code
== VIEW_CONVERT_EXPR
537 || code
== BIT_FIELD_REF
)
538 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
542 case tcc_declaration
:
549 if (code
== ADDR_EXPR
)
550 return (is_gimple_min_invariant (rhs1
)
551 ? VN_CONSTANT
: VN_REFERENCE
);
552 else if (code
== CONSTRUCTOR
)
565 /* Lookup a value id for CONSTANT and return it. If it does not
569 get_constant_value_id (tree constant
)
571 vn_constant_s
**slot
;
572 struct vn_constant_s vc
;
574 vc
.hashcode
= vn_hash_constant_with_type (constant
);
575 vc
.constant
= constant
;
576 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
578 return (*slot
)->value_id
;
582 /* Lookup a value id for CONSTANT, and if it does not exist, create a
583 new one and return it. If it does exist, return it. */
586 get_or_alloc_constant_value_id (tree constant
)
588 vn_constant_s
**slot
;
589 struct vn_constant_s vc
;
592 vc
.hashcode
= vn_hash_constant_with_type (constant
);
593 vc
.constant
= constant
;
594 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
596 return (*slot
)->value_id
;
598 vcp
= XNEW (struct vn_constant_s
);
599 vcp
->hashcode
= vc
.hashcode
;
600 vcp
->constant
= constant
;
601 vcp
->value_id
= get_next_value_id ();
603 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
604 return vcp
->value_id
;
607 /* Return true if V is a value id for a constant. */
610 value_id_constant_p (unsigned int v
)
612 return bitmap_bit_p (constant_value_ids
, v
);
615 /* Compute the hash for a reference operand VRO1. */
618 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
620 hstate
.add_int (vro1
->opcode
);
622 inchash::add_expr (vro1
->op0
, hstate
);
624 inchash::add_expr (vro1
->op1
, hstate
);
626 inchash::add_expr (vro1
->op2
, hstate
);
629 /* Compute a hash for the reference operation VR1 and return it. */
632 vn_reference_compute_hash (const vn_reference_t vr1
)
634 inchash::hash hstate
;
637 vn_reference_op_t vro
;
638 HOST_WIDE_INT off
= -1;
641 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
643 if (vro
->opcode
== MEM_REF
)
645 else if (vro
->opcode
!= ADDR_EXPR
)
657 hstate
.add_int (off
);
660 && vro
->opcode
== ADDR_EXPR
)
664 tree op
= TREE_OPERAND (vro
->op0
, 0);
665 hstate
.add_int (TREE_CODE (op
));
666 inchash::add_expr (op
, hstate
);
670 vn_reference_op_compute_hash (vro
, hstate
);
673 result
= hstate
.end ();
674 /* ??? We would ICE later if we hash instead of adding that in. */
676 result
+= SSA_NAME_VERSION (vr1
->vuse
);
681 /* Return true if reference operations VR1 and VR2 are equivalent. This
682 means they have the same set of operands and vuses. */
685 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
689 /* Early out if this is not a hash collision. */
690 if (vr1
->hashcode
!= vr2
->hashcode
)
693 /* The VOP needs to be the same. */
694 if (vr1
->vuse
!= vr2
->vuse
)
697 /* If the operands are the same we are done. */
698 if (vr1
->operands
== vr2
->operands
)
701 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
704 if (INTEGRAL_TYPE_P (vr1
->type
)
705 && INTEGRAL_TYPE_P (vr2
->type
))
707 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
710 else if (INTEGRAL_TYPE_P (vr1
->type
)
711 && (TYPE_PRECISION (vr1
->type
)
712 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
714 else if (INTEGRAL_TYPE_P (vr2
->type
)
715 && (TYPE_PRECISION (vr2
->type
)
716 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
723 HOST_WIDE_INT off1
= 0, off2
= 0;
724 vn_reference_op_t vro1
, vro2
;
725 vn_reference_op_s tem1
, tem2
;
726 bool deref1
= false, deref2
= false;
727 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
729 if (vro1
->opcode
== MEM_REF
)
735 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
737 if (vro2
->opcode
== MEM_REF
)
745 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
747 memset (&tem1
, 0, sizeof (tem1
));
748 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
749 tem1
.type
= TREE_TYPE (tem1
.op0
);
750 tem1
.opcode
= TREE_CODE (tem1
.op0
);
754 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
756 memset (&tem2
, 0, sizeof (tem2
));
757 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
758 tem2
.type
= TREE_TYPE (tem2
.op0
);
759 tem2
.opcode
= TREE_CODE (tem2
.op0
);
763 if (deref1
!= deref2
)
765 if (!vn_reference_op_eq (vro1
, vro2
))
770 while (vr1
->operands
.length () != i
771 || vr2
->operands
.length () != j
);
776 /* Copy the operations present in load/store REF into RESULT, a vector of
777 vn_reference_op_s's. */
780 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
782 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
784 vn_reference_op_s temp
;
788 memset (&temp
, 0, sizeof (temp
));
789 temp
.type
= TREE_TYPE (ref
);
790 temp
.opcode
= TREE_CODE (ref
);
791 temp
.op0
= TMR_INDEX (ref
);
792 temp
.op1
= TMR_STEP (ref
);
793 temp
.op2
= TMR_OFFSET (ref
);
795 result
->quick_push (temp
);
797 memset (&temp
, 0, sizeof (temp
));
798 temp
.type
= NULL_TREE
;
799 temp
.opcode
= ERROR_MARK
;
800 temp
.op0
= TMR_INDEX2 (ref
);
802 result
->quick_push (temp
);
804 memset (&temp
, 0, sizeof (temp
));
805 temp
.type
= NULL_TREE
;
806 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
807 temp
.op0
= TMR_BASE (ref
);
809 result
->quick_push (temp
);
813 /* For non-calls, store the information that makes up the address. */
817 vn_reference_op_s temp
;
819 memset (&temp
, 0, sizeof (temp
));
820 temp
.type
= TREE_TYPE (ref
);
821 temp
.opcode
= TREE_CODE (ref
);
827 temp
.op0
= TREE_OPERAND (ref
, 1);
830 temp
.op0
= TREE_OPERAND (ref
, 1);
834 /* The base address gets its own vn_reference_op_s structure. */
835 temp
.op0
= TREE_OPERAND (ref
, 1);
836 if (tree_fits_shwi_p (TREE_OPERAND (ref
, 1)))
837 temp
.off
= tree_to_shwi (TREE_OPERAND (ref
, 1));
840 /* Record bits and position. */
841 temp
.op0
= TREE_OPERAND (ref
, 1);
842 temp
.op1
= TREE_OPERAND (ref
, 2);
845 /* The field decl is enough to unambiguously specify the field,
846 a matching type is not necessary and a mismatching type
847 is always a spurious difference. */
848 temp
.type
= NULL_TREE
;
849 temp
.op0
= TREE_OPERAND (ref
, 1);
850 temp
.op1
= TREE_OPERAND (ref
, 2);
852 tree this_offset
= component_ref_field_offset (ref
);
854 && TREE_CODE (this_offset
) == INTEGER_CST
)
856 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
857 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
860 = (wi::to_offset (this_offset
)
861 + wi::lrshift (wi::to_offset (bit_offset
),
862 LOG2_BITS_PER_UNIT
));
863 if (wi::fits_shwi_p (off
)
864 /* Probibit value-numbering zero offset components
865 of addresses the same before the pass folding
866 __builtin_object_size had a chance to run
867 (checking cfun->after_inlining does the
869 && (TREE_CODE (orig
) != ADDR_EXPR
871 || cfun
->after_inlining
))
872 temp
.off
= off
.to_shwi ();
877 case ARRAY_RANGE_REF
:
879 /* Record index as operand. */
880 temp
.op0
= TREE_OPERAND (ref
, 1);
881 /* Always record lower bounds and element size. */
882 temp
.op1
= array_ref_low_bound (ref
);
883 temp
.op2
= array_ref_element_size (ref
);
884 if (TREE_CODE (temp
.op0
) == INTEGER_CST
885 && TREE_CODE (temp
.op1
) == INTEGER_CST
886 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
888 offset_int off
= ((wi::to_offset (temp
.op0
)
889 - wi::to_offset (temp
.op1
))
890 * wi::to_offset (temp
.op2
));
891 if (wi::fits_shwi_p (off
))
892 temp
.off
= off
.to_shwi();
896 if (DECL_HARD_REGISTER (ref
))
905 /* Canonicalize decls to MEM[&decl] which is what we end up with
906 when valueizing MEM[ptr] with ptr = &decl. */
907 temp
.opcode
= MEM_REF
;
908 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
910 result
->safe_push (temp
);
911 temp
.opcode
= ADDR_EXPR
;
912 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
913 temp
.type
= TREE_TYPE (temp
.op0
);
927 if (is_gimple_min_invariant (ref
))
933 /* These are only interesting for their operands, their
934 existence, and their type. They will never be the last
935 ref in the chain of references (IE they require an
936 operand), so we don't have to put anything
937 for op* as it will be handled by the iteration */
939 case VIEW_CONVERT_EXPR
:
943 /* This is only interesting for its constant offset. */
944 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
949 result
->safe_push (temp
);
951 if (REFERENCE_CLASS_P (ref
)
952 || TREE_CODE (ref
) == MODIFY_EXPR
953 || TREE_CODE (ref
) == WITH_SIZE_EXPR
954 || (TREE_CODE (ref
) == ADDR_EXPR
955 && !is_gimple_min_invariant (ref
)))
956 ref
= TREE_OPERAND (ref
, 0);
962 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
963 operands in *OPS, the reference alias set SET and the reference type TYPE.
964 Return true if something useful was produced. */
967 ao_ref_init_from_vn_reference (ao_ref
*ref
,
968 alias_set_type set
, tree type
,
969 vec
<vn_reference_op_s
> ops
)
971 vn_reference_op_t op
;
973 tree base
= NULL_TREE
;
975 HOST_WIDE_INT offset
= 0;
976 HOST_WIDE_INT max_size
;
977 HOST_WIDE_INT size
= -1;
978 tree size_tree
= NULL_TREE
;
979 alias_set_type base_alias_set
= -1;
981 /* First get the final access size from just the outermost expression. */
983 if (op
->opcode
== COMPONENT_REF
)
984 size_tree
= DECL_SIZE (op
->op0
);
985 else if (op
->opcode
== BIT_FIELD_REF
)
989 machine_mode mode
= TYPE_MODE (type
);
991 size_tree
= TYPE_SIZE (type
);
993 size
= GET_MODE_BITSIZE (mode
);
995 if (size_tree
!= NULL_TREE
)
997 if (!tree_fits_uhwi_p (size_tree
))
1000 size
= tree_to_uhwi (size_tree
);
1003 /* Initially, maxsize is the same as the accessed element size.
1004 In the following it will only grow (or become -1). */
1007 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1008 and find the ultimate containing object. */
1009 FOR_EACH_VEC_ELT (ops
, i
, op
)
1013 /* These may be in the reference ops, but we cannot do anything
1014 sensible with them here. */
1016 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1017 if (base
!= NULL_TREE
1018 && TREE_CODE (base
) == MEM_REF
1020 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1022 vn_reference_op_t pop
= &ops
[i
-1];
1023 base
= TREE_OPERAND (op
->op0
, 0);
1030 offset
+= pop
->off
* BITS_PER_UNIT
;
1038 /* Record the base objects. */
1040 base_alias_set
= get_deref_alias_set (op
->op0
);
1041 *op0_p
= build2 (MEM_REF
, op
->type
,
1042 NULL_TREE
, op
->op0
);
1043 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1054 /* And now the usual component-reference style ops. */
1056 offset
+= tree_to_shwi (op
->op1
);
1061 tree field
= op
->op0
;
1062 /* We do not have a complete COMPONENT_REF tree here so we
1063 cannot use component_ref_field_offset. Do the interesting
1067 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
)))
1071 offset
+= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
1073 offset
+= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1078 case ARRAY_RANGE_REF
:
1080 /* We recorded the lower bound and the element size. */
1081 if (!tree_fits_shwi_p (op
->op0
)
1082 || !tree_fits_shwi_p (op
->op1
)
1083 || !tree_fits_shwi_p (op
->op2
))
1087 HOST_WIDE_INT hindex
= tree_to_shwi (op
->op0
);
1088 hindex
-= tree_to_shwi (op
->op1
);
1089 hindex
*= tree_to_shwi (op
->op2
);
1090 hindex
*= BITS_PER_UNIT
;
1102 case VIEW_CONVERT_EXPR
:
1119 if (base
== NULL_TREE
)
1122 ref
->ref
= NULL_TREE
;
1124 ref
->offset
= offset
;
1126 ref
->max_size
= max_size
;
1127 ref
->ref_alias_set
= set
;
1128 if (base_alias_set
!= -1)
1129 ref
->base_alias_set
= base_alias_set
;
1131 ref
->base_alias_set
= get_alias_set (base
);
1132 /* We discount volatiles from value-numbering elsewhere. */
1133 ref
->volatile_p
= false;
1138 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1139 vn_reference_op_s's. */
1142 copy_reference_ops_from_call (gcall
*call
,
1143 vec
<vn_reference_op_s
> *result
)
1145 vn_reference_op_s temp
;
1147 tree lhs
= gimple_call_lhs (call
);
1150 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1151 different. By adding the lhs here in the vector, we ensure that the
1152 hashcode is different, guaranteeing a different value number. */
1153 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1155 memset (&temp
, 0, sizeof (temp
));
1156 temp
.opcode
= MODIFY_EXPR
;
1157 temp
.type
= TREE_TYPE (lhs
);
1160 result
->safe_push (temp
);
1163 /* Copy the type, opcode, function, static chain and EH region, if any. */
1164 memset (&temp
, 0, sizeof (temp
));
1165 temp
.type
= gimple_call_return_type (call
);
1166 temp
.opcode
= CALL_EXPR
;
1167 temp
.op0
= gimple_call_fn (call
);
1168 temp
.op1
= gimple_call_chain (call
);
1169 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1170 temp
.op2
= size_int (lr
);
1172 if (gimple_call_with_bounds_p (call
))
1173 temp
.with_bounds
= 1;
1174 result
->safe_push (temp
);
1176 /* Copy the call arguments. As they can be references as well,
1177 just chain them together. */
1178 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1180 tree callarg
= gimple_call_arg (call
, i
);
1181 copy_reference_ops_from_ref (callarg
, result
);
1185 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1186 *I_P to point to the last element of the replacement. */
1188 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1191 unsigned int i
= *i_p
;
1192 vn_reference_op_t op
= &(*ops
)[i
];
1193 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1195 HOST_WIDE_INT addr_offset
= 0;
1197 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1198 from .foo.bar to the preceding MEM_REF offset and replace the
1199 address with &OBJ. */
1200 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1202 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1203 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1205 offset_int off
= offset_int::from (mem_op
->op0
, SIGNED
);
1207 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1208 op
->op0
= build_fold_addr_expr (addr_base
);
1209 if (tree_fits_shwi_p (mem_op
->op0
))
1210 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1216 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1217 *I_P to point to the last element of the replacement. */
1219 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1222 unsigned int i
= *i_p
;
1223 vn_reference_op_t op
= &(*ops
)[i
];
1224 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1226 enum tree_code code
;
1229 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1230 if (!is_gimple_assign (def_stmt
))
1233 code
= gimple_assign_rhs_code (def_stmt
);
1234 if (code
!= ADDR_EXPR
1235 && code
!= POINTER_PLUS_EXPR
)
1238 off
= offset_int::from (mem_op
->op0
, SIGNED
);
1240 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1241 from .foo.bar to the preceding MEM_REF offset and replace the
1242 address with &OBJ. */
1243 if (code
== ADDR_EXPR
)
1245 tree addr
, addr_base
;
1246 HOST_WIDE_INT addr_offset
;
1248 addr
= gimple_assign_rhs1 (def_stmt
);
1249 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1252 || TREE_CODE (addr_base
) != MEM_REF
)
1256 off
+= mem_ref_offset (addr_base
);
1257 op
->op0
= TREE_OPERAND (addr_base
, 0);
1262 ptr
= gimple_assign_rhs1 (def_stmt
);
1263 ptroff
= gimple_assign_rhs2 (def_stmt
);
1264 if (TREE_CODE (ptr
) != SSA_NAME
1265 || TREE_CODE (ptroff
) != INTEGER_CST
)
1268 off
+= wi::to_offset (ptroff
);
1272 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1273 if (tree_fits_shwi_p (mem_op
->op0
))
1274 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1277 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1278 op
->op0
= SSA_VAL (op
->op0
);
1279 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1280 op
->opcode
= TREE_CODE (op
->op0
);
1283 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1284 vn_reference_maybe_forwprop_address (ops
, i_p
);
1285 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1286 vn_reference_fold_indirect (ops
, i_p
);
1289 /* Optimize the reference REF to a constant if possible or return
1290 NULL_TREE if not. */
1293 fully_constant_vn_reference_p (vn_reference_t ref
)
1295 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1296 vn_reference_op_t op
;
1298 /* Try to simplify the translated expression if it is
1299 a call to a builtin function with at most two arguments. */
1301 if (op
->opcode
== CALL_EXPR
1302 && TREE_CODE (op
->op0
) == ADDR_EXPR
1303 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1304 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1305 && operands
.length () >= 2
1306 && operands
.length () <= 3)
1308 vn_reference_op_t arg0
, arg1
= NULL
;
1309 bool anyconst
= false;
1310 arg0
= &operands
[1];
1311 if (operands
.length () > 2)
1312 arg1
= &operands
[2];
1313 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1314 || (arg0
->opcode
== ADDR_EXPR
1315 && is_gimple_min_invariant (arg0
->op0
)))
1318 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1319 || (arg1
->opcode
== ADDR_EXPR
1320 && is_gimple_min_invariant (arg1
->op0
))))
1324 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1327 arg1
? arg1
->op0
: NULL
);
1329 && TREE_CODE (folded
) == NOP_EXPR
)
1330 folded
= TREE_OPERAND (folded
, 0);
1332 && is_gimple_min_invariant (folded
))
1337 /* Simplify reads from constants or constant initializers. */
1338 else if (BITS_PER_UNIT
== 8
1339 && is_gimple_reg_type (ref
->type
)
1340 && (!INTEGRAL_TYPE_P (ref
->type
)
1341 || TYPE_PRECISION (ref
->type
) % BITS_PER_UNIT
== 0))
1343 HOST_WIDE_INT off
= 0;
1344 HOST_WIDE_INT size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1345 if (size
% BITS_PER_UNIT
!= 0
1346 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1348 size
/= BITS_PER_UNIT
;
1350 for (i
= 0; i
< operands
.length (); ++i
)
1352 if (operands
[i
].off
== -1)
1354 off
+= operands
[i
].off
;
1355 if (operands
[i
].opcode
== MEM_REF
)
1361 vn_reference_op_t base
= &operands
[--i
];
1362 tree ctor
= error_mark_node
;
1363 tree decl
= NULL_TREE
;
1364 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1366 else if (base
->opcode
== MEM_REF
1367 && base
[1].opcode
== ADDR_EXPR
1368 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1369 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
))
1371 decl
= TREE_OPERAND (base
[1].op0
, 0);
1372 ctor
= ctor_for_folding (decl
);
1374 if (ctor
== NULL_TREE
)
1375 return build_zero_cst (ref
->type
);
1376 else if (ctor
!= error_mark_node
)
1380 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1381 off
* BITS_PER_UNIT
,
1382 size
* BITS_PER_UNIT
, decl
);
1385 STRIP_USELESS_TYPE_CONVERSION (res
);
1386 if (is_gimple_min_invariant (res
))
1392 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1393 if (native_encode_expr (ctor
, buf
, size
, off
) > 0)
1394 return native_interpret_expr (ref
->type
, buf
, size
);
1402 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1403 structures into their value numbers. This is done in-place, and
1404 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1405 whether any operands were valueized. */
1407 static vec
<vn_reference_op_s
>
1408 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
)
1410 vn_reference_op_t vro
;
1413 *valueized_anything
= false;
1415 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1417 if (vro
->opcode
== SSA_NAME
1418 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1420 tree tem
= SSA_VAL (vro
->op0
);
1421 if (tem
!= vro
->op0
)
1423 *valueized_anything
= true;
1426 /* If it transforms from an SSA_NAME to a constant, update
1428 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1429 vro
->opcode
= TREE_CODE (vro
->op0
);
1431 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1433 tree tem
= SSA_VAL (vro
->op1
);
1434 if (tem
!= vro
->op1
)
1436 *valueized_anything
= true;
1440 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1442 tree tem
= SSA_VAL (vro
->op2
);
1443 if (tem
!= vro
->op2
)
1445 *valueized_anything
= true;
1449 /* If it transforms from an SSA_NAME to an address, fold with
1450 a preceding indirect reference. */
1453 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1454 && orig
[i
- 1].opcode
== MEM_REF
)
1455 vn_reference_fold_indirect (&orig
, &i
);
1457 && vro
->opcode
== SSA_NAME
1458 && orig
[i
- 1].opcode
== MEM_REF
)
1459 vn_reference_maybe_forwprop_address (&orig
, &i
);
1460 /* If it transforms a non-constant ARRAY_REF into a constant
1461 one, adjust the constant offset. */
1462 else if (vro
->opcode
== ARRAY_REF
1464 && TREE_CODE (vro
->op0
) == INTEGER_CST
1465 && TREE_CODE (vro
->op1
) == INTEGER_CST
1466 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1468 offset_int off
= ((wi::to_offset (vro
->op0
)
1469 - wi::to_offset (vro
->op1
))
1470 * wi::to_offset (vro
->op2
));
1471 if (wi::fits_shwi_p (off
))
1472 vro
->off
= off
.to_shwi ();
1479 static vec
<vn_reference_op_s
>
1480 valueize_refs (vec
<vn_reference_op_s
> orig
)
1483 return valueize_refs_1 (orig
, &tem
);
1486 static vec
<vn_reference_op_s
> shared_lookup_references
;
1488 /* Create a vector of vn_reference_op_s structures from REF, a
1489 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1490 this function. *VALUEIZED_ANYTHING will specify whether any
1491 operands were valueized. */
1493 static vec
<vn_reference_op_s
>
1494 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1498 shared_lookup_references
.truncate (0);
1499 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1500 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1501 valueized_anything
);
1502 return shared_lookup_references
;
1505 /* Create a vector of vn_reference_op_s structures from CALL, a
1506 call statement. The vector is shared among all callers of
1509 static vec
<vn_reference_op_s
>
1510 valueize_shared_reference_ops_from_call (gcall
*call
)
1514 shared_lookup_references
.truncate (0);
1515 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1516 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1517 return shared_lookup_references
;
1520 /* Lookup a SCCVN reference operation VR in the current hash table.
1521 Returns the resulting value number if it exists in the hash table,
1522 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1523 vn_reference_t stored in the hashtable if something is found. */
1526 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1528 vn_reference_s
**slot
;
1531 hash
= vr
->hashcode
;
1532 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1533 if (!slot
&& current_info
== optimistic_info
)
1534 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1538 *vnresult
= (vn_reference_t
)*slot
;
1539 return ((vn_reference_t
)*slot
)->result
;
1545 static tree
*last_vuse_ptr
;
1546 static vn_lookup_kind vn_walk_kind
;
1547 static vn_lookup_kind default_vn_walk_kind
;
1549 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1550 with the current VUSE and performs the expression lookup. */
1553 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1554 unsigned int cnt
, void *vr_
)
1556 vn_reference_t vr
= (vn_reference_t
)vr_
;
1557 vn_reference_s
**slot
;
1560 /* This bounds the stmt walks we perform on reference lookups
1561 to O(1) instead of O(N) where N is the number of dominating
1563 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1567 *last_vuse_ptr
= vuse
;
1569 /* Fixup vuse and hash. */
1571 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1572 vr
->vuse
= vuse_ssa_val (vuse
);
1574 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1576 hash
= vr
->hashcode
;
1577 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1578 if (!slot
&& current_info
== optimistic_info
)
1579 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1586 /* Lookup an existing or insert a new vn_reference entry into the
1587 value table for the VUSE, SET, TYPE, OPERANDS reference which
1588 has the value VALUE which is either a constant or an SSA name. */
1590 static vn_reference_t
1591 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1594 vec
<vn_reference_op_s
,
1598 struct vn_reference_s vr1
;
1599 vn_reference_t result
;
1602 vr1
.operands
= operands
;
1605 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1606 if (vn_reference_lookup_1 (&vr1
, &result
))
1608 if (TREE_CODE (value
) == SSA_NAME
)
1609 value_id
= VN_INFO (value
)->value_id
;
1611 value_id
= get_or_alloc_constant_value_id (value
);
1612 return vn_reference_insert_pieces (vuse
, set
, type
,
1613 operands
.copy (), value
, value_id
);
1616 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1617 from the statement defining VUSE and if not successful tries to
1618 translate *REFP and VR_ through an aggregate copy at the definition
1622 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1623 bool disambiguate_only
)
1625 vn_reference_t vr
= (vn_reference_t
)vr_
;
1626 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1628 HOST_WIDE_INT offset
, maxsize
;
1629 static vec
<vn_reference_op_s
>
1632 bool lhs_ref_ok
= false;
1634 /* First try to disambiguate after value-replacing in the definitions LHS. */
1635 if (is_gimple_assign (def_stmt
))
1637 vec
<vn_reference_op_s
> tem
;
1638 tree lhs
= gimple_assign_lhs (def_stmt
);
1639 bool valueized_anything
= false;
1640 /* Avoid re-allocation overhead. */
1641 lhs_ops
.truncate (0);
1642 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1644 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
);
1645 gcc_assert (lhs_ops
== tem
);
1646 if (valueized_anything
)
1648 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1649 get_alias_set (lhs
),
1650 TREE_TYPE (lhs
), lhs_ops
);
1652 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1657 ao_ref_init (&lhs_ref
, lhs
);
1661 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1662 && gimple_call_num_args (def_stmt
) <= 4)
1664 /* For builtin calls valueize its arguments and call the
1665 alias oracle again. Valueization may improve points-to
1666 info of pointers and constify size and position arguments.
1667 Originally this was motivated by PR61034 which has
1668 conditional calls to free falsely clobbering ref because
1669 of imprecise points-to info of the argument. */
1671 bool valueized_anything
= false;
1672 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1674 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1675 if (TREE_CODE (oldargs
[i
]) == SSA_NAME
1676 && VN_INFO (oldargs
[i
])->valnum
!= oldargs
[i
])
1678 gimple_call_set_arg (def_stmt
, i
, VN_INFO (oldargs
[i
])->valnum
);
1679 valueized_anything
= true;
1682 if (valueized_anything
)
1684 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1686 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1687 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1693 if (disambiguate_only
)
1696 base
= ao_ref_base (ref
);
1697 offset
= ref
->offset
;
1698 maxsize
= ref
->max_size
;
1700 /* If we cannot constrain the size of the reference we cannot
1701 test if anything kills it. */
1705 /* We can't deduce anything useful from clobbers. */
1706 if (gimple_clobber_p (def_stmt
))
1709 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1710 from that definition.
1712 if (is_gimple_reg_type (vr
->type
)
1713 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1714 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1715 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2))
1716 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1718 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1720 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1721 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
);
1722 size2
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2)) * 8;
1723 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1724 == tree_to_uhwi (gimple_call_arg (def_stmt
, 2))
1726 && operand_equal_p (base
, base2
, 0)
1727 && offset2
<= offset
1728 && offset2
+ size2
>= offset
+ maxsize
)
1730 tree val
= build_zero_cst (vr
->type
);
1731 return vn_reference_lookup_or_insert_for_pieces
1732 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1736 /* 2) Assignment from an empty CONSTRUCTOR. */
1737 else if (is_gimple_reg_type (vr
->type
)
1738 && gimple_assign_single_p (def_stmt
)
1739 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1740 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1743 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1744 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1745 &offset2
, &size2
, &maxsize2
);
1747 && operand_equal_p (base
, base2
, 0)
1748 && offset2
<= offset
1749 && offset2
+ size2
>= offset
+ maxsize
)
1751 tree val
= build_zero_cst (vr
->type
);
1752 return vn_reference_lookup_or_insert_for_pieces
1753 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1757 /* 3) Assignment from a constant. We can use folds native encode/interpret
1758 routines to extract the assigned bits. */
1759 else if (vn_walk_kind
== VN_WALKREWRITE
1760 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
1761 && ref
->size
== maxsize
1762 && maxsize
% BITS_PER_UNIT
== 0
1763 && offset
% BITS_PER_UNIT
== 0
1764 && is_gimple_reg_type (vr
->type
)
1765 && gimple_assign_single_p (def_stmt
)
1766 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
1769 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1770 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1771 &offset2
, &size2
, &maxsize2
);
1773 && maxsize2
== size2
1774 && size2
% BITS_PER_UNIT
== 0
1775 && offset2
% BITS_PER_UNIT
== 0
1776 && operand_equal_p (base
, base2
, 0)
1777 && offset2
<= offset
1778 && offset2
+ size2
>= offset
+ maxsize
)
1780 /* We support up to 512-bit values (for V8DFmode). */
1781 unsigned char buffer
[64];
1784 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
1785 buffer
, sizeof (buffer
));
1788 tree val
= native_interpret_expr (vr
->type
,
1790 + ((offset
- offset2
)
1792 ref
->size
/ BITS_PER_UNIT
);
1794 return vn_reference_lookup_or_insert_for_pieces
1795 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1800 /* 4) Assignment from an SSA name which definition we may be able
1801 to access pieces from. */
1802 else if (ref
->size
== maxsize
1803 && is_gimple_reg_type (vr
->type
)
1804 && gimple_assign_single_p (def_stmt
)
1805 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
1807 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
1808 gimple def_stmt2
= SSA_NAME_DEF_STMT (rhs1
);
1809 if (is_gimple_assign (def_stmt2
)
1810 && (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
1811 || gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
)
1812 && types_compatible_p (vr
->type
, TREE_TYPE (TREE_TYPE (rhs1
))))
1815 HOST_WIDE_INT offset2
, size2
, maxsize2
, off
;
1816 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1817 &offset2
, &size2
, &maxsize2
);
1818 off
= offset
- offset2
;
1820 && maxsize2
== size2
1821 && operand_equal_p (base
, base2
, 0)
1822 && offset2
<= offset
1823 && offset2
+ size2
>= offset
+ maxsize
)
1825 tree val
= NULL_TREE
;
1827 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1
))));
1828 if (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
)
1831 val
= gimple_assign_rhs1 (def_stmt2
);
1832 else if (off
== elsz
)
1833 val
= gimple_assign_rhs2 (def_stmt2
);
1835 else if (gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
1838 tree ctor
= gimple_assign_rhs1 (def_stmt2
);
1839 unsigned i
= off
/ elsz
;
1840 if (i
< CONSTRUCTOR_NELTS (ctor
))
1842 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, i
);
1843 if (TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
1845 if (TREE_CODE (TREE_TYPE (elt
->value
))
1852 return vn_reference_lookup_or_insert_for_pieces
1853 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1858 /* 5) For aggregate copies translate the reference through them if
1859 the copy kills ref. */
1860 else if (vn_walk_kind
== VN_WALKREWRITE
1861 && gimple_assign_single_p (def_stmt
)
1862 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
1863 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
1864 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
1867 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1869 auto_vec
<vn_reference_op_s
> rhs
;
1870 vn_reference_op_t vro
;
1876 /* See if the assignment kills REF. */
1877 base2
= ao_ref_base (&lhs_ref
);
1878 offset2
= lhs_ref
.offset
;
1879 size2
= lhs_ref
.size
;
1880 maxsize2
= lhs_ref
.max_size
;
1882 || (base
!= base2
&& !operand_equal_p (base
, base2
, 0))
1884 || offset2
+ size2
< offset
+ maxsize
)
1887 /* Find the common base of ref and the lhs. lhs_ops already
1888 contains valueized operands for the lhs. */
1889 i
= vr
->operands
.length () - 1;
1890 j
= lhs_ops
.length () - 1;
1891 while (j
>= 0 && i
>= 0
1892 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
1898 /* ??? The innermost op should always be a MEM_REF and we already
1899 checked that the assignment to the lhs kills vr. Thus for
1900 aggregate copies using char[] types the vn_reference_op_eq
1901 may fail when comparing types for compatibility. But we really
1902 don't care here - further lookups with the rewritten operands
1903 will simply fail if we messed up types too badly. */
1904 HOST_WIDE_INT extra_off
= 0;
1905 if (j
== 0 && i
>= 0
1906 && lhs_ops
[0].opcode
== MEM_REF
1907 && lhs_ops
[0].off
!= -1)
1909 if (lhs_ops
[0].off
== vr
->operands
[i
].off
)
1911 else if (vr
->operands
[i
].opcode
== MEM_REF
1912 && vr
->operands
[i
].off
!= -1)
1914 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
1919 /* i now points to the first additional op.
1920 ??? LHS may not be completely contained in VR, one or more
1921 VIEW_CONVERT_EXPRs could be in its way. We could at least
1922 try handling outermost VIEW_CONVERT_EXPRs. */
1926 /* Now re-write REF to be based on the rhs of the assignment. */
1927 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
1929 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1932 if (rhs
.length () < 2
1933 || rhs
[0].opcode
!= MEM_REF
1934 || rhs
[0].off
== -1)
1936 rhs
[0].off
+= extra_off
;
1937 rhs
[0].op0
= int_const_binop (PLUS_EXPR
, rhs
[0].op0
,
1938 build_int_cst (TREE_TYPE (rhs
[0].op0
),
1942 /* We need to pre-pend vr->operands[0..i] to rhs. */
1943 vec
<vn_reference_op_s
> old
= vr
->operands
;
1944 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
1946 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
1947 if (old
== shared_lookup_references
)
1948 shared_lookup_references
= vr
->operands
;
1951 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
1952 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
1953 vr
->operands
[i
+ 1 + j
] = *vro
;
1954 vr
->operands
= valueize_refs (vr
->operands
);
1955 if (old
== shared_lookup_references
)
1956 shared_lookup_references
= vr
->operands
;
1957 vr
->hashcode
= vn_reference_compute_hash (vr
);
1959 /* Try folding the new reference to a constant. */
1960 tree val
= fully_constant_vn_reference_p (vr
);
1962 return vn_reference_lookup_or_insert_for_pieces
1963 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1965 /* Adjust *ref from the new operands. */
1966 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
1968 /* This can happen with bitfields. */
1969 if (ref
->size
!= r
.size
)
1973 /* Do not update last seen VUSE after translating. */
1974 last_vuse_ptr
= NULL
;
1976 /* Keep looking for the adjusted *REF / VR pair. */
1980 /* 6) For memcpy copies translate the reference through them if
1981 the copy kills ref. */
1982 else if (vn_walk_kind
== VN_WALKREWRITE
1983 && is_gimple_reg_type (vr
->type
)
1984 /* ??? Handle BCOPY as well. */
1985 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
1986 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
1987 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
1988 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
1989 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
1990 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
1991 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
1992 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2)))
1996 HOST_WIDE_INT rhs_offset
, copy_size
, lhs_offset
;
1997 vn_reference_op_s op
;
2001 /* Only handle non-variable, addressable refs. */
2002 if (ref
->size
!= maxsize
2003 || offset
% BITS_PER_UNIT
!= 0
2004 || ref
->size
% BITS_PER_UNIT
!= 0)
2007 /* Extract a pointer base and an offset for the destination. */
2008 lhs
= gimple_call_arg (def_stmt
, 0);
2010 if (TREE_CODE (lhs
) == SSA_NAME
)
2011 lhs
= SSA_VAL (lhs
);
2012 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2014 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2018 if (TREE_CODE (tem
) == MEM_REF
2019 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2021 lhs
= TREE_OPERAND (tem
, 0);
2022 lhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2024 else if (DECL_P (tem
))
2025 lhs
= build_fold_addr_expr (tem
);
2029 if (TREE_CODE (lhs
) != SSA_NAME
2030 && TREE_CODE (lhs
) != ADDR_EXPR
)
2033 /* Extract a pointer base and an offset for the source. */
2034 rhs
= gimple_call_arg (def_stmt
, 1);
2036 if (TREE_CODE (rhs
) == SSA_NAME
)
2037 rhs
= SSA_VAL (rhs
);
2038 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2040 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2044 if (TREE_CODE (tem
) == MEM_REF
2045 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2047 rhs
= TREE_OPERAND (tem
, 0);
2048 rhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2050 else if (DECL_P (tem
))
2051 rhs
= build_fold_addr_expr (tem
);
2055 if (TREE_CODE (rhs
) != SSA_NAME
2056 && TREE_CODE (rhs
) != ADDR_EXPR
)
2059 copy_size
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2));
2061 /* The bases of the destination and the references have to agree. */
2062 if ((TREE_CODE (base
) != MEM_REF
2064 || (TREE_CODE (base
) == MEM_REF
2065 && (TREE_OPERAND (base
, 0) != lhs
2066 || !tree_fits_uhwi_p (TREE_OPERAND (base
, 1))))
2068 && (TREE_CODE (lhs
) != ADDR_EXPR
2069 || TREE_OPERAND (lhs
, 0) != base
)))
2072 /* And the access has to be contained within the memcpy destination. */
2073 at
= offset
/ BITS_PER_UNIT
;
2074 if (TREE_CODE (base
) == MEM_REF
)
2075 at
+= tree_to_uhwi (TREE_OPERAND (base
, 1));
2077 || lhs_offset
+ copy_size
< at
+ maxsize
/ BITS_PER_UNIT
)
2080 /* Make room for 2 operands in the new reference. */
2081 if (vr
->operands
.length () < 2)
2083 vec
<vn_reference_op_s
> old
= vr
->operands
;
2084 vr
->operands
.safe_grow_cleared (2);
2085 if (old
== shared_lookup_references
2086 && vr
->operands
!= old
)
2087 shared_lookup_references
= vr
->operands
;
2090 vr
->operands
.truncate (2);
2092 /* The looked-through reference is a simple MEM_REF. */
2093 memset (&op
, 0, sizeof (op
));
2095 op
.opcode
= MEM_REF
;
2096 op
.op0
= build_int_cst (ptr_type_node
, at
- rhs_offset
);
2097 op
.off
= at
- lhs_offset
+ rhs_offset
;
2098 vr
->operands
[0] = op
;
2099 op
.type
= TREE_TYPE (rhs
);
2100 op
.opcode
= TREE_CODE (rhs
);
2103 vr
->operands
[1] = op
;
2104 vr
->hashcode
= vn_reference_compute_hash (vr
);
2106 /* Adjust *ref from the new operands. */
2107 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2109 /* This can happen with bitfields. */
2110 if (ref
->size
!= r
.size
)
2114 /* Do not update last seen VUSE after translating. */
2115 last_vuse_ptr
= NULL
;
2117 /* Keep looking for the adjusted *REF / VR pair. */
2121 /* Bail out and stop walking. */
2125 /* Lookup a reference operation by it's parts, in the current hash table.
2126 Returns the resulting value number if it exists in the hash table,
2127 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2128 vn_reference_t stored in the hashtable if something is found. */
2131 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2132 vec
<vn_reference_op_s
> operands
,
2133 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2135 struct vn_reference_s vr1
;
2143 vr1
.vuse
= vuse_ssa_val (vuse
);
2144 shared_lookup_references
.truncate (0);
2145 shared_lookup_references
.safe_grow (operands
.length ());
2146 memcpy (shared_lookup_references
.address (),
2147 operands
.address (),
2148 sizeof (vn_reference_op_s
)
2149 * operands
.length ());
2150 vr1
.operands
= operands
= shared_lookup_references
2151 = valueize_refs (shared_lookup_references
);
2154 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2155 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2158 vn_reference_lookup_1 (&vr1
, vnresult
);
2160 && kind
!= VN_NOWALK
2164 vn_walk_kind
= kind
;
2165 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2167 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2168 vn_reference_lookup_2
,
2169 vn_reference_lookup_3
,
2170 vuse_ssa_val
, &vr1
);
2171 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2175 return (*vnresult
)->result
;
2180 /* Lookup OP in the current hash table, and return the resulting value
2181 number if it exists in the hash table. Return NULL_TREE if it does
2182 not exist in the hash table or if the result field of the structure
2183 was NULL.. VNRESULT will be filled in with the vn_reference_t
2184 stored in the hashtable if one exists. */
2187 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2188 vn_reference_t
*vnresult
)
2190 vec
<vn_reference_op_s
> operands
;
2191 struct vn_reference_s vr1
;
2193 bool valuezied_anything
;
2198 vr1
.vuse
= vuse_ssa_val (vuse
);
2199 vr1
.operands
= operands
2200 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2201 vr1
.type
= TREE_TYPE (op
);
2202 vr1
.set
= get_alias_set (op
);
2203 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2204 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2207 if (kind
!= VN_NOWALK
2210 vn_reference_t wvnresult
;
2212 /* Make sure to use a valueized reference if we valueized anything.
2213 Otherwise preserve the full reference for advanced TBAA. */
2214 if (!valuezied_anything
2215 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2217 ao_ref_init (&r
, op
);
2218 vn_walk_kind
= kind
;
2220 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2221 vn_reference_lookup_2
,
2222 vn_reference_lookup_3
,
2223 vuse_ssa_val
, &vr1
);
2224 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2228 *vnresult
= wvnresult
;
2229 return wvnresult
->result
;
2235 return vn_reference_lookup_1 (&vr1
, vnresult
);
2238 /* Lookup CALL in the current hash table and return the entry in
2239 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2242 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2248 tree vuse
= gimple_vuse (call
);
2250 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2251 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2252 vr
->type
= gimple_expr_type (call
);
2254 vr
->hashcode
= vn_reference_compute_hash (vr
);
2255 vn_reference_lookup_1 (vr
, vnresult
);
2258 /* Insert OP into the current hash table with a value number of
2259 RESULT, and return the resulting reference structure we created. */
2261 static vn_reference_t
2262 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2264 vn_reference_s
**slot
;
2268 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
2269 if (TREE_CODE (result
) == SSA_NAME
)
2270 vr1
->value_id
= VN_INFO (result
)->value_id
;
2272 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2273 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2274 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2275 vr1
->type
= TREE_TYPE (op
);
2276 vr1
->set
= get_alias_set (op
);
2277 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2278 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2279 vr1
->result_vdef
= vdef
;
2281 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2284 /* Because we lookup stores using vuses, and value number failures
2285 using the vdefs (see visit_reference_op_store for how and why),
2286 it's possible that on failure we may try to insert an already
2287 inserted store. This is not wrong, there is no ssa name for a
2288 store that we could use as a differentiator anyway. Thus, unlike
2289 the other lookup functions, you cannot gcc_assert (!*slot)
2292 /* But free the old slot in case of a collision. */
2294 free_reference (*slot
);
2300 /* Insert a reference by it's pieces into the current hash table with
2301 a value number of RESULT. Return the resulting reference
2302 structure we created. */
2305 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2306 vec
<vn_reference_op_s
> operands
,
2307 tree result
, unsigned int value_id
)
2310 vn_reference_s
**slot
;
2313 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
2314 vr1
->value_id
= value_id
;
2315 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2316 vr1
->operands
= valueize_refs (operands
);
2319 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2320 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2321 result
= SSA_VAL (result
);
2322 vr1
->result
= result
;
2324 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2327 /* At this point we should have all the things inserted that we have
2328 seen before, and we should never try inserting something that
2330 gcc_assert (!*slot
);
2332 free_reference (*slot
);
2338 /* Compute and return the hash value for nary operation VBO1. */
2341 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2343 inchash::hash hstate
;
2346 for (i
= 0; i
< vno1
->length
; ++i
)
2347 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2348 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2350 if (vno1
->length
== 2
2351 && commutative_tree_code (vno1
->opcode
)
2352 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2354 tree temp
= vno1
->op
[0];
2355 vno1
->op
[0] = vno1
->op
[1];
2359 hstate
.add_int (vno1
->opcode
);
2360 for (i
= 0; i
< vno1
->length
; ++i
)
2361 inchash::add_expr (vno1
->op
[i
], hstate
);
2363 return hstate
.end ();
2366 /* Compare nary operations VNO1 and VNO2 and return true if they are
2370 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2374 if (vno1
->hashcode
!= vno2
->hashcode
)
2377 if (vno1
->length
!= vno2
->length
)
2380 if (vno1
->opcode
!= vno2
->opcode
2381 || !types_compatible_p (vno1
->type
, vno2
->type
))
2384 for (i
= 0; i
< vno1
->length
; ++i
)
2385 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2391 /* Initialize VNO from the pieces provided. */
2394 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2395 enum tree_code code
, tree type
, tree
*ops
)
2398 vno
->length
= length
;
2400 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2403 /* Initialize VNO from OP. */
2406 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2410 vno
->opcode
= TREE_CODE (op
);
2411 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2412 vno
->type
= TREE_TYPE (op
);
2413 for (i
= 0; i
< vno
->length
; ++i
)
2414 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2417 /* Return the number of operands for a vn_nary ops structure from STMT. */
2420 vn_nary_length_from_stmt (gimple stmt
)
2422 switch (gimple_assign_rhs_code (stmt
))
2426 case VIEW_CONVERT_EXPR
:
2433 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2436 return gimple_num_ops (stmt
) - 1;
2440 /* Initialize VNO from STMT. */
2443 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple stmt
)
2447 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2448 vno
->type
= gimple_expr_type (stmt
);
2449 switch (vno
->opcode
)
2453 case VIEW_CONVERT_EXPR
:
2455 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2460 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2461 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2462 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2466 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2467 for (i
= 0; i
< vno
->length
; ++i
)
2468 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2472 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2473 vno
->length
= gimple_num_ops (stmt
) - 1;
2474 for (i
= 0; i
< vno
->length
; ++i
)
2475 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2479 /* Compute the hashcode for VNO and look for it in the hash table;
2480 return the resulting value number if it exists in the hash table.
2481 Return NULL_TREE if it does not exist in the hash table or if the
2482 result field of the operation is NULL. VNRESULT will contain the
2483 vn_nary_op_t from the hashtable if it exists. */
2486 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2488 vn_nary_op_s
**slot
;
2493 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2494 slot
= current_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2496 if (!slot
&& current_info
== optimistic_info
)
2497 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2503 return (*slot
)->result
;
2506 /* Lookup a n-ary operation by its pieces and return the resulting value
2507 number if it exists in the hash table. Return NULL_TREE if it does
2508 not exist in the hash table or if the result field of the operation
2509 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2513 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2514 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2516 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2517 sizeof_vn_nary_op (length
));
2518 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2519 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2522 /* Lookup OP in the current hash table, and return the resulting value
2523 number if it exists in the hash table. Return NULL_TREE if it does
2524 not exist in the hash table or if the result field of the operation
2525 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2529 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2532 = XALLOCAVAR (struct vn_nary_op_s
,
2533 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2534 init_vn_nary_op_from_op (vno1
, op
);
2535 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2538 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2539 value number if it exists in the hash table. Return NULL_TREE if
2540 it does not exist in the hash table. VNRESULT will contain the
2541 vn_nary_op_t from the hashtable if it exists. */
2544 vn_nary_op_lookup_stmt (gimple stmt
, vn_nary_op_t
*vnresult
)
2547 = XALLOCAVAR (struct vn_nary_op_s
,
2548 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2549 init_vn_nary_op_from_stmt (vno1
, stmt
);
2550 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2553 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2556 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2558 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2561 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2565 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2567 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2568 ¤t_info
->nary_obstack
);
2570 vno1
->value_id
= value_id
;
2571 vno1
->length
= length
;
2572 vno1
->result
= result
;
2577 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2578 VNO->HASHCODE first. */
2581 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
2584 vn_nary_op_s
**slot
;
2587 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2589 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
2590 gcc_assert (!*slot
);
2596 /* Insert a n-ary operation into the current hash table using it's
2597 pieces. Return the vn_nary_op_t structure we created and put in
2601 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2602 tree type
, tree
*ops
,
2603 tree result
, unsigned int value_id
)
2605 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2606 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2607 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2610 /* Insert OP into the current hash table with a value number of
2611 RESULT. Return the vn_nary_op_t structure we created and put in
2615 vn_nary_op_insert (tree op
, tree result
)
2617 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2620 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2621 init_vn_nary_op_from_op (vno1
, op
);
2622 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2625 /* Insert the rhs of STMT into the current hash table with a value number of
2629 vn_nary_op_insert_stmt (gimple stmt
, tree result
)
2632 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
2633 result
, VN_INFO (result
)->value_id
);
2634 init_vn_nary_op_from_stmt (vno1
, stmt
);
2635 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2638 /* Compute a hashcode for PHI operation VP1 and return it. */
2640 static inline hashval_t
2641 vn_phi_compute_hash (vn_phi_t vp1
)
2643 inchash::hash
hstate (vp1
->block
->index
);
2648 /* If all PHI arguments are constants we need to distinguish
2649 the PHI node via its type. */
2651 hstate
.merge_hash (vn_hash_type (type
));
2653 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2655 if (phi1op
== VN_TOP
)
2657 inchash::add_expr (phi1op
, hstate
);
2660 return hstate
.end ();
2663 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2666 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
2668 if (vp1
->hashcode
!= vp2
->hashcode
)
2671 if (vp1
->block
== vp2
->block
)
2676 /* If the PHI nodes do not have compatible types
2677 they are not the same. */
2678 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2681 /* Any phi in the same block will have it's arguments in the
2682 same edge order, because of how we store phi nodes. */
2683 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2685 tree phi2op
= vp2
->phiargs
[i
];
2686 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
2688 if (!expressions_equal_p (phi1op
, phi2op
))
2696 static vec
<tree
> shared_lookup_phiargs
;
2698 /* Lookup PHI in the current hash table, and return the resulting
2699 value number if it exists in the hash table. Return NULL_TREE if
2700 it does not exist in the hash table. */
2703 vn_phi_lookup (gimple phi
)
2706 struct vn_phi_s vp1
;
2709 shared_lookup_phiargs
.truncate (0);
2711 /* Canonicalize the SSA_NAME's to their value number. */
2712 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2714 tree def
= PHI_ARG_DEF (phi
, i
);
2715 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2716 shared_lookup_phiargs
.safe_push (def
);
2718 vp1
.type
= TREE_TYPE (gimple_phi_result (phi
));
2719 vp1
.phiargs
= shared_lookup_phiargs
;
2720 vp1
.block
= gimple_bb (phi
);
2721 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
2722 slot
= current_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2724 if (!slot
&& current_info
== optimistic_info
)
2725 slot
= valid_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2729 return (*slot
)->result
;
2732 /* Insert PHI into the current hash table with a value number of
2736 vn_phi_insert (gimple phi
, tree result
)
2739 vn_phi_t vp1
= (vn_phi_t
) pool_alloc (current_info
->phis_pool
);
2741 vec
<tree
> args
= vNULL
;
2743 /* Canonicalize the SSA_NAME's to their value number. */
2744 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2746 tree def
= PHI_ARG_DEF (phi
, i
);
2747 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2748 args
.safe_push (def
);
2750 vp1
->value_id
= VN_INFO (result
)->value_id
;
2751 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
2752 vp1
->phiargs
= args
;
2753 vp1
->block
= gimple_bb (phi
);
2754 vp1
->result
= result
;
2755 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
2757 slot
= current_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
2759 /* Because we iterate over phi operations more than once, it's
2760 possible the slot might already exist here, hence no assert.*/
2766 /* Print set of components in strongly connected component SCC to OUT. */
2769 print_scc (FILE *out
, vec
<tree
> scc
)
2774 fprintf (out
, "SCC consists of:");
2775 FOR_EACH_VEC_ELT (scc
, i
, var
)
2778 print_generic_expr (out
, var
, 0);
2780 fprintf (out
, "\n");
2783 /* Set the value number of FROM to TO, return true if it has changed
2787 set_ssa_val_to (tree from
, tree to
)
2789 tree currval
= SSA_VAL (from
);
2790 HOST_WIDE_INT toff
, coff
;
2792 /* The only thing we allow as value numbers are ssa_names
2793 and invariants. So assert that here. We don't allow VN_TOP
2794 as visiting a stmt should produce a value-number other than
2796 ??? Still VN_TOP can happen for unreachable code, so force
2797 it to varying in that case. Not all code is prepared to
2798 get VN_TOP on valueization. */
2801 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2802 fprintf (dump_file
, "Forcing value number to varying on "
2803 "receiving VN_TOP\n");
2807 gcc_assert (to
!= NULL_TREE
2808 && (TREE_CODE (to
) == SSA_NAME
2809 || is_gimple_min_invariant (to
)));
2813 if (currval
== from
)
2815 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2817 fprintf (dump_file
, "Not changing value number of ");
2818 print_generic_expr (dump_file
, from
, 0);
2819 fprintf (dump_file
, " from VARYING to ");
2820 print_generic_expr (dump_file
, to
, 0);
2821 fprintf (dump_file
, "\n");
2825 else if (TREE_CODE (to
) == SSA_NAME
2826 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
2830 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2832 fprintf (dump_file
, "Setting value number of ");
2833 print_generic_expr (dump_file
, from
, 0);
2834 fprintf (dump_file
, " to ");
2835 print_generic_expr (dump_file
, to
, 0);
2839 && !operand_equal_p (currval
, to
, 0)
2840 /* ??? For addresses involving volatile objects or types operand_equal_p
2841 does not reliably detect ADDR_EXPRs as equal. We know we are only
2842 getting invariant gimple addresses here, so can use
2843 get_addr_base_and_unit_offset to do this comparison. */
2844 && !(TREE_CODE (currval
) == ADDR_EXPR
2845 && TREE_CODE (to
) == ADDR_EXPR
2846 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
2847 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
2850 VN_INFO (from
)->valnum
= to
;
2851 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2852 fprintf (dump_file
, " (changed)\n");
2855 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2856 fprintf (dump_file
, "\n");
2860 /* Mark as processed all the definitions in the defining stmt of USE, or
2864 mark_use_processed (tree use
)
2868 gimple stmt
= SSA_NAME_DEF_STMT (use
);
2870 if (SSA_NAME_IS_DEFAULT_DEF (use
) || gimple_code (stmt
) == GIMPLE_PHI
)
2872 VN_INFO (use
)->use_processed
= true;
2876 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2878 tree def
= DEF_FROM_PTR (defp
);
2880 VN_INFO (def
)->use_processed
= true;
2884 /* Set all definitions in STMT to value number to themselves.
2885 Return true if a value number changed. */
2888 defs_to_varying (gimple stmt
)
2890 bool changed
= false;
2894 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2896 tree def
= DEF_FROM_PTR (defp
);
2897 changed
|= set_ssa_val_to (def
, def
);
2902 static bool expr_has_constants (tree expr
);
2904 /* Visit a copy between LHS and RHS, return true if the value number
2908 visit_copy (tree lhs
, tree rhs
)
2910 /* The copy may have a more interesting constant filled expression
2911 (we don't, since we know our RHS is just an SSA name). */
2912 VN_INFO (lhs
)->has_constants
= VN_INFO (rhs
)->has_constants
;
2913 VN_INFO (lhs
)->expr
= VN_INFO (rhs
)->expr
;
2915 /* And finally valueize. */
2916 rhs
= SSA_VAL (rhs
);
2918 return set_ssa_val_to (lhs
, rhs
);
2921 /* Visit a nary operator RHS, value number it, and return true if the
2922 value number of LHS has changed as a result. */
2925 visit_nary_op (tree lhs
, gimple stmt
)
2927 bool changed
= false;
2928 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
2931 changed
= set_ssa_val_to (lhs
, result
);
2934 changed
= set_ssa_val_to (lhs
, lhs
);
2935 vn_nary_op_insert_stmt (stmt
, lhs
);
2941 /* Visit a call STMT storing into LHS. Return true if the value number
2942 of the LHS has changed as a result. */
2945 visit_reference_op_call (tree lhs
, gcall
*stmt
)
2947 bool changed
= false;
2948 struct vn_reference_s vr1
;
2949 vn_reference_t vnresult
= NULL
;
2950 tree vdef
= gimple_vdef (stmt
);
2952 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2953 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
2956 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
2959 if (vnresult
->result_vdef
&& vdef
)
2960 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
2962 if (!vnresult
->result
&& lhs
)
2963 vnresult
->result
= lhs
;
2965 if (vnresult
->result
&& lhs
)
2967 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
2969 if (VN_INFO (vnresult
->result
)->has_constants
)
2970 VN_INFO (lhs
)->has_constants
= true;
2976 vn_reference_s
**slot
;
2978 changed
|= set_ssa_val_to (vdef
, vdef
);
2980 changed
|= set_ssa_val_to (lhs
, lhs
);
2981 vr2
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
2982 vr2
->vuse
= vr1
.vuse
;
2983 /* As we are not walking the virtual operand chain we know the
2984 shared_lookup_references are still original so we can re-use
2986 vr2
->operands
= vr1
.operands
.copy ();
2987 vr2
->type
= vr1
.type
;
2989 vr2
->hashcode
= vr1
.hashcode
;
2991 vr2
->result_vdef
= vdef
;
2992 slot
= current_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
2994 gcc_assert (!*slot
);
3001 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3002 and return true if the value number of the LHS has changed as a result. */
3005 visit_reference_op_load (tree lhs
, tree op
, gimple stmt
)
3007 bool changed
= false;
3011 last_vuse
= gimple_vuse (stmt
);
3012 last_vuse_ptr
= &last_vuse
;
3013 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3014 default_vn_walk_kind
, NULL
);
3015 last_vuse_ptr
= NULL
;
3017 /* We handle type-punning through unions by value-numbering based
3018 on offset and size of the access. Be prepared to handle a
3019 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3021 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3023 /* We will be setting the value number of lhs to the value number
3024 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3025 So first simplify and lookup this expression to see if it
3026 is already available. */
3027 tree val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
3028 if ((CONVERT_EXPR_P (val
)
3029 || TREE_CODE (val
) == VIEW_CONVERT_EXPR
)
3030 && TREE_CODE (TREE_OPERAND (val
, 0)) == SSA_NAME
)
3032 tree tem
= vn_get_expr_for (TREE_OPERAND (val
, 0));
3033 if ((CONVERT_EXPR_P (tem
)
3034 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
)
3035 && (tem
= fold_unary_ignore_overflow (TREE_CODE (val
),
3036 TREE_TYPE (val
), tem
)))
3040 if (!is_gimple_min_invariant (val
)
3041 && TREE_CODE (val
) != SSA_NAME
)
3042 result
= vn_nary_op_lookup (val
, NULL
);
3043 /* If the expression is not yet available, value-number lhs to
3044 a new SSA_NAME we create. */
3047 result
= make_temp_ssa_name (TREE_TYPE (lhs
), gimple_build_nop (),
3049 /* Initialize value-number information properly. */
3050 VN_INFO_GET (result
)->valnum
= result
;
3051 VN_INFO (result
)->value_id
= get_next_value_id ();
3052 VN_INFO (result
)->expr
= val
;
3053 VN_INFO (result
)->has_constants
= expr_has_constants (val
);
3054 VN_INFO (result
)->needs_insertion
= true;
3055 /* As all "inserted" statements are singleton SCCs, insert
3056 to the valid table. This is strictly needed to
3057 avoid re-generating new value SSA_NAMEs for the same
3058 expression during SCC iteration over and over (the
3059 optimistic table gets cleared after each iteration).
3060 We do not need to insert into the optimistic table, as
3061 lookups there will fall back to the valid table. */
3062 if (current_info
== optimistic_info
)
3064 current_info
= valid_info
;
3065 vn_nary_op_insert (val
, result
);
3066 current_info
= optimistic_info
;
3069 vn_nary_op_insert (val
, result
);
3070 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3072 fprintf (dump_file
, "Inserting name ");
3073 print_generic_expr (dump_file
, result
, 0);
3074 fprintf (dump_file
, " for expression ");
3075 print_generic_expr (dump_file
, val
, 0);
3076 fprintf (dump_file
, "\n");
3083 changed
= set_ssa_val_to (lhs
, result
);
3084 if (TREE_CODE (result
) == SSA_NAME
3085 && VN_INFO (result
)->has_constants
)
3087 VN_INFO (lhs
)->expr
= VN_INFO (result
)->expr
;
3088 VN_INFO (lhs
)->has_constants
= true;
3093 changed
= set_ssa_val_to (lhs
, lhs
);
3094 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
3101 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3102 and return true if the value number of the LHS has changed as a result. */
3105 visit_reference_op_store (tree lhs
, tree op
, gimple stmt
)
3107 bool changed
= false;
3108 vn_reference_t vnresult
= NULL
;
3109 tree result
, assign
;
3110 bool resultsame
= false;
3111 tree vuse
= gimple_vuse (stmt
);
3112 tree vdef
= gimple_vdef (stmt
);
3114 /* First we want to lookup using the *vuses* from the store and see
3115 if there the last store to this location with the same address
3118 The vuses represent the memory state before the store. If the
3119 memory state, address, and value of the store is the same as the
3120 last store to this location, then this store will produce the
3121 same memory state as that store.
3123 In this case the vdef versions for this store are value numbered to those
3124 vuse versions, since they represent the same memory state after
3127 Otherwise, the vdefs for the store are used when inserting into
3128 the table, since the store generates a new memory state. */
3130 result
= vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, NULL
);
3134 if (TREE_CODE (result
) == SSA_NAME
)
3135 result
= SSA_VAL (result
);
3136 if (TREE_CODE (op
) == SSA_NAME
)
3138 resultsame
= expressions_equal_p (result
, op
);
3141 if ((!result
|| !resultsame
)
3142 /* Only perform the following when being called from PRE
3143 which embeds tail merging. */
3144 && default_vn_walk_kind
== VN_WALK
)
3146 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3147 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
);
3150 VN_INFO (vdef
)->use_processed
= true;
3151 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3155 if (!result
|| !resultsame
)
3157 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3159 fprintf (dump_file
, "No store match\n");
3160 fprintf (dump_file
, "Value numbering store ");
3161 print_generic_expr (dump_file
, lhs
, 0);
3162 fprintf (dump_file
, " to ");
3163 print_generic_expr (dump_file
, op
, 0);
3164 fprintf (dump_file
, "\n");
3166 /* Have to set value numbers before insert, since insert is
3167 going to valueize the references in-place. */
3170 changed
|= set_ssa_val_to (vdef
, vdef
);
3173 /* Do not insert structure copies into the tables. */
3174 if (is_gimple_min_invariant (op
)
3175 || is_gimple_reg (op
))
3176 vn_reference_insert (lhs
, op
, vdef
, NULL
);
3178 /* Only perform the following when being called from PRE
3179 which embeds tail merging. */
3180 if (default_vn_walk_kind
== VN_WALK
)
3182 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3183 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
3188 /* We had a match, so value number the vdef to have the value
3189 number of the vuse it came from. */
3191 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3192 fprintf (dump_file
, "Store matched earlier value,"
3193 "value numbering store vdefs to matching vuses.\n");
3195 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
3201 /* Visit and value number PHI, return true if the value number
3205 visit_phi (gimple phi
)
3207 bool changed
= false;
3209 tree sameval
= VN_TOP
;
3210 bool allsame
= true;
3212 /* TODO: We could check for this in init_sccvn, and replace this
3213 with a gcc_assert. */
3214 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
3215 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3217 /* See if all non-TOP arguments have the same value. TOP is
3218 equivalent to everything, so we can ignore it. */
3221 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3222 if (e
->flags
& EDGE_EXECUTABLE
)
3224 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3226 if (TREE_CODE (def
) == SSA_NAME
)
3227 def
= SSA_VAL (def
);
3230 if (sameval
== VN_TOP
)
3236 if (!expressions_equal_p (def
, sameval
))
3244 /* If all value numbered to the same value, the phi node has that
3247 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3249 /* Otherwise, see if it is equivalent to a phi node in this block. */
3250 result
= vn_phi_lookup (phi
);
3252 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
3255 vn_phi_insert (phi
, PHI_RESULT (phi
));
3256 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
3257 VN_INFO (PHI_RESULT (phi
))->expr
= PHI_RESULT (phi
);
3258 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3264 /* Return true if EXPR contains constants. */
3267 expr_has_constants (tree expr
)
3269 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
3272 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0));
3275 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0))
3276 || is_gimple_min_invariant (TREE_OPERAND (expr
, 1));
3277 /* Constants inside reference ops are rarely interesting, but
3278 it can take a lot of looking to find them. */
3280 case tcc_declaration
:
3283 return is_gimple_min_invariant (expr
);
3288 /* Return true if STMT contains constants. */
3291 stmt_has_constants (gimple stmt
)
3295 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
3298 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
3300 case GIMPLE_TERNARY_RHS
:
3301 tem
= gimple_assign_rhs3 (stmt
);
3302 if (TREE_CODE (tem
) == SSA_NAME
)
3303 tem
= SSA_VAL (tem
);
3304 if (is_gimple_min_invariant (tem
))
3308 case GIMPLE_BINARY_RHS
:
3309 tem
= gimple_assign_rhs2 (stmt
);
3310 if (TREE_CODE (tem
) == SSA_NAME
)
3311 tem
= SSA_VAL (tem
);
3312 if (is_gimple_min_invariant (tem
))
3316 case GIMPLE_SINGLE_RHS
:
3317 /* Constants inside reference ops are rarely interesting, but
3318 it can take a lot of looking to find them. */
3319 case GIMPLE_UNARY_RHS
:
3320 tem
= gimple_assign_rhs1 (stmt
);
3321 if (TREE_CODE (tem
) == SSA_NAME
)
3322 tem
= SSA_VAL (tem
);
3323 return is_gimple_min_invariant (tem
);
3331 /* Simplify the binary expression RHS, and return the result if
3335 simplify_binary_expression (gimple stmt
)
3337 tree result
= NULL_TREE
;
3338 tree op0
= gimple_assign_rhs1 (stmt
);
3339 tree op1
= gimple_assign_rhs2 (stmt
);
3340 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3342 /* This will not catch every single case we could combine, but will
3343 catch those with constants. The goal here is to simultaneously
3344 combine constants between expressions, but avoid infinite
3345 expansion of expressions during simplification. */
3346 op0
= vn_valueize (op0
);
3347 if (TREE_CODE (op0
) == SSA_NAME
3348 && (VN_INFO (op0
)->has_constants
3349 || TREE_CODE_CLASS (code
) == tcc_comparison
3350 || code
== COMPLEX_EXPR
))
3351 op0
= vn_get_expr_for (op0
);
3353 op1
= vn_valueize (op1
);
3354 if (TREE_CODE (op1
) == SSA_NAME
3355 && (VN_INFO (op1
)->has_constants
3356 || code
== COMPLEX_EXPR
))
3357 op1
= vn_get_expr_for (op1
);
3359 /* Pointer plus constant can be represented as invariant address.
3360 Do so to allow further propatation, see also tree forwprop. */
3361 if (code
== POINTER_PLUS_EXPR
3362 && tree_fits_uhwi_p (op1
)
3363 && TREE_CODE (op0
) == ADDR_EXPR
3364 && is_gimple_min_invariant (op0
))
3365 return build_invariant_address (TREE_TYPE (op0
),
3366 TREE_OPERAND (op0
, 0),
3367 tree_to_uhwi (op1
));
3369 /* Avoid folding if nothing changed. */
3370 if (op0
== gimple_assign_rhs1 (stmt
)
3371 && op1
== gimple_assign_rhs2 (stmt
))
3374 fold_defer_overflow_warnings ();
3376 result
= fold_binary (code
, gimple_expr_type (stmt
), op0
, op1
);
3378 STRIP_USELESS_TYPE_CONVERSION (result
);
3380 fold_undefer_overflow_warnings (result
&& valid_gimple_rhs_p (result
),
3383 /* Make sure result is not a complex expression consisting
3384 of operators of operators (IE (a + b) + (a + c))
3385 Otherwise, we will end up with unbounded expressions if
3386 fold does anything at all. */
3387 if (result
&& valid_gimple_rhs_p (result
))
3393 /* Simplify the unary expression RHS, and return the result if
3397 simplify_unary_expression (gassign
*stmt
)
3399 tree result
= NULL_TREE
;
3400 tree orig_op0
, op0
= gimple_assign_rhs1 (stmt
);
3401 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3403 /* We handle some tcc_reference codes here that are all
3404 GIMPLE_ASSIGN_SINGLE codes. */
3405 if (code
== REALPART_EXPR
3406 || code
== IMAGPART_EXPR
3407 || code
== VIEW_CONVERT_EXPR
3408 || code
== BIT_FIELD_REF
)
3409 op0
= TREE_OPERAND (op0
, 0);
3412 op0
= vn_valueize (op0
);
3413 if (TREE_CODE (op0
) == SSA_NAME
)
3415 if (VN_INFO (op0
)->has_constants
)
3416 op0
= vn_get_expr_for (op0
);
3417 else if (CONVERT_EXPR_CODE_P (code
)
3418 || code
== REALPART_EXPR
3419 || code
== IMAGPART_EXPR
3420 || code
== VIEW_CONVERT_EXPR
3421 || code
== BIT_FIELD_REF
)
3423 /* We want to do tree-combining on conversion-like expressions.
3424 Make sure we feed only SSA_NAMEs or constants to fold though. */
3425 tree tem
= vn_get_expr_for (op0
);
3426 if (UNARY_CLASS_P (tem
)
3427 || BINARY_CLASS_P (tem
)
3428 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
3429 || TREE_CODE (tem
) == SSA_NAME
3430 || TREE_CODE (tem
) == CONSTRUCTOR
3431 || is_gimple_min_invariant (tem
))
3436 /* Avoid folding if nothing changed, but remember the expression. */
3437 if (op0
== orig_op0
)
3440 if (code
== BIT_FIELD_REF
)
3442 tree rhs
= gimple_assign_rhs1 (stmt
);
3443 result
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (rhs
),
3444 op0
, TREE_OPERAND (rhs
, 1), TREE_OPERAND (rhs
, 2));
3447 result
= fold_unary_ignore_overflow (code
, gimple_expr_type (stmt
), op0
);
3450 STRIP_USELESS_TYPE_CONVERSION (result
);
3451 if (valid_gimple_rhs_p (result
))
3458 /* Try to simplify RHS using equivalences and constant folding. */
3461 try_to_simplify (gassign
*stmt
)
3463 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3466 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3467 in this case, there is no point in doing extra work. */
3468 if (code
== SSA_NAME
)
3471 /* First try constant folding based on our current lattice. */
3472 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
3474 && (TREE_CODE (tem
) == SSA_NAME
3475 || is_gimple_min_invariant (tem
)))
3478 /* If that didn't work try combining multiple statements. */
3479 switch (TREE_CODE_CLASS (code
))
3482 /* Fallthrough for some unary codes that can operate on registers. */
3483 if (!(code
== REALPART_EXPR
3484 || code
== IMAGPART_EXPR
3485 || code
== VIEW_CONVERT_EXPR
3486 || code
== BIT_FIELD_REF
))
3488 /* We could do a little more with unary ops, if they expand
3489 into binary ops, but it's debatable whether it is worth it. */
3491 return simplify_unary_expression (stmt
);
3493 case tcc_comparison
:
3495 return simplify_binary_expression (stmt
);
3504 /* Visit and value number USE, return true if the value number
3508 visit_use (tree use
)
3510 bool changed
= false;
3511 gimple stmt
= SSA_NAME_DEF_STMT (use
);
3513 mark_use_processed (use
);
3515 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
3516 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3517 && !SSA_NAME_IS_DEFAULT_DEF (use
))
3519 fprintf (dump_file
, "Value numbering ");
3520 print_generic_expr (dump_file
, use
, 0);
3521 fprintf (dump_file
, " stmt = ");
3522 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3525 /* Handle uninitialized uses. */
3526 if (SSA_NAME_IS_DEFAULT_DEF (use
))
3527 changed
= set_ssa_val_to (use
, use
);
3530 if (gimple_code (stmt
) == GIMPLE_PHI
)
3531 changed
= visit_phi (stmt
);
3532 else if (gimple_has_volatile_ops (stmt
))
3533 changed
= defs_to_varying (stmt
);
3534 else if (is_gimple_assign (stmt
))
3536 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3537 tree lhs
= gimple_assign_lhs (stmt
);
3538 tree rhs1
= gimple_assign_rhs1 (stmt
);
3541 /* Shortcut for copies. Simplifying copies is pointless,
3542 since we copy the expression and value they represent. */
3543 if (code
== SSA_NAME
3544 && TREE_CODE (lhs
) == SSA_NAME
)
3546 changed
= visit_copy (lhs
, rhs1
);
3549 simplified
= try_to_simplify (as_a
<gassign
*> (stmt
));
3552 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3554 fprintf (dump_file
, "RHS ");
3555 print_gimple_expr (dump_file
, stmt
, 0, 0);
3556 fprintf (dump_file
, " simplified to ");
3557 print_generic_expr (dump_file
, simplified
, 0);
3558 if (TREE_CODE (lhs
) == SSA_NAME
)
3559 fprintf (dump_file
, " has constants %d\n",
3560 expr_has_constants (simplified
));
3562 fprintf (dump_file
, "\n");
3565 /* Setting value numbers to constants will occasionally
3566 screw up phi congruence because constants are not
3567 uniquely associated with a single ssa name that can be
3570 && is_gimple_min_invariant (simplified
)
3571 && TREE_CODE (lhs
) == SSA_NAME
)
3573 VN_INFO (lhs
)->expr
= simplified
;
3574 VN_INFO (lhs
)->has_constants
= true;
3575 changed
= set_ssa_val_to (lhs
, simplified
);
3579 && TREE_CODE (simplified
) == SSA_NAME
3580 && TREE_CODE (lhs
) == SSA_NAME
)
3582 changed
= visit_copy (lhs
, simplified
);
3585 else if (simplified
)
3587 if (TREE_CODE (lhs
) == SSA_NAME
)
3589 VN_INFO (lhs
)->has_constants
= expr_has_constants (simplified
);
3590 /* We have to unshare the expression or else
3591 valuizing may change the IL stream. */
3592 VN_INFO (lhs
)->expr
= unshare_expr (simplified
);
3595 else if (stmt_has_constants (stmt
)
3596 && TREE_CODE (lhs
) == SSA_NAME
)
3597 VN_INFO (lhs
)->has_constants
= true;
3598 else if (TREE_CODE (lhs
) == SSA_NAME
)
3600 /* We reset expr and constantness here because we may
3601 have been value numbering optimistically, and
3602 iterating. They may become non-constant in this case,
3603 even if they were optimistically constant. */
3605 VN_INFO (lhs
)->has_constants
= false;
3606 VN_INFO (lhs
)->expr
= NULL_TREE
;
3609 if ((TREE_CODE (lhs
) == SSA_NAME
3610 /* We can substitute SSA_NAMEs that are live over
3611 abnormal edges with their constant value. */
3612 && !(gimple_assign_copy_p (stmt
)
3613 && is_gimple_min_invariant (rhs1
))
3615 && is_gimple_min_invariant (simplified
))
3616 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3617 /* Stores or copies from SSA_NAMEs that are live over
3618 abnormal edges are a problem. */
3619 || (code
== SSA_NAME
3620 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
3621 changed
= defs_to_varying (stmt
);
3622 else if (REFERENCE_CLASS_P (lhs
)
3624 changed
= visit_reference_op_store (lhs
, rhs1
, stmt
);
3625 else if (TREE_CODE (lhs
) == SSA_NAME
)
3627 if ((gimple_assign_copy_p (stmt
)
3628 && is_gimple_min_invariant (rhs1
))
3630 && is_gimple_min_invariant (simplified
)))
3632 VN_INFO (lhs
)->has_constants
= true;
3634 changed
= set_ssa_val_to (lhs
, simplified
);
3636 changed
= set_ssa_val_to (lhs
, rhs1
);
3640 /* First try to lookup the simplified expression. */
3643 enum gimple_rhs_class rhs_class
;
3646 rhs_class
= get_gimple_rhs_class (TREE_CODE (simplified
));
3647 if ((rhs_class
== GIMPLE_UNARY_RHS
3648 || rhs_class
== GIMPLE_BINARY_RHS
3649 || rhs_class
== GIMPLE_TERNARY_RHS
)
3650 && valid_gimple_rhs_p (simplified
))
3652 tree result
= vn_nary_op_lookup (simplified
, NULL
);
3655 changed
= set_ssa_val_to (lhs
, result
);
3661 /* Otherwise visit the original statement. */
3662 switch (vn_get_stmt_kind (stmt
))
3665 changed
= visit_nary_op (lhs
, stmt
);
3668 changed
= visit_reference_op_load (lhs
, rhs1
, stmt
);
3671 changed
= defs_to_varying (stmt
);
3677 changed
= defs_to_varying (stmt
);
3679 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
3681 tree lhs
= gimple_call_lhs (stmt
);
3682 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3684 /* Try constant folding based on our current lattice. */
3685 tree simplified
= gimple_fold_stmt_to_constant_1 (stmt
,
3689 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3691 fprintf (dump_file
, "call ");
3692 print_gimple_expr (dump_file
, stmt
, 0, 0);
3693 fprintf (dump_file
, " simplified to ");
3694 print_generic_expr (dump_file
, simplified
, 0);
3695 if (TREE_CODE (lhs
) == SSA_NAME
)
3696 fprintf (dump_file
, " has constants %d\n",
3697 expr_has_constants (simplified
));
3699 fprintf (dump_file
, "\n");
3702 /* Setting value numbers to constants will occasionally
3703 screw up phi congruence because constants are not
3704 uniquely associated with a single ssa name that can be
3707 && is_gimple_min_invariant (simplified
))
3709 VN_INFO (lhs
)->expr
= simplified
;
3710 VN_INFO (lhs
)->has_constants
= true;
3711 changed
= set_ssa_val_to (lhs
, simplified
);
3712 if (gimple_vdef (stmt
))
3713 changed
|= set_ssa_val_to (gimple_vdef (stmt
),
3714 gimple_vuse (stmt
));
3718 && TREE_CODE (simplified
) == SSA_NAME
)
3720 changed
= visit_copy (lhs
, simplified
);
3721 if (gimple_vdef (stmt
))
3722 changed
|= set_ssa_val_to (gimple_vdef (stmt
),
3723 gimple_vuse (stmt
));
3728 if (stmt_has_constants (stmt
))
3729 VN_INFO (lhs
)->has_constants
= true;
3732 /* We reset expr and constantness here because we may
3733 have been value numbering optimistically, and
3734 iterating. They may become non-constant in this case,
3735 even if they were optimistically constant. */
3736 VN_INFO (lhs
)->has_constants
= false;
3737 VN_INFO (lhs
)->expr
= NULL_TREE
;
3740 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3742 changed
= defs_to_varying (stmt
);
3748 if (!gimple_call_internal_p (stmt
)
3749 && (/* Calls to the same function with the same vuse
3750 and the same operands do not necessarily return the same
3751 value, unless they're pure or const. */
3752 gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
)
3753 /* If calls have a vdef, subsequent calls won't have
3754 the same incoming vuse. So, if 2 calls with vdef have the
3755 same vuse, we know they're not subsequent.
3756 We can value number 2 calls to the same function with the
3757 same vuse and the same operands which are not subsequent
3758 the same, because there is no code in the program that can
3759 compare the 2 values... */
3760 || (gimple_vdef (stmt
)
3761 /* ... unless the call returns a pointer which does
3762 not alias with anything else. In which case the
3763 information that the values are distinct are encoded
3765 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
3766 /* Only perform the following when being called from PRE
3767 which embeds tail merging. */
3768 && default_vn_walk_kind
== VN_WALK
)))
3769 changed
= visit_reference_op_call (lhs
, call_stmt
);
3771 changed
= defs_to_varying (stmt
);
3774 changed
= defs_to_varying (stmt
);
3780 /* Compare two operands by reverse postorder index */
3783 compare_ops (const void *pa
, const void *pb
)
3785 const tree opa
= *((const tree
*)pa
);
3786 const tree opb
= *((const tree
*)pb
);
3787 gimple opstmta
= SSA_NAME_DEF_STMT (opa
);
3788 gimple opstmtb
= SSA_NAME_DEF_STMT (opb
);
3792 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
3793 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3794 else if (gimple_nop_p (opstmta
))
3796 else if (gimple_nop_p (opstmtb
))
3799 bba
= gimple_bb (opstmta
);
3800 bbb
= gimple_bb (opstmtb
);
3803 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3811 if (gimple_code (opstmta
) == GIMPLE_PHI
3812 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3813 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3814 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3816 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3818 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3819 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3821 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3823 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3826 /* Sort an array containing members of a strongly connected component
3827 SCC so that the members are ordered by RPO number.
3828 This means that when the sort is complete, iterating through the
3829 array will give you the members in RPO order. */
3832 sort_scc (vec
<tree
> scc
)
3834 scc
.qsort (compare_ops
);
3837 /* Insert the no longer used nary ONARY to the hash INFO. */
3840 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3842 size_t size
= sizeof_vn_nary_op (onary
->length
);
3843 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
3844 &info
->nary_obstack
);
3845 memcpy (nary
, onary
, size
);
3846 vn_nary_op_insert_into (nary
, info
->nary
, false);
3849 /* Insert the no longer used phi OPHI to the hash INFO. */
3852 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3854 vn_phi_t phi
= (vn_phi_t
) pool_alloc (info
->phis_pool
);
3856 memcpy (phi
, ophi
, sizeof (*phi
));
3857 ophi
->phiargs
.create (0);
3858 slot
= info
->phis
->find_slot_with_hash (phi
, phi
->hashcode
, INSERT
);
3859 gcc_assert (!*slot
);
3863 /* Insert the no longer used reference OREF to the hash INFO. */
3866 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3869 vn_reference_s
**slot
;
3870 ref
= (vn_reference_t
) pool_alloc (info
->references_pool
);
3871 memcpy (ref
, oref
, sizeof (*ref
));
3872 oref
->operands
.create (0);
3873 slot
= info
->references
->find_slot_with_hash (ref
, ref
->hashcode
, INSERT
);
3875 free_reference (*slot
);
3879 /* Process a strongly connected component in the SSA graph. */
3882 process_scc (vec
<tree
> scc
)
3886 unsigned int iterations
= 0;
3887 bool changed
= true;
3888 vn_nary_op_iterator_type hin
;
3889 vn_phi_iterator_type hip
;
3890 vn_reference_iterator_type hir
;
3895 /* If the SCC has a single member, just visit it. */
3896 if (scc
.length () == 1)
3899 if (VN_INFO (use
)->use_processed
)
3901 /* We need to make sure it doesn't form a cycle itself, which can
3902 happen for self-referential PHI nodes. In that case we would
3903 end up inserting an expression with VN_TOP operands into the
3904 valid table which makes us derive bogus equivalences later.
3905 The cheapest way to check this is to assume it for all PHI nodes. */
3906 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
3907 /* Fallthru to iteration. */ ;
3915 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3916 print_scc (dump_file
, scc
);
3918 /* Iterate over the SCC with the optimistic table until it stops
3920 current_info
= optimistic_info
;
3925 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3926 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
3927 /* As we are value-numbering optimistically we have to
3928 clear the expression tables and the simplified expressions
3929 in each iteration until we converge. */
3930 optimistic_info
->nary
->empty ();
3931 optimistic_info
->phis
->empty ();
3932 optimistic_info
->references
->empty ();
3933 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
3934 gcc_obstack_init (&optimistic_info
->nary_obstack
);
3935 empty_alloc_pool (optimistic_info
->phis_pool
);
3936 empty_alloc_pool (optimistic_info
->references_pool
);
3937 FOR_EACH_VEC_ELT (scc
, i
, var
)
3938 VN_INFO (var
)->expr
= NULL_TREE
;
3939 FOR_EACH_VEC_ELT (scc
, i
, var
)
3940 changed
|= visit_use (var
);
3943 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3944 fprintf (dump_file
, "Processing SCC needed %d iterations\n", iterations
);
3945 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
3947 /* Finally, copy the contents of the no longer used optimistic
3948 table to the valid table. */
3949 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->nary
, nary
, vn_nary_op_t
, hin
)
3950 copy_nary (nary
, valid_info
);
3951 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->phis
, phi
, vn_phi_t
, hip
)
3952 copy_phi (phi
, valid_info
);
3953 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->references
,
3954 ref
, vn_reference_t
, hir
)
3955 copy_reference (ref
, valid_info
);
3957 current_info
= valid_info
;
3961 /* Pop the components of the found SCC for NAME off the SCC stack
3962 and process them. Returns true if all went well, false if
3963 we run into resource limits. */
3966 extract_and_process_scc_for_name (tree name
)
3971 /* Found an SCC, pop the components off the SCC stack and
3975 x
= sccstack
.pop ();
3977 VN_INFO (x
)->on_sccstack
= false;
3979 } while (x
!= name
);
3981 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3983 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
3986 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
3987 "SCC size %u exceeding %u\n", scc
.length (),
3988 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
3993 if (scc
.length () > 1)
4001 /* Depth first search on NAME to discover and process SCC's in the SSA
4003 Execution of this algorithm relies on the fact that the SCC's are
4004 popped off the stack in topological order.
4005 Returns true if successful, false if we stopped processing SCC's due
4006 to resource constraints. */
4011 vec
<ssa_op_iter
> itervec
= vNULL
;
4012 vec
<tree
> namevec
= vNULL
;
4013 use_operand_p usep
= NULL
;
4020 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
4021 VN_INFO (name
)->visited
= true;
4022 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
4024 sccstack
.safe_push (name
);
4025 VN_INFO (name
)->on_sccstack
= true;
4026 defstmt
= SSA_NAME_DEF_STMT (name
);
4028 /* Recursively DFS on our operands, looking for SCC's. */
4029 if (!gimple_nop_p (defstmt
))
4031 /* Push a new iterator. */
4032 if (gphi
*phi
= dyn_cast
<gphi
*> (defstmt
))
4033 usep
= op_iter_init_phiuse (&iter
, phi
, SSA_OP_ALL_USES
);
4035 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
4038 clear_and_done_ssa_iter (&iter
);
4042 /* If we are done processing uses of a name, go up the stack
4043 of iterators and process SCCs as we found them. */
4044 if (op_iter_done (&iter
))
4046 /* See if we found an SCC. */
4047 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
4048 if (!extract_and_process_scc_for_name (name
))
4055 /* Check if we are done. */
4056 if (namevec
.is_empty ())
4063 /* Restore the last use walker and continue walking there. */
4065 name
= namevec
.pop ();
4066 memcpy (&iter
, &itervec
.last (),
4067 sizeof (ssa_op_iter
));
4069 goto continue_walking
;
4072 use
= USE_FROM_PTR (usep
);
4074 /* Since we handle phi nodes, we will sometimes get
4075 invariants in the use expression. */
4076 if (TREE_CODE (use
) == SSA_NAME
)
4078 if (! (VN_INFO (use
)->visited
))
4080 /* Recurse by pushing the current use walking state on
4081 the stack and starting over. */
4082 itervec
.safe_push (iter
);
4083 namevec
.safe_push (name
);
4088 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
4089 VN_INFO (use
)->low
);
4091 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
4092 && VN_INFO (use
)->on_sccstack
)
4094 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
4095 VN_INFO (name
)->low
);
4099 usep
= op_iter_next_use (&iter
);
4103 /* Allocate a value number table. */
4106 allocate_vn_table (vn_tables_t table
)
4108 table
->phis
= new vn_phi_table_type (23);
4109 table
->nary
= new vn_nary_op_table_type (23);
4110 table
->references
= new vn_reference_table_type (23);
4112 gcc_obstack_init (&table
->nary_obstack
);
4113 table
->phis_pool
= create_alloc_pool ("VN phis",
4114 sizeof (struct vn_phi_s
),
4116 table
->references_pool
= create_alloc_pool ("VN references",
4117 sizeof (struct vn_reference_s
),
4121 /* Free a value number table. */
4124 free_vn_table (vn_tables_t table
)
4130 delete table
->references
;
4131 table
->references
= NULL
;
4132 obstack_free (&table
->nary_obstack
, NULL
);
4133 free_alloc_pool (table
->phis_pool
);
4134 free_alloc_pool (table
->references_pool
);
4142 int *rpo_numbers_temp
;
4144 calculate_dominance_info (CDI_DOMINATORS
);
4145 sccstack
.create (0);
4146 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
4148 constant_value_ids
= BITMAP_ALLOC (NULL
);
4153 vn_ssa_aux_table
.create (num_ssa_names
+ 1);
4154 /* VEC_alloc doesn't actually grow it to the right size, it just
4155 preallocates the space to do so. */
4156 vn_ssa_aux_table
.safe_grow_cleared (num_ssa_names
+ 1);
4157 gcc_obstack_init (&vn_ssa_aux_obstack
);
4159 shared_lookup_phiargs
.create (0);
4160 shared_lookup_references
.create (0);
4161 rpo_numbers
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
4163 XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
4164 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
4166 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4167 the i'th block in RPO order is bb. We want to map bb's to RPO
4168 numbers, so we need to rearrange this array. */
4169 for (j
= 0; j
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; j
++)
4170 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
4172 XDELETE (rpo_numbers_temp
);
4174 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
4176 /* Create the VN_INFO structures, and initialize value numbers to
4178 for (i
= 0; i
< num_ssa_names
; i
++)
4180 tree name
= ssa_name (i
);
4183 VN_INFO_GET (name
)->valnum
= VN_TOP
;
4184 VN_INFO (name
)->expr
= NULL_TREE
;
4185 VN_INFO (name
)->value_id
= 0;
4189 renumber_gimple_stmt_uids ();
4191 /* Create the valid and optimistic value numbering tables. */
4192 valid_info
= XCNEW (struct vn_tables_s
);
4193 allocate_vn_table (valid_info
);
4194 optimistic_info
= XCNEW (struct vn_tables_s
);
4195 allocate_vn_table (optimistic_info
);
4203 delete constant_to_value_id
;
4204 constant_to_value_id
= NULL
;
4205 BITMAP_FREE (constant_value_ids
);
4206 shared_lookup_phiargs
.release ();
4207 shared_lookup_references
.release ();
4208 XDELETEVEC (rpo_numbers
);
4210 for (i
= 0; i
< num_ssa_names
; i
++)
4212 tree name
= ssa_name (i
);
4214 && VN_INFO (name
)->needs_insertion
)
4215 release_ssa_name (name
);
4217 obstack_free (&vn_ssa_aux_obstack
, NULL
);
4218 vn_ssa_aux_table
.release ();
4220 sccstack
.release ();
4221 free_vn_table (valid_info
);
4222 XDELETE (valid_info
);
4223 free_vn_table (optimistic_info
);
4224 XDELETE (optimistic_info
);
4227 /* Set *ID according to RESULT. */
4230 set_value_id_for_result (tree result
, unsigned int *id
)
4232 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4233 *id
= VN_INFO (result
)->value_id
;
4234 else if (result
&& is_gimple_min_invariant (result
))
4235 *id
= get_or_alloc_constant_value_id (result
);
4237 *id
= get_next_value_id ();
4240 /* Set the value ids in the valid hash tables. */
4243 set_hashtable_value_ids (void)
4245 vn_nary_op_iterator_type hin
;
4246 vn_phi_iterator_type hip
;
4247 vn_reference_iterator_type hir
;
4252 /* Now set the value ids of the things we had put in the hash
4255 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4256 set_value_id_for_result (vno
->result
, &vno
->value_id
);
4258 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4259 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4261 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4263 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4266 class cond_dom_walker
: public dom_walker
4269 cond_dom_walker () : dom_walker (CDI_DOMINATORS
), fail (false) {}
4271 virtual void before_dom_children (basic_block
);
4277 cond_dom_walker::before_dom_children (basic_block bb
)
4285 /* If any of the predecessor edges that do not come from blocks dominated
4286 by us are still marked as possibly executable consider this block
4288 bool reachable
= bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
);
4289 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4290 if (!dominated_by_p (CDI_DOMINATORS
, e
->src
, bb
))
4291 reachable
|= (e
->flags
& EDGE_EXECUTABLE
);
4293 /* If the block is not reachable all outgoing edges are not
4297 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4298 fprintf (dump_file
, "Marking all outgoing edges of unreachable "
4299 "BB %d as not executable\n", bb
->index
);
4301 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4302 e
->flags
&= ~EDGE_EXECUTABLE
;
4306 gimple stmt
= last_stmt (bb
);
4310 enum gimple_code code
= gimple_code (stmt
);
4311 if (code
!= GIMPLE_COND
4312 && code
!= GIMPLE_SWITCH
4313 && code
!= GIMPLE_GOTO
)
4316 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4318 fprintf (dump_file
, "Value-numbering operands of stmt ending BB %d: ",
4320 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4323 /* Value-number the last stmts SSA uses. */
4326 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_USE
)
4327 if (VN_INFO (op
)->visited
== false
4334 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4335 if value-numbering can prove they are not reachable. Handling
4336 computed gotos is also possible. */
4342 tree lhs
= gimple_cond_lhs (stmt
);
4343 tree rhs
= gimple_cond_rhs (stmt
);
4344 /* Work hard in computing the condition and take into account
4345 the valueization of the defining stmt. */
4346 if (TREE_CODE (lhs
) == SSA_NAME
)
4347 lhs
= vn_get_expr_for (lhs
);
4348 if (TREE_CODE (rhs
) == SSA_NAME
)
4349 rhs
= vn_get_expr_for (rhs
);
4350 val
= fold_binary (gimple_cond_code (stmt
),
4351 boolean_type_node
, lhs
, rhs
);
4355 val
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
4358 val
= gimple_goto_dest (stmt
);
4366 edge taken
= find_taken_edge (bb
, vn_valueize (val
));
4370 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4371 fprintf (dump_file
, "Marking all edges out of BB %d but (%d -> %d) as "
4372 "not executable\n", bb
->index
, bb
->index
, taken
->dest
->index
);
4374 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4376 e
->flags
&= ~EDGE_EXECUTABLE
;
4379 /* Do SCCVN. Returns true if it finished, false if we bailed out
4380 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4381 how we use the alias oracle walking during the VN process. */
4384 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
4390 default_vn_walk_kind
= default_vn_walk_kind_
;
4393 current_info
= valid_info
;
4395 for (param
= DECL_ARGUMENTS (current_function_decl
);
4397 param
= DECL_CHAIN (param
))
4399 tree def
= ssa_default_def (cfun
, param
);
4402 VN_INFO (def
)->visited
= true;
4403 VN_INFO (def
)->valnum
= def
;
4407 /* Mark all edges as possibly executable. */
4408 FOR_ALL_BB_FN (bb
, cfun
)
4412 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4413 e
->flags
|= EDGE_EXECUTABLE
;
4416 /* Walk all blocks in dominator order, value-numbering the last stmts
4417 SSA uses and decide whether outgoing edges are not executable. */
4418 cond_dom_walker walker
;
4419 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
4426 /* Value-number remaining SSA names. */
4427 for (i
= 1; i
< num_ssa_names
; ++i
)
4429 tree name
= ssa_name (i
);
4431 && VN_INFO (name
)->visited
== false
4432 && !has_zero_uses (name
))
4440 /* Initialize the value ids. */
4442 for (i
= 1; i
< num_ssa_names
; ++i
)
4444 tree name
= ssa_name (i
);
4448 info
= VN_INFO (name
);
4449 if (info
->valnum
== name
4450 || info
->valnum
== VN_TOP
)
4451 info
->value_id
= get_next_value_id ();
4452 else if (is_gimple_min_invariant (info
->valnum
))
4453 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
4457 for (i
= 1; i
< num_ssa_names
; ++i
)
4459 tree name
= ssa_name (i
);
4463 info
= VN_INFO (name
);
4464 if (TREE_CODE (info
->valnum
) == SSA_NAME
4465 && info
->valnum
!= name
4466 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
4467 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
4470 set_hashtable_value_ids ();
4472 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4474 fprintf (dump_file
, "Value numbers:\n");
4475 for (i
= 0; i
< num_ssa_names
; i
++)
4477 tree name
= ssa_name (i
);
4479 && VN_INFO (name
)->visited
4480 && SSA_VAL (name
) != name
)
4482 print_generic_expr (dump_file
, name
, 0);
4483 fprintf (dump_file
, " = ");
4484 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
4485 fprintf (dump_file
, "\n");
4493 /* Return the maximum value id we have ever seen. */
4496 get_max_value_id (void)
4498 return next_value_id
;
4501 /* Return the next unique value id. */
4504 get_next_value_id (void)
4506 return next_value_id
++;
4510 /* Compare two expressions E1 and E2 and return true if they are equal. */
4513 expressions_equal_p (tree e1
, tree e2
)
4515 /* The obvious case. */
4519 /* If only one of them is null, they cannot be equal. */
4523 /* Now perform the actual comparison. */
4524 if (TREE_CODE (e1
) == TREE_CODE (e2
)
4525 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
4532 /* Return true if the nary operation NARY may trap. This is a copy
4533 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4536 vn_nary_may_trap (vn_nary_op_t nary
)
4539 tree rhs2
= NULL_TREE
;
4540 bool honor_nans
= false;
4541 bool honor_snans
= false;
4542 bool fp_operation
= false;
4543 bool honor_trapv
= false;
4547 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
4548 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
4549 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
4552 fp_operation
= FLOAT_TYPE_P (type
);
4555 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
4556 honor_snans
= flag_signaling_nans
!= 0;
4558 else if (INTEGRAL_TYPE_P (type
)
4559 && TYPE_OVERFLOW_TRAPS (type
))
4562 if (nary
->length
>= 2)
4564 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
4566 honor_nans
, honor_snans
, rhs2
,
4572 for (i
= 0; i
< nary
->length
; ++i
)
4573 if (tree_could_trap_p (nary
->op
[i
]))