1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
33 #include "gimple-pretty-print.h"
34 #include "tree-inline.h"
35 #include "internal-fn.h"
36 #include "gimple-fold.h"
40 #include "insn-config.h"
52 #include "alloc-pool.h"
55 #include "tree-ssa-propagate.h"
56 #include "tree-ssa-sccvn.h"
61 /* This algorithm is based on the SCC algorithm presented by Keith
62 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
63 (http://citeseer.ist.psu.edu/41805.html). In
64 straight line code, it is equivalent to a regular hash based value
65 numbering that is performed in reverse postorder.
67 For code with cycles, there are two alternatives, both of which
68 require keeping the hashtables separate from the actual list of
69 value numbers for SSA names.
71 1. Iterate value numbering in an RPO walk of the blocks, removing
72 all the entries from the hashtable after each iteration (but
73 keeping the SSA name->value number mapping between iterations).
74 Iterate until it does not change.
76 2. Perform value numbering as part of an SCC walk on the SSA graph,
77 iterating only the cycles in the SSA graph until they do not change
78 (using a separate, optimistic hashtable for value numbering the SCC
81 The second is not just faster in practice (because most SSA graph
82 cycles do not involve all the variables in the graph), it also has
85 One of these nice properties is that when we pop an SCC off the
86 stack, we are guaranteed to have processed all the operands coming from
87 *outside of that SCC*, so we do not need to do anything special to
88 ensure they have value numbers.
90 Another nice property is that the SCC walk is done as part of a DFS
91 of the SSA graph, which makes it easy to perform combining and
92 simplifying operations at the same time.
94 The code below is deliberately written in a way that makes it easy
95 to separate the SCC walk from the other work it does.
97 In order to propagate constants through the code, we track which
98 expressions contain constants, and use those while folding. In
99 theory, we could also track expressions whose value numbers are
100 replaced, in case we end up folding based on expression
103 In order to value number memory, we assign value numbers to vuses.
104 This enables us to note that, for example, stores to the same
105 address of the same value from the same starting memory states are
109 1. We can iterate only the changing portions of the SCC's, but
110 I have not seen an SCC big enough for this to be a win.
111 2. If you differentiate between phi nodes for loops and phi nodes
112 for if-then-else, you can properly consider phi nodes in different
113 blocks for equivalence.
114 3. We could value number vuses in more cases, particularly, whole
119 static tree
*last_vuse_ptr
;
120 static vn_lookup_kind vn_walk_kind
;
121 static vn_lookup_kind default_vn_walk_kind
;
123 /* vn_nary_op hashtable helpers. */
125 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
127 typedef vn_nary_op_s
*compare_type
;
128 static inline hashval_t
hash (const vn_nary_op_s
*);
129 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
132 /* Return the computed hashcode for nary operation P1. */
135 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
137 return vno1
->hashcode
;
140 /* Compare nary operations P1 and P2 and return true if they are
144 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
146 return vn_nary_op_eq (vno1
, vno2
);
149 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
150 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
153 /* vn_phi hashtable helpers. */
156 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
158 struct vn_phi_hasher
: pointer_hash
<vn_phi_s
>
160 static inline hashval_t
hash (const vn_phi_s
*);
161 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
162 static inline void remove (vn_phi_s
*);
165 /* Return the computed hashcode for phi operation P1. */
168 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
170 return vp1
->hashcode
;
173 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
176 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
178 return vn_phi_eq (vp1
, vp2
);
181 /* Free a phi operation structure VP. */
184 vn_phi_hasher::remove (vn_phi_s
*phi
)
186 phi
->phiargs
.release ();
189 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
190 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
193 /* Compare two reference operands P1 and P2 for equality. Return true if
194 they are equal, and false otherwise. */
197 vn_reference_op_eq (const void *p1
, const void *p2
)
199 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
200 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
202 return (vro1
->opcode
== vro2
->opcode
203 /* We do not care for differences in type qualification. */
204 && (vro1
->type
== vro2
->type
205 || (vro1
->type
&& vro2
->type
206 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
207 TYPE_MAIN_VARIANT (vro2
->type
))))
208 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
209 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
210 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
213 /* Free a reference operation structure VP. */
216 free_reference (vn_reference_s
*vr
)
218 vr
->operands
.release ();
222 /* vn_reference hashtable helpers. */
224 struct vn_reference_hasher
: pointer_hash
<vn_reference_s
>
226 static inline hashval_t
hash (const vn_reference_s
*);
227 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
228 static inline void remove (vn_reference_s
*);
231 /* Return the hashcode for a given reference operation P1. */
234 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
236 return vr1
->hashcode
;
240 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
242 return vn_reference_eq (v
, c
);
246 vn_reference_hasher::remove (vn_reference_s
*v
)
251 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
252 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
255 /* The set of hashtables and alloc_pool's for their items. */
257 typedef struct vn_tables_s
259 vn_nary_op_table_type
*nary
;
260 vn_phi_table_type
*phis
;
261 vn_reference_table_type
*references
;
262 struct obstack nary_obstack
;
263 pool_allocator
<vn_phi_s
> *phis_pool
;
264 pool_allocator
<vn_reference_s
> *references_pool
;
268 /* vn_constant hashtable helpers. */
270 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
272 static inline hashval_t
hash (const vn_constant_s
*);
273 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
276 /* Hash table hash function for vn_constant_t. */
279 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
281 return vc1
->hashcode
;
284 /* Hash table equality function for vn_constant_t. */
287 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
289 if (vc1
->hashcode
!= vc2
->hashcode
)
292 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
295 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
296 static bitmap constant_value_ids
;
299 /* Valid hashtables storing information we have proven to be
302 static vn_tables_t valid_info
;
304 /* Optimistic hashtables storing information we are making assumptions about
305 during iterations. */
307 static vn_tables_t optimistic_info
;
309 /* Pointer to the set of hashtables that is currently being used.
310 Should always point to either the optimistic_info, or the
313 static vn_tables_t current_info
;
316 /* Reverse post order index for each basic block. */
318 static int *rpo_numbers
;
320 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
322 /* Return the SSA value of the VUSE x, supporting released VDEFs
323 during elimination which will value-number the VDEF to the
324 associated VUSE (but not substitute in the whole lattice). */
327 vuse_ssa_val (tree x
)
336 while (SSA_NAME_IN_FREE_LIST (x
));
341 /* This represents the top of the VN lattice, which is the universal
346 /* Unique counter for our value ids. */
348 static unsigned int next_value_id
;
350 /* Next DFS number and the stack for strongly connected component
353 static unsigned int next_dfs_num
;
354 static vec
<tree
> sccstack
;
358 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
359 are allocated on an obstack for locality reasons, and to free them
360 without looping over the vec. */
362 static vec
<vn_ssa_aux_t
> vn_ssa_aux_table
;
363 static struct obstack vn_ssa_aux_obstack
;
365 /* Return the value numbering information for a given SSA name. */
370 vn_ssa_aux_t res
= vn_ssa_aux_table
[SSA_NAME_VERSION (name
)];
371 gcc_checking_assert (res
);
375 /* Set the value numbering info for a given SSA name to a given
379 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
381 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = value
;
384 /* Initialize the value numbering info for a given SSA name.
385 This should be called just once for every SSA name. */
388 VN_INFO_GET (tree name
)
390 vn_ssa_aux_t newinfo
;
392 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
393 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
394 if (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ())
395 vn_ssa_aux_table
.safe_grow (SSA_NAME_VERSION (name
) + 1);
396 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = newinfo
;
401 /* Get the representative expression for the SSA_NAME NAME. Returns
402 the representative SSA_NAME if there is no expression associated with it. */
405 vn_get_expr_for (tree name
)
407 vn_ssa_aux_t vn
= VN_INFO (name
);
409 tree expr
= NULL_TREE
;
412 if (vn
->valnum
== VN_TOP
)
415 /* If the value-number is a constant it is the representative
417 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
420 /* Get to the information of the value of this SSA_NAME. */
421 vn
= VN_INFO (vn
->valnum
);
423 /* If the value-number is a constant it is the representative
425 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
428 /* Else if we have an expression, return it. */
429 if (vn
->expr
!= NULL_TREE
)
432 /* Otherwise use the defining statement to build the expression. */
433 def_stmt
= SSA_NAME_DEF_STMT (vn
->valnum
);
435 /* If the value number is not an assignment use it directly. */
436 if (!is_gimple_assign (def_stmt
))
439 /* Note that we can valueize here because we clear the cached
440 simplified expressions after each optimistic iteration. */
441 code
= gimple_assign_rhs_code (def_stmt
);
442 switch (TREE_CODE_CLASS (code
))
445 if ((code
== REALPART_EXPR
446 || code
== IMAGPART_EXPR
447 || code
== VIEW_CONVERT_EXPR
)
448 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt
),
450 expr
= fold_build1 (code
,
451 gimple_expr_type (def_stmt
),
452 vn_valueize (TREE_OPERAND
453 (gimple_assign_rhs1 (def_stmt
), 0)));
457 expr
= fold_build1 (code
,
458 gimple_expr_type (def_stmt
),
459 vn_valueize (gimple_assign_rhs1 (def_stmt
)));
463 expr
= fold_build2 (code
,
464 gimple_expr_type (def_stmt
),
465 vn_valueize (gimple_assign_rhs1 (def_stmt
)),
466 vn_valueize (gimple_assign_rhs2 (def_stmt
)));
469 case tcc_exceptional
:
470 if (code
== CONSTRUCTOR
472 (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))) == VECTOR_TYPE
)
473 expr
= gimple_assign_rhs1 (def_stmt
);
478 if (expr
== NULL_TREE
)
481 /* Cache the expression. */
487 /* Return the vn_kind the expression computed by the stmt should be
491 vn_get_stmt_kind (gimple stmt
)
493 switch (gimple_code (stmt
))
501 enum tree_code code
= gimple_assign_rhs_code (stmt
);
502 tree rhs1
= gimple_assign_rhs1 (stmt
);
503 switch (get_gimple_rhs_class (code
))
505 case GIMPLE_UNARY_RHS
:
506 case GIMPLE_BINARY_RHS
:
507 case GIMPLE_TERNARY_RHS
:
509 case GIMPLE_SINGLE_RHS
:
510 switch (TREE_CODE_CLASS (code
))
513 /* VOP-less references can go through unary case. */
514 if ((code
== REALPART_EXPR
515 || code
== IMAGPART_EXPR
516 || code
== VIEW_CONVERT_EXPR
517 || code
== BIT_FIELD_REF
)
518 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
522 case tcc_declaration
:
529 if (code
== ADDR_EXPR
)
530 return (is_gimple_min_invariant (rhs1
)
531 ? VN_CONSTANT
: VN_REFERENCE
);
532 else if (code
== CONSTRUCTOR
)
545 /* Lookup a value id for CONSTANT and return it. If it does not
549 get_constant_value_id (tree constant
)
551 vn_constant_s
**slot
;
552 struct vn_constant_s vc
;
554 vc
.hashcode
= vn_hash_constant_with_type (constant
);
555 vc
.constant
= constant
;
556 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
558 return (*slot
)->value_id
;
562 /* Lookup a value id for CONSTANT, and if it does not exist, create a
563 new one and return it. If it does exist, return it. */
566 get_or_alloc_constant_value_id (tree constant
)
568 vn_constant_s
**slot
;
569 struct vn_constant_s vc
;
572 vc
.hashcode
= vn_hash_constant_with_type (constant
);
573 vc
.constant
= constant
;
574 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
576 return (*slot
)->value_id
;
578 vcp
= XNEW (struct vn_constant_s
);
579 vcp
->hashcode
= vc
.hashcode
;
580 vcp
->constant
= constant
;
581 vcp
->value_id
= get_next_value_id ();
583 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
584 return vcp
->value_id
;
587 /* Return true if V is a value id for a constant. */
590 value_id_constant_p (unsigned int v
)
592 return bitmap_bit_p (constant_value_ids
, v
);
595 /* Compute the hash for a reference operand VRO1. */
598 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
600 hstate
.add_int (vro1
->opcode
);
602 inchash::add_expr (vro1
->op0
, hstate
);
604 inchash::add_expr (vro1
->op1
, hstate
);
606 inchash::add_expr (vro1
->op2
, hstate
);
609 /* Compute a hash for the reference operation VR1 and return it. */
612 vn_reference_compute_hash (const vn_reference_t vr1
)
614 inchash::hash hstate
;
617 vn_reference_op_t vro
;
618 HOST_WIDE_INT off
= -1;
621 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
623 if (vro
->opcode
== MEM_REF
)
625 else if (vro
->opcode
!= ADDR_EXPR
)
637 hstate
.add_int (off
);
640 && vro
->opcode
== ADDR_EXPR
)
644 tree op
= TREE_OPERAND (vro
->op0
, 0);
645 hstate
.add_int (TREE_CODE (op
));
646 inchash::add_expr (op
, hstate
);
650 vn_reference_op_compute_hash (vro
, hstate
);
653 result
= hstate
.end ();
654 /* ??? We would ICE later if we hash instead of adding that in. */
656 result
+= SSA_NAME_VERSION (vr1
->vuse
);
661 /* Return true if reference operations VR1 and VR2 are equivalent. This
662 means they have the same set of operands and vuses. */
665 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
669 /* Early out if this is not a hash collision. */
670 if (vr1
->hashcode
!= vr2
->hashcode
)
673 /* The VOP needs to be the same. */
674 if (vr1
->vuse
!= vr2
->vuse
)
677 /* If the operands are the same we are done. */
678 if (vr1
->operands
== vr2
->operands
)
681 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
684 if (INTEGRAL_TYPE_P (vr1
->type
)
685 && INTEGRAL_TYPE_P (vr2
->type
))
687 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
690 else if (INTEGRAL_TYPE_P (vr1
->type
)
691 && (TYPE_PRECISION (vr1
->type
)
692 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
694 else if (INTEGRAL_TYPE_P (vr2
->type
)
695 && (TYPE_PRECISION (vr2
->type
)
696 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
703 HOST_WIDE_INT off1
= 0, off2
= 0;
704 vn_reference_op_t vro1
, vro2
;
705 vn_reference_op_s tem1
, tem2
;
706 bool deref1
= false, deref2
= false;
707 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
709 if (vro1
->opcode
== MEM_REF
)
715 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
717 if (vro2
->opcode
== MEM_REF
)
725 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
727 memset (&tem1
, 0, sizeof (tem1
));
728 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
729 tem1
.type
= TREE_TYPE (tem1
.op0
);
730 tem1
.opcode
= TREE_CODE (tem1
.op0
);
734 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
736 memset (&tem2
, 0, sizeof (tem2
));
737 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
738 tem2
.type
= TREE_TYPE (tem2
.op0
);
739 tem2
.opcode
= TREE_CODE (tem2
.op0
);
743 if (deref1
!= deref2
)
745 if (!vn_reference_op_eq (vro1
, vro2
))
750 while (vr1
->operands
.length () != i
751 || vr2
->operands
.length () != j
);
756 /* Copy the operations present in load/store REF into RESULT, a vector of
757 vn_reference_op_s's. */
760 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
762 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
764 vn_reference_op_s temp
;
768 memset (&temp
, 0, sizeof (temp
));
769 temp
.type
= TREE_TYPE (ref
);
770 temp
.opcode
= TREE_CODE (ref
);
771 temp
.op0
= TMR_INDEX (ref
);
772 temp
.op1
= TMR_STEP (ref
);
773 temp
.op2
= TMR_OFFSET (ref
);
775 result
->quick_push (temp
);
777 memset (&temp
, 0, sizeof (temp
));
778 temp
.type
= NULL_TREE
;
779 temp
.opcode
= ERROR_MARK
;
780 temp
.op0
= TMR_INDEX2 (ref
);
782 result
->quick_push (temp
);
784 memset (&temp
, 0, sizeof (temp
));
785 temp
.type
= NULL_TREE
;
786 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
787 temp
.op0
= TMR_BASE (ref
);
789 result
->quick_push (temp
);
793 /* For non-calls, store the information that makes up the address. */
797 vn_reference_op_s temp
;
799 memset (&temp
, 0, sizeof (temp
));
800 temp
.type
= TREE_TYPE (ref
);
801 temp
.opcode
= TREE_CODE (ref
);
807 temp
.op0
= TREE_OPERAND (ref
, 1);
810 temp
.op0
= TREE_OPERAND (ref
, 1);
814 /* The base address gets its own vn_reference_op_s structure. */
815 temp
.op0
= TREE_OPERAND (ref
, 1);
816 if (tree_fits_shwi_p (TREE_OPERAND (ref
, 1)))
817 temp
.off
= tree_to_shwi (TREE_OPERAND (ref
, 1));
820 /* Record bits and position. */
821 temp
.op0
= TREE_OPERAND (ref
, 1);
822 temp
.op1
= TREE_OPERAND (ref
, 2);
825 /* The field decl is enough to unambiguously specify the field,
826 a matching type is not necessary and a mismatching type
827 is always a spurious difference. */
828 temp
.type
= NULL_TREE
;
829 temp
.op0
= TREE_OPERAND (ref
, 1);
830 temp
.op1
= TREE_OPERAND (ref
, 2);
832 tree this_offset
= component_ref_field_offset (ref
);
834 && TREE_CODE (this_offset
) == INTEGER_CST
)
836 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
837 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
840 = (wi::to_offset (this_offset
)
841 + wi::lrshift (wi::to_offset (bit_offset
),
842 LOG2_BITS_PER_UNIT
));
843 if (wi::fits_shwi_p (off
)
844 /* Probibit value-numbering zero offset components
845 of addresses the same before the pass folding
846 __builtin_object_size had a chance to run
847 (checking cfun->after_inlining does the
849 && (TREE_CODE (orig
) != ADDR_EXPR
851 || cfun
->after_inlining
))
852 temp
.off
= off
.to_shwi ();
857 case ARRAY_RANGE_REF
:
859 /* Record index as operand. */
860 temp
.op0
= TREE_OPERAND (ref
, 1);
861 /* Always record lower bounds and element size. */
862 temp
.op1
= array_ref_low_bound (ref
);
863 temp
.op2
= array_ref_element_size (ref
);
864 if (TREE_CODE (temp
.op0
) == INTEGER_CST
865 && TREE_CODE (temp
.op1
) == INTEGER_CST
866 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
868 offset_int off
= ((wi::to_offset (temp
.op0
)
869 - wi::to_offset (temp
.op1
))
870 * wi::to_offset (temp
.op2
));
871 if (wi::fits_shwi_p (off
))
872 temp
.off
= off
.to_shwi();
876 if (DECL_HARD_REGISTER (ref
))
885 /* Canonicalize decls to MEM[&decl] which is what we end up with
886 when valueizing MEM[ptr] with ptr = &decl. */
887 temp
.opcode
= MEM_REF
;
888 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
890 result
->safe_push (temp
);
891 temp
.opcode
= ADDR_EXPR
;
892 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
893 temp
.type
= TREE_TYPE (temp
.op0
);
907 if (is_gimple_min_invariant (ref
))
913 /* These are only interesting for their operands, their
914 existence, and their type. They will never be the last
915 ref in the chain of references (IE they require an
916 operand), so we don't have to put anything
917 for op* as it will be handled by the iteration */
919 case VIEW_CONVERT_EXPR
:
923 /* This is only interesting for its constant offset. */
924 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
929 result
->safe_push (temp
);
931 if (REFERENCE_CLASS_P (ref
)
932 || TREE_CODE (ref
) == MODIFY_EXPR
933 || TREE_CODE (ref
) == WITH_SIZE_EXPR
934 || (TREE_CODE (ref
) == ADDR_EXPR
935 && !is_gimple_min_invariant (ref
)))
936 ref
= TREE_OPERAND (ref
, 0);
942 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
943 operands in *OPS, the reference alias set SET and the reference type TYPE.
944 Return true if something useful was produced. */
947 ao_ref_init_from_vn_reference (ao_ref
*ref
,
948 alias_set_type set
, tree type
,
949 vec
<vn_reference_op_s
> ops
)
951 vn_reference_op_t op
;
953 tree base
= NULL_TREE
;
955 HOST_WIDE_INT offset
= 0;
956 HOST_WIDE_INT max_size
;
957 HOST_WIDE_INT size
= -1;
958 tree size_tree
= NULL_TREE
;
959 alias_set_type base_alias_set
= -1;
961 /* First get the final access size from just the outermost expression. */
963 if (op
->opcode
== COMPONENT_REF
)
964 size_tree
= DECL_SIZE (op
->op0
);
965 else if (op
->opcode
== BIT_FIELD_REF
)
969 machine_mode mode
= TYPE_MODE (type
);
971 size_tree
= TYPE_SIZE (type
);
973 size
= GET_MODE_BITSIZE (mode
);
975 if (size_tree
!= NULL_TREE
)
977 if (!tree_fits_uhwi_p (size_tree
))
980 size
= tree_to_uhwi (size_tree
);
983 /* Initially, maxsize is the same as the accessed element size.
984 In the following it will only grow (or become -1). */
987 /* Compute cumulative bit-offset for nested component-refs and array-refs,
988 and find the ultimate containing object. */
989 FOR_EACH_VEC_ELT (ops
, i
, op
)
993 /* These may be in the reference ops, but we cannot do anything
994 sensible with them here. */
996 /* Apart from ADDR_EXPR arguments to MEM_REF. */
997 if (base
!= NULL_TREE
998 && TREE_CODE (base
) == MEM_REF
1000 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1002 vn_reference_op_t pop
= &ops
[i
-1];
1003 base
= TREE_OPERAND (op
->op0
, 0);
1010 offset
+= pop
->off
* BITS_PER_UNIT
;
1018 /* Record the base objects. */
1020 base_alias_set
= get_deref_alias_set (op
->op0
);
1021 *op0_p
= build2 (MEM_REF
, op
->type
,
1022 NULL_TREE
, op
->op0
);
1023 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1034 /* And now the usual component-reference style ops. */
1036 offset
+= tree_to_shwi (op
->op1
);
1041 tree field
= op
->op0
;
1042 /* We do not have a complete COMPONENT_REF tree here so we
1043 cannot use component_ref_field_offset. Do the interesting
1047 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
)))
1051 offset
+= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
1053 offset
+= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1058 case ARRAY_RANGE_REF
:
1060 /* We recorded the lower bound and the element size. */
1061 if (!tree_fits_shwi_p (op
->op0
)
1062 || !tree_fits_shwi_p (op
->op1
)
1063 || !tree_fits_shwi_p (op
->op2
))
1067 HOST_WIDE_INT hindex
= tree_to_shwi (op
->op0
);
1068 hindex
-= tree_to_shwi (op
->op1
);
1069 hindex
*= tree_to_shwi (op
->op2
);
1070 hindex
*= BITS_PER_UNIT
;
1082 case VIEW_CONVERT_EXPR
:
1099 if (base
== NULL_TREE
)
1102 ref
->ref
= NULL_TREE
;
1104 ref
->offset
= offset
;
1106 ref
->max_size
= max_size
;
1107 ref
->ref_alias_set
= set
;
1108 if (base_alias_set
!= -1)
1109 ref
->base_alias_set
= base_alias_set
;
1111 ref
->base_alias_set
= get_alias_set (base
);
1112 /* We discount volatiles from value-numbering elsewhere. */
1113 ref
->volatile_p
= false;
1118 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1119 vn_reference_op_s's. */
1122 copy_reference_ops_from_call (gcall
*call
,
1123 vec
<vn_reference_op_s
> *result
)
1125 vn_reference_op_s temp
;
1127 tree lhs
= gimple_call_lhs (call
);
1130 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1131 different. By adding the lhs here in the vector, we ensure that the
1132 hashcode is different, guaranteeing a different value number. */
1133 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1135 memset (&temp
, 0, sizeof (temp
));
1136 temp
.opcode
= MODIFY_EXPR
;
1137 temp
.type
= TREE_TYPE (lhs
);
1140 result
->safe_push (temp
);
1143 /* Copy the type, opcode, function, static chain and EH region, if any. */
1144 memset (&temp
, 0, sizeof (temp
));
1145 temp
.type
= gimple_call_return_type (call
);
1146 temp
.opcode
= CALL_EXPR
;
1147 temp
.op0
= gimple_call_fn (call
);
1148 temp
.op1
= gimple_call_chain (call
);
1149 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1150 temp
.op2
= size_int (lr
);
1152 if (gimple_call_with_bounds_p (call
))
1153 temp
.with_bounds
= 1;
1154 result
->safe_push (temp
);
1156 /* Copy the call arguments. As they can be references as well,
1157 just chain them together. */
1158 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1160 tree callarg
= gimple_call_arg (call
, i
);
1161 copy_reference_ops_from_ref (callarg
, result
);
1165 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1166 *I_P to point to the last element of the replacement. */
1168 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1171 unsigned int i
= *i_p
;
1172 vn_reference_op_t op
= &(*ops
)[i
];
1173 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1175 HOST_WIDE_INT addr_offset
= 0;
1177 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1178 from .foo.bar to the preceding MEM_REF offset and replace the
1179 address with &OBJ. */
1180 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1182 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1183 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1185 offset_int off
= offset_int::from (mem_op
->op0
, SIGNED
);
1187 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1188 op
->op0
= build_fold_addr_expr (addr_base
);
1189 if (tree_fits_shwi_p (mem_op
->op0
))
1190 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1196 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1197 *I_P to point to the last element of the replacement. */
1199 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1202 unsigned int i
= *i_p
;
1203 vn_reference_op_t op
= &(*ops
)[i
];
1204 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1206 enum tree_code code
;
1209 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1210 if (!is_gimple_assign (def_stmt
))
1213 code
= gimple_assign_rhs_code (def_stmt
);
1214 if (code
!= ADDR_EXPR
1215 && code
!= POINTER_PLUS_EXPR
)
1218 off
= offset_int::from (mem_op
->op0
, SIGNED
);
1220 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1221 from .foo.bar to the preceding MEM_REF offset and replace the
1222 address with &OBJ. */
1223 if (code
== ADDR_EXPR
)
1225 tree addr
, addr_base
;
1226 HOST_WIDE_INT addr_offset
;
1228 addr
= gimple_assign_rhs1 (def_stmt
);
1229 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1231 /* If that didn't work because the address isn't invariant propagate
1232 the reference tree from the address operation in case the current
1233 dereference isn't offsetted. */
1235 && *i_p
== ops
->length () - 1
1237 /* This makes us disable this transform for PRE where the
1238 reference ops might be also used for code insertion which
1240 && default_vn_walk_kind
== VN_WALKREWRITE
)
1242 auto_vec
<vn_reference_op_s
, 32> tem
;
1243 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1246 ops
->safe_splice (tem
);
1251 || TREE_CODE (addr_base
) != MEM_REF
)
1255 off
+= mem_ref_offset (addr_base
);
1256 op
->op0
= TREE_OPERAND (addr_base
, 0);
1261 ptr
= gimple_assign_rhs1 (def_stmt
);
1262 ptroff
= gimple_assign_rhs2 (def_stmt
);
1263 if (TREE_CODE (ptr
) != SSA_NAME
1264 || TREE_CODE (ptroff
) != INTEGER_CST
)
1267 off
+= wi::to_offset (ptroff
);
1271 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1272 if (tree_fits_shwi_p (mem_op
->op0
))
1273 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1276 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1277 op
->op0
= SSA_VAL (op
->op0
);
1278 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1279 op
->opcode
= TREE_CODE (op
->op0
);
1282 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1283 vn_reference_maybe_forwprop_address (ops
, i_p
);
1284 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1285 vn_reference_fold_indirect (ops
, i_p
);
1288 /* Optimize the reference REF to a constant if possible or return
1289 NULL_TREE if not. */
1292 fully_constant_vn_reference_p (vn_reference_t ref
)
1294 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1295 vn_reference_op_t op
;
1297 /* Try to simplify the translated expression if it is
1298 a call to a builtin function with at most two arguments. */
1300 if (op
->opcode
== CALL_EXPR
1301 && TREE_CODE (op
->op0
) == ADDR_EXPR
1302 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1303 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1304 && operands
.length () >= 2
1305 && operands
.length () <= 3)
1307 vn_reference_op_t arg0
, arg1
= NULL
;
1308 bool anyconst
= false;
1309 arg0
= &operands
[1];
1310 if (operands
.length () > 2)
1311 arg1
= &operands
[2];
1312 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1313 || (arg0
->opcode
== ADDR_EXPR
1314 && is_gimple_min_invariant (arg0
->op0
)))
1317 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1318 || (arg1
->opcode
== ADDR_EXPR
1319 && is_gimple_min_invariant (arg1
->op0
))))
1323 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1326 arg1
? arg1
->op0
: NULL
);
1328 && TREE_CODE (folded
) == NOP_EXPR
)
1329 folded
= TREE_OPERAND (folded
, 0);
1331 && is_gimple_min_invariant (folded
))
1336 /* Simplify reads from constants or constant initializers. */
1337 else if (BITS_PER_UNIT
== 8
1338 && is_gimple_reg_type (ref
->type
)
1339 && (!INTEGRAL_TYPE_P (ref
->type
)
1340 || TYPE_PRECISION (ref
->type
) % BITS_PER_UNIT
== 0))
1342 HOST_WIDE_INT off
= 0;
1344 if (INTEGRAL_TYPE_P (ref
->type
))
1345 size
= TYPE_PRECISION (ref
->type
);
1347 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1348 if (size
% BITS_PER_UNIT
!= 0
1349 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1351 size
/= BITS_PER_UNIT
;
1353 for (i
= 0; i
< operands
.length (); ++i
)
1355 if (operands
[i
].off
== -1)
1357 off
+= operands
[i
].off
;
1358 if (operands
[i
].opcode
== MEM_REF
)
1364 vn_reference_op_t base
= &operands
[--i
];
1365 tree ctor
= error_mark_node
;
1366 tree decl
= NULL_TREE
;
1367 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1369 else if (base
->opcode
== MEM_REF
1370 && base
[1].opcode
== ADDR_EXPR
1371 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1372 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
))
1374 decl
= TREE_OPERAND (base
[1].op0
, 0);
1375 ctor
= ctor_for_folding (decl
);
1377 if (ctor
== NULL_TREE
)
1378 return build_zero_cst (ref
->type
);
1379 else if (ctor
!= error_mark_node
)
1383 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1384 off
* BITS_PER_UNIT
,
1385 size
* BITS_PER_UNIT
, decl
);
1388 STRIP_USELESS_TYPE_CONVERSION (res
);
1389 if (is_gimple_min_invariant (res
))
1395 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1396 if (native_encode_expr (ctor
, buf
, size
, off
) > 0)
1397 return native_interpret_expr (ref
->type
, buf
, size
);
1405 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1406 structures into their value numbers. This is done in-place, and
1407 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1408 whether any operands were valueized. */
1410 static vec
<vn_reference_op_s
>
1411 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
)
1413 vn_reference_op_t vro
;
1416 *valueized_anything
= false;
1418 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1420 if (vro
->opcode
== SSA_NAME
1421 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1423 tree tem
= SSA_VAL (vro
->op0
);
1424 if (tem
!= vro
->op0
)
1426 *valueized_anything
= true;
1429 /* If it transforms from an SSA_NAME to a constant, update
1431 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1432 vro
->opcode
= TREE_CODE (vro
->op0
);
1434 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1436 tree tem
= SSA_VAL (vro
->op1
);
1437 if (tem
!= vro
->op1
)
1439 *valueized_anything
= true;
1443 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1445 tree tem
= SSA_VAL (vro
->op2
);
1446 if (tem
!= vro
->op2
)
1448 *valueized_anything
= true;
1452 /* If it transforms from an SSA_NAME to an address, fold with
1453 a preceding indirect reference. */
1456 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1457 && orig
[i
- 1].opcode
== MEM_REF
)
1458 vn_reference_fold_indirect (&orig
, &i
);
1460 && vro
->opcode
== SSA_NAME
1461 && orig
[i
- 1].opcode
== MEM_REF
)
1462 vn_reference_maybe_forwprop_address (&orig
, &i
);
1463 /* If it transforms a non-constant ARRAY_REF into a constant
1464 one, adjust the constant offset. */
1465 else if (vro
->opcode
== ARRAY_REF
1467 && TREE_CODE (vro
->op0
) == INTEGER_CST
1468 && TREE_CODE (vro
->op1
) == INTEGER_CST
1469 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1471 offset_int off
= ((wi::to_offset (vro
->op0
)
1472 - wi::to_offset (vro
->op1
))
1473 * wi::to_offset (vro
->op2
));
1474 if (wi::fits_shwi_p (off
))
1475 vro
->off
= off
.to_shwi ();
1482 static vec
<vn_reference_op_s
>
1483 valueize_refs (vec
<vn_reference_op_s
> orig
)
1486 return valueize_refs_1 (orig
, &tem
);
1489 static vec
<vn_reference_op_s
> shared_lookup_references
;
1491 /* Create a vector of vn_reference_op_s structures from REF, a
1492 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1493 this function. *VALUEIZED_ANYTHING will specify whether any
1494 operands were valueized. */
1496 static vec
<vn_reference_op_s
>
1497 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1501 shared_lookup_references
.truncate (0);
1502 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1503 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1504 valueized_anything
);
1505 return shared_lookup_references
;
1508 /* Create a vector of vn_reference_op_s structures from CALL, a
1509 call statement. The vector is shared among all callers of
1512 static vec
<vn_reference_op_s
>
1513 valueize_shared_reference_ops_from_call (gcall
*call
)
1517 shared_lookup_references
.truncate (0);
1518 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1519 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1520 return shared_lookup_references
;
1523 /* Lookup a SCCVN reference operation VR in the current hash table.
1524 Returns the resulting value number if it exists in the hash table,
1525 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1526 vn_reference_t stored in the hashtable if something is found. */
1529 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1531 vn_reference_s
**slot
;
1534 hash
= vr
->hashcode
;
1535 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1536 if (!slot
&& current_info
== optimistic_info
)
1537 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1541 *vnresult
= (vn_reference_t
)*slot
;
1542 return ((vn_reference_t
)*slot
)->result
;
1548 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1549 with the current VUSE and performs the expression lookup. */
1552 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1553 unsigned int cnt
, void *vr_
)
1555 vn_reference_t vr
= (vn_reference_t
)vr_
;
1556 vn_reference_s
**slot
;
1559 /* This bounds the stmt walks we perform on reference lookups
1560 to O(1) instead of O(N) where N is the number of dominating
1562 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1566 *last_vuse_ptr
= vuse
;
1568 /* Fixup vuse and hash. */
1570 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1571 vr
->vuse
= vuse_ssa_val (vuse
);
1573 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1575 hash
= vr
->hashcode
;
1576 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1577 if (!slot
&& current_info
== optimistic_info
)
1578 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1585 /* Lookup an existing or insert a new vn_reference entry into the
1586 value table for the VUSE, SET, TYPE, OPERANDS reference which
1587 has the value VALUE which is either a constant or an SSA name. */
1589 static vn_reference_t
1590 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1593 vec
<vn_reference_op_s
,
1598 vn_reference_t result
;
1601 vr1
.operands
= operands
;
1604 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1605 if (vn_reference_lookup_1 (&vr1
, &result
))
1607 if (TREE_CODE (value
) == SSA_NAME
)
1608 value_id
= VN_INFO (value
)->value_id
;
1610 value_id
= get_or_alloc_constant_value_id (value
);
1611 return vn_reference_insert_pieces (vuse
, set
, type
,
1612 operands
.copy (), value
, value_id
);
1615 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1616 from the statement defining VUSE and if not successful tries to
1617 translate *REFP and VR_ through an aggregate copy at the definition
1621 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1622 bool disambiguate_only
)
1624 vn_reference_t vr
= (vn_reference_t
)vr_
;
1625 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1627 HOST_WIDE_INT offset
, maxsize
;
1628 static vec
<vn_reference_op_s
>
1631 bool lhs_ref_ok
= false;
1633 /* First try to disambiguate after value-replacing in the definitions LHS. */
1634 if (is_gimple_assign (def_stmt
))
1636 tree lhs
= gimple_assign_lhs (def_stmt
);
1637 bool valueized_anything
= false;
1638 /* Avoid re-allocation overhead. */
1639 lhs_ops
.truncate (0);
1640 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1641 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
);
1642 if (valueized_anything
)
1644 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1645 get_alias_set (lhs
),
1646 TREE_TYPE (lhs
), lhs_ops
);
1648 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1653 ao_ref_init (&lhs_ref
, lhs
);
1657 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1658 && gimple_call_num_args (def_stmt
) <= 4)
1660 /* For builtin calls valueize its arguments and call the
1661 alias oracle again. Valueization may improve points-to
1662 info of pointers and constify size and position arguments.
1663 Originally this was motivated by PR61034 which has
1664 conditional calls to free falsely clobbering ref because
1665 of imprecise points-to info of the argument. */
1667 bool valueized_anything
= false;
1668 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1670 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1671 if (TREE_CODE (oldargs
[i
]) == SSA_NAME
1672 && VN_INFO (oldargs
[i
])->valnum
!= oldargs
[i
])
1674 gimple_call_set_arg (def_stmt
, i
, VN_INFO (oldargs
[i
])->valnum
);
1675 valueized_anything
= true;
1678 if (valueized_anything
)
1680 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1682 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1683 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1689 if (disambiguate_only
)
1692 base
= ao_ref_base (ref
);
1693 offset
= ref
->offset
;
1694 maxsize
= ref
->max_size
;
1696 /* If we cannot constrain the size of the reference we cannot
1697 test if anything kills it. */
1701 /* We can't deduce anything useful from clobbers. */
1702 if (gimple_clobber_p (def_stmt
))
1705 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1706 from that definition.
1708 if (is_gimple_reg_type (vr
->type
)
1709 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1710 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1711 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2))
1712 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1714 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1716 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1717 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
);
1718 size2
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2)) * 8;
1719 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1720 == tree_to_uhwi (gimple_call_arg (def_stmt
, 2))
1722 && operand_equal_p (base
, base2
, 0)
1723 && offset2
<= offset
1724 && offset2
+ size2
>= offset
+ maxsize
)
1726 tree val
= build_zero_cst (vr
->type
);
1727 return vn_reference_lookup_or_insert_for_pieces
1728 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1732 /* 2) Assignment from an empty CONSTRUCTOR. */
1733 else if (is_gimple_reg_type (vr
->type
)
1734 && gimple_assign_single_p (def_stmt
)
1735 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1736 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1739 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1740 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1741 &offset2
, &size2
, &maxsize2
);
1743 && operand_equal_p (base
, base2
, 0)
1744 && offset2
<= offset
1745 && offset2
+ size2
>= offset
+ maxsize
)
1747 tree val
= build_zero_cst (vr
->type
);
1748 return vn_reference_lookup_or_insert_for_pieces
1749 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1753 /* 3) Assignment from a constant. We can use folds native encode/interpret
1754 routines to extract the assigned bits. */
1755 else if (vn_walk_kind
== VN_WALKREWRITE
1756 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
1757 && ref
->size
== maxsize
1758 && maxsize
% BITS_PER_UNIT
== 0
1759 && offset
% BITS_PER_UNIT
== 0
1760 && is_gimple_reg_type (vr
->type
)
1761 && gimple_assign_single_p (def_stmt
)
1762 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
1765 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1766 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1767 &offset2
, &size2
, &maxsize2
);
1769 && maxsize2
== size2
1770 && size2
% BITS_PER_UNIT
== 0
1771 && offset2
% BITS_PER_UNIT
== 0
1772 && operand_equal_p (base
, base2
, 0)
1773 && offset2
<= offset
1774 && offset2
+ size2
>= offset
+ maxsize
)
1776 /* We support up to 512-bit values (for V8DFmode). */
1777 unsigned char buffer
[64];
1780 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
1781 buffer
, sizeof (buffer
));
1784 tree val
= native_interpret_expr (vr
->type
,
1786 + ((offset
- offset2
)
1788 ref
->size
/ BITS_PER_UNIT
);
1790 return vn_reference_lookup_or_insert_for_pieces
1791 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1796 /* 4) Assignment from an SSA name which definition we may be able
1797 to access pieces from. */
1798 else if (ref
->size
== maxsize
1799 && is_gimple_reg_type (vr
->type
)
1800 && gimple_assign_single_p (def_stmt
)
1801 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
1803 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
1804 gimple def_stmt2
= SSA_NAME_DEF_STMT (rhs1
);
1805 if (is_gimple_assign (def_stmt2
)
1806 && (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
1807 || gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
)
1808 && types_compatible_p (vr
->type
, TREE_TYPE (TREE_TYPE (rhs1
))))
1811 HOST_WIDE_INT offset2
, size2
, maxsize2
, off
;
1812 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1813 &offset2
, &size2
, &maxsize2
);
1814 off
= offset
- offset2
;
1816 && maxsize2
== size2
1817 && operand_equal_p (base
, base2
, 0)
1818 && offset2
<= offset
1819 && offset2
+ size2
>= offset
+ maxsize
)
1821 tree val
= NULL_TREE
;
1823 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1
))));
1824 if (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
)
1827 val
= gimple_assign_rhs1 (def_stmt2
);
1828 else if (off
== elsz
)
1829 val
= gimple_assign_rhs2 (def_stmt2
);
1831 else if (gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
1834 tree ctor
= gimple_assign_rhs1 (def_stmt2
);
1835 unsigned i
= off
/ elsz
;
1836 if (i
< CONSTRUCTOR_NELTS (ctor
))
1838 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, i
);
1839 if (TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
1841 if (TREE_CODE (TREE_TYPE (elt
->value
))
1848 return vn_reference_lookup_or_insert_for_pieces
1849 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1854 /* 5) For aggregate copies translate the reference through them if
1855 the copy kills ref. */
1856 else if (vn_walk_kind
== VN_WALKREWRITE
1857 && gimple_assign_single_p (def_stmt
)
1858 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
1859 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
1860 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
1863 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1865 auto_vec
<vn_reference_op_s
> rhs
;
1866 vn_reference_op_t vro
;
1872 /* See if the assignment kills REF. */
1873 base2
= ao_ref_base (&lhs_ref
);
1874 offset2
= lhs_ref
.offset
;
1875 size2
= lhs_ref
.size
;
1876 maxsize2
= lhs_ref
.max_size
;
1879 && (TREE_CODE (base
) != MEM_REF
1880 || TREE_CODE (base2
) != MEM_REF
1881 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
1882 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
1883 TREE_OPERAND (base2
, 1))))
1885 || offset2
+ size2
< offset
+ maxsize
)
1888 /* Find the common base of ref and the lhs. lhs_ops already
1889 contains valueized operands for the lhs. */
1890 i
= vr
->operands
.length () - 1;
1891 j
= lhs_ops
.length () - 1;
1892 while (j
>= 0 && i
>= 0
1893 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
1899 /* ??? The innermost op should always be a MEM_REF and we already
1900 checked that the assignment to the lhs kills vr. Thus for
1901 aggregate copies using char[] types the vn_reference_op_eq
1902 may fail when comparing types for compatibility. But we really
1903 don't care here - further lookups with the rewritten operands
1904 will simply fail if we messed up types too badly. */
1905 HOST_WIDE_INT extra_off
= 0;
1906 if (j
== 0 && i
>= 0
1907 && lhs_ops
[0].opcode
== MEM_REF
1908 && lhs_ops
[0].off
!= -1)
1910 if (lhs_ops
[0].off
== vr
->operands
[i
].off
)
1912 else if (vr
->operands
[i
].opcode
== MEM_REF
1913 && vr
->operands
[i
].off
!= -1)
1915 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
1920 /* i now points to the first additional op.
1921 ??? LHS may not be completely contained in VR, one or more
1922 VIEW_CONVERT_EXPRs could be in its way. We could at least
1923 try handling outermost VIEW_CONVERT_EXPRs. */
1927 /* Now re-write REF to be based on the rhs of the assignment. */
1928 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
1930 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1933 if (rhs
.length () < 2
1934 || rhs
[0].opcode
!= MEM_REF
1935 || rhs
[0].off
== -1)
1937 rhs
[0].off
+= extra_off
;
1938 rhs
[0].op0
= int_const_binop (PLUS_EXPR
, rhs
[0].op0
,
1939 build_int_cst (TREE_TYPE (rhs
[0].op0
),
1943 /* We need to pre-pend vr->operands[0..i] to rhs. */
1944 vec
<vn_reference_op_s
> old
= vr
->operands
;
1945 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
1947 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
1948 if (old
== shared_lookup_references
)
1949 shared_lookup_references
= vr
->operands
;
1952 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
1953 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
1954 vr
->operands
[i
+ 1 + j
] = *vro
;
1955 vr
->operands
= valueize_refs (vr
->operands
);
1956 if (old
== shared_lookup_references
)
1957 shared_lookup_references
= vr
->operands
;
1958 vr
->hashcode
= vn_reference_compute_hash (vr
);
1960 /* Try folding the new reference to a constant. */
1961 tree val
= fully_constant_vn_reference_p (vr
);
1963 return vn_reference_lookup_or_insert_for_pieces
1964 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1966 /* Adjust *ref from the new operands. */
1967 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
1969 /* This can happen with bitfields. */
1970 if (ref
->size
!= r
.size
)
1974 /* Do not update last seen VUSE after translating. */
1975 last_vuse_ptr
= NULL
;
1977 /* Keep looking for the adjusted *REF / VR pair. */
1981 /* 6) For memcpy copies translate the reference through them if
1982 the copy kills ref. */
1983 else if (vn_walk_kind
== VN_WALKREWRITE
1984 && is_gimple_reg_type (vr
->type
)
1985 /* ??? Handle BCOPY as well. */
1986 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
1987 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
1988 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
1989 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
1990 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
1991 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
1992 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
1993 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2)))
1997 HOST_WIDE_INT rhs_offset
, copy_size
, lhs_offset
;
1998 vn_reference_op_s op
;
2002 /* Only handle non-variable, addressable refs. */
2003 if (ref
->size
!= maxsize
2004 || offset
% BITS_PER_UNIT
!= 0
2005 || ref
->size
% BITS_PER_UNIT
!= 0)
2008 /* Extract a pointer base and an offset for the destination. */
2009 lhs
= gimple_call_arg (def_stmt
, 0);
2011 if (TREE_CODE (lhs
) == SSA_NAME
)
2013 lhs
= SSA_VAL (lhs
);
2014 if (TREE_CODE (lhs
) == SSA_NAME
)
2016 gimple def_stmt
= SSA_NAME_DEF_STMT (lhs
);
2017 if (gimple_assign_single_p (def_stmt
)
2018 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2019 lhs
= gimple_assign_rhs1 (def_stmt
);
2022 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2024 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2028 if (TREE_CODE (tem
) == MEM_REF
2029 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2031 lhs
= TREE_OPERAND (tem
, 0);
2032 if (TREE_CODE (lhs
) == SSA_NAME
)
2033 lhs
= SSA_VAL (lhs
);
2034 lhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2036 else if (DECL_P (tem
))
2037 lhs
= build_fold_addr_expr (tem
);
2041 if (TREE_CODE (lhs
) != SSA_NAME
2042 && TREE_CODE (lhs
) != ADDR_EXPR
)
2045 /* Extract a pointer base and an offset for the source. */
2046 rhs
= gimple_call_arg (def_stmt
, 1);
2048 if (TREE_CODE (rhs
) == SSA_NAME
)
2049 rhs
= SSA_VAL (rhs
);
2050 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2052 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2056 if (TREE_CODE (tem
) == MEM_REF
2057 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2059 rhs
= TREE_OPERAND (tem
, 0);
2060 rhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2062 else if (DECL_P (tem
))
2063 rhs
= build_fold_addr_expr (tem
);
2067 if (TREE_CODE (rhs
) != SSA_NAME
2068 && TREE_CODE (rhs
) != ADDR_EXPR
)
2071 copy_size
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2));
2073 /* The bases of the destination and the references have to agree. */
2074 if ((TREE_CODE (base
) != MEM_REF
2076 || (TREE_CODE (base
) == MEM_REF
2077 && (TREE_OPERAND (base
, 0) != lhs
2078 || !tree_fits_uhwi_p (TREE_OPERAND (base
, 1))))
2080 && (TREE_CODE (lhs
) != ADDR_EXPR
2081 || TREE_OPERAND (lhs
, 0) != base
)))
2084 at
= offset
/ BITS_PER_UNIT
;
2085 if (TREE_CODE (base
) == MEM_REF
)
2086 at
+= tree_to_uhwi (TREE_OPERAND (base
, 1));
2087 /* If the access is completely outside of the memcpy destination
2088 area there is no aliasing. */
2089 if (lhs_offset
>= at
+ maxsize
/ BITS_PER_UNIT
2090 || lhs_offset
+ copy_size
<= at
)
2092 /* And the access has to be contained within the memcpy destination. */
2094 || lhs_offset
+ copy_size
< at
+ maxsize
/ BITS_PER_UNIT
)
2097 /* Make room for 2 operands in the new reference. */
2098 if (vr
->operands
.length () < 2)
2100 vec
<vn_reference_op_s
> old
= vr
->operands
;
2101 vr
->operands
.safe_grow_cleared (2);
2102 if (old
== shared_lookup_references
2103 && vr
->operands
!= old
)
2104 shared_lookup_references
= vr
->operands
;
2107 vr
->operands
.truncate (2);
2109 /* The looked-through reference is a simple MEM_REF. */
2110 memset (&op
, 0, sizeof (op
));
2112 op
.opcode
= MEM_REF
;
2113 op
.op0
= build_int_cst (ptr_type_node
, at
- rhs_offset
);
2114 op
.off
= at
- lhs_offset
+ rhs_offset
;
2115 vr
->operands
[0] = op
;
2116 op
.type
= TREE_TYPE (rhs
);
2117 op
.opcode
= TREE_CODE (rhs
);
2120 vr
->operands
[1] = op
;
2121 vr
->hashcode
= vn_reference_compute_hash (vr
);
2123 /* Adjust *ref from the new operands. */
2124 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2126 /* This can happen with bitfields. */
2127 if (ref
->size
!= r
.size
)
2131 /* Do not update last seen VUSE after translating. */
2132 last_vuse_ptr
= NULL
;
2134 /* Keep looking for the adjusted *REF / VR pair. */
2138 /* Bail out and stop walking. */
2142 /* Lookup a reference operation by it's parts, in the current hash table.
2143 Returns the resulting value number if it exists in the hash table,
2144 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2145 vn_reference_t stored in the hashtable if something is found. */
2148 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2149 vec
<vn_reference_op_s
> operands
,
2150 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2152 struct vn_reference_s vr1
;
2160 vr1
.vuse
= vuse_ssa_val (vuse
);
2161 shared_lookup_references
.truncate (0);
2162 shared_lookup_references
.safe_grow (operands
.length ());
2163 memcpy (shared_lookup_references
.address (),
2164 operands
.address (),
2165 sizeof (vn_reference_op_s
)
2166 * operands
.length ());
2167 vr1
.operands
= operands
= shared_lookup_references
2168 = valueize_refs (shared_lookup_references
);
2171 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2172 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2175 vn_reference_lookup_1 (&vr1
, vnresult
);
2177 && kind
!= VN_NOWALK
2181 vn_walk_kind
= kind
;
2182 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2184 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2185 vn_reference_lookup_2
,
2186 vn_reference_lookup_3
,
2187 vuse_ssa_val
, &vr1
);
2188 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2192 return (*vnresult
)->result
;
2197 /* Lookup OP in the current hash table, and return the resulting value
2198 number if it exists in the hash table. Return NULL_TREE if it does
2199 not exist in the hash table or if the result field of the structure
2200 was NULL.. VNRESULT will be filled in with the vn_reference_t
2201 stored in the hashtable if one exists. */
2204 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2205 vn_reference_t
*vnresult
)
2207 vec
<vn_reference_op_s
> operands
;
2208 struct vn_reference_s vr1
;
2210 bool valuezied_anything
;
2215 vr1
.vuse
= vuse_ssa_val (vuse
);
2216 vr1
.operands
= operands
2217 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2218 vr1
.type
= TREE_TYPE (op
);
2219 vr1
.set
= get_alias_set (op
);
2220 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2221 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2224 if (kind
!= VN_NOWALK
2227 vn_reference_t wvnresult
;
2229 /* Make sure to use a valueized reference if we valueized anything.
2230 Otherwise preserve the full reference for advanced TBAA. */
2231 if (!valuezied_anything
2232 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2234 ao_ref_init (&r
, op
);
2235 vn_walk_kind
= kind
;
2237 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2238 vn_reference_lookup_2
,
2239 vn_reference_lookup_3
,
2240 vuse_ssa_val
, &vr1
);
2241 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2245 *vnresult
= wvnresult
;
2246 return wvnresult
->result
;
2252 return vn_reference_lookup_1 (&vr1
, vnresult
);
2255 /* Lookup CALL in the current hash table and return the entry in
2256 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2259 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2265 tree vuse
= gimple_vuse (call
);
2267 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2268 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2269 vr
->type
= gimple_expr_type (call
);
2271 vr
->hashcode
= vn_reference_compute_hash (vr
);
2272 vn_reference_lookup_1 (vr
, vnresult
);
2275 /* Insert OP into the current hash table with a value number of
2276 RESULT, and return the resulting reference structure we created. */
2278 static vn_reference_t
2279 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2281 vn_reference_s
**slot
;
2285 vr1
= current_info
->references_pool
->allocate ();
2286 if (TREE_CODE (result
) == SSA_NAME
)
2287 vr1
->value_id
= VN_INFO (result
)->value_id
;
2289 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2290 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2291 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2292 vr1
->type
= TREE_TYPE (op
);
2293 vr1
->set
= get_alias_set (op
);
2294 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2295 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2296 vr1
->result_vdef
= vdef
;
2298 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2301 /* Because we lookup stores using vuses, and value number failures
2302 using the vdefs (see visit_reference_op_store for how and why),
2303 it's possible that on failure we may try to insert an already
2304 inserted store. This is not wrong, there is no ssa name for a
2305 store that we could use as a differentiator anyway. Thus, unlike
2306 the other lookup functions, you cannot gcc_assert (!*slot)
2309 /* But free the old slot in case of a collision. */
2311 free_reference (*slot
);
2317 /* Insert a reference by it's pieces into the current hash table with
2318 a value number of RESULT. Return the resulting reference
2319 structure we created. */
2322 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2323 vec
<vn_reference_op_s
> operands
,
2324 tree result
, unsigned int value_id
)
2327 vn_reference_s
**slot
;
2330 vr1
= current_info
->references_pool
->allocate ();
2331 vr1
->value_id
= value_id
;
2332 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2333 vr1
->operands
= valueize_refs (operands
);
2336 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2337 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2338 result
= SSA_VAL (result
);
2339 vr1
->result
= result
;
2341 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2344 /* At this point we should have all the things inserted that we have
2345 seen before, and we should never try inserting something that
2347 gcc_assert (!*slot
);
2349 free_reference (*slot
);
2355 /* Compute and return the hash value for nary operation VBO1. */
2358 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2360 inchash::hash hstate
;
2363 for (i
= 0; i
< vno1
->length
; ++i
)
2364 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2365 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2367 if (vno1
->length
== 2
2368 && commutative_tree_code (vno1
->opcode
)
2369 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2370 std::swap (vno1
->op
[0], vno1
->op
[1]);
2372 hstate
.add_int (vno1
->opcode
);
2373 for (i
= 0; i
< vno1
->length
; ++i
)
2374 inchash::add_expr (vno1
->op
[i
], hstate
);
2376 return hstate
.end ();
2379 /* Compare nary operations VNO1 and VNO2 and return true if they are
2383 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2387 if (vno1
->hashcode
!= vno2
->hashcode
)
2390 if (vno1
->length
!= vno2
->length
)
2393 if (vno1
->opcode
!= vno2
->opcode
2394 || !types_compatible_p (vno1
->type
, vno2
->type
))
2397 for (i
= 0; i
< vno1
->length
; ++i
)
2398 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2404 /* Initialize VNO from the pieces provided. */
2407 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2408 enum tree_code code
, tree type
, tree
*ops
)
2411 vno
->length
= length
;
2413 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2416 /* Initialize VNO from OP. */
2419 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2423 vno
->opcode
= TREE_CODE (op
);
2424 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2425 vno
->type
= TREE_TYPE (op
);
2426 for (i
= 0; i
< vno
->length
; ++i
)
2427 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2430 /* Return the number of operands for a vn_nary ops structure from STMT. */
2433 vn_nary_length_from_stmt (gimple stmt
)
2435 switch (gimple_assign_rhs_code (stmt
))
2439 case VIEW_CONVERT_EXPR
:
2446 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2449 return gimple_num_ops (stmt
) - 1;
2453 /* Initialize VNO from STMT. */
2456 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple stmt
)
2460 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2461 vno
->type
= gimple_expr_type (stmt
);
2462 switch (vno
->opcode
)
2466 case VIEW_CONVERT_EXPR
:
2468 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2473 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2474 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2475 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2479 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2480 for (i
= 0; i
< vno
->length
; ++i
)
2481 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2485 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2486 vno
->length
= gimple_num_ops (stmt
) - 1;
2487 for (i
= 0; i
< vno
->length
; ++i
)
2488 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2492 /* Compute the hashcode for VNO and look for it in the hash table;
2493 return the resulting value number if it exists in the hash table.
2494 Return NULL_TREE if it does not exist in the hash table or if the
2495 result field of the operation is NULL. VNRESULT will contain the
2496 vn_nary_op_t from the hashtable if it exists. */
2499 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2501 vn_nary_op_s
**slot
;
2506 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2507 slot
= current_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2509 if (!slot
&& current_info
== optimistic_info
)
2510 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2516 return (*slot
)->result
;
2519 /* Lookup a n-ary operation by its pieces and return the resulting value
2520 number if it exists in the hash table. Return NULL_TREE if it does
2521 not exist in the hash table or if the result field of the operation
2522 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2526 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2527 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2529 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2530 sizeof_vn_nary_op (length
));
2531 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2532 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2535 /* Lookup OP in the current hash table, and return the resulting value
2536 number if it exists in the hash table. Return NULL_TREE if it does
2537 not exist in the hash table or if the result field of the operation
2538 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2542 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2545 = XALLOCAVAR (struct vn_nary_op_s
,
2546 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2547 init_vn_nary_op_from_op (vno1
, op
);
2548 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2551 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2552 value number if it exists in the hash table. Return NULL_TREE if
2553 it does not exist in the hash table. VNRESULT will contain the
2554 vn_nary_op_t from the hashtable if it exists. */
2557 vn_nary_op_lookup_stmt (gimple stmt
, vn_nary_op_t
*vnresult
)
2560 = XALLOCAVAR (struct vn_nary_op_s
,
2561 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2562 init_vn_nary_op_from_stmt (vno1
, stmt
);
2563 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2566 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2569 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2571 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2574 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2578 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2580 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2581 ¤t_info
->nary_obstack
);
2583 vno1
->value_id
= value_id
;
2584 vno1
->length
= length
;
2585 vno1
->result
= result
;
2590 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2591 VNO->HASHCODE first. */
2594 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
2597 vn_nary_op_s
**slot
;
2600 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2602 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
2603 gcc_assert (!*slot
);
2609 /* Insert a n-ary operation into the current hash table using it's
2610 pieces. Return the vn_nary_op_t structure we created and put in
2614 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2615 tree type
, tree
*ops
,
2616 tree result
, unsigned int value_id
)
2618 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2619 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2620 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2623 /* Insert OP into the current hash table with a value number of
2624 RESULT. Return the vn_nary_op_t structure we created and put in
2628 vn_nary_op_insert (tree op
, tree result
)
2630 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2633 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2634 init_vn_nary_op_from_op (vno1
, op
);
2635 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2638 /* Insert the rhs of STMT into the current hash table with a value number of
2642 vn_nary_op_insert_stmt (gimple stmt
, tree result
)
2645 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
2646 result
, VN_INFO (result
)->value_id
);
2647 init_vn_nary_op_from_stmt (vno1
, stmt
);
2648 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2651 /* Compute a hashcode for PHI operation VP1 and return it. */
2653 static inline hashval_t
2654 vn_phi_compute_hash (vn_phi_t vp1
)
2656 inchash::hash
hstate (vp1
->block
->index
);
2661 /* If all PHI arguments are constants we need to distinguish
2662 the PHI node via its type. */
2664 hstate
.merge_hash (vn_hash_type (type
));
2666 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2668 if (phi1op
== VN_TOP
)
2670 inchash::add_expr (phi1op
, hstate
);
2673 return hstate
.end ();
2676 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2679 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
2681 if (vp1
->hashcode
!= vp2
->hashcode
)
2684 if (vp1
->block
== vp2
->block
)
2689 /* If the PHI nodes do not have compatible types
2690 they are not the same. */
2691 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2694 /* Any phi in the same block will have it's arguments in the
2695 same edge order, because of how we store phi nodes. */
2696 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2698 tree phi2op
= vp2
->phiargs
[i
];
2699 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
2701 if (!expressions_equal_p (phi1op
, phi2op
))
2709 static vec
<tree
> shared_lookup_phiargs
;
2711 /* Lookup PHI in the current hash table, and return the resulting
2712 value number if it exists in the hash table. Return NULL_TREE if
2713 it does not exist in the hash table. */
2716 vn_phi_lookup (gimple phi
)
2719 struct vn_phi_s vp1
;
2722 shared_lookup_phiargs
.truncate (0);
2724 /* Canonicalize the SSA_NAME's to their value number. */
2725 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2727 tree def
= PHI_ARG_DEF (phi
, i
);
2728 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2729 shared_lookup_phiargs
.safe_push (def
);
2731 vp1
.type
= TREE_TYPE (gimple_phi_result (phi
));
2732 vp1
.phiargs
= shared_lookup_phiargs
;
2733 vp1
.block
= gimple_bb (phi
);
2734 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
2735 slot
= current_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2737 if (!slot
&& current_info
== optimistic_info
)
2738 slot
= valid_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2742 return (*slot
)->result
;
2745 /* Insert PHI into the current hash table with a value number of
2749 vn_phi_insert (gimple phi
, tree result
)
2752 vn_phi_t vp1
= current_info
->phis_pool
->allocate ();
2754 vec
<tree
> args
= vNULL
;
2756 /* Canonicalize the SSA_NAME's to their value number. */
2757 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2759 tree def
= PHI_ARG_DEF (phi
, i
);
2760 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2761 args
.safe_push (def
);
2763 vp1
->value_id
= VN_INFO (result
)->value_id
;
2764 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
2765 vp1
->phiargs
= args
;
2766 vp1
->block
= gimple_bb (phi
);
2767 vp1
->result
= result
;
2768 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
2770 slot
= current_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
2772 /* Because we iterate over phi operations more than once, it's
2773 possible the slot might already exist here, hence no assert.*/
2779 /* Print set of components in strongly connected component SCC to OUT. */
2782 print_scc (FILE *out
, vec
<tree
> scc
)
2787 fprintf (out
, "SCC consists of:");
2788 FOR_EACH_VEC_ELT (scc
, i
, var
)
2791 print_generic_expr (out
, var
, 0);
2793 fprintf (out
, "\n");
2796 /* Set the value number of FROM to TO, return true if it has changed
2800 set_ssa_val_to (tree from
, tree to
)
2802 tree currval
= SSA_VAL (from
);
2803 HOST_WIDE_INT toff
, coff
;
2805 /* The only thing we allow as value numbers are ssa_names
2806 and invariants. So assert that here. We don't allow VN_TOP
2807 as visiting a stmt should produce a value-number other than
2809 ??? Still VN_TOP can happen for unreachable code, so force
2810 it to varying in that case. Not all code is prepared to
2811 get VN_TOP on valueization. */
2814 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2815 fprintf (dump_file
, "Forcing value number to varying on "
2816 "receiving VN_TOP\n");
2820 gcc_assert (to
!= NULL_TREE
2821 && ((TREE_CODE (to
) == SSA_NAME
2822 && (to
== from
|| SSA_VAL (to
) == to
))
2823 || is_gimple_min_invariant (to
)));
2827 if (currval
== from
)
2829 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2831 fprintf (dump_file
, "Not changing value number of ");
2832 print_generic_expr (dump_file
, from
, 0);
2833 fprintf (dump_file
, " from VARYING to ");
2834 print_generic_expr (dump_file
, to
, 0);
2835 fprintf (dump_file
, "\n");
2839 else if (TREE_CODE (to
) == SSA_NAME
2840 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
2844 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2846 fprintf (dump_file
, "Setting value number of ");
2847 print_generic_expr (dump_file
, from
, 0);
2848 fprintf (dump_file
, " to ");
2849 print_generic_expr (dump_file
, to
, 0);
2853 && !operand_equal_p (currval
, to
, 0)
2854 /* ??? For addresses involving volatile objects or types operand_equal_p
2855 does not reliably detect ADDR_EXPRs as equal. We know we are only
2856 getting invariant gimple addresses here, so can use
2857 get_addr_base_and_unit_offset to do this comparison. */
2858 && !(TREE_CODE (currval
) == ADDR_EXPR
2859 && TREE_CODE (to
) == ADDR_EXPR
2860 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
2861 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
2864 VN_INFO (from
)->valnum
= to
;
2865 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2866 fprintf (dump_file
, " (changed)\n");
2869 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2870 fprintf (dump_file
, "\n");
2874 /* Mark as processed all the definitions in the defining stmt of USE, or
2878 mark_use_processed (tree use
)
2882 gimple stmt
= SSA_NAME_DEF_STMT (use
);
2884 if (SSA_NAME_IS_DEFAULT_DEF (use
) || gimple_code (stmt
) == GIMPLE_PHI
)
2886 VN_INFO (use
)->use_processed
= true;
2890 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2892 tree def
= DEF_FROM_PTR (defp
);
2894 VN_INFO (def
)->use_processed
= true;
2898 /* Set all definitions in STMT to value number to themselves.
2899 Return true if a value number changed. */
2902 defs_to_varying (gimple stmt
)
2904 bool changed
= false;
2908 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2910 tree def
= DEF_FROM_PTR (defp
);
2911 changed
|= set_ssa_val_to (def
, def
);
2916 static bool expr_has_constants (tree expr
);
2918 /* Visit a copy between LHS and RHS, return true if the value number
2922 visit_copy (tree lhs
, tree rhs
)
2924 /* The copy may have a more interesting constant filled expression
2925 (we don't, since we know our RHS is just an SSA name). */
2926 VN_INFO (lhs
)->has_constants
= VN_INFO (rhs
)->has_constants
;
2927 VN_INFO (lhs
)->expr
= VN_INFO (rhs
)->expr
;
2929 /* And finally valueize. */
2930 rhs
= SSA_VAL (rhs
);
2932 return set_ssa_val_to (lhs
, rhs
);
2935 /* Visit a nary operator RHS, value number it, and return true if the
2936 value number of LHS has changed as a result. */
2939 visit_nary_op (tree lhs
, gimple stmt
)
2941 bool changed
= false;
2942 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
2945 changed
= set_ssa_val_to (lhs
, result
);
2948 changed
= set_ssa_val_to (lhs
, lhs
);
2949 vn_nary_op_insert_stmt (stmt
, lhs
);
2955 /* Visit a call STMT storing into LHS. Return true if the value number
2956 of the LHS has changed as a result. */
2959 visit_reference_op_call (tree lhs
, gcall
*stmt
)
2961 bool changed
= false;
2962 struct vn_reference_s vr1
;
2963 vn_reference_t vnresult
= NULL
;
2964 tree vdef
= gimple_vdef (stmt
);
2966 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2967 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
2970 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
2973 if (vnresult
->result_vdef
&& vdef
)
2974 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
2976 if (!vnresult
->result
&& lhs
)
2977 vnresult
->result
= lhs
;
2979 if (vnresult
->result
&& lhs
)
2981 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
2983 if (VN_INFO (vnresult
->result
)->has_constants
)
2984 VN_INFO (lhs
)->has_constants
= true;
2990 vn_reference_s
**slot
;
2992 changed
|= set_ssa_val_to (vdef
, vdef
);
2994 changed
|= set_ssa_val_to (lhs
, lhs
);
2995 vr2
= current_info
->references_pool
->allocate ();
2996 vr2
->vuse
= vr1
.vuse
;
2997 /* As we are not walking the virtual operand chain we know the
2998 shared_lookup_references are still original so we can re-use
3000 vr2
->operands
= vr1
.operands
.copy ();
3001 vr2
->type
= vr1
.type
;
3003 vr2
->hashcode
= vr1
.hashcode
;
3005 vr2
->result_vdef
= vdef
;
3006 slot
= current_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
3008 gcc_assert (!*slot
);
3015 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3016 and return true if the value number of the LHS has changed as a result. */
3019 visit_reference_op_load (tree lhs
, tree op
, gimple stmt
)
3021 bool changed
= false;
3025 last_vuse
= gimple_vuse (stmt
);
3026 last_vuse_ptr
= &last_vuse
;
3027 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3028 default_vn_walk_kind
, NULL
);
3029 last_vuse_ptr
= NULL
;
3031 /* We handle type-punning through unions by value-numbering based
3032 on offset and size of the access. Be prepared to handle a
3033 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3035 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3037 /* We will be setting the value number of lhs to the value number
3038 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3039 So first simplify and lookup this expression to see if it
3040 is already available. */
3041 tree val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
3042 if ((CONVERT_EXPR_P (val
)
3043 || TREE_CODE (val
) == VIEW_CONVERT_EXPR
)
3044 && TREE_CODE (TREE_OPERAND (val
, 0)) == SSA_NAME
)
3046 tree tem
= vn_get_expr_for (TREE_OPERAND (val
, 0));
3047 if ((CONVERT_EXPR_P (tem
)
3048 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
)
3049 && (tem
= fold_unary_ignore_overflow (TREE_CODE (val
),
3050 TREE_TYPE (val
), tem
)))
3054 if (!is_gimple_min_invariant (val
)
3055 && TREE_CODE (val
) != SSA_NAME
)
3056 result
= vn_nary_op_lookup (val
, NULL
);
3057 /* If the expression is not yet available, value-number lhs to
3058 a new SSA_NAME we create. */
3061 result
= make_temp_ssa_name (TREE_TYPE (lhs
), gimple_build_nop (),
3063 /* Initialize value-number information properly. */
3064 VN_INFO_GET (result
)->valnum
= result
;
3065 VN_INFO (result
)->value_id
= get_next_value_id ();
3066 VN_INFO (result
)->expr
= val
;
3067 VN_INFO (result
)->has_constants
= expr_has_constants (val
);
3068 VN_INFO (result
)->needs_insertion
= true;
3069 /* As all "inserted" statements are singleton SCCs, insert
3070 to the valid table. This is strictly needed to
3071 avoid re-generating new value SSA_NAMEs for the same
3072 expression during SCC iteration over and over (the
3073 optimistic table gets cleared after each iteration).
3074 We do not need to insert into the optimistic table, as
3075 lookups there will fall back to the valid table. */
3076 if (current_info
== optimistic_info
)
3078 current_info
= valid_info
;
3079 vn_nary_op_insert (val
, result
);
3080 current_info
= optimistic_info
;
3083 vn_nary_op_insert (val
, result
);
3084 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3086 fprintf (dump_file
, "Inserting name ");
3087 print_generic_expr (dump_file
, result
, 0);
3088 fprintf (dump_file
, " for expression ");
3089 print_generic_expr (dump_file
, val
, 0);
3090 fprintf (dump_file
, "\n");
3097 changed
= set_ssa_val_to (lhs
, result
);
3098 if (TREE_CODE (result
) == SSA_NAME
3099 && VN_INFO (result
)->has_constants
)
3101 VN_INFO (lhs
)->expr
= VN_INFO (result
)->expr
;
3102 VN_INFO (lhs
)->has_constants
= true;
3107 changed
= set_ssa_val_to (lhs
, lhs
);
3108 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
3115 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3116 and return true if the value number of the LHS has changed as a result. */
3119 visit_reference_op_store (tree lhs
, tree op
, gimple stmt
)
3121 bool changed
= false;
3122 vn_reference_t vnresult
= NULL
;
3123 tree result
, assign
;
3124 bool resultsame
= false;
3125 tree vuse
= gimple_vuse (stmt
);
3126 tree vdef
= gimple_vdef (stmt
);
3128 if (TREE_CODE (op
) == SSA_NAME
)
3131 /* First we want to lookup using the *vuses* from the store and see
3132 if there the last store to this location with the same address
3135 The vuses represent the memory state before the store. If the
3136 memory state, address, and value of the store is the same as the
3137 last store to this location, then this store will produce the
3138 same memory state as that store.
3140 In this case the vdef versions for this store are value numbered to those
3141 vuse versions, since they represent the same memory state after
3144 Otherwise, the vdefs for the store are used when inserting into
3145 the table, since the store generates a new memory state. */
3147 result
= vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, NULL
);
3151 if (TREE_CODE (result
) == SSA_NAME
)
3152 result
= SSA_VAL (result
);
3153 resultsame
= expressions_equal_p (result
, op
);
3156 if ((!result
|| !resultsame
)
3157 /* Only perform the following when being called from PRE
3158 which embeds tail merging. */
3159 && default_vn_walk_kind
== VN_WALK
)
3161 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3162 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
);
3165 VN_INFO (vdef
)->use_processed
= true;
3166 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3170 if (!result
|| !resultsame
)
3172 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3174 fprintf (dump_file
, "No store match\n");
3175 fprintf (dump_file
, "Value numbering store ");
3176 print_generic_expr (dump_file
, lhs
, 0);
3177 fprintf (dump_file
, " to ");
3178 print_generic_expr (dump_file
, op
, 0);
3179 fprintf (dump_file
, "\n");
3181 /* Have to set value numbers before insert, since insert is
3182 going to valueize the references in-place. */
3185 changed
|= set_ssa_val_to (vdef
, vdef
);
3188 /* Do not insert structure copies into the tables. */
3189 if (is_gimple_min_invariant (op
)
3190 || is_gimple_reg (op
))
3191 vn_reference_insert (lhs
, op
, vdef
, NULL
);
3193 /* Only perform the following when being called from PRE
3194 which embeds tail merging. */
3195 if (default_vn_walk_kind
== VN_WALK
)
3197 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3198 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
3203 /* We had a match, so value number the vdef to have the value
3204 number of the vuse it came from. */
3206 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3207 fprintf (dump_file
, "Store matched earlier value,"
3208 "value numbering store vdefs to matching vuses.\n");
3210 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
3216 /* Visit and value number PHI, return true if the value number
3220 visit_phi (gimple phi
)
3222 bool changed
= false;
3224 tree sameval
= VN_TOP
;
3225 bool allsame
= true;
3227 /* TODO: We could check for this in init_sccvn, and replace this
3228 with a gcc_assert. */
3229 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
3230 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3232 /* See if all non-TOP arguments have the same value. TOP is
3233 equivalent to everything, so we can ignore it. */
3236 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3237 if (e
->flags
& EDGE_EXECUTABLE
)
3239 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3241 if (TREE_CODE (def
) == SSA_NAME
)
3242 def
= SSA_VAL (def
);
3245 if (sameval
== VN_TOP
)
3251 if (!expressions_equal_p (def
, sameval
))
3259 /* If all value numbered to the same value, the phi node has that
3262 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3264 /* Otherwise, see if it is equivalent to a phi node in this block. */
3265 result
= vn_phi_lookup (phi
);
3267 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
3270 vn_phi_insert (phi
, PHI_RESULT (phi
));
3271 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
3272 VN_INFO (PHI_RESULT (phi
))->expr
= PHI_RESULT (phi
);
3273 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3279 /* Return true if EXPR contains constants. */
3282 expr_has_constants (tree expr
)
3284 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
3287 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0));
3290 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0))
3291 || is_gimple_min_invariant (TREE_OPERAND (expr
, 1));
3292 /* Constants inside reference ops are rarely interesting, but
3293 it can take a lot of looking to find them. */
3295 case tcc_declaration
:
3298 return is_gimple_min_invariant (expr
);
3303 /* Return true if STMT contains constants. */
3306 stmt_has_constants (gimple stmt
)
3310 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
3313 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
3315 case GIMPLE_TERNARY_RHS
:
3316 tem
= gimple_assign_rhs3 (stmt
);
3317 if (TREE_CODE (tem
) == SSA_NAME
)
3318 tem
= SSA_VAL (tem
);
3319 if (is_gimple_min_invariant (tem
))
3323 case GIMPLE_BINARY_RHS
:
3324 tem
= gimple_assign_rhs2 (stmt
);
3325 if (TREE_CODE (tem
) == SSA_NAME
)
3326 tem
= SSA_VAL (tem
);
3327 if (is_gimple_min_invariant (tem
))
3331 case GIMPLE_SINGLE_RHS
:
3332 /* Constants inside reference ops are rarely interesting, but
3333 it can take a lot of looking to find them. */
3334 case GIMPLE_UNARY_RHS
:
3335 tem
= gimple_assign_rhs1 (stmt
);
3336 if (TREE_CODE (tem
) == SSA_NAME
)
3337 tem
= SSA_VAL (tem
);
3338 return is_gimple_min_invariant (tem
);
3346 /* Simplify the binary expression RHS, and return the result if
3350 simplify_binary_expression (gimple stmt
)
3352 tree result
= NULL_TREE
;
3353 tree op0
= gimple_assign_rhs1 (stmt
);
3354 tree op1
= gimple_assign_rhs2 (stmt
);
3355 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3357 /* This will not catch every single case we could combine, but will
3358 catch those with constants. The goal here is to simultaneously
3359 combine constants between expressions, but avoid infinite
3360 expansion of expressions during simplification. */
3361 op0
= vn_valueize (op0
);
3362 if (TREE_CODE (op0
) == SSA_NAME
3363 && (VN_INFO (op0
)->has_constants
3364 || TREE_CODE_CLASS (code
) == tcc_comparison
3365 || code
== COMPLEX_EXPR
))
3366 op0
= vn_get_expr_for (op0
);
3368 op1
= vn_valueize (op1
);
3369 if (TREE_CODE (op1
) == SSA_NAME
3370 && (VN_INFO (op1
)->has_constants
3371 || code
== COMPLEX_EXPR
))
3372 op1
= vn_get_expr_for (op1
);
3374 /* Pointer plus constant can be represented as invariant address.
3375 Do so to allow further propatation, see also tree forwprop. */
3376 if (code
== POINTER_PLUS_EXPR
3377 && tree_fits_uhwi_p (op1
)
3378 && TREE_CODE (op0
) == ADDR_EXPR
3379 && is_gimple_min_invariant (op0
))
3380 return build_invariant_address (TREE_TYPE (op0
),
3381 TREE_OPERAND (op0
, 0),
3382 tree_to_uhwi (op1
));
3384 /* Avoid folding if nothing changed. */
3385 if (op0
== gimple_assign_rhs1 (stmt
)
3386 && op1
== gimple_assign_rhs2 (stmt
))
3389 fold_defer_overflow_warnings ();
3391 result
= fold_binary (code
, gimple_expr_type (stmt
), op0
, op1
);
3393 STRIP_USELESS_TYPE_CONVERSION (result
);
3395 fold_undefer_overflow_warnings (result
&& valid_gimple_rhs_p (result
),
3398 /* Make sure result is not a complex expression consisting
3399 of operators of operators (IE (a + b) + (a + c))
3400 Otherwise, we will end up with unbounded expressions if
3401 fold does anything at all. */
3402 if (result
&& valid_gimple_rhs_p (result
))
3408 /* Simplify the unary expression RHS, and return the result if
3412 simplify_unary_expression (gassign
*stmt
)
3414 tree result
= NULL_TREE
;
3415 tree orig_op0
, op0
= gimple_assign_rhs1 (stmt
);
3416 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3418 /* We handle some tcc_reference codes here that are all
3419 GIMPLE_ASSIGN_SINGLE codes. */
3420 if (code
== REALPART_EXPR
3421 || code
== IMAGPART_EXPR
3422 || code
== VIEW_CONVERT_EXPR
3423 || code
== BIT_FIELD_REF
)
3424 op0
= TREE_OPERAND (op0
, 0);
3427 op0
= vn_valueize (op0
);
3428 if (TREE_CODE (op0
) == SSA_NAME
)
3430 if (VN_INFO (op0
)->has_constants
)
3431 op0
= vn_get_expr_for (op0
);
3432 else if (CONVERT_EXPR_CODE_P (code
)
3433 || code
== REALPART_EXPR
3434 || code
== IMAGPART_EXPR
3435 || code
== VIEW_CONVERT_EXPR
3436 || code
== BIT_FIELD_REF
)
3438 /* We want to do tree-combining on conversion-like expressions.
3439 Make sure we feed only SSA_NAMEs or constants to fold though. */
3440 tree tem
= vn_get_expr_for (op0
);
3441 if (UNARY_CLASS_P (tem
)
3442 || BINARY_CLASS_P (tem
)
3443 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
3444 || TREE_CODE (tem
) == SSA_NAME
3445 || TREE_CODE (tem
) == CONSTRUCTOR
3446 || is_gimple_min_invariant (tem
))
3451 /* Avoid folding if nothing changed, but remember the expression. */
3452 if (op0
== orig_op0
)
3455 if (code
== BIT_FIELD_REF
)
3457 tree rhs
= gimple_assign_rhs1 (stmt
);
3458 result
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (rhs
),
3459 op0
, TREE_OPERAND (rhs
, 1), TREE_OPERAND (rhs
, 2));
3462 result
= fold_unary_ignore_overflow (code
, gimple_expr_type (stmt
), op0
);
3465 STRIP_USELESS_TYPE_CONVERSION (result
);
3466 if (valid_gimple_rhs_p (result
))
3473 /* Try to simplify RHS using equivalences and constant folding. */
3476 try_to_simplify (gassign
*stmt
)
3478 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3481 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3482 in this case, there is no point in doing extra work. */
3483 if (code
== SSA_NAME
)
3486 /* First try constant folding based on our current lattice. */
3487 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
3489 && (TREE_CODE (tem
) == SSA_NAME
3490 || is_gimple_min_invariant (tem
)))
3493 /* If that didn't work try combining multiple statements. */
3494 switch (TREE_CODE_CLASS (code
))
3497 /* Fallthrough for some unary codes that can operate on registers. */
3498 if (!(code
== REALPART_EXPR
3499 || code
== IMAGPART_EXPR
3500 || code
== VIEW_CONVERT_EXPR
3501 || code
== BIT_FIELD_REF
))
3503 /* We could do a little more with unary ops, if they expand
3504 into binary ops, but it's debatable whether it is worth it. */
3506 return simplify_unary_expression (stmt
);
3508 case tcc_comparison
:
3510 return simplify_binary_expression (stmt
);
3519 /* Visit and value number USE, return true if the value number
3523 visit_use (tree use
)
3525 bool changed
= false;
3526 gimple stmt
= SSA_NAME_DEF_STMT (use
);
3528 mark_use_processed (use
);
3530 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
3531 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3532 && !SSA_NAME_IS_DEFAULT_DEF (use
))
3534 fprintf (dump_file
, "Value numbering ");
3535 print_generic_expr (dump_file
, use
, 0);
3536 fprintf (dump_file
, " stmt = ");
3537 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3540 /* Handle uninitialized uses. */
3541 if (SSA_NAME_IS_DEFAULT_DEF (use
))
3542 changed
= set_ssa_val_to (use
, use
);
3545 if (gimple_code (stmt
) == GIMPLE_PHI
)
3546 changed
= visit_phi (stmt
);
3547 else if (gimple_has_volatile_ops (stmt
))
3548 changed
= defs_to_varying (stmt
);
3549 else if (is_gimple_assign (stmt
))
3551 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3552 tree lhs
= gimple_assign_lhs (stmt
);
3553 tree rhs1
= gimple_assign_rhs1 (stmt
);
3556 /* Shortcut for copies. Simplifying copies is pointless,
3557 since we copy the expression and value they represent. */
3558 if (code
== SSA_NAME
3559 && TREE_CODE (lhs
) == SSA_NAME
)
3561 changed
= visit_copy (lhs
, rhs1
);
3564 simplified
= try_to_simplify (as_a
<gassign
*> (stmt
));
3567 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3569 fprintf (dump_file
, "RHS ");
3570 print_gimple_expr (dump_file
, stmt
, 0, 0);
3571 fprintf (dump_file
, " simplified to ");
3572 print_generic_expr (dump_file
, simplified
, 0);
3573 if (TREE_CODE (lhs
) == SSA_NAME
)
3574 fprintf (dump_file
, " has constants %d\n",
3575 expr_has_constants (simplified
));
3577 fprintf (dump_file
, "\n");
3580 /* Setting value numbers to constants will occasionally
3581 screw up phi congruence because constants are not
3582 uniquely associated with a single ssa name that can be
3585 && is_gimple_min_invariant (simplified
)
3586 && TREE_CODE (lhs
) == SSA_NAME
)
3588 VN_INFO (lhs
)->expr
= simplified
;
3589 VN_INFO (lhs
)->has_constants
= true;
3590 changed
= set_ssa_val_to (lhs
, simplified
);
3594 && TREE_CODE (simplified
) == SSA_NAME
3595 && TREE_CODE (lhs
) == SSA_NAME
)
3597 changed
= visit_copy (lhs
, simplified
);
3600 else if (simplified
)
3602 if (TREE_CODE (lhs
) == SSA_NAME
)
3604 VN_INFO (lhs
)->has_constants
= expr_has_constants (simplified
);
3605 /* We have to unshare the expression or else
3606 valuizing may change the IL stream. */
3607 VN_INFO (lhs
)->expr
= unshare_expr (simplified
);
3610 else if (stmt_has_constants (stmt
)
3611 && TREE_CODE (lhs
) == SSA_NAME
)
3612 VN_INFO (lhs
)->has_constants
= true;
3613 else if (TREE_CODE (lhs
) == SSA_NAME
)
3615 /* We reset expr and constantness here because we may
3616 have been value numbering optimistically, and
3617 iterating. They may become non-constant in this case,
3618 even if they were optimistically constant. */
3620 VN_INFO (lhs
)->has_constants
= false;
3621 VN_INFO (lhs
)->expr
= NULL_TREE
;
3624 if ((TREE_CODE (lhs
) == SSA_NAME
3625 /* We can substitute SSA_NAMEs that are live over
3626 abnormal edges with their constant value. */
3627 && !(gimple_assign_copy_p (stmt
)
3628 && is_gimple_min_invariant (rhs1
))
3630 && is_gimple_min_invariant (simplified
))
3631 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3632 /* Stores or copies from SSA_NAMEs that are live over
3633 abnormal edges are a problem. */
3634 || (code
== SSA_NAME
3635 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
3636 changed
= defs_to_varying (stmt
);
3637 else if (REFERENCE_CLASS_P (lhs
)
3639 changed
= visit_reference_op_store (lhs
, rhs1
, stmt
);
3640 else if (TREE_CODE (lhs
) == SSA_NAME
)
3642 if ((gimple_assign_copy_p (stmt
)
3643 && is_gimple_min_invariant (rhs1
))
3645 && is_gimple_min_invariant (simplified
)))
3647 VN_INFO (lhs
)->has_constants
= true;
3649 changed
= set_ssa_val_to (lhs
, simplified
);
3651 changed
= set_ssa_val_to (lhs
, rhs1
);
3655 /* First try to lookup the simplified expression. */
3658 enum gimple_rhs_class rhs_class
;
3661 rhs_class
= get_gimple_rhs_class (TREE_CODE (simplified
));
3662 if ((rhs_class
== GIMPLE_UNARY_RHS
3663 || rhs_class
== GIMPLE_BINARY_RHS
3664 || rhs_class
== GIMPLE_TERNARY_RHS
)
3665 && valid_gimple_rhs_p (simplified
))
3667 tree result
= vn_nary_op_lookup (simplified
, NULL
);
3670 changed
= set_ssa_val_to (lhs
, result
);
3676 /* Otherwise visit the original statement. */
3677 switch (vn_get_stmt_kind (stmt
))
3680 changed
= visit_nary_op (lhs
, stmt
);
3683 changed
= visit_reference_op_load (lhs
, rhs1
, stmt
);
3686 changed
= defs_to_varying (stmt
);
3692 changed
= defs_to_varying (stmt
);
3694 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
3696 tree lhs
= gimple_call_lhs (stmt
);
3697 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3699 /* Try constant folding based on our current lattice. */
3700 tree simplified
= gimple_fold_stmt_to_constant_1 (stmt
,
3704 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3706 fprintf (dump_file
, "call ");
3707 print_gimple_expr (dump_file
, stmt
, 0, 0);
3708 fprintf (dump_file
, " simplified to ");
3709 print_generic_expr (dump_file
, simplified
, 0);
3710 if (TREE_CODE (lhs
) == SSA_NAME
)
3711 fprintf (dump_file
, " has constants %d\n",
3712 expr_has_constants (simplified
));
3714 fprintf (dump_file
, "\n");
3717 /* Setting value numbers to constants will occasionally
3718 screw up phi congruence because constants are not
3719 uniquely associated with a single ssa name that can be
3722 && is_gimple_min_invariant (simplified
))
3724 VN_INFO (lhs
)->expr
= simplified
;
3725 VN_INFO (lhs
)->has_constants
= true;
3726 changed
= set_ssa_val_to (lhs
, simplified
);
3727 if (gimple_vdef (stmt
))
3728 changed
|= set_ssa_val_to (gimple_vdef (stmt
),
3729 SSA_VAL (gimple_vuse (stmt
)));
3733 && TREE_CODE (simplified
) == SSA_NAME
)
3735 changed
= visit_copy (lhs
, simplified
);
3736 if (gimple_vdef (stmt
))
3737 changed
|= set_ssa_val_to (gimple_vdef (stmt
),
3738 SSA_VAL (gimple_vuse (stmt
)));
3743 if (stmt_has_constants (stmt
))
3744 VN_INFO (lhs
)->has_constants
= true;
3747 /* We reset expr and constantness here because we may
3748 have been value numbering optimistically, and
3749 iterating. They may become non-constant in this case,
3750 even if they were optimistically constant. */
3751 VN_INFO (lhs
)->has_constants
= false;
3752 VN_INFO (lhs
)->expr
= NULL_TREE
;
3755 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3757 changed
= defs_to_varying (stmt
);
3763 if (!gimple_call_internal_p (stmt
)
3764 && (/* Calls to the same function with the same vuse
3765 and the same operands do not necessarily return the same
3766 value, unless they're pure or const. */
3767 gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
)
3768 /* If calls have a vdef, subsequent calls won't have
3769 the same incoming vuse. So, if 2 calls with vdef have the
3770 same vuse, we know they're not subsequent.
3771 We can value number 2 calls to the same function with the
3772 same vuse and the same operands which are not subsequent
3773 the same, because there is no code in the program that can
3774 compare the 2 values... */
3775 || (gimple_vdef (stmt
)
3776 /* ... unless the call returns a pointer which does
3777 not alias with anything else. In which case the
3778 information that the values are distinct are encoded
3780 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
3781 /* Only perform the following when being called from PRE
3782 which embeds tail merging. */
3783 && default_vn_walk_kind
== VN_WALK
)))
3784 changed
= visit_reference_op_call (lhs
, call_stmt
);
3786 changed
= defs_to_varying (stmt
);
3789 changed
= defs_to_varying (stmt
);
3795 /* Compare two operands by reverse postorder index */
3798 compare_ops (const void *pa
, const void *pb
)
3800 const tree opa
= *((const tree
*)pa
);
3801 const tree opb
= *((const tree
*)pb
);
3802 gimple opstmta
= SSA_NAME_DEF_STMT (opa
);
3803 gimple opstmtb
= SSA_NAME_DEF_STMT (opb
);
3807 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
3808 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3809 else if (gimple_nop_p (opstmta
))
3811 else if (gimple_nop_p (opstmtb
))
3814 bba
= gimple_bb (opstmta
);
3815 bbb
= gimple_bb (opstmtb
);
3818 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3826 if (gimple_code (opstmta
) == GIMPLE_PHI
3827 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3828 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3829 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3831 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3833 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3834 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3836 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3838 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3841 /* Sort an array containing members of a strongly connected component
3842 SCC so that the members are ordered by RPO number.
3843 This means that when the sort is complete, iterating through the
3844 array will give you the members in RPO order. */
3847 sort_scc (vec
<tree
> scc
)
3849 scc
.qsort (compare_ops
);
3852 /* Insert the no longer used nary ONARY to the hash INFO. */
3855 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3857 size_t size
= sizeof_vn_nary_op (onary
->length
);
3858 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
3859 &info
->nary_obstack
);
3860 memcpy (nary
, onary
, size
);
3861 vn_nary_op_insert_into (nary
, info
->nary
, false);
3864 /* Insert the no longer used phi OPHI to the hash INFO. */
3867 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3869 vn_phi_t phi
= info
->phis_pool
->allocate ();
3871 memcpy (phi
, ophi
, sizeof (*phi
));
3872 ophi
->phiargs
.create (0);
3873 slot
= info
->phis
->find_slot_with_hash (phi
, phi
->hashcode
, INSERT
);
3874 gcc_assert (!*slot
);
3878 /* Insert the no longer used reference OREF to the hash INFO. */
3881 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3884 vn_reference_s
**slot
;
3885 ref
= info
->references_pool
->allocate ();
3886 memcpy (ref
, oref
, sizeof (*ref
));
3887 oref
->operands
.create (0);
3888 slot
= info
->references
->find_slot_with_hash (ref
, ref
->hashcode
, INSERT
);
3890 free_reference (*slot
);
3894 /* Process a strongly connected component in the SSA graph. */
3897 process_scc (vec
<tree
> scc
)
3901 unsigned int iterations
= 0;
3902 bool changed
= true;
3903 vn_nary_op_iterator_type hin
;
3904 vn_phi_iterator_type hip
;
3905 vn_reference_iterator_type hir
;
3910 /* If the SCC has a single member, just visit it. */
3911 if (scc
.length () == 1)
3914 if (VN_INFO (use
)->use_processed
)
3916 /* We need to make sure it doesn't form a cycle itself, which can
3917 happen for self-referential PHI nodes. In that case we would
3918 end up inserting an expression with VN_TOP operands into the
3919 valid table which makes us derive bogus equivalences later.
3920 The cheapest way to check this is to assume it for all PHI nodes. */
3921 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
3922 /* Fallthru to iteration. */ ;
3930 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3931 print_scc (dump_file
, scc
);
3933 /* Iterate over the SCC with the optimistic table until it stops
3935 current_info
= optimistic_info
;
3940 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3941 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
3942 /* As we are value-numbering optimistically we have to
3943 clear the expression tables and the simplified expressions
3944 in each iteration until we converge. */
3945 optimistic_info
->nary
->empty ();
3946 optimistic_info
->phis
->empty ();
3947 optimistic_info
->references
->empty ();
3948 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
3949 gcc_obstack_init (&optimistic_info
->nary_obstack
);
3950 optimistic_info
->phis_pool
->release ();
3951 optimistic_info
->references_pool
->release ();
3952 FOR_EACH_VEC_ELT (scc
, i
, var
)
3953 VN_INFO (var
)->expr
= NULL_TREE
;
3954 FOR_EACH_VEC_ELT (scc
, i
, var
)
3955 changed
|= visit_use (var
);
3958 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3959 fprintf (dump_file
, "Processing SCC needed %d iterations\n", iterations
);
3960 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
3962 /* Finally, copy the contents of the no longer used optimistic
3963 table to the valid table. */
3964 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->nary
, nary
, vn_nary_op_t
, hin
)
3965 copy_nary (nary
, valid_info
);
3966 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->phis
, phi
, vn_phi_t
, hip
)
3967 copy_phi (phi
, valid_info
);
3968 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->references
,
3969 ref
, vn_reference_t
, hir
)
3970 copy_reference (ref
, valid_info
);
3972 current_info
= valid_info
;
3976 /* Pop the components of the found SCC for NAME off the SCC stack
3977 and process them. Returns true if all went well, false if
3978 we run into resource limits. */
3981 extract_and_process_scc_for_name (tree name
)
3986 /* Found an SCC, pop the components off the SCC stack and
3990 x
= sccstack
.pop ();
3992 VN_INFO (x
)->on_sccstack
= false;
3994 } while (x
!= name
);
3996 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3998 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
4001 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
4002 "SCC size %u exceeding %u\n", scc
.length (),
4003 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
4008 if (scc
.length () > 1)
4016 /* Depth first search on NAME to discover and process SCC's in the SSA
4018 Execution of this algorithm relies on the fact that the SCC's are
4019 popped off the stack in topological order.
4020 Returns true if successful, false if we stopped processing SCC's due
4021 to resource constraints. */
4026 vec
<ssa_op_iter
> itervec
= vNULL
;
4027 vec
<tree
> namevec
= vNULL
;
4028 use_operand_p usep
= NULL
;
4035 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
4036 VN_INFO (name
)->visited
= true;
4037 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
4039 sccstack
.safe_push (name
);
4040 VN_INFO (name
)->on_sccstack
= true;
4041 defstmt
= SSA_NAME_DEF_STMT (name
);
4043 /* Recursively DFS on our operands, looking for SCC's. */
4044 if (!gimple_nop_p (defstmt
))
4046 /* Push a new iterator. */
4047 if (gphi
*phi
= dyn_cast
<gphi
*> (defstmt
))
4048 usep
= op_iter_init_phiuse (&iter
, phi
, SSA_OP_ALL_USES
);
4050 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
4053 clear_and_done_ssa_iter (&iter
);
4057 /* If we are done processing uses of a name, go up the stack
4058 of iterators and process SCCs as we found them. */
4059 if (op_iter_done (&iter
))
4061 /* See if we found an SCC. */
4062 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
4063 if (!extract_and_process_scc_for_name (name
))
4070 /* Check if we are done. */
4071 if (namevec
.is_empty ())
4078 /* Restore the last use walker and continue walking there. */
4080 name
= namevec
.pop ();
4081 memcpy (&iter
, &itervec
.last (),
4082 sizeof (ssa_op_iter
));
4084 goto continue_walking
;
4087 use
= USE_FROM_PTR (usep
);
4089 /* Since we handle phi nodes, we will sometimes get
4090 invariants in the use expression. */
4091 if (TREE_CODE (use
) == SSA_NAME
)
4093 if (! (VN_INFO (use
)->visited
))
4095 /* Recurse by pushing the current use walking state on
4096 the stack and starting over. */
4097 itervec
.safe_push (iter
);
4098 namevec
.safe_push (name
);
4103 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
4104 VN_INFO (use
)->low
);
4106 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
4107 && VN_INFO (use
)->on_sccstack
)
4109 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
4110 VN_INFO (name
)->low
);
4114 usep
= op_iter_next_use (&iter
);
4118 /* Allocate a value number table. */
4121 allocate_vn_table (vn_tables_t table
)
4123 table
->phis
= new vn_phi_table_type (23);
4124 table
->nary
= new vn_nary_op_table_type (23);
4125 table
->references
= new vn_reference_table_type (23);
4127 gcc_obstack_init (&table
->nary_obstack
);
4128 table
->phis_pool
= new pool_allocator
<vn_phi_s
> ("VN phis", 30);
4129 table
->references_pool
= new pool_allocator
<vn_reference_s
> ("VN references",
4133 /* Free a value number table. */
4136 free_vn_table (vn_tables_t table
)
4142 delete table
->references
;
4143 table
->references
= NULL
;
4144 obstack_free (&table
->nary_obstack
, NULL
);
4145 delete table
->phis_pool
;
4146 delete table
->references_pool
;
4154 int *rpo_numbers_temp
;
4156 calculate_dominance_info (CDI_DOMINATORS
);
4157 sccstack
.create (0);
4158 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
4160 constant_value_ids
= BITMAP_ALLOC (NULL
);
4165 vn_ssa_aux_table
.create (num_ssa_names
+ 1);
4166 /* VEC_alloc doesn't actually grow it to the right size, it just
4167 preallocates the space to do so. */
4168 vn_ssa_aux_table
.safe_grow_cleared (num_ssa_names
+ 1);
4169 gcc_obstack_init (&vn_ssa_aux_obstack
);
4171 shared_lookup_phiargs
.create (0);
4172 shared_lookup_references
.create (0);
4173 rpo_numbers
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
4175 XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
4176 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
4178 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4179 the i'th block in RPO order is bb. We want to map bb's to RPO
4180 numbers, so we need to rearrange this array. */
4181 for (j
= 0; j
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; j
++)
4182 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
4184 XDELETE (rpo_numbers_temp
);
4186 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
4188 /* Create the VN_INFO structures, and initialize value numbers to
4190 for (i
= 0; i
< num_ssa_names
; i
++)
4192 tree name
= ssa_name (i
);
4195 VN_INFO_GET (name
)->valnum
= VN_TOP
;
4196 VN_INFO (name
)->expr
= NULL_TREE
;
4197 VN_INFO (name
)->value_id
= 0;
4201 renumber_gimple_stmt_uids ();
4203 /* Create the valid and optimistic value numbering tables. */
4204 valid_info
= XCNEW (struct vn_tables_s
);
4205 allocate_vn_table (valid_info
);
4206 optimistic_info
= XCNEW (struct vn_tables_s
);
4207 allocate_vn_table (optimistic_info
);
4215 delete constant_to_value_id
;
4216 constant_to_value_id
= NULL
;
4217 BITMAP_FREE (constant_value_ids
);
4218 shared_lookup_phiargs
.release ();
4219 shared_lookup_references
.release ();
4220 XDELETEVEC (rpo_numbers
);
4222 for (i
= 0; i
< num_ssa_names
; i
++)
4224 tree name
= ssa_name (i
);
4226 && VN_INFO (name
)->needs_insertion
)
4227 release_ssa_name (name
);
4229 obstack_free (&vn_ssa_aux_obstack
, NULL
);
4230 vn_ssa_aux_table
.release ();
4232 sccstack
.release ();
4233 free_vn_table (valid_info
);
4234 XDELETE (valid_info
);
4235 free_vn_table (optimistic_info
);
4236 XDELETE (optimistic_info
);
4239 /* Set *ID according to RESULT. */
4242 set_value_id_for_result (tree result
, unsigned int *id
)
4244 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4245 *id
= VN_INFO (result
)->value_id
;
4246 else if (result
&& is_gimple_min_invariant (result
))
4247 *id
= get_or_alloc_constant_value_id (result
);
4249 *id
= get_next_value_id ();
4252 /* Set the value ids in the valid hash tables. */
4255 set_hashtable_value_ids (void)
4257 vn_nary_op_iterator_type hin
;
4258 vn_phi_iterator_type hip
;
4259 vn_reference_iterator_type hir
;
4264 /* Now set the value ids of the things we had put in the hash
4267 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4268 set_value_id_for_result (vno
->result
, &vno
->value_id
);
4270 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4271 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4273 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4275 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4278 class cond_dom_walker
: public dom_walker
4281 cond_dom_walker () : dom_walker (CDI_DOMINATORS
), fail (false) {}
4283 virtual void before_dom_children (basic_block
);
4289 cond_dom_walker::before_dom_children (basic_block bb
)
4297 /* If any of the predecessor edges that do not come from blocks dominated
4298 by us are still marked as possibly executable consider this block
4300 bool reachable
= bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
);
4301 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4302 if (!dominated_by_p (CDI_DOMINATORS
, e
->src
, bb
))
4303 reachable
|= (e
->flags
& EDGE_EXECUTABLE
);
4305 /* If the block is not reachable all outgoing edges are not
4309 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4310 fprintf (dump_file
, "Marking all outgoing edges of unreachable "
4311 "BB %d as not executable\n", bb
->index
);
4313 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4314 e
->flags
&= ~EDGE_EXECUTABLE
;
4318 gimple stmt
= last_stmt (bb
);
4322 enum gimple_code code
= gimple_code (stmt
);
4323 if (code
!= GIMPLE_COND
4324 && code
!= GIMPLE_SWITCH
4325 && code
!= GIMPLE_GOTO
)
4328 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4330 fprintf (dump_file
, "Value-numbering operands of stmt ending BB %d: ",
4332 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4335 /* Value-number the last stmts SSA uses. */
4338 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_USE
)
4339 if (VN_INFO (op
)->visited
== false
4346 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4347 if value-numbering can prove they are not reachable. Handling
4348 computed gotos is also possible. */
4354 tree lhs
= gimple_cond_lhs (stmt
);
4355 tree rhs
= gimple_cond_rhs (stmt
);
4356 /* Work hard in computing the condition and take into account
4357 the valueization of the defining stmt. */
4358 if (TREE_CODE (lhs
) == SSA_NAME
)
4359 lhs
= vn_get_expr_for (lhs
);
4360 if (TREE_CODE (rhs
) == SSA_NAME
)
4361 rhs
= vn_get_expr_for (rhs
);
4362 val
= fold_binary (gimple_cond_code (stmt
),
4363 boolean_type_node
, lhs
, rhs
);
4367 val
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
4370 val
= gimple_goto_dest (stmt
);
4378 edge taken
= find_taken_edge (bb
, vn_valueize (val
));
4382 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4383 fprintf (dump_file
, "Marking all edges out of BB %d but (%d -> %d) as "
4384 "not executable\n", bb
->index
, bb
->index
, taken
->dest
->index
);
4386 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4388 e
->flags
&= ~EDGE_EXECUTABLE
;
4391 /* Do SCCVN. Returns true if it finished, false if we bailed out
4392 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4393 how we use the alias oracle walking during the VN process. */
4396 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
4402 default_vn_walk_kind
= default_vn_walk_kind_
;
4405 current_info
= valid_info
;
4407 for (param
= DECL_ARGUMENTS (current_function_decl
);
4409 param
= DECL_CHAIN (param
))
4411 tree def
= ssa_default_def (cfun
, param
);
4414 VN_INFO (def
)->visited
= true;
4415 VN_INFO (def
)->valnum
= def
;
4419 /* Mark all edges as possibly executable. */
4420 FOR_ALL_BB_FN (bb
, cfun
)
4424 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4425 e
->flags
|= EDGE_EXECUTABLE
;
4428 /* Walk all blocks in dominator order, value-numbering the last stmts
4429 SSA uses and decide whether outgoing edges are not executable. */
4430 cond_dom_walker walker
;
4431 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
4438 /* Value-number remaining SSA names. */
4439 for (i
= 1; i
< num_ssa_names
; ++i
)
4441 tree name
= ssa_name (i
);
4443 && VN_INFO (name
)->visited
== false
4444 && !has_zero_uses (name
))
4452 /* Initialize the value ids. */
4454 for (i
= 1; i
< num_ssa_names
; ++i
)
4456 tree name
= ssa_name (i
);
4460 info
= VN_INFO (name
);
4461 if (info
->valnum
== name
4462 || info
->valnum
== VN_TOP
)
4463 info
->value_id
= get_next_value_id ();
4464 else if (is_gimple_min_invariant (info
->valnum
))
4465 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
4469 for (i
= 1; i
< num_ssa_names
; ++i
)
4471 tree name
= ssa_name (i
);
4475 info
= VN_INFO (name
);
4476 if (TREE_CODE (info
->valnum
) == SSA_NAME
4477 && info
->valnum
!= name
4478 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
4479 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
4482 set_hashtable_value_ids ();
4484 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4486 fprintf (dump_file
, "Value numbers:\n");
4487 for (i
= 0; i
< num_ssa_names
; i
++)
4489 tree name
= ssa_name (i
);
4491 && VN_INFO (name
)->visited
4492 && SSA_VAL (name
) != name
)
4494 print_generic_expr (dump_file
, name
, 0);
4495 fprintf (dump_file
, " = ");
4496 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
4497 fprintf (dump_file
, "\n");
4505 /* Return the maximum value id we have ever seen. */
4508 get_max_value_id (void)
4510 return next_value_id
;
4513 /* Return the next unique value id. */
4516 get_next_value_id (void)
4518 return next_value_id
++;
4522 /* Compare two expressions E1 and E2 and return true if they are equal. */
4525 expressions_equal_p (tree e1
, tree e2
)
4527 /* The obvious case. */
4531 /* If only one of them is null, they cannot be equal. */
4535 /* Now perform the actual comparison. */
4536 if (TREE_CODE (e1
) == TREE_CODE (e2
)
4537 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
4544 /* Return true if the nary operation NARY may trap. This is a copy
4545 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4548 vn_nary_may_trap (vn_nary_op_t nary
)
4551 tree rhs2
= NULL_TREE
;
4552 bool honor_nans
= false;
4553 bool honor_snans
= false;
4554 bool fp_operation
= false;
4555 bool honor_trapv
= false;
4559 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
4560 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
4561 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
4564 fp_operation
= FLOAT_TYPE_P (type
);
4567 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
4568 honor_snans
= flag_signaling_nans
!= 0;
4570 else if (INTEGRAL_TYPE_P (type
)
4571 && TYPE_OVERFLOW_TRAPS (type
))
4574 if (nary
->length
>= 2)
4576 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
4578 honor_nans
, honor_snans
, rhs2
,
4584 for (i
= 0; i
< nary
->length
; ++i
)
4585 if (tree_could_trap_p (nary
->op
[i
]))