1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
33 #include "gimple-pretty-print.h"
34 #include "tree-inline.h"
35 #include "internal-fn.h"
36 #include "gimple-fold.h"
40 #include "insn-config.h"
52 #include "alloc-pool.h"
55 #include "tree-ssa-propagate.h"
56 #include "tree-ssa-sccvn.h"
60 #include "gimple-iterator.h"
62 /* This algorithm is based on the SCC algorithm presented by Keith
63 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
64 (http://citeseer.ist.psu.edu/41805.html). In
65 straight line code, it is equivalent to a regular hash based value
66 numbering that is performed in reverse postorder.
68 For code with cycles, there are two alternatives, both of which
69 require keeping the hashtables separate from the actual list of
70 value numbers for SSA names.
72 1. Iterate value numbering in an RPO walk of the blocks, removing
73 all the entries from the hashtable after each iteration (but
74 keeping the SSA name->value number mapping between iterations).
75 Iterate until it does not change.
77 2. Perform value numbering as part of an SCC walk on the SSA graph,
78 iterating only the cycles in the SSA graph until they do not change
79 (using a separate, optimistic hashtable for value numbering the SCC
82 The second is not just faster in practice (because most SSA graph
83 cycles do not involve all the variables in the graph), it also has
86 One of these nice properties is that when we pop an SCC off the
87 stack, we are guaranteed to have processed all the operands coming from
88 *outside of that SCC*, so we do not need to do anything special to
89 ensure they have value numbers.
91 Another nice property is that the SCC walk is done as part of a DFS
92 of the SSA graph, which makes it easy to perform combining and
93 simplifying operations at the same time.
95 The code below is deliberately written in a way that makes it easy
96 to separate the SCC walk from the other work it does.
98 In order to propagate constants through the code, we track which
99 expressions contain constants, and use those while folding. In
100 theory, we could also track expressions whose value numbers are
101 replaced, in case we end up folding based on expression
104 In order to value number memory, we assign value numbers to vuses.
105 This enables us to note that, for example, stores to the same
106 address of the same value from the same starting memory states are
110 1. We can iterate only the changing portions of the SCC's, but
111 I have not seen an SCC big enough for this to be a win.
112 2. If you differentiate between phi nodes for loops and phi nodes
113 for if-then-else, you can properly consider phi nodes in different
114 blocks for equivalence.
115 3. We could value number vuses in more cases, particularly, whole
120 static tree
*last_vuse_ptr
;
121 static vn_lookup_kind vn_walk_kind
;
122 static vn_lookup_kind default_vn_walk_kind
;
124 /* vn_nary_op hashtable helpers. */
126 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
128 typedef vn_nary_op_s
*compare_type
;
129 static inline hashval_t
hash (const vn_nary_op_s
*);
130 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
133 /* Return the computed hashcode for nary operation P1. */
136 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
138 return vno1
->hashcode
;
141 /* Compare nary operations P1 and P2 and return true if they are
145 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
147 return vn_nary_op_eq (vno1
, vno2
);
150 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
151 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
154 /* vn_phi hashtable helpers. */
157 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
159 struct vn_phi_hasher
: pointer_hash
<vn_phi_s
>
161 static inline hashval_t
hash (const vn_phi_s
*);
162 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
163 static inline void remove (vn_phi_s
*);
166 /* Return the computed hashcode for phi operation P1. */
169 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
171 return vp1
->hashcode
;
174 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
177 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
179 return vn_phi_eq (vp1
, vp2
);
182 /* Free a phi operation structure VP. */
185 vn_phi_hasher::remove (vn_phi_s
*phi
)
187 phi
->phiargs
.release ();
190 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
191 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
194 /* Compare two reference operands P1 and P2 for equality. Return true if
195 they are equal, and false otherwise. */
198 vn_reference_op_eq (const void *p1
, const void *p2
)
200 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
201 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
203 return (vro1
->opcode
== vro2
->opcode
204 /* We do not care for differences in type qualification. */
205 && (vro1
->type
== vro2
->type
206 || (vro1
->type
&& vro2
->type
207 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
208 TYPE_MAIN_VARIANT (vro2
->type
))))
209 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
210 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
211 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
214 /* Free a reference operation structure VP. */
217 free_reference (vn_reference_s
*vr
)
219 vr
->operands
.release ();
223 /* vn_reference hashtable helpers. */
225 struct vn_reference_hasher
: pointer_hash
<vn_reference_s
>
227 static inline hashval_t
hash (const vn_reference_s
*);
228 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
229 static inline void remove (vn_reference_s
*);
232 /* Return the hashcode for a given reference operation P1. */
235 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
237 return vr1
->hashcode
;
241 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
243 return vn_reference_eq (v
, c
);
247 vn_reference_hasher::remove (vn_reference_s
*v
)
252 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
253 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
256 /* The set of hashtables and alloc_pool's for their items. */
258 typedef struct vn_tables_s
260 vn_nary_op_table_type
*nary
;
261 vn_phi_table_type
*phis
;
262 vn_reference_table_type
*references
;
263 struct obstack nary_obstack
;
264 object_allocator
<vn_phi_s
> *phis_pool
;
265 object_allocator
<vn_reference_s
> *references_pool
;
269 /* vn_constant hashtable helpers. */
271 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
273 static inline hashval_t
hash (const vn_constant_s
*);
274 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
277 /* Hash table hash function for vn_constant_t. */
280 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
282 return vc1
->hashcode
;
285 /* Hash table equality function for vn_constant_t. */
288 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
290 if (vc1
->hashcode
!= vc2
->hashcode
)
293 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
296 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
297 static bitmap constant_value_ids
;
300 /* Valid hashtables storing information we have proven to be
303 static vn_tables_t valid_info
;
305 /* Optimistic hashtables storing information we are making assumptions about
306 during iterations. */
308 static vn_tables_t optimistic_info
;
310 /* Pointer to the set of hashtables that is currently being used.
311 Should always point to either the optimistic_info, or the
314 static vn_tables_t current_info
;
317 /* Reverse post order index for each basic block. */
319 static int *rpo_numbers
;
321 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
323 /* Return the SSA value of the VUSE x, supporting released VDEFs
324 during elimination which will value-number the VDEF to the
325 associated VUSE (but not substitute in the whole lattice). */
328 vuse_ssa_val (tree x
)
337 while (SSA_NAME_IN_FREE_LIST (x
));
342 /* This represents the top of the VN lattice, which is the universal
347 /* Unique counter for our value ids. */
349 static unsigned int next_value_id
;
351 /* Next DFS number and the stack for strongly connected component
354 static unsigned int next_dfs_num
;
355 static vec
<tree
> sccstack
;
359 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
360 are allocated on an obstack for locality reasons, and to free them
361 without looping over the vec. */
363 static vec
<vn_ssa_aux_t
> vn_ssa_aux_table
;
364 static struct obstack vn_ssa_aux_obstack
;
366 /* Return the value numbering information for a given SSA name. */
371 vn_ssa_aux_t res
= vn_ssa_aux_table
[SSA_NAME_VERSION (name
)];
372 gcc_checking_assert (res
);
376 /* Set the value numbering info for a given SSA name to a given
380 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
382 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = value
;
385 /* Initialize the value numbering info for a given SSA name.
386 This should be called just once for every SSA name. */
389 VN_INFO_GET (tree name
)
391 vn_ssa_aux_t newinfo
;
393 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
394 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
395 if (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ())
396 vn_ssa_aux_table
.safe_grow (SSA_NAME_VERSION (name
) + 1);
397 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = newinfo
;
402 /* Get the representative expression for the SSA_NAME NAME. Returns
403 the representative SSA_NAME if there is no expression associated with it. */
406 vn_get_expr_for (tree name
)
408 vn_ssa_aux_t vn
= VN_INFO (name
);
410 tree expr
= NULL_TREE
;
413 if (vn
->valnum
== VN_TOP
)
416 /* If the value-number is a constant it is the representative
418 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
421 /* Get to the information of the value of this SSA_NAME. */
422 vn
= VN_INFO (vn
->valnum
);
424 /* If the value-number is a constant it is the representative
426 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
429 /* Else if we have an expression, return it. */
430 if (vn
->expr
!= NULL_TREE
)
433 /* Otherwise use the defining statement to build the expression. */
434 def_stmt
= SSA_NAME_DEF_STMT (vn
->valnum
);
436 /* If the value number is not an assignment use it directly. */
437 if (!is_gimple_assign (def_stmt
))
440 /* Note that we can valueize here because we clear the cached
441 simplified expressions after each optimistic iteration. */
442 code
= gimple_assign_rhs_code (def_stmt
);
443 switch (TREE_CODE_CLASS (code
))
446 if ((code
== REALPART_EXPR
447 || code
== IMAGPART_EXPR
448 || code
== VIEW_CONVERT_EXPR
)
449 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt
),
451 expr
= fold_build1 (code
,
452 gimple_expr_type (def_stmt
),
453 vn_valueize (TREE_OPERAND
454 (gimple_assign_rhs1 (def_stmt
), 0)));
458 expr
= fold_build1 (code
,
459 gimple_expr_type (def_stmt
),
460 vn_valueize (gimple_assign_rhs1 (def_stmt
)));
464 expr
= fold_build2 (code
,
465 gimple_expr_type (def_stmt
),
466 vn_valueize (gimple_assign_rhs1 (def_stmt
)),
467 vn_valueize (gimple_assign_rhs2 (def_stmt
)));
470 case tcc_exceptional
:
471 if (code
== CONSTRUCTOR
473 (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))) == VECTOR_TYPE
)
474 expr
= gimple_assign_rhs1 (def_stmt
);
479 if (expr
== NULL_TREE
)
482 /* Cache the expression. */
488 /* Return the vn_kind the expression computed by the stmt should be
492 vn_get_stmt_kind (gimple
*stmt
)
494 switch (gimple_code (stmt
))
502 enum tree_code code
= gimple_assign_rhs_code (stmt
);
503 tree rhs1
= gimple_assign_rhs1 (stmt
);
504 switch (get_gimple_rhs_class (code
))
506 case GIMPLE_UNARY_RHS
:
507 case GIMPLE_BINARY_RHS
:
508 case GIMPLE_TERNARY_RHS
:
510 case GIMPLE_SINGLE_RHS
:
511 switch (TREE_CODE_CLASS (code
))
514 /* VOP-less references can go through unary case. */
515 if ((code
== REALPART_EXPR
516 || code
== IMAGPART_EXPR
517 || code
== VIEW_CONVERT_EXPR
518 || code
== BIT_FIELD_REF
)
519 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
523 case tcc_declaration
:
530 if (code
== ADDR_EXPR
)
531 return (is_gimple_min_invariant (rhs1
)
532 ? VN_CONSTANT
: VN_REFERENCE
);
533 else if (code
== CONSTRUCTOR
)
546 /* Lookup a value id for CONSTANT and return it. If it does not
550 get_constant_value_id (tree constant
)
552 vn_constant_s
**slot
;
553 struct vn_constant_s vc
;
555 vc
.hashcode
= vn_hash_constant_with_type (constant
);
556 vc
.constant
= constant
;
557 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
559 return (*slot
)->value_id
;
563 /* Lookup a value id for CONSTANT, and if it does not exist, create a
564 new one and return it. If it does exist, return it. */
567 get_or_alloc_constant_value_id (tree constant
)
569 vn_constant_s
**slot
;
570 struct vn_constant_s vc
;
573 vc
.hashcode
= vn_hash_constant_with_type (constant
);
574 vc
.constant
= constant
;
575 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
577 return (*slot
)->value_id
;
579 vcp
= XNEW (struct vn_constant_s
);
580 vcp
->hashcode
= vc
.hashcode
;
581 vcp
->constant
= constant
;
582 vcp
->value_id
= get_next_value_id ();
584 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
585 return vcp
->value_id
;
588 /* Return true if V is a value id for a constant. */
591 value_id_constant_p (unsigned int v
)
593 return bitmap_bit_p (constant_value_ids
, v
);
596 /* Compute the hash for a reference operand VRO1. */
599 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
601 hstate
.add_int (vro1
->opcode
);
603 inchash::add_expr (vro1
->op0
, hstate
);
605 inchash::add_expr (vro1
->op1
, hstate
);
607 inchash::add_expr (vro1
->op2
, hstate
);
610 /* Compute a hash for the reference operation VR1 and return it. */
613 vn_reference_compute_hash (const vn_reference_t vr1
)
615 inchash::hash hstate
;
618 vn_reference_op_t vro
;
619 HOST_WIDE_INT off
= -1;
622 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
624 if (vro
->opcode
== MEM_REF
)
626 else if (vro
->opcode
!= ADDR_EXPR
)
638 hstate
.add_int (off
);
641 && vro
->opcode
== ADDR_EXPR
)
645 tree op
= TREE_OPERAND (vro
->op0
, 0);
646 hstate
.add_int (TREE_CODE (op
));
647 inchash::add_expr (op
, hstate
);
651 vn_reference_op_compute_hash (vro
, hstate
);
654 result
= hstate
.end ();
655 /* ??? We would ICE later if we hash instead of adding that in. */
657 result
+= SSA_NAME_VERSION (vr1
->vuse
);
662 /* Return true if reference operations VR1 and VR2 are equivalent. This
663 means they have the same set of operands and vuses. */
666 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
670 /* Early out if this is not a hash collision. */
671 if (vr1
->hashcode
!= vr2
->hashcode
)
674 /* The VOP needs to be the same. */
675 if (vr1
->vuse
!= vr2
->vuse
)
678 /* If the operands are the same we are done. */
679 if (vr1
->operands
== vr2
->operands
)
682 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
685 if (INTEGRAL_TYPE_P (vr1
->type
)
686 && INTEGRAL_TYPE_P (vr2
->type
))
688 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
691 else if (INTEGRAL_TYPE_P (vr1
->type
)
692 && (TYPE_PRECISION (vr1
->type
)
693 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
695 else if (INTEGRAL_TYPE_P (vr2
->type
)
696 && (TYPE_PRECISION (vr2
->type
)
697 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
704 HOST_WIDE_INT off1
= 0, off2
= 0;
705 vn_reference_op_t vro1
, vro2
;
706 vn_reference_op_s tem1
, tem2
;
707 bool deref1
= false, deref2
= false;
708 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
710 if (vro1
->opcode
== MEM_REF
)
716 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
718 if (vro2
->opcode
== MEM_REF
)
726 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
728 memset (&tem1
, 0, sizeof (tem1
));
729 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
730 tem1
.type
= TREE_TYPE (tem1
.op0
);
731 tem1
.opcode
= TREE_CODE (tem1
.op0
);
735 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
737 memset (&tem2
, 0, sizeof (tem2
));
738 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
739 tem2
.type
= TREE_TYPE (tem2
.op0
);
740 tem2
.opcode
= TREE_CODE (tem2
.op0
);
744 if (deref1
!= deref2
)
746 if (!vn_reference_op_eq (vro1
, vro2
))
751 while (vr1
->operands
.length () != i
752 || vr2
->operands
.length () != j
);
757 /* Copy the operations present in load/store REF into RESULT, a vector of
758 vn_reference_op_s's. */
761 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
763 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
765 vn_reference_op_s temp
;
769 memset (&temp
, 0, sizeof (temp
));
770 temp
.type
= TREE_TYPE (ref
);
771 temp
.opcode
= TREE_CODE (ref
);
772 temp
.op0
= TMR_INDEX (ref
);
773 temp
.op1
= TMR_STEP (ref
);
774 temp
.op2
= TMR_OFFSET (ref
);
776 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
777 temp
.base
= MR_DEPENDENCE_BASE (ref
);
778 result
->quick_push (temp
);
780 memset (&temp
, 0, sizeof (temp
));
781 temp
.type
= NULL_TREE
;
782 temp
.opcode
= ERROR_MARK
;
783 temp
.op0
= TMR_INDEX2 (ref
);
785 result
->quick_push (temp
);
787 memset (&temp
, 0, sizeof (temp
));
788 temp
.type
= NULL_TREE
;
789 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
790 temp
.op0
= TMR_BASE (ref
);
792 result
->quick_push (temp
);
796 /* For non-calls, store the information that makes up the address. */
800 vn_reference_op_s temp
;
802 memset (&temp
, 0, sizeof (temp
));
803 temp
.type
= TREE_TYPE (ref
);
804 temp
.opcode
= TREE_CODE (ref
);
810 temp
.op0
= TREE_OPERAND (ref
, 1);
813 temp
.op0
= TREE_OPERAND (ref
, 1);
817 /* The base address gets its own vn_reference_op_s structure. */
818 temp
.op0
= TREE_OPERAND (ref
, 1);
819 if (tree_fits_shwi_p (TREE_OPERAND (ref
, 1)))
820 temp
.off
= tree_to_shwi (TREE_OPERAND (ref
, 1));
821 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
822 temp
.base
= MR_DEPENDENCE_BASE (ref
);
825 /* Record bits and position. */
826 temp
.op0
= TREE_OPERAND (ref
, 1);
827 temp
.op1
= TREE_OPERAND (ref
, 2);
828 if (tree_fits_shwi_p (TREE_OPERAND (ref
, 2)))
830 HOST_WIDE_INT off
= tree_to_shwi (TREE_OPERAND (ref
, 2));
831 if (off
% BITS_PER_UNIT
== 0)
832 temp
.off
= off
/ BITS_PER_UNIT
;
836 /* The field decl is enough to unambiguously specify the field,
837 a matching type is not necessary and a mismatching type
838 is always a spurious difference. */
839 temp
.type
= NULL_TREE
;
840 temp
.op0
= TREE_OPERAND (ref
, 1);
841 temp
.op1
= TREE_OPERAND (ref
, 2);
843 tree this_offset
= component_ref_field_offset (ref
);
845 && TREE_CODE (this_offset
) == INTEGER_CST
)
847 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
848 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
851 = (wi::to_offset (this_offset
)
852 + wi::lrshift (wi::to_offset (bit_offset
),
853 LOG2_BITS_PER_UNIT
));
854 if (wi::fits_shwi_p (off
)
855 /* Probibit value-numbering zero offset components
856 of addresses the same before the pass folding
857 __builtin_object_size had a chance to run
858 (checking cfun->after_inlining does the
860 && (TREE_CODE (orig
) != ADDR_EXPR
862 || cfun
->after_inlining
))
863 temp
.off
= off
.to_shwi ();
868 case ARRAY_RANGE_REF
:
870 /* Record index as operand. */
871 temp
.op0
= TREE_OPERAND (ref
, 1);
872 /* Always record lower bounds and element size. */
873 temp
.op1
= array_ref_low_bound (ref
);
874 temp
.op2
= array_ref_element_size (ref
);
875 if (TREE_CODE (temp
.op0
) == INTEGER_CST
876 && TREE_CODE (temp
.op1
) == INTEGER_CST
877 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
879 offset_int off
= ((wi::to_offset (temp
.op0
)
880 - wi::to_offset (temp
.op1
))
881 * wi::to_offset (temp
.op2
));
882 if (wi::fits_shwi_p (off
))
883 temp
.off
= off
.to_shwi();
887 if (DECL_HARD_REGISTER (ref
))
896 /* Canonicalize decls to MEM[&decl] which is what we end up with
897 when valueizing MEM[ptr] with ptr = &decl. */
898 temp
.opcode
= MEM_REF
;
899 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
901 result
->safe_push (temp
);
902 temp
.opcode
= ADDR_EXPR
;
903 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
904 temp
.type
= TREE_TYPE (temp
.op0
);
918 if (is_gimple_min_invariant (ref
))
924 /* These are only interesting for their operands, their
925 existence, and their type. They will never be the last
926 ref in the chain of references (IE they require an
927 operand), so we don't have to put anything
928 for op* as it will be handled by the iteration */
930 case VIEW_CONVERT_EXPR
:
934 /* This is only interesting for its constant offset. */
935 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
940 result
->safe_push (temp
);
942 if (REFERENCE_CLASS_P (ref
)
943 || TREE_CODE (ref
) == MODIFY_EXPR
944 || TREE_CODE (ref
) == WITH_SIZE_EXPR
945 || (TREE_CODE (ref
) == ADDR_EXPR
946 && !is_gimple_min_invariant (ref
)))
947 ref
= TREE_OPERAND (ref
, 0);
953 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
954 operands in *OPS, the reference alias set SET and the reference type TYPE.
955 Return true if something useful was produced. */
958 ao_ref_init_from_vn_reference (ao_ref
*ref
,
959 alias_set_type set
, tree type
,
960 vec
<vn_reference_op_s
> ops
)
962 vn_reference_op_t op
;
964 tree base
= NULL_TREE
;
966 offset_int offset
= 0;
968 offset_int size
= -1;
969 tree size_tree
= NULL_TREE
;
970 alias_set_type base_alias_set
= -1;
972 /* First get the final access size from just the outermost expression. */
974 if (op
->opcode
== COMPONENT_REF
)
975 size_tree
= DECL_SIZE (op
->op0
);
976 else if (op
->opcode
== BIT_FIELD_REF
)
980 machine_mode mode
= TYPE_MODE (type
);
982 size_tree
= TYPE_SIZE (type
);
984 size
= int (GET_MODE_BITSIZE (mode
));
986 if (size_tree
!= NULL_TREE
987 && TREE_CODE (size_tree
) == INTEGER_CST
)
988 size
= wi::to_offset (size_tree
);
990 /* Initially, maxsize is the same as the accessed element size.
991 In the following it will only grow (or become -1). */
994 /* Compute cumulative bit-offset for nested component-refs and array-refs,
995 and find the ultimate containing object. */
996 FOR_EACH_VEC_ELT (ops
, i
, op
)
1000 /* These may be in the reference ops, but we cannot do anything
1001 sensible with them here. */
1003 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1004 if (base
!= NULL_TREE
1005 && TREE_CODE (base
) == MEM_REF
1007 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1009 vn_reference_op_t pop
= &ops
[i
-1];
1010 base
= TREE_OPERAND (op
->op0
, 0);
1017 offset
+= pop
->off
* BITS_PER_UNIT
;
1025 /* Record the base objects. */
1027 base_alias_set
= get_deref_alias_set (op
->op0
);
1028 *op0_p
= build2 (MEM_REF
, op
->type
,
1029 NULL_TREE
, op
->op0
);
1030 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
1031 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
1032 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1043 /* And now the usual component-reference style ops. */
1045 offset
+= wi::to_offset (op
->op1
);
1050 tree field
= op
->op0
;
1051 /* We do not have a complete COMPONENT_REF tree here so we
1052 cannot use component_ref_field_offset. Do the interesting
1054 tree this_offset
= DECL_FIELD_OFFSET (field
);
1056 if (op
->op1
|| TREE_CODE (this_offset
) != INTEGER_CST
)
1060 offset_int woffset
= wi::lshift (wi::to_offset (this_offset
),
1061 LOG2_BITS_PER_UNIT
);
1062 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1068 case ARRAY_RANGE_REF
:
1070 /* We recorded the lower bound and the element size. */
1071 if (TREE_CODE (op
->op0
) != INTEGER_CST
1072 || TREE_CODE (op
->op1
) != INTEGER_CST
1073 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1078 = wi::sext (wi::to_offset (op
->op0
) - wi::to_offset (op
->op1
),
1079 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1080 woffset
*= wi::to_offset (op
->op2
);
1081 woffset
= wi::lshift (woffset
, LOG2_BITS_PER_UNIT
);
1093 case VIEW_CONVERT_EXPR
:
1110 if (base
== NULL_TREE
)
1113 ref
->ref
= NULL_TREE
;
1115 ref
->ref_alias_set
= set
;
1116 if (base_alias_set
!= -1)
1117 ref
->base_alias_set
= base_alias_set
;
1119 ref
->base_alias_set
= get_alias_set (base
);
1120 /* We discount volatiles from value-numbering elsewhere. */
1121 ref
->volatile_p
= false;
1123 if (!wi::fits_shwi_p (size
) || wi::neg_p (size
))
1131 ref
->size
= size
.to_shwi ();
1133 if (!wi::fits_shwi_p (offset
))
1140 ref
->offset
= offset
.to_shwi ();
1142 if (!wi::fits_shwi_p (max_size
) || wi::neg_p (max_size
))
1145 ref
->max_size
= max_size
.to_shwi ();
1150 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1151 vn_reference_op_s's. */
1154 copy_reference_ops_from_call (gcall
*call
,
1155 vec
<vn_reference_op_s
> *result
)
1157 vn_reference_op_s temp
;
1159 tree lhs
= gimple_call_lhs (call
);
1162 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1163 different. By adding the lhs here in the vector, we ensure that the
1164 hashcode is different, guaranteeing a different value number. */
1165 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1167 memset (&temp
, 0, sizeof (temp
));
1168 temp
.opcode
= MODIFY_EXPR
;
1169 temp
.type
= TREE_TYPE (lhs
);
1172 result
->safe_push (temp
);
1175 /* Copy the type, opcode, function, static chain and EH region, if any. */
1176 memset (&temp
, 0, sizeof (temp
));
1177 temp
.type
= gimple_call_return_type (call
);
1178 temp
.opcode
= CALL_EXPR
;
1179 temp
.op0
= gimple_call_fn (call
);
1180 temp
.op1
= gimple_call_chain (call
);
1181 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1182 temp
.op2
= size_int (lr
);
1184 if (gimple_call_with_bounds_p (call
))
1185 temp
.with_bounds
= 1;
1186 result
->safe_push (temp
);
1188 /* Copy the call arguments. As they can be references as well,
1189 just chain them together. */
1190 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1192 tree callarg
= gimple_call_arg (call
, i
);
1193 copy_reference_ops_from_ref (callarg
, result
);
1197 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1198 *I_P to point to the last element of the replacement. */
1200 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1203 unsigned int i
= *i_p
;
1204 vn_reference_op_t op
= &(*ops
)[i
];
1205 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1207 HOST_WIDE_INT addr_offset
= 0;
1209 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1210 from .foo.bar to the preceding MEM_REF offset and replace the
1211 address with &OBJ. */
1212 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1214 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1215 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1217 offset_int off
= offset_int::from (mem_op
->op0
, SIGNED
);
1219 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1220 op
->op0
= build_fold_addr_expr (addr_base
);
1221 if (tree_fits_shwi_p (mem_op
->op0
))
1222 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1230 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1231 *I_P to point to the last element of the replacement. */
1233 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1236 unsigned int i
= *i_p
;
1237 vn_reference_op_t op
= &(*ops
)[i
];
1238 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1240 enum tree_code code
;
1243 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1244 if (!is_gimple_assign (def_stmt
))
1247 code
= gimple_assign_rhs_code (def_stmt
);
1248 if (code
!= ADDR_EXPR
1249 && code
!= POINTER_PLUS_EXPR
)
1252 off
= offset_int::from (mem_op
->op0
, SIGNED
);
1254 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1255 from .foo.bar to the preceding MEM_REF offset and replace the
1256 address with &OBJ. */
1257 if (code
== ADDR_EXPR
)
1259 tree addr
, addr_base
;
1260 HOST_WIDE_INT addr_offset
;
1262 addr
= gimple_assign_rhs1 (def_stmt
);
1263 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1265 /* If that didn't work because the address isn't invariant propagate
1266 the reference tree from the address operation in case the current
1267 dereference isn't offsetted. */
1269 && *i_p
== ops
->length () - 1
1271 /* This makes us disable this transform for PRE where the
1272 reference ops might be also used for code insertion which
1274 && default_vn_walk_kind
== VN_WALKREWRITE
)
1276 auto_vec
<vn_reference_op_s
, 32> tem
;
1277 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1280 ops
->safe_splice (tem
);
1285 || TREE_CODE (addr_base
) != MEM_REF
)
1289 off
+= mem_ref_offset (addr_base
);
1290 op
->op0
= TREE_OPERAND (addr_base
, 0);
1295 ptr
= gimple_assign_rhs1 (def_stmt
);
1296 ptroff
= gimple_assign_rhs2 (def_stmt
);
1297 if (TREE_CODE (ptr
) != SSA_NAME
1298 || TREE_CODE (ptroff
) != INTEGER_CST
)
1301 off
+= wi::to_offset (ptroff
);
1305 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1306 if (tree_fits_shwi_p (mem_op
->op0
))
1307 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1310 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1311 op
->op0
= SSA_VAL (op
->op0
);
1312 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1313 op
->opcode
= TREE_CODE (op
->op0
);
1316 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1317 vn_reference_maybe_forwprop_address (ops
, i_p
);
1318 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1319 vn_reference_fold_indirect (ops
, i_p
);
1323 /* Optimize the reference REF to a constant if possible or return
1324 NULL_TREE if not. */
1327 fully_constant_vn_reference_p (vn_reference_t ref
)
1329 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1330 vn_reference_op_t op
;
1332 /* Try to simplify the translated expression if it is
1333 a call to a builtin function with at most two arguments. */
1335 if (op
->opcode
== CALL_EXPR
1336 && TREE_CODE (op
->op0
) == ADDR_EXPR
1337 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1338 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1339 && operands
.length () >= 2
1340 && operands
.length () <= 3)
1342 vn_reference_op_t arg0
, arg1
= NULL
;
1343 bool anyconst
= false;
1344 arg0
= &operands
[1];
1345 if (operands
.length () > 2)
1346 arg1
= &operands
[2];
1347 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1348 || (arg0
->opcode
== ADDR_EXPR
1349 && is_gimple_min_invariant (arg0
->op0
)))
1352 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1353 || (arg1
->opcode
== ADDR_EXPR
1354 && is_gimple_min_invariant (arg1
->op0
))))
1358 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1361 arg1
? arg1
->op0
: NULL
);
1363 && TREE_CODE (folded
) == NOP_EXPR
)
1364 folded
= TREE_OPERAND (folded
, 0);
1366 && is_gimple_min_invariant (folded
))
1371 /* Simplify reads from constants or constant initializers. */
1372 else if (BITS_PER_UNIT
== 8
1373 && is_gimple_reg_type (ref
->type
)
1374 && (!INTEGRAL_TYPE_P (ref
->type
)
1375 || TYPE_PRECISION (ref
->type
) % BITS_PER_UNIT
== 0))
1377 HOST_WIDE_INT off
= 0;
1379 if (INTEGRAL_TYPE_P (ref
->type
))
1380 size
= TYPE_PRECISION (ref
->type
);
1382 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1383 if (size
% BITS_PER_UNIT
!= 0
1384 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1386 size
/= BITS_PER_UNIT
;
1388 for (i
= 0; i
< operands
.length (); ++i
)
1390 if (operands
[i
].off
== -1)
1392 off
+= operands
[i
].off
;
1393 if (operands
[i
].opcode
== MEM_REF
)
1399 vn_reference_op_t base
= &operands
[--i
];
1400 tree ctor
= error_mark_node
;
1401 tree decl
= NULL_TREE
;
1402 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1404 else if (base
->opcode
== MEM_REF
1405 && base
[1].opcode
== ADDR_EXPR
1406 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1407 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
))
1409 decl
= TREE_OPERAND (base
[1].op0
, 0);
1410 ctor
= ctor_for_folding (decl
);
1412 if (ctor
== NULL_TREE
)
1413 return build_zero_cst (ref
->type
);
1414 else if (ctor
!= error_mark_node
)
1418 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1419 off
* BITS_PER_UNIT
,
1420 size
* BITS_PER_UNIT
, decl
);
1423 STRIP_USELESS_TYPE_CONVERSION (res
);
1424 if (is_gimple_min_invariant (res
))
1430 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1431 if (native_encode_expr (ctor
, buf
, size
, off
) > 0)
1432 return native_interpret_expr (ref
->type
, buf
, size
);
1440 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1441 structures into their value numbers. This is done in-place, and
1442 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1443 whether any operands were valueized. */
1445 static vec
<vn_reference_op_s
>
1446 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
)
1448 vn_reference_op_t vro
;
1451 *valueized_anything
= false;
1453 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1455 if (vro
->opcode
== SSA_NAME
1456 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1458 tree tem
= SSA_VAL (vro
->op0
);
1459 if (tem
!= vro
->op0
)
1461 *valueized_anything
= true;
1464 /* If it transforms from an SSA_NAME to a constant, update
1466 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1467 vro
->opcode
= TREE_CODE (vro
->op0
);
1469 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1471 tree tem
= SSA_VAL (vro
->op1
);
1472 if (tem
!= vro
->op1
)
1474 *valueized_anything
= true;
1478 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1480 tree tem
= SSA_VAL (vro
->op2
);
1481 if (tem
!= vro
->op2
)
1483 *valueized_anything
= true;
1487 /* If it transforms from an SSA_NAME to an address, fold with
1488 a preceding indirect reference. */
1491 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1492 && orig
[i
- 1].opcode
== MEM_REF
)
1494 if (vn_reference_fold_indirect (&orig
, &i
))
1495 *valueized_anything
= true;
1498 && vro
->opcode
== SSA_NAME
1499 && orig
[i
- 1].opcode
== MEM_REF
)
1501 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1502 *valueized_anything
= true;
1504 /* If it transforms a non-constant ARRAY_REF into a constant
1505 one, adjust the constant offset. */
1506 else if (vro
->opcode
== ARRAY_REF
1508 && TREE_CODE (vro
->op0
) == INTEGER_CST
1509 && TREE_CODE (vro
->op1
) == INTEGER_CST
1510 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1512 offset_int off
= ((wi::to_offset (vro
->op0
)
1513 - wi::to_offset (vro
->op1
))
1514 * wi::to_offset (vro
->op2
));
1515 if (wi::fits_shwi_p (off
))
1516 vro
->off
= off
.to_shwi ();
1523 static vec
<vn_reference_op_s
>
1524 valueize_refs (vec
<vn_reference_op_s
> orig
)
1527 return valueize_refs_1 (orig
, &tem
);
1530 static vec
<vn_reference_op_s
> shared_lookup_references
;
1532 /* Create a vector of vn_reference_op_s structures from REF, a
1533 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1534 this function. *VALUEIZED_ANYTHING will specify whether any
1535 operands were valueized. */
1537 static vec
<vn_reference_op_s
>
1538 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1542 shared_lookup_references
.truncate (0);
1543 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1544 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1545 valueized_anything
);
1546 return shared_lookup_references
;
1549 /* Create a vector of vn_reference_op_s structures from CALL, a
1550 call statement. The vector is shared among all callers of
1553 static vec
<vn_reference_op_s
>
1554 valueize_shared_reference_ops_from_call (gcall
*call
)
1558 shared_lookup_references
.truncate (0);
1559 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1560 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1561 return shared_lookup_references
;
1564 /* Lookup a SCCVN reference operation VR in the current hash table.
1565 Returns the resulting value number if it exists in the hash table,
1566 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1567 vn_reference_t stored in the hashtable if something is found. */
1570 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1572 vn_reference_s
**slot
;
1575 hash
= vr
->hashcode
;
1576 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1577 if (!slot
&& current_info
== optimistic_info
)
1578 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1582 *vnresult
= (vn_reference_t
)*slot
;
1583 return ((vn_reference_t
)*slot
)->result
;
1589 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1590 with the current VUSE and performs the expression lookup. */
1593 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1594 unsigned int cnt
, void *vr_
)
1596 vn_reference_t vr
= (vn_reference_t
)vr_
;
1597 vn_reference_s
**slot
;
1600 /* This bounds the stmt walks we perform on reference lookups
1601 to O(1) instead of O(N) where N is the number of dominating
1603 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1607 *last_vuse_ptr
= vuse
;
1609 /* Fixup vuse and hash. */
1611 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1612 vr
->vuse
= vuse_ssa_val (vuse
);
1614 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1616 hash
= vr
->hashcode
;
1617 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1618 if (!slot
&& current_info
== optimistic_info
)
1619 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1626 /* Lookup an existing or insert a new vn_reference entry into the
1627 value table for the VUSE, SET, TYPE, OPERANDS reference which
1628 has the value VALUE which is either a constant or an SSA name. */
1630 static vn_reference_t
1631 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1634 vec
<vn_reference_op_s
,
1639 vn_reference_t result
;
1642 vr1
.operands
= operands
;
1645 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1646 if (vn_reference_lookup_1 (&vr1
, &result
))
1648 if (TREE_CODE (value
) == SSA_NAME
)
1649 value_id
= VN_INFO (value
)->value_id
;
1651 value_id
= get_or_alloc_constant_value_id (value
);
1652 return vn_reference_insert_pieces (vuse
, set
, type
,
1653 operands
.copy (), value
, value_id
);
1656 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1657 from the statement defining VUSE and if not successful tries to
1658 translate *REFP and VR_ through an aggregate copy at the definition
1662 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1663 bool disambiguate_only
)
1665 vn_reference_t vr
= (vn_reference_t
)vr_
;
1666 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1668 HOST_WIDE_INT offset
, maxsize
;
1669 static vec
<vn_reference_op_s
>
1672 bool lhs_ref_ok
= false;
1674 /* First try to disambiguate after value-replacing in the definitions LHS. */
1675 if (is_gimple_assign (def_stmt
))
1677 tree lhs
= gimple_assign_lhs (def_stmt
);
1678 bool valueized_anything
= false;
1679 /* Avoid re-allocation overhead. */
1680 lhs_ops
.truncate (0);
1681 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1682 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
);
1683 if (valueized_anything
)
1685 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1686 get_alias_set (lhs
),
1687 TREE_TYPE (lhs
), lhs_ops
);
1689 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1694 ao_ref_init (&lhs_ref
, lhs
);
1698 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1699 && gimple_call_num_args (def_stmt
) <= 4)
1701 /* For builtin calls valueize its arguments and call the
1702 alias oracle again. Valueization may improve points-to
1703 info of pointers and constify size and position arguments.
1704 Originally this was motivated by PR61034 which has
1705 conditional calls to free falsely clobbering ref because
1706 of imprecise points-to info of the argument. */
1708 bool valueized_anything
= false;
1709 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1711 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1712 if (TREE_CODE (oldargs
[i
]) == SSA_NAME
1713 && VN_INFO (oldargs
[i
])->valnum
!= oldargs
[i
])
1715 gimple_call_set_arg (def_stmt
, i
, VN_INFO (oldargs
[i
])->valnum
);
1716 valueized_anything
= true;
1719 if (valueized_anything
)
1721 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1723 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1724 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1730 if (disambiguate_only
)
1733 base
= ao_ref_base (ref
);
1734 offset
= ref
->offset
;
1735 maxsize
= ref
->max_size
;
1737 /* If we cannot constrain the size of the reference we cannot
1738 test if anything kills it. */
1742 /* We can't deduce anything useful from clobbers. */
1743 if (gimple_clobber_p (def_stmt
))
1746 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1747 from that definition.
1749 if (is_gimple_reg_type (vr
->type
)
1750 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1751 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1752 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2))
1753 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1755 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1757 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1758 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
);
1759 size2
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2)) * 8;
1760 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1761 == tree_to_uhwi (gimple_call_arg (def_stmt
, 2))
1763 && operand_equal_p (base
, base2
, 0)
1764 && offset2
<= offset
1765 && offset2
+ size2
>= offset
+ maxsize
)
1767 tree val
= build_zero_cst (vr
->type
);
1768 return vn_reference_lookup_or_insert_for_pieces
1769 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1773 /* 2) Assignment from an empty CONSTRUCTOR. */
1774 else if (is_gimple_reg_type (vr
->type
)
1775 && gimple_assign_single_p (def_stmt
)
1776 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1777 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1780 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1781 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1782 &offset2
, &size2
, &maxsize2
);
1784 && operand_equal_p (base
, base2
, 0)
1785 && offset2
<= offset
1786 && offset2
+ size2
>= offset
+ maxsize
)
1788 tree val
= build_zero_cst (vr
->type
);
1789 return vn_reference_lookup_or_insert_for_pieces
1790 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1794 /* 3) Assignment from a constant. We can use folds native encode/interpret
1795 routines to extract the assigned bits. */
1796 else if (vn_walk_kind
== VN_WALKREWRITE
1797 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
1798 && ref
->size
== maxsize
1799 && maxsize
% BITS_PER_UNIT
== 0
1800 && offset
% BITS_PER_UNIT
== 0
1801 && is_gimple_reg_type (vr
->type
)
1802 && gimple_assign_single_p (def_stmt
)
1803 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
1806 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1807 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1808 &offset2
, &size2
, &maxsize2
);
1810 && maxsize2
== size2
1811 && size2
% BITS_PER_UNIT
== 0
1812 && offset2
% BITS_PER_UNIT
== 0
1813 && operand_equal_p (base
, base2
, 0)
1814 && offset2
<= offset
1815 && offset2
+ size2
>= offset
+ maxsize
)
1817 /* We support up to 512-bit values (for V8DFmode). */
1818 unsigned char buffer
[64];
1821 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
1822 buffer
, sizeof (buffer
));
1825 tree val
= native_interpret_expr (vr
->type
,
1827 + ((offset
- offset2
)
1829 ref
->size
/ BITS_PER_UNIT
);
1831 return vn_reference_lookup_or_insert_for_pieces
1832 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1837 /* 4) Assignment from an SSA name which definition we may be able
1838 to access pieces from. */
1839 else if (ref
->size
== maxsize
1840 && is_gimple_reg_type (vr
->type
)
1841 && gimple_assign_single_p (def_stmt
)
1842 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
1844 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
1845 gimple
*def_stmt2
= SSA_NAME_DEF_STMT (rhs1
);
1846 if (is_gimple_assign (def_stmt2
)
1847 && (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
1848 || gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
)
1849 && types_compatible_p (vr
->type
, TREE_TYPE (TREE_TYPE (rhs1
))))
1852 HOST_WIDE_INT offset2
, size2
, maxsize2
, off
;
1853 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1854 &offset2
, &size2
, &maxsize2
);
1855 off
= offset
- offset2
;
1857 && maxsize2
== size2
1858 && operand_equal_p (base
, base2
, 0)
1859 && offset2
<= offset
1860 && offset2
+ size2
>= offset
+ maxsize
)
1862 tree val
= NULL_TREE
;
1864 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1
))));
1865 if (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
)
1868 val
= gimple_assign_rhs1 (def_stmt2
);
1869 else if (off
== elsz
)
1870 val
= gimple_assign_rhs2 (def_stmt2
);
1872 else if (gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
1875 tree ctor
= gimple_assign_rhs1 (def_stmt2
);
1876 unsigned i
= off
/ elsz
;
1877 if (i
< CONSTRUCTOR_NELTS (ctor
))
1879 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, i
);
1880 if (TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
1882 if (TREE_CODE (TREE_TYPE (elt
->value
))
1889 return vn_reference_lookup_or_insert_for_pieces
1890 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1895 /* 5) For aggregate copies translate the reference through them if
1896 the copy kills ref. */
1897 else if (vn_walk_kind
== VN_WALKREWRITE
1898 && gimple_assign_single_p (def_stmt
)
1899 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
1900 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
1901 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
1904 HOST_WIDE_INT maxsize2
;
1906 auto_vec
<vn_reference_op_s
> rhs
;
1907 vn_reference_op_t vro
;
1913 /* See if the assignment kills REF. */
1914 base2
= ao_ref_base (&lhs_ref
);
1915 maxsize2
= lhs_ref
.max_size
;
1918 && (TREE_CODE (base
) != MEM_REF
1919 || TREE_CODE (base2
) != MEM_REF
1920 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
1921 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
1922 TREE_OPERAND (base2
, 1))))
1923 || !stmt_kills_ref_p (def_stmt
, ref
))
1926 /* Find the common base of ref and the lhs. lhs_ops already
1927 contains valueized operands for the lhs. */
1928 i
= vr
->operands
.length () - 1;
1929 j
= lhs_ops
.length () - 1;
1930 while (j
>= 0 && i
>= 0
1931 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
1937 /* ??? The innermost op should always be a MEM_REF and we already
1938 checked that the assignment to the lhs kills vr. Thus for
1939 aggregate copies using char[] types the vn_reference_op_eq
1940 may fail when comparing types for compatibility. But we really
1941 don't care here - further lookups with the rewritten operands
1942 will simply fail if we messed up types too badly. */
1943 HOST_WIDE_INT extra_off
= 0;
1944 if (j
== 0 && i
>= 0
1945 && lhs_ops
[0].opcode
== MEM_REF
1946 && lhs_ops
[0].off
!= -1)
1948 if (lhs_ops
[0].off
== vr
->operands
[i
].off
)
1950 else if (vr
->operands
[i
].opcode
== MEM_REF
1951 && vr
->operands
[i
].off
!= -1)
1953 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
1958 /* i now points to the first additional op.
1959 ??? LHS may not be completely contained in VR, one or more
1960 VIEW_CONVERT_EXPRs could be in its way. We could at least
1961 try handling outermost VIEW_CONVERT_EXPRs. */
1965 /* Now re-write REF to be based on the rhs of the assignment. */
1966 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
1968 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1971 if (rhs
.length () < 2
1972 || rhs
[0].opcode
!= MEM_REF
1973 || rhs
[0].off
== -1)
1975 rhs
[0].off
+= extra_off
;
1976 rhs
[0].op0
= int_const_binop (PLUS_EXPR
, rhs
[0].op0
,
1977 build_int_cst (TREE_TYPE (rhs
[0].op0
),
1981 /* We need to pre-pend vr->operands[0..i] to rhs. */
1982 vec
<vn_reference_op_s
> old
= vr
->operands
;
1983 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
1985 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
1986 if (old
== shared_lookup_references
)
1987 shared_lookup_references
= vr
->operands
;
1990 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
1991 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
1992 vr
->operands
[i
+ 1 + j
] = *vro
;
1993 vr
->operands
= valueize_refs (vr
->operands
);
1994 if (old
== shared_lookup_references
)
1995 shared_lookup_references
= vr
->operands
;
1996 vr
->hashcode
= vn_reference_compute_hash (vr
);
1998 /* Try folding the new reference to a constant. */
1999 tree val
= fully_constant_vn_reference_p (vr
);
2001 return vn_reference_lookup_or_insert_for_pieces
2002 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2004 /* Adjust *ref from the new operands. */
2005 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2007 /* This can happen with bitfields. */
2008 if (ref
->size
!= r
.size
)
2012 /* Do not update last seen VUSE after translating. */
2013 last_vuse_ptr
= NULL
;
2015 /* Keep looking for the adjusted *REF / VR pair. */
2019 /* 6) For memcpy copies translate the reference through them if
2020 the copy kills ref. */
2021 else if (vn_walk_kind
== VN_WALKREWRITE
2022 && is_gimple_reg_type (vr
->type
)
2023 /* ??? Handle BCOPY as well. */
2024 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
2025 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
2026 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
2027 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2028 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
2029 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
2030 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
2031 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2)))
2035 HOST_WIDE_INT rhs_offset
, copy_size
, lhs_offset
;
2036 vn_reference_op_s op
;
2040 /* Only handle non-variable, addressable refs. */
2041 if (ref
->size
!= maxsize
2042 || offset
% BITS_PER_UNIT
!= 0
2043 || ref
->size
% BITS_PER_UNIT
!= 0)
2046 /* Extract a pointer base and an offset for the destination. */
2047 lhs
= gimple_call_arg (def_stmt
, 0);
2049 if (TREE_CODE (lhs
) == SSA_NAME
)
2051 lhs
= SSA_VAL (lhs
);
2052 if (TREE_CODE (lhs
) == SSA_NAME
)
2054 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
2055 if (gimple_assign_single_p (def_stmt
)
2056 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2057 lhs
= gimple_assign_rhs1 (def_stmt
);
2060 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2062 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2066 if (TREE_CODE (tem
) == MEM_REF
2067 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2069 lhs
= TREE_OPERAND (tem
, 0);
2070 if (TREE_CODE (lhs
) == SSA_NAME
)
2071 lhs
= SSA_VAL (lhs
);
2072 lhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2074 else if (DECL_P (tem
))
2075 lhs
= build_fold_addr_expr (tem
);
2079 if (TREE_CODE (lhs
) != SSA_NAME
2080 && TREE_CODE (lhs
) != ADDR_EXPR
)
2083 /* Extract a pointer base and an offset for the source. */
2084 rhs
= gimple_call_arg (def_stmt
, 1);
2086 if (TREE_CODE (rhs
) == SSA_NAME
)
2087 rhs
= SSA_VAL (rhs
);
2088 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2090 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2094 if (TREE_CODE (tem
) == MEM_REF
2095 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2097 rhs
= TREE_OPERAND (tem
, 0);
2098 rhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2100 else if (DECL_P (tem
))
2101 rhs
= build_fold_addr_expr (tem
);
2105 if (TREE_CODE (rhs
) != SSA_NAME
2106 && TREE_CODE (rhs
) != ADDR_EXPR
)
2109 copy_size
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2));
2111 /* The bases of the destination and the references have to agree. */
2112 if ((TREE_CODE (base
) != MEM_REF
2114 || (TREE_CODE (base
) == MEM_REF
2115 && (TREE_OPERAND (base
, 0) != lhs
2116 || !tree_fits_uhwi_p (TREE_OPERAND (base
, 1))))
2118 && (TREE_CODE (lhs
) != ADDR_EXPR
2119 || TREE_OPERAND (lhs
, 0) != base
)))
2122 at
= offset
/ BITS_PER_UNIT
;
2123 if (TREE_CODE (base
) == MEM_REF
)
2124 at
+= tree_to_uhwi (TREE_OPERAND (base
, 1));
2125 /* If the access is completely outside of the memcpy destination
2126 area there is no aliasing. */
2127 if (lhs_offset
>= at
+ maxsize
/ BITS_PER_UNIT
2128 || lhs_offset
+ copy_size
<= at
)
2130 /* And the access has to be contained within the memcpy destination. */
2132 || lhs_offset
+ copy_size
< at
+ maxsize
/ BITS_PER_UNIT
)
2135 /* Make room for 2 operands in the new reference. */
2136 if (vr
->operands
.length () < 2)
2138 vec
<vn_reference_op_s
> old
= vr
->operands
;
2139 vr
->operands
.safe_grow_cleared (2);
2140 if (old
== shared_lookup_references
2141 && vr
->operands
!= old
)
2142 shared_lookup_references
= vr
->operands
;
2145 vr
->operands
.truncate (2);
2147 /* The looked-through reference is a simple MEM_REF. */
2148 memset (&op
, 0, sizeof (op
));
2150 op
.opcode
= MEM_REF
;
2151 op
.op0
= build_int_cst (ptr_type_node
, at
- rhs_offset
);
2152 op
.off
= at
- lhs_offset
+ rhs_offset
;
2153 vr
->operands
[0] = op
;
2154 op
.type
= TREE_TYPE (rhs
);
2155 op
.opcode
= TREE_CODE (rhs
);
2158 vr
->operands
[1] = op
;
2159 vr
->hashcode
= vn_reference_compute_hash (vr
);
2161 /* Adjust *ref from the new operands. */
2162 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2164 /* This can happen with bitfields. */
2165 if (ref
->size
!= r
.size
)
2169 /* Do not update last seen VUSE after translating. */
2170 last_vuse_ptr
= NULL
;
2172 /* Keep looking for the adjusted *REF / VR pair. */
2176 /* Bail out and stop walking. */
2180 /* Lookup a reference operation by it's parts, in the current hash table.
2181 Returns the resulting value number if it exists in the hash table,
2182 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2183 vn_reference_t stored in the hashtable if something is found. */
2186 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2187 vec
<vn_reference_op_s
> operands
,
2188 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2190 struct vn_reference_s vr1
;
2198 vr1
.vuse
= vuse_ssa_val (vuse
);
2199 shared_lookup_references
.truncate (0);
2200 shared_lookup_references
.safe_grow (operands
.length ());
2201 memcpy (shared_lookup_references
.address (),
2202 operands
.address (),
2203 sizeof (vn_reference_op_s
)
2204 * operands
.length ());
2205 vr1
.operands
= operands
= shared_lookup_references
2206 = valueize_refs (shared_lookup_references
);
2209 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2210 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2213 vn_reference_lookup_1 (&vr1
, vnresult
);
2215 && kind
!= VN_NOWALK
2219 vn_walk_kind
= kind
;
2220 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2222 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2223 vn_reference_lookup_2
,
2224 vn_reference_lookup_3
,
2225 vuse_ssa_val
, &vr1
);
2226 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2230 return (*vnresult
)->result
;
2235 /* Lookup OP in the current hash table, and return the resulting value
2236 number if it exists in the hash table. Return NULL_TREE if it does
2237 not exist in the hash table or if the result field of the structure
2238 was NULL.. VNRESULT will be filled in with the vn_reference_t
2239 stored in the hashtable if one exists. */
2242 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2243 vn_reference_t
*vnresult
)
2245 vec
<vn_reference_op_s
> operands
;
2246 struct vn_reference_s vr1
;
2248 bool valuezied_anything
;
2253 vr1
.vuse
= vuse_ssa_val (vuse
);
2254 vr1
.operands
= operands
2255 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2256 vr1
.type
= TREE_TYPE (op
);
2257 vr1
.set
= get_alias_set (op
);
2258 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2259 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2262 if (kind
!= VN_NOWALK
2265 vn_reference_t wvnresult
;
2267 /* Make sure to use a valueized reference if we valueized anything.
2268 Otherwise preserve the full reference for advanced TBAA. */
2269 if (!valuezied_anything
2270 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2272 ao_ref_init (&r
, op
);
2273 vn_walk_kind
= kind
;
2275 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2276 vn_reference_lookup_2
,
2277 vn_reference_lookup_3
,
2278 vuse_ssa_val
, &vr1
);
2279 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2283 *vnresult
= wvnresult
;
2284 return wvnresult
->result
;
2290 return vn_reference_lookup_1 (&vr1
, vnresult
);
2293 /* Lookup CALL in the current hash table and return the entry in
2294 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2297 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2303 tree vuse
= gimple_vuse (call
);
2305 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2306 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2307 vr
->type
= gimple_expr_type (call
);
2309 vr
->hashcode
= vn_reference_compute_hash (vr
);
2310 vn_reference_lookup_1 (vr
, vnresult
);
2313 /* Insert OP into the current hash table with a value number of
2314 RESULT, and return the resulting reference structure we created. */
2316 static vn_reference_t
2317 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2319 vn_reference_s
**slot
;
2323 vr1
= current_info
->references_pool
->allocate ();
2324 if (TREE_CODE (result
) == SSA_NAME
)
2325 vr1
->value_id
= VN_INFO (result
)->value_id
;
2327 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2328 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2329 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2330 vr1
->type
= TREE_TYPE (op
);
2331 vr1
->set
= get_alias_set (op
);
2332 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2333 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2334 vr1
->result_vdef
= vdef
;
2336 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2339 /* Because we lookup stores using vuses, and value number failures
2340 using the vdefs (see visit_reference_op_store for how and why),
2341 it's possible that on failure we may try to insert an already
2342 inserted store. This is not wrong, there is no ssa name for a
2343 store that we could use as a differentiator anyway. Thus, unlike
2344 the other lookup functions, you cannot gcc_assert (!*slot)
2347 /* But free the old slot in case of a collision. */
2349 free_reference (*slot
);
2355 /* Insert a reference by it's pieces into the current hash table with
2356 a value number of RESULT. Return the resulting reference
2357 structure we created. */
2360 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2361 vec
<vn_reference_op_s
> operands
,
2362 tree result
, unsigned int value_id
)
2365 vn_reference_s
**slot
;
2368 vr1
= current_info
->references_pool
->allocate ();
2369 vr1
->value_id
= value_id
;
2370 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2371 vr1
->operands
= valueize_refs (operands
);
2374 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2375 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2376 result
= SSA_VAL (result
);
2377 vr1
->result
= result
;
2379 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2382 /* At this point we should have all the things inserted that we have
2383 seen before, and we should never try inserting something that
2385 gcc_assert (!*slot
);
2387 free_reference (*slot
);
2393 /* Compute and return the hash value for nary operation VBO1. */
2396 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2398 inchash::hash hstate
;
2401 for (i
= 0; i
< vno1
->length
; ++i
)
2402 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2403 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2405 if (((vno1
->length
== 2
2406 && commutative_tree_code (vno1
->opcode
))
2407 || (vno1
->length
== 3
2408 && commutative_ternary_tree_code (vno1
->opcode
)))
2409 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2410 std::swap (vno1
->op
[0], vno1
->op
[1]);
2411 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
2412 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2414 std::swap (vno1
->op
[0], vno1
->op
[1]);
2415 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
2418 hstate
.add_int (vno1
->opcode
);
2419 for (i
= 0; i
< vno1
->length
; ++i
)
2420 inchash::add_expr (vno1
->op
[i
], hstate
);
2422 return hstate
.end ();
2425 /* Compare nary operations VNO1 and VNO2 and return true if they are
2429 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2433 if (vno1
->hashcode
!= vno2
->hashcode
)
2436 if (vno1
->length
!= vno2
->length
)
2439 if (vno1
->opcode
!= vno2
->opcode
2440 || !types_compatible_p (vno1
->type
, vno2
->type
))
2443 for (i
= 0; i
< vno1
->length
; ++i
)
2444 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2450 /* Initialize VNO from the pieces provided. */
2453 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2454 enum tree_code code
, tree type
, tree
*ops
)
2457 vno
->length
= length
;
2459 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2462 /* Initialize VNO from OP. */
2465 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2469 vno
->opcode
= TREE_CODE (op
);
2470 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2471 vno
->type
= TREE_TYPE (op
);
2472 for (i
= 0; i
< vno
->length
; ++i
)
2473 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2476 /* Return the number of operands for a vn_nary ops structure from STMT. */
2479 vn_nary_length_from_stmt (gimple
*stmt
)
2481 switch (gimple_assign_rhs_code (stmt
))
2485 case VIEW_CONVERT_EXPR
:
2492 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2495 return gimple_num_ops (stmt
) - 1;
2499 /* Initialize VNO from STMT. */
2502 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
2506 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2507 vno
->type
= gimple_expr_type (stmt
);
2508 switch (vno
->opcode
)
2512 case VIEW_CONVERT_EXPR
:
2514 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2519 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2520 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2521 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2525 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2526 for (i
= 0; i
< vno
->length
; ++i
)
2527 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2531 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2532 vno
->length
= gimple_num_ops (stmt
) - 1;
2533 for (i
= 0; i
< vno
->length
; ++i
)
2534 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2538 /* Compute the hashcode for VNO and look for it in the hash table;
2539 return the resulting value number if it exists in the hash table.
2540 Return NULL_TREE if it does not exist in the hash table or if the
2541 result field of the operation is NULL. VNRESULT will contain the
2542 vn_nary_op_t from the hashtable if it exists. */
2545 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2547 vn_nary_op_s
**slot
;
2552 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2553 slot
= current_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2555 if (!slot
&& current_info
== optimistic_info
)
2556 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2562 return (*slot
)->result
;
2565 /* Lookup a n-ary operation by its pieces and return the resulting value
2566 number if it exists in the hash table. Return NULL_TREE if it does
2567 not exist in the hash table or if the result field of the operation
2568 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2572 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2573 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2575 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2576 sizeof_vn_nary_op (length
));
2577 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2578 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2581 /* Lookup OP in the current hash table, and return the resulting value
2582 number if it exists in the hash table. Return NULL_TREE if it does
2583 not exist in the hash table or if the result field of the operation
2584 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2588 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2591 = XALLOCAVAR (struct vn_nary_op_s
,
2592 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2593 init_vn_nary_op_from_op (vno1
, op
);
2594 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2597 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2598 value number if it exists in the hash table. Return NULL_TREE if
2599 it does not exist in the hash table. VNRESULT will contain the
2600 vn_nary_op_t from the hashtable if it exists. */
2603 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
2606 = XALLOCAVAR (struct vn_nary_op_s
,
2607 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2608 init_vn_nary_op_from_stmt (vno1
, stmt
);
2609 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2612 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2615 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2617 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2620 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2624 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2626 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2627 ¤t_info
->nary_obstack
);
2629 vno1
->value_id
= value_id
;
2630 vno1
->length
= length
;
2631 vno1
->result
= result
;
2636 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2637 VNO->HASHCODE first. */
2640 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
2643 vn_nary_op_s
**slot
;
2646 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2648 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
2649 gcc_assert (!*slot
);
2655 /* Insert a n-ary operation into the current hash table using it's
2656 pieces. Return the vn_nary_op_t structure we created and put in
2660 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2661 tree type
, tree
*ops
,
2662 tree result
, unsigned int value_id
)
2664 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2665 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2666 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2669 /* Insert OP into the current hash table with a value number of
2670 RESULT. Return the vn_nary_op_t structure we created and put in
2674 vn_nary_op_insert (tree op
, tree result
)
2676 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2679 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2680 init_vn_nary_op_from_op (vno1
, op
);
2681 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2684 /* Insert the rhs of STMT into the current hash table with a value number of
2688 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
2691 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
2692 result
, VN_INFO (result
)->value_id
);
2693 init_vn_nary_op_from_stmt (vno1
, stmt
);
2694 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2697 /* Compute a hashcode for PHI operation VP1 and return it. */
2699 static inline hashval_t
2700 vn_phi_compute_hash (vn_phi_t vp1
)
2702 inchash::hash
hstate (vp1
->block
->index
);
2708 /* If all PHI arguments are constants we need to distinguish
2709 the PHI node via its type. */
2711 hstate
.merge_hash (vn_hash_type (type
));
2713 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
2715 /* Don't hash backedge values they need to be handled as VN_TOP
2716 for optimistic value-numbering. */
2717 if (e
->flags
& EDGE_DFS_BACK
)
2720 phi1op
= vp1
->phiargs
[e
->dest_idx
];
2721 if (phi1op
== VN_TOP
)
2723 inchash::add_expr (phi1op
, hstate
);
2726 return hstate
.end ();
2729 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2732 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
2734 if (vp1
->hashcode
!= vp2
->hashcode
)
2737 if (vp1
->block
== vp2
->block
)
2742 /* If the PHI nodes do not have compatible types
2743 they are not the same. */
2744 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2747 /* Any phi in the same block will have it's arguments in the
2748 same edge order, because of how we store phi nodes. */
2749 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2751 tree phi2op
= vp2
->phiargs
[i
];
2752 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
2754 if (!expressions_equal_p (phi1op
, phi2op
))
2762 static vec
<tree
> shared_lookup_phiargs
;
2764 /* Lookup PHI in the current hash table, and return the resulting
2765 value number if it exists in the hash table. Return NULL_TREE if
2766 it does not exist in the hash table. */
2769 vn_phi_lookup (gimple
*phi
)
2772 struct vn_phi_s vp1
;
2776 shared_lookup_phiargs
.truncate (0);
2777 shared_lookup_phiargs
.safe_grow (gimple_phi_num_args (phi
));
2779 /* Canonicalize the SSA_NAME's to their value number. */
2780 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
2782 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
2783 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2784 shared_lookup_phiargs
[e
->dest_idx
] = def
;
2786 vp1
.type
= TREE_TYPE (gimple_phi_result (phi
));
2787 vp1
.phiargs
= shared_lookup_phiargs
;
2788 vp1
.block
= gimple_bb (phi
);
2789 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
2790 slot
= current_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2792 if (!slot
&& current_info
== optimistic_info
)
2793 slot
= valid_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2797 return (*slot
)->result
;
2800 /* Insert PHI into the current hash table with a value number of
2804 vn_phi_insert (gimple
*phi
, tree result
)
2807 vn_phi_t vp1
= current_info
->phis_pool
->allocate ();
2808 vec
<tree
> args
= vNULL
;
2812 args
.safe_grow (gimple_phi_num_args (phi
));
2814 /* Canonicalize the SSA_NAME's to their value number. */
2815 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
2817 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
2818 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2819 args
[e
->dest_idx
] = def
;
2821 vp1
->value_id
= VN_INFO (result
)->value_id
;
2822 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
2823 vp1
->phiargs
= args
;
2824 vp1
->block
= gimple_bb (phi
);
2825 vp1
->result
= result
;
2826 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
2828 slot
= current_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
2830 /* Because we iterate over phi operations more than once, it's
2831 possible the slot might already exist here, hence no assert.*/
2837 /* Print set of components in strongly connected component SCC to OUT. */
2840 print_scc (FILE *out
, vec
<tree
> scc
)
2845 fprintf (out
, "SCC consists of:");
2846 FOR_EACH_VEC_ELT (scc
, i
, var
)
2849 print_generic_expr (out
, var
, 0);
2851 fprintf (out
, "\n");
2854 /* Set the value number of FROM to TO, return true if it has changed
2858 set_ssa_val_to (tree from
, tree to
)
2860 tree currval
= SSA_VAL (from
);
2861 HOST_WIDE_INT toff
, coff
;
2863 /* The only thing we allow as value numbers are ssa_names
2864 and invariants. So assert that here. We don't allow VN_TOP
2865 as visiting a stmt should produce a value-number other than
2867 ??? Still VN_TOP can happen for unreachable code, so force
2868 it to varying in that case. Not all code is prepared to
2869 get VN_TOP on valueization. */
2872 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2873 fprintf (dump_file
, "Forcing value number to varying on "
2874 "receiving VN_TOP\n");
2878 gcc_assert (to
!= NULL_TREE
2879 && ((TREE_CODE (to
) == SSA_NAME
2880 && (to
== from
|| SSA_VAL (to
) == to
))
2881 || is_gimple_min_invariant (to
)));
2885 if (currval
== from
)
2887 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2889 fprintf (dump_file
, "Not changing value number of ");
2890 print_generic_expr (dump_file
, from
, 0);
2891 fprintf (dump_file
, " from VARYING to ");
2892 print_generic_expr (dump_file
, to
, 0);
2893 fprintf (dump_file
, "\n");
2897 else if (TREE_CODE (to
) == SSA_NAME
2898 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
2902 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2904 fprintf (dump_file
, "Setting value number of ");
2905 print_generic_expr (dump_file
, from
, 0);
2906 fprintf (dump_file
, " to ");
2907 print_generic_expr (dump_file
, to
, 0);
2911 && !operand_equal_p (currval
, to
, 0)
2912 /* ??? For addresses involving volatile objects or types operand_equal_p
2913 does not reliably detect ADDR_EXPRs as equal. We know we are only
2914 getting invariant gimple addresses here, so can use
2915 get_addr_base_and_unit_offset to do this comparison. */
2916 && !(TREE_CODE (currval
) == ADDR_EXPR
2917 && TREE_CODE (to
) == ADDR_EXPR
2918 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
2919 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
2922 VN_INFO (from
)->valnum
= to
;
2923 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2924 fprintf (dump_file
, " (changed)\n");
2927 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2928 fprintf (dump_file
, "\n");
2932 /* Mark as processed all the definitions in the defining stmt of USE, or
2936 mark_use_processed (tree use
)
2940 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
2942 if (SSA_NAME_IS_DEFAULT_DEF (use
) || gimple_code (stmt
) == GIMPLE_PHI
)
2944 VN_INFO (use
)->use_processed
= true;
2948 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2950 tree def
= DEF_FROM_PTR (defp
);
2952 VN_INFO (def
)->use_processed
= true;
2956 /* Set all definitions in STMT to value number to themselves.
2957 Return true if a value number changed. */
2960 defs_to_varying (gimple
*stmt
)
2962 bool changed
= false;
2966 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2968 tree def
= DEF_FROM_PTR (defp
);
2969 changed
|= set_ssa_val_to (def
, def
);
2974 static bool expr_has_constants (tree expr
);
2976 /* Visit a copy between LHS and RHS, return true if the value number
2980 visit_copy (tree lhs
, tree rhs
)
2982 /* The copy may have a more interesting constant filled expression
2983 (we don't, since we know our RHS is just an SSA name). */
2984 VN_INFO (lhs
)->has_constants
= VN_INFO (rhs
)->has_constants
;
2985 VN_INFO (lhs
)->expr
= VN_INFO (rhs
)->expr
;
2987 /* And finally valueize. */
2988 rhs
= SSA_VAL (rhs
);
2990 return set_ssa_val_to (lhs
, rhs
);
2993 /* Visit a nary operator RHS, value number it, and return true if the
2994 value number of LHS has changed as a result. */
2997 visit_nary_op (tree lhs
, gimple
*stmt
)
2999 bool changed
= false;
3000 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
3003 changed
= set_ssa_val_to (lhs
, result
);
3006 changed
= set_ssa_val_to (lhs
, lhs
);
3007 vn_nary_op_insert_stmt (stmt
, lhs
);
3013 /* Visit a call STMT storing into LHS. Return true if the value number
3014 of the LHS has changed as a result. */
3017 visit_reference_op_call (tree lhs
, gcall
*stmt
)
3019 bool changed
= false;
3020 struct vn_reference_s vr1
;
3021 vn_reference_t vnresult
= NULL
;
3022 tree vdef
= gimple_vdef (stmt
);
3024 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3025 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
3028 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
3031 if (vnresult
->result_vdef
&& vdef
)
3032 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3034 if (!vnresult
->result
&& lhs
)
3035 vnresult
->result
= lhs
;
3037 if (vnresult
->result
&& lhs
)
3039 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
3041 if (VN_INFO (vnresult
->result
)->has_constants
)
3042 VN_INFO (lhs
)->has_constants
= true;
3048 vn_reference_s
**slot
;
3050 changed
|= set_ssa_val_to (vdef
, vdef
);
3052 changed
|= set_ssa_val_to (lhs
, lhs
);
3053 vr2
= current_info
->references_pool
->allocate ();
3054 vr2
->vuse
= vr1
.vuse
;
3055 /* As we are not walking the virtual operand chain we know the
3056 shared_lookup_references are still original so we can re-use
3058 vr2
->operands
= vr1
.operands
.copy ();
3059 vr2
->type
= vr1
.type
;
3061 vr2
->hashcode
= vr1
.hashcode
;
3063 vr2
->result_vdef
= vdef
;
3064 slot
= current_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
3066 gcc_assert (!*slot
);
3073 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3074 and return true if the value number of the LHS has changed as a result. */
3077 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
3079 bool changed
= false;
3083 last_vuse
= gimple_vuse (stmt
);
3084 last_vuse_ptr
= &last_vuse
;
3085 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3086 default_vn_walk_kind
, NULL
);
3087 last_vuse_ptr
= NULL
;
3089 /* We handle type-punning through unions by value-numbering based
3090 on offset and size of the access. Be prepared to handle a
3091 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3093 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3095 /* We will be setting the value number of lhs to the value number
3096 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3097 So first simplify and lookup this expression to see if it
3098 is already available. */
3099 tree val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
3100 if ((CONVERT_EXPR_P (val
)
3101 || TREE_CODE (val
) == VIEW_CONVERT_EXPR
)
3102 && TREE_CODE (TREE_OPERAND (val
, 0)) == SSA_NAME
)
3104 tree tem
= vn_get_expr_for (TREE_OPERAND (val
, 0));
3105 if ((CONVERT_EXPR_P (tem
)
3106 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
)
3107 && (tem
= fold_unary_ignore_overflow (TREE_CODE (val
),
3108 TREE_TYPE (val
), tem
)))
3112 if (!is_gimple_min_invariant (val
)
3113 && TREE_CODE (val
) != SSA_NAME
)
3114 result
= vn_nary_op_lookup (val
, NULL
);
3115 /* If the expression is not yet available, value-number lhs to
3116 a new SSA_NAME we create. */
3119 result
= make_temp_ssa_name (TREE_TYPE (lhs
), gimple_build_nop (),
3121 /* Initialize value-number information properly. */
3122 VN_INFO_GET (result
)->valnum
= result
;
3123 VN_INFO (result
)->value_id
= get_next_value_id ();
3124 VN_INFO (result
)->expr
= val
;
3125 VN_INFO (result
)->has_constants
= expr_has_constants (val
);
3126 VN_INFO (result
)->needs_insertion
= true;
3127 /* As all "inserted" statements are singleton SCCs, insert
3128 to the valid table. This is strictly needed to
3129 avoid re-generating new value SSA_NAMEs for the same
3130 expression during SCC iteration over and over (the
3131 optimistic table gets cleared after each iteration).
3132 We do not need to insert into the optimistic table, as
3133 lookups there will fall back to the valid table. */
3134 if (current_info
== optimistic_info
)
3136 current_info
= valid_info
;
3137 vn_nary_op_insert (val
, result
);
3138 current_info
= optimistic_info
;
3141 vn_nary_op_insert (val
, result
);
3142 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3144 fprintf (dump_file
, "Inserting name ");
3145 print_generic_expr (dump_file
, result
, 0);
3146 fprintf (dump_file
, " for expression ");
3147 print_generic_expr (dump_file
, val
, 0);
3148 fprintf (dump_file
, "\n");
3155 changed
= set_ssa_val_to (lhs
, result
);
3156 if (TREE_CODE (result
) == SSA_NAME
3157 && VN_INFO (result
)->has_constants
)
3159 VN_INFO (lhs
)->expr
= VN_INFO (result
)->expr
;
3160 VN_INFO (lhs
)->has_constants
= true;
3165 changed
= set_ssa_val_to (lhs
, lhs
);
3166 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
3173 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3174 and return true if the value number of the LHS has changed as a result. */
3177 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
3179 bool changed
= false;
3180 vn_reference_t vnresult
= NULL
;
3181 tree result
, assign
;
3182 bool resultsame
= false;
3183 tree vuse
= gimple_vuse (stmt
);
3184 tree vdef
= gimple_vdef (stmt
);
3186 if (TREE_CODE (op
) == SSA_NAME
)
3189 /* First we want to lookup using the *vuses* from the store and see
3190 if there the last store to this location with the same address
3193 The vuses represent the memory state before the store. If the
3194 memory state, address, and value of the store is the same as the
3195 last store to this location, then this store will produce the
3196 same memory state as that store.
3198 In this case the vdef versions for this store are value numbered to those
3199 vuse versions, since they represent the same memory state after
3202 Otherwise, the vdefs for the store are used when inserting into
3203 the table, since the store generates a new memory state. */
3205 result
= vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, NULL
);
3209 if (TREE_CODE (result
) == SSA_NAME
)
3210 result
= SSA_VAL (result
);
3211 resultsame
= expressions_equal_p (result
, op
);
3214 if ((!result
|| !resultsame
)
3215 /* Only perform the following when being called from PRE
3216 which embeds tail merging. */
3217 && default_vn_walk_kind
== VN_WALK
)
3219 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3220 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
);
3223 VN_INFO (vdef
)->use_processed
= true;
3224 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3228 if (!result
|| !resultsame
)
3230 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3232 fprintf (dump_file
, "No store match\n");
3233 fprintf (dump_file
, "Value numbering store ");
3234 print_generic_expr (dump_file
, lhs
, 0);
3235 fprintf (dump_file
, " to ");
3236 print_generic_expr (dump_file
, op
, 0);
3237 fprintf (dump_file
, "\n");
3239 /* Have to set value numbers before insert, since insert is
3240 going to valueize the references in-place. */
3243 changed
|= set_ssa_val_to (vdef
, vdef
);
3246 /* Do not insert structure copies into the tables. */
3247 if (is_gimple_min_invariant (op
)
3248 || is_gimple_reg (op
))
3249 vn_reference_insert (lhs
, op
, vdef
, NULL
);
3251 /* Only perform the following when being called from PRE
3252 which embeds tail merging. */
3253 if (default_vn_walk_kind
== VN_WALK
)
3255 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3256 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
3261 /* We had a match, so value number the vdef to have the value
3262 number of the vuse it came from. */
3264 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3265 fprintf (dump_file
, "Store matched earlier value,"
3266 "value numbering store vdefs to matching vuses.\n");
3268 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
3274 /* Visit and value number PHI, return true if the value number
3278 visit_phi (gimple
*phi
)
3280 bool changed
= false;
3282 tree sameval
= VN_TOP
;
3283 bool allsame
= true;
3285 /* TODO: We could check for this in init_sccvn, and replace this
3286 with a gcc_assert. */
3287 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
3288 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3290 /* See if all non-TOP arguments have the same value. TOP is
3291 equivalent to everything, so we can ignore it. */
3294 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3295 if (e
->flags
& EDGE_EXECUTABLE
)
3297 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3299 if (TREE_CODE (def
) == SSA_NAME
)
3300 def
= SSA_VAL (def
);
3303 if (sameval
== VN_TOP
)
3305 else if (!expressions_equal_p (def
, sameval
))
3312 /* If none of the edges was executable or all incoming values are
3313 undefined keep the value-number at VN_TOP. */
3314 if (sameval
== VN_TOP
)
3315 return set_ssa_val_to (PHI_RESULT (phi
), VN_TOP
);
3317 /* First see if it is equivalent to a phi node in this block. We prefer
3318 this as it allows IV elimination - see PRs 66502 and 67167. */
3319 result
= vn_phi_lookup (phi
);
3321 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
3322 /* Otherwise all value numbered to the same value, the phi node has that
3325 changed
= set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3328 vn_phi_insert (phi
, PHI_RESULT (phi
));
3329 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
3330 VN_INFO (PHI_RESULT (phi
))->expr
= PHI_RESULT (phi
);
3331 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3337 /* Return true if EXPR contains constants. */
3340 expr_has_constants (tree expr
)
3342 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
3345 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0));
3348 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0))
3349 || is_gimple_min_invariant (TREE_OPERAND (expr
, 1));
3350 /* Constants inside reference ops are rarely interesting, but
3351 it can take a lot of looking to find them. */
3353 case tcc_declaration
:
3356 return is_gimple_min_invariant (expr
);
3361 /* Return true if STMT contains constants. */
3364 stmt_has_constants (gimple
*stmt
)
3368 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
3371 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
3373 case GIMPLE_TERNARY_RHS
:
3374 tem
= gimple_assign_rhs3 (stmt
);
3375 if (TREE_CODE (tem
) == SSA_NAME
)
3376 tem
= SSA_VAL (tem
);
3377 if (is_gimple_min_invariant (tem
))
3381 case GIMPLE_BINARY_RHS
:
3382 tem
= gimple_assign_rhs2 (stmt
);
3383 if (TREE_CODE (tem
) == SSA_NAME
)
3384 tem
= SSA_VAL (tem
);
3385 if (is_gimple_min_invariant (tem
))
3389 case GIMPLE_SINGLE_RHS
:
3390 /* Constants inside reference ops are rarely interesting, but
3391 it can take a lot of looking to find them. */
3392 case GIMPLE_UNARY_RHS
:
3393 tem
= gimple_assign_rhs1 (stmt
);
3394 if (TREE_CODE (tem
) == SSA_NAME
)
3395 tem
= SSA_VAL (tem
);
3396 return is_gimple_min_invariant (tem
);
3404 /* Simplify the binary expression RHS, and return the result if
3408 simplify_binary_expression (gimple
*stmt
)
3410 tree result
= NULL_TREE
;
3411 tree op0
= gimple_assign_rhs1 (stmt
);
3412 tree op1
= gimple_assign_rhs2 (stmt
);
3413 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3415 /* This will not catch every single case we could combine, but will
3416 catch those with constants. The goal here is to simultaneously
3417 combine constants between expressions, but avoid infinite
3418 expansion of expressions during simplification. */
3419 op0
= vn_valueize (op0
);
3420 if (TREE_CODE (op0
) == SSA_NAME
3421 && (VN_INFO (op0
)->has_constants
3422 || TREE_CODE_CLASS (code
) == tcc_comparison
3423 || code
== COMPLEX_EXPR
))
3424 op0
= vn_get_expr_for (op0
);
3426 op1
= vn_valueize (op1
);
3427 if (TREE_CODE (op1
) == SSA_NAME
3428 && (VN_INFO (op1
)->has_constants
3429 || code
== COMPLEX_EXPR
))
3430 op1
= vn_get_expr_for (op1
);
3432 /* Pointer plus constant can be represented as invariant address.
3433 Do so to allow further propatation, see also tree forwprop. */
3434 if (code
== POINTER_PLUS_EXPR
3435 && tree_fits_uhwi_p (op1
)
3436 && TREE_CODE (op0
) == ADDR_EXPR
3437 && is_gimple_min_invariant (op0
))
3438 return build_invariant_address (TREE_TYPE (op0
),
3439 TREE_OPERAND (op0
, 0),
3440 tree_to_uhwi (op1
));
3442 /* Avoid folding if nothing changed. */
3443 if (op0
== gimple_assign_rhs1 (stmt
)
3444 && op1
== gimple_assign_rhs2 (stmt
))
3447 fold_defer_overflow_warnings ();
3449 result
= fold_binary (code
, gimple_expr_type (stmt
), op0
, op1
);
3451 STRIP_USELESS_TYPE_CONVERSION (result
);
3453 fold_undefer_overflow_warnings (result
&& valid_gimple_rhs_p (result
),
3456 /* Make sure result is not a complex expression consisting
3457 of operators of operators (IE (a + b) + (a + c))
3458 Otherwise, we will end up with unbounded expressions if
3459 fold does anything at all. */
3460 if (result
&& valid_gimple_rhs_p (result
))
3466 /* Simplify the unary expression RHS, and return the result if
3470 simplify_unary_expression (gassign
*stmt
)
3472 tree result
= NULL_TREE
;
3473 tree orig_op0
, op0
= gimple_assign_rhs1 (stmt
);
3474 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3476 /* We handle some tcc_reference codes here that are all
3477 GIMPLE_ASSIGN_SINGLE codes. */
3478 if (code
== REALPART_EXPR
3479 || code
== IMAGPART_EXPR
3480 || code
== VIEW_CONVERT_EXPR
3481 || code
== BIT_FIELD_REF
)
3482 op0
= TREE_OPERAND (op0
, 0);
3485 op0
= vn_valueize (op0
);
3486 if (TREE_CODE (op0
) == SSA_NAME
)
3488 if (VN_INFO (op0
)->has_constants
)
3489 op0
= vn_get_expr_for (op0
);
3490 else if (CONVERT_EXPR_CODE_P (code
)
3491 || code
== REALPART_EXPR
3492 || code
== IMAGPART_EXPR
3493 || code
== VIEW_CONVERT_EXPR
3494 || code
== BIT_FIELD_REF
)
3496 /* We want to do tree-combining on conversion-like expressions.
3497 Make sure we feed only SSA_NAMEs or constants to fold though. */
3498 tree tem
= vn_get_expr_for (op0
);
3499 if (UNARY_CLASS_P (tem
)
3500 || BINARY_CLASS_P (tem
)
3501 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
3502 || TREE_CODE (tem
) == SSA_NAME
3503 || TREE_CODE (tem
) == CONSTRUCTOR
3504 || is_gimple_min_invariant (tem
))
3509 /* Avoid folding if nothing changed, but remember the expression. */
3510 if (op0
== orig_op0
)
3513 if (code
== BIT_FIELD_REF
)
3515 tree rhs
= gimple_assign_rhs1 (stmt
);
3516 result
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (rhs
),
3517 op0
, TREE_OPERAND (rhs
, 1), TREE_OPERAND (rhs
, 2));
3520 result
= fold_unary_ignore_overflow (code
, gimple_expr_type (stmt
), op0
);
3523 STRIP_USELESS_TYPE_CONVERSION (result
);
3524 if (valid_gimple_rhs_p (result
))
3531 /* Try to simplify RHS using equivalences and constant folding. */
3534 try_to_simplify (gassign
*stmt
)
3536 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3539 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3540 in this case, there is no point in doing extra work. */
3541 if (code
== SSA_NAME
)
3544 /* First try constant folding based on our current lattice. */
3545 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
3547 && (TREE_CODE (tem
) == SSA_NAME
3548 || is_gimple_min_invariant (tem
)))
3551 /* If that didn't work try combining multiple statements. */
3552 switch (TREE_CODE_CLASS (code
))
3555 /* Fallthrough for some unary codes that can operate on registers. */
3556 if (!(code
== REALPART_EXPR
3557 || code
== IMAGPART_EXPR
3558 || code
== VIEW_CONVERT_EXPR
3559 || code
== BIT_FIELD_REF
))
3561 /* We could do a little more with unary ops, if they expand
3562 into binary ops, but it's debatable whether it is worth it. */
3564 return simplify_unary_expression (stmt
);
3566 case tcc_comparison
:
3568 return simplify_binary_expression (stmt
);
3577 /* Visit and value number USE, return true if the value number
3581 visit_use (tree use
)
3583 bool changed
= false;
3584 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
3586 mark_use_processed (use
);
3588 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
3589 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3590 && !SSA_NAME_IS_DEFAULT_DEF (use
))
3592 fprintf (dump_file
, "Value numbering ");
3593 print_generic_expr (dump_file
, use
, 0);
3594 fprintf (dump_file
, " stmt = ");
3595 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3598 /* Handle uninitialized uses. */
3599 if (SSA_NAME_IS_DEFAULT_DEF (use
))
3600 changed
= set_ssa_val_to (use
, use
);
3603 if (gimple_code (stmt
) == GIMPLE_PHI
)
3604 changed
= visit_phi (stmt
);
3605 else if (gimple_has_volatile_ops (stmt
))
3606 changed
= defs_to_varying (stmt
);
3607 else if (is_gimple_assign (stmt
))
3609 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3610 tree lhs
= gimple_assign_lhs (stmt
);
3611 tree rhs1
= gimple_assign_rhs1 (stmt
);
3614 /* Shortcut for copies. Simplifying copies is pointless,
3615 since we copy the expression and value they represent. */
3616 if (code
== SSA_NAME
3617 && TREE_CODE (lhs
) == SSA_NAME
)
3619 changed
= visit_copy (lhs
, rhs1
);
3622 simplified
= try_to_simplify (as_a
<gassign
*> (stmt
));
3625 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3627 fprintf (dump_file
, "RHS ");
3628 print_gimple_expr (dump_file
, stmt
, 0, 0);
3629 fprintf (dump_file
, " simplified to ");
3630 print_generic_expr (dump_file
, simplified
, 0);
3631 if (TREE_CODE (lhs
) == SSA_NAME
)
3632 fprintf (dump_file
, " has constants %d\n",
3633 expr_has_constants (simplified
));
3635 fprintf (dump_file
, "\n");
3638 /* Setting value numbers to constants will occasionally
3639 screw up phi congruence because constants are not
3640 uniquely associated with a single ssa name that can be
3643 && is_gimple_min_invariant (simplified
)
3644 && TREE_CODE (lhs
) == SSA_NAME
)
3646 VN_INFO (lhs
)->expr
= simplified
;
3647 VN_INFO (lhs
)->has_constants
= true;
3648 changed
= set_ssa_val_to (lhs
, simplified
);
3652 && TREE_CODE (simplified
) == SSA_NAME
3653 && TREE_CODE (lhs
) == SSA_NAME
)
3655 changed
= visit_copy (lhs
, simplified
);
3658 else if (simplified
)
3660 if (TREE_CODE (lhs
) == SSA_NAME
)
3662 VN_INFO (lhs
)->has_constants
= expr_has_constants (simplified
);
3663 /* We have to unshare the expression or else
3664 valuizing may change the IL stream. */
3665 VN_INFO (lhs
)->expr
= unshare_expr (simplified
);
3668 else if (stmt_has_constants (stmt
)
3669 && TREE_CODE (lhs
) == SSA_NAME
)
3670 VN_INFO (lhs
)->has_constants
= true;
3671 else if (TREE_CODE (lhs
) == SSA_NAME
)
3673 /* We reset expr and constantness here because we may
3674 have been value numbering optimistically, and
3675 iterating. They may become non-constant in this case,
3676 even if they were optimistically constant. */
3678 VN_INFO (lhs
)->has_constants
= false;
3679 VN_INFO (lhs
)->expr
= NULL_TREE
;
3682 if ((TREE_CODE (lhs
) == SSA_NAME
3683 /* We can substitute SSA_NAMEs that are live over
3684 abnormal edges with their constant value. */
3685 && !(gimple_assign_copy_p (stmt
)
3686 && is_gimple_min_invariant (rhs1
))
3688 && is_gimple_min_invariant (simplified
))
3689 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3690 /* Stores or copies from SSA_NAMEs that are live over
3691 abnormal edges are a problem. */
3692 || (code
== SSA_NAME
3693 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
3694 changed
= defs_to_varying (stmt
);
3695 else if (REFERENCE_CLASS_P (lhs
)
3697 changed
= visit_reference_op_store (lhs
, rhs1
, stmt
);
3698 else if (TREE_CODE (lhs
) == SSA_NAME
)
3700 if ((gimple_assign_copy_p (stmt
)
3701 && is_gimple_min_invariant (rhs1
))
3703 && is_gimple_min_invariant (simplified
)))
3705 VN_INFO (lhs
)->has_constants
= true;
3707 changed
= set_ssa_val_to (lhs
, simplified
);
3709 changed
= set_ssa_val_to (lhs
, rhs1
);
3713 /* First try to lookup the simplified expression. */
3716 enum gimple_rhs_class rhs_class
;
3719 rhs_class
= get_gimple_rhs_class (TREE_CODE (simplified
));
3720 if ((rhs_class
== GIMPLE_UNARY_RHS
3721 || rhs_class
== GIMPLE_BINARY_RHS
3722 || rhs_class
== GIMPLE_TERNARY_RHS
)
3723 && valid_gimple_rhs_p (simplified
))
3725 tree result
= vn_nary_op_lookup (simplified
, NULL
);
3728 changed
= set_ssa_val_to (lhs
, result
);
3734 /* Otherwise visit the original statement. */
3735 switch (vn_get_stmt_kind (stmt
))
3738 changed
= visit_nary_op (lhs
, stmt
);
3741 changed
= visit_reference_op_load (lhs
, rhs1
, stmt
);
3744 changed
= defs_to_varying (stmt
);
3750 changed
= defs_to_varying (stmt
);
3752 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
3754 tree lhs
= gimple_call_lhs (stmt
);
3755 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3757 /* Try constant folding based on our current lattice. */
3758 tree simplified
= gimple_fold_stmt_to_constant_1 (stmt
,
3762 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3764 fprintf (dump_file
, "call ");
3765 print_gimple_expr (dump_file
, stmt
, 0, 0);
3766 fprintf (dump_file
, " simplified to ");
3767 print_generic_expr (dump_file
, simplified
, 0);
3768 if (TREE_CODE (lhs
) == SSA_NAME
)
3769 fprintf (dump_file
, " has constants %d\n",
3770 expr_has_constants (simplified
));
3772 fprintf (dump_file
, "\n");
3775 /* Setting value numbers to constants will occasionally
3776 screw up phi congruence because constants are not
3777 uniquely associated with a single ssa name that can be
3780 && is_gimple_min_invariant (simplified
))
3782 VN_INFO (lhs
)->expr
= simplified
;
3783 VN_INFO (lhs
)->has_constants
= true;
3784 changed
= set_ssa_val_to (lhs
, simplified
);
3785 if (gimple_vdef (stmt
))
3786 changed
|= set_ssa_val_to (gimple_vdef (stmt
),
3787 SSA_VAL (gimple_vuse (stmt
)));
3791 && TREE_CODE (simplified
) == SSA_NAME
)
3793 changed
= visit_copy (lhs
, simplified
);
3794 if (gimple_vdef (stmt
))
3795 changed
|= set_ssa_val_to (gimple_vdef (stmt
),
3796 SSA_VAL (gimple_vuse (stmt
)));
3801 if (stmt_has_constants (stmt
))
3802 VN_INFO (lhs
)->has_constants
= true;
3805 /* We reset expr and constantness here because we may
3806 have been value numbering optimistically, and
3807 iterating. They may become non-constant in this case,
3808 even if they were optimistically constant. */
3809 VN_INFO (lhs
)->has_constants
= false;
3810 VN_INFO (lhs
)->expr
= NULL_TREE
;
3813 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3815 changed
= defs_to_varying (stmt
);
3821 if (!gimple_call_internal_p (stmt
)
3822 && (/* Calls to the same function with the same vuse
3823 and the same operands do not necessarily return the same
3824 value, unless they're pure or const. */
3825 gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
)
3826 /* If calls have a vdef, subsequent calls won't have
3827 the same incoming vuse. So, if 2 calls with vdef have the
3828 same vuse, we know they're not subsequent.
3829 We can value number 2 calls to the same function with the
3830 same vuse and the same operands which are not subsequent
3831 the same, because there is no code in the program that can
3832 compare the 2 values... */
3833 || (gimple_vdef (stmt
)
3834 /* ... unless the call returns a pointer which does
3835 not alias with anything else. In which case the
3836 information that the values are distinct are encoded
3838 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
3839 /* Only perform the following when being called from PRE
3840 which embeds tail merging. */
3841 && default_vn_walk_kind
== VN_WALK
)))
3842 changed
= visit_reference_op_call (lhs
, call_stmt
);
3844 changed
= defs_to_varying (stmt
);
3847 changed
= defs_to_varying (stmt
);
3853 /* Compare two operands by reverse postorder index */
3856 compare_ops (const void *pa
, const void *pb
)
3858 const tree opa
= *((const tree
*)pa
);
3859 const tree opb
= *((const tree
*)pb
);
3860 gimple
*opstmta
= SSA_NAME_DEF_STMT (opa
);
3861 gimple
*opstmtb
= SSA_NAME_DEF_STMT (opb
);
3865 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
3866 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3867 else if (gimple_nop_p (opstmta
))
3869 else if (gimple_nop_p (opstmtb
))
3872 bba
= gimple_bb (opstmta
);
3873 bbb
= gimple_bb (opstmtb
);
3876 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3884 if (gimple_code (opstmta
) == GIMPLE_PHI
3885 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3886 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3887 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3889 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3891 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3892 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3894 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3896 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3899 /* Sort an array containing members of a strongly connected component
3900 SCC so that the members are ordered by RPO number.
3901 This means that when the sort is complete, iterating through the
3902 array will give you the members in RPO order. */
3905 sort_scc (vec
<tree
> scc
)
3907 scc
.qsort (compare_ops
);
3910 /* Insert the no longer used nary ONARY to the hash INFO. */
3913 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3915 size_t size
= sizeof_vn_nary_op (onary
->length
);
3916 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
3917 &info
->nary_obstack
);
3918 memcpy (nary
, onary
, size
);
3919 vn_nary_op_insert_into (nary
, info
->nary
, false);
3922 /* Insert the no longer used phi OPHI to the hash INFO. */
3925 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3927 vn_phi_t phi
= info
->phis_pool
->allocate ();
3929 memcpy (phi
, ophi
, sizeof (*phi
));
3930 ophi
->phiargs
.create (0);
3931 slot
= info
->phis
->find_slot_with_hash (phi
, phi
->hashcode
, INSERT
);
3932 gcc_assert (!*slot
);
3936 /* Insert the no longer used reference OREF to the hash INFO. */
3939 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3942 vn_reference_s
**slot
;
3943 ref
= info
->references_pool
->allocate ();
3944 memcpy (ref
, oref
, sizeof (*ref
));
3945 oref
->operands
.create (0);
3946 slot
= info
->references
->find_slot_with_hash (ref
, ref
->hashcode
, INSERT
);
3948 free_reference (*slot
);
3952 /* Process a strongly connected component in the SSA graph. */
3955 process_scc (vec
<tree
> scc
)
3959 unsigned int iterations
= 0;
3960 bool changed
= true;
3961 vn_nary_op_iterator_type hin
;
3962 vn_phi_iterator_type hip
;
3963 vn_reference_iterator_type hir
;
3968 /* If the SCC has a single member, just visit it. */
3969 if (scc
.length () == 1)
3972 if (VN_INFO (use
)->use_processed
)
3974 /* We need to make sure it doesn't form a cycle itself, which can
3975 happen for self-referential PHI nodes. In that case we would
3976 end up inserting an expression with VN_TOP operands into the
3977 valid table which makes us derive bogus equivalences later.
3978 The cheapest way to check this is to assume it for all PHI nodes. */
3979 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
3980 /* Fallthru to iteration. */ ;
3988 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3989 print_scc (dump_file
, scc
);
3991 /* Iterate over the SCC with the optimistic table until it stops
3993 current_info
= optimistic_info
;
3998 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3999 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
4000 /* As we are value-numbering optimistically we have to
4001 clear the expression tables and the simplified expressions
4002 in each iteration until we converge. */
4003 optimistic_info
->nary
->empty ();
4004 optimistic_info
->phis
->empty ();
4005 optimistic_info
->references
->empty ();
4006 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
4007 gcc_obstack_init (&optimistic_info
->nary_obstack
);
4008 optimistic_info
->phis_pool
->release ();
4009 optimistic_info
->references_pool
->release ();
4010 FOR_EACH_VEC_ELT (scc
, i
, var
)
4011 VN_INFO (var
)->expr
= NULL_TREE
;
4012 FOR_EACH_VEC_ELT (scc
, i
, var
)
4013 changed
|= visit_use (var
);
4016 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4017 fprintf (dump_file
, "Processing SCC needed %d iterations\n", iterations
);
4018 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
4020 /* Finally, copy the contents of the no longer used optimistic
4021 table to the valid table. */
4022 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->nary
, nary
, vn_nary_op_t
, hin
)
4023 copy_nary (nary
, valid_info
);
4024 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->phis
, phi
, vn_phi_t
, hip
)
4025 copy_phi (phi
, valid_info
);
4026 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->references
,
4027 ref
, vn_reference_t
, hir
)
4028 copy_reference (ref
, valid_info
);
4030 current_info
= valid_info
;
4034 /* Pop the components of the found SCC for NAME off the SCC stack
4035 and process them. Returns true if all went well, false if
4036 we run into resource limits. */
4039 extract_and_process_scc_for_name (tree name
)
4044 /* Found an SCC, pop the components off the SCC stack and
4048 x
= sccstack
.pop ();
4050 VN_INFO (x
)->on_sccstack
= false;
4052 } while (x
!= name
);
4054 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4056 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
4059 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
4060 "SCC size %u exceeding %u\n", scc
.length (),
4061 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
4066 if (scc
.length () > 1)
4074 /* Depth first search on NAME to discover and process SCC's in the SSA
4076 Execution of this algorithm relies on the fact that the SCC's are
4077 popped off the stack in topological order.
4078 Returns true if successful, false if we stopped processing SCC's due
4079 to resource constraints. */
4084 vec
<ssa_op_iter
> itervec
= vNULL
;
4085 vec
<tree
> namevec
= vNULL
;
4086 use_operand_p usep
= NULL
;
4093 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
4094 VN_INFO (name
)->visited
= true;
4095 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
4097 sccstack
.safe_push (name
);
4098 VN_INFO (name
)->on_sccstack
= true;
4099 defstmt
= SSA_NAME_DEF_STMT (name
);
4101 /* Recursively DFS on our operands, looking for SCC's. */
4102 if (!gimple_nop_p (defstmt
))
4104 /* Push a new iterator. */
4105 if (gphi
*phi
= dyn_cast
<gphi
*> (defstmt
))
4106 usep
= op_iter_init_phiuse (&iter
, phi
, SSA_OP_ALL_USES
);
4108 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
4111 clear_and_done_ssa_iter (&iter
);
4115 /* If we are done processing uses of a name, go up the stack
4116 of iterators and process SCCs as we found them. */
4117 if (op_iter_done (&iter
))
4119 /* See if we found an SCC. */
4120 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
4121 if (!extract_and_process_scc_for_name (name
))
4128 /* Check if we are done. */
4129 if (namevec
.is_empty ())
4136 /* Restore the last use walker and continue walking there. */
4138 name
= namevec
.pop ();
4139 memcpy (&iter
, &itervec
.last (),
4140 sizeof (ssa_op_iter
));
4142 goto continue_walking
;
4145 use
= USE_FROM_PTR (usep
);
4147 /* Since we handle phi nodes, we will sometimes get
4148 invariants in the use expression. */
4149 if (TREE_CODE (use
) == SSA_NAME
)
4151 if (! (VN_INFO (use
)->visited
))
4153 /* Recurse by pushing the current use walking state on
4154 the stack and starting over. */
4155 itervec
.safe_push (iter
);
4156 namevec
.safe_push (name
);
4161 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
4162 VN_INFO (use
)->low
);
4164 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
4165 && VN_INFO (use
)->on_sccstack
)
4167 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
4168 VN_INFO (name
)->low
);
4172 usep
= op_iter_next_use (&iter
);
4176 /* Allocate a value number table. */
4179 allocate_vn_table (vn_tables_t table
)
4181 table
->phis
= new vn_phi_table_type (23);
4182 table
->nary
= new vn_nary_op_table_type (23);
4183 table
->references
= new vn_reference_table_type (23);
4185 gcc_obstack_init (&table
->nary_obstack
);
4186 table
->phis_pool
= new object_allocator
<vn_phi_s
> ("VN phis");
4187 table
->references_pool
= new object_allocator
<vn_reference_s
>
4191 /* Free a value number table. */
4194 free_vn_table (vn_tables_t table
)
4200 delete table
->references
;
4201 table
->references
= NULL
;
4202 obstack_free (&table
->nary_obstack
, NULL
);
4203 delete table
->phis_pool
;
4204 delete table
->references_pool
;
4212 int *rpo_numbers_temp
;
4214 calculate_dominance_info (CDI_DOMINATORS
);
4215 mark_dfs_back_edges ();
4217 sccstack
.create (0);
4218 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
4220 constant_value_ids
= BITMAP_ALLOC (NULL
);
4225 vn_ssa_aux_table
.create (num_ssa_names
+ 1);
4226 /* VEC_alloc doesn't actually grow it to the right size, it just
4227 preallocates the space to do so. */
4228 vn_ssa_aux_table
.safe_grow_cleared (num_ssa_names
+ 1);
4229 gcc_obstack_init (&vn_ssa_aux_obstack
);
4231 shared_lookup_phiargs
.create (0);
4232 shared_lookup_references
.create (0);
4233 rpo_numbers
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
4235 XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
4236 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
4238 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4239 the i'th block in RPO order is bb. We want to map bb's to RPO
4240 numbers, so we need to rearrange this array. */
4241 for (j
= 0; j
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; j
++)
4242 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
4244 XDELETE (rpo_numbers_temp
);
4246 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
4248 renumber_gimple_stmt_uids ();
4250 /* Create the valid and optimistic value numbering tables. */
4251 valid_info
= XCNEW (struct vn_tables_s
);
4252 allocate_vn_table (valid_info
);
4253 optimistic_info
= XCNEW (struct vn_tables_s
);
4254 allocate_vn_table (optimistic_info
);
4255 current_info
= valid_info
;
4257 /* Create the VN_INFO structures, and initialize value numbers to
4258 TOP or VARYING for parameters. */
4259 for (i
= 1; i
< num_ssa_names
; i
++)
4261 tree name
= ssa_name (i
);
4265 VN_INFO_GET (name
)->valnum
= VN_TOP
;
4266 VN_INFO (name
)->expr
= NULL_TREE
;
4267 VN_INFO (name
)->value_id
= 0;
4269 if (!SSA_NAME_IS_DEFAULT_DEF (name
))
4272 switch (TREE_CODE (SSA_NAME_VAR (name
)))
4275 /* Undefined vars keep TOP. */
4279 /* Parameters are VARYING but we can record a condition
4280 if we know it is a non-NULL pointer. */
4281 VN_INFO (name
)->visited
= true;
4282 VN_INFO (name
)->valnum
= name
;
4283 if (POINTER_TYPE_P (TREE_TYPE (name
))
4284 && nonnull_arg_p (SSA_NAME_VAR (name
)))
4288 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
4289 vn_nary_op_insert_pieces (2, NE_EXPR
, boolean_type_node
, ops
,
4290 boolean_true_node
, 0);
4291 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4293 fprintf (dump_file
, "Recording ");
4294 print_generic_expr (dump_file
, name
, TDF_SLIM
);
4295 fprintf (dump_file
, " != 0\n");
4301 /* If the result is passed by invisible reference the default
4302 def is initialized, otherwise it's uninitialized. */
4303 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name
)))
4305 VN_INFO (name
)->visited
= true;
4306 VN_INFO (name
)->valnum
= name
;
4321 delete constant_to_value_id
;
4322 constant_to_value_id
= NULL
;
4323 BITMAP_FREE (constant_value_ids
);
4324 shared_lookup_phiargs
.release ();
4325 shared_lookup_references
.release ();
4326 XDELETEVEC (rpo_numbers
);
4328 for (i
= 0; i
< num_ssa_names
; i
++)
4330 tree name
= ssa_name (i
);
4332 && SSA_NAME_VERSION (name
) < vn_ssa_aux_table
.length ()
4333 && vn_ssa_aux_table
[SSA_NAME_VERSION (name
)]
4334 && VN_INFO (name
)->needs_insertion
)
4335 release_ssa_name (name
);
4337 obstack_free (&vn_ssa_aux_obstack
, NULL
);
4338 vn_ssa_aux_table
.release ();
4340 sccstack
.release ();
4341 free_vn_table (valid_info
);
4342 XDELETE (valid_info
);
4343 free_vn_table (optimistic_info
);
4344 XDELETE (optimistic_info
);
4347 /* Set *ID according to RESULT. */
4350 set_value_id_for_result (tree result
, unsigned int *id
)
4352 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4353 *id
= VN_INFO (result
)->value_id
;
4354 else if (result
&& is_gimple_min_invariant (result
))
4355 *id
= get_or_alloc_constant_value_id (result
);
4357 *id
= get_next_value_id ();
4360 /* Set the value ids in the valid hash tables. */
4363 set_hashtable_value_ids (void)
4365 vn_nary_op_iterator_type hin
;
4366 vn_phi_iterator_type hip
;
4367 vn_reference_iterator_type hir
;
4372 /* Now set the value ids of the things we had put in the hash
4375 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4376 set_value_id_for_result (vno
->result
, &vno
->value_id
);
4378 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4379 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4381 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4383 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4386 class sccvn_dom_walker
: public dom_walker
4390 : dom_walker (CDI_DOMINATORS
), fail (false), cond_stack (vNULL
) {}
4392 virtual void before_dom_children (basic_block
);
4393 virtual void after_dom_children (basic_block
);
4395 void record_cond (basic_block
,
4396 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4397 void record_conds (basic_block
,
4398 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4401 vec
<std::pair
<basic_block
, std::pair
<vn_nary_op_t
, vn_nary_op_t
> > >
4405 /* Record a temporary condition for the BB and its dominated blocks. */
4408 sccvn_dom_walker::record_cond (basic_block bb
,
4409 enum tree_code code
, tree lhs
, tree rhs
,
4412 tree ops
[2] = { lhs
, rhs
};
4413 vn_nary_op_t old
= NULL
;
4414 if (vn_nary_op_lookup_pieces (2, code
, boolean_type_node
, ops
, &old
))
4415 current_info
->nary
->remove_elt_with_hash (old
, old
->hashcode
);
4417 = vn_nary_op_insert_pieces (2, code
, boolean_type_node
, ops
,
4420 : boolean_false_node
, 0);
4421 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4423 fprintf (dump_file
, "Recording temporarily ");
4424 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
4425 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
4426 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
4427 fprintf (dump_file
, " == %s%s\n",
4428 value
? "true" : "false",
4429 old
? " (old entry saved)" : "");
4431 cond_stack
.safe_push (std::make_pair (bb
, std::make_pair (cond
, old
)));
4434 /* Record temporary conditions for the BB and its dominated blocks
4435 according to LHS CODE RHS == VALUE and its dominated conditions. */
4438 sccvn_dom_walker::record_conds (basic_block bb
,
4439 enum tree_code code
, tree lhs
, tree rhs
,
4442 /* Record the original condition. */
4443 record_cond (bb
, code
, lhs
, rhs
, value
);
4448 /* Record dominated conditions if the condition is true. Note that
4449 the inversion is already recorded. */
4454 record_cond (bb
, code
== LT_EXPR
? LE_EXPR
: GE_EXPR
, lhs
, rhs
, true);
4455 record_cond (bb
, NE_EXPR
, lhs
, rhs
, true);
4456 record_cond (bb
, EQ_EXPR
, lhs
, rhs
, false);
4460 record_cond (bb
, LE_EXPR
, lhs
, rhs
, true);
4461 record_cond (bb
, GE_EXPR
, lhs
, rhs
, true);
4462 record_cond (bb
, LT_EXPR
, lhs
, rhs
, false);
4463 record_cond (bb
, GT_EXPR
, lhs
, rhs
, false);
4471 /* Restore expressions and values derived from conditionals. */
4474 sccvn_dom_walker::after_dom_children (basic_block bb
)
4476 while (!cond_stack
.is_empty ()
4477 && cond_stack
.last ().first
== bb
)
4479 vn_nary_op_t cond
= cond_stack
.last ().second
.first
;
4480 vn_nary_op_t old
= cond_stack
.last ().second
.second
;
4481 current_info
->nary
->remove_elt_with_hash (cond
, cond
->hashcode
);
4483 vn_nary_op_insert_into (old
, current_info
->nary
, false);
4488 /* Value number all statements in BB. */
4491 sccvn_dom_walker::before_dom_children (basic_block bb
)
4499 /* If any of the predecessor edges that do not come from blocks dominated
4500 by us are still marked as possibly executable consider this block
4502 bool reachable
= bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
);
4503 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4504 if (!dominated_by_p (CDI_DOMINATORS
, e
->src
, bb
))
4505 reachable
|= (e
->flags
& EDGE_EXECUTABLE
);
4507 /* If the block is not reachable all outgoing edges are not
4508 executable. Neither are incoming edges with src dominated by us. */
4511 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4512 fprintf (dump_file
, "Marking all outgoing edges of unreachable "
4513 "BB %d as not executable\n", bb
->index
);
4515 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4516 e
->flags
&= ~EDGE_EXECUTABLE
;
4518 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4520 if (dominated_by_p (CDI_DOMINATORS
, e
->src
, bb
))
4522 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4523 fprintf (dump_file
, "Marking backedge from BB %d into "
4524 "unreachable BB %d as not executable\n",
4525 e
->src
->index
, bb
->index
);
4526 e
->flags
&= ~EDGE_EXECUTABLE
;
4532 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4533 fprintf (dump_file
, "Visiting BB %d\n", bb
->index
);
4535 /* If we have a single predecessor record the equivalence from a
4536 possible condition on the predecessor edge. */
4537 if (single_pred_p (bb
))
4539 edge e
= single_pred_edge (bb
);
4540 /* Check if there are multiple executable successor edges in
4541 the source block. Otherwise there is no additional info
4544 FOR_EACH_EDGE (e2
, ei
, e
->src
->succs
)
4546 && e2
->flags
& EDGE_EXECUTABLE
)
4548 if (e2
&& (e2
->flags
& EDGE_EXECUTABLE
))
4550 gimple
*stmt
= last_stmt (e
->src
);
4552 && gimple_code (stmt
) == GIMPLE_COND
)
4554 enum tree_code code
= gimple_cond_code (stmt
);
4555 tree lhs
= gimple_cond_lhs (stmt
);
4556 tree rhs
= gimple_cond_rhs (stmt
);
4557 record_conds (bb
, code
, lhs
, rhs
,
4558 (e
->flags
& EDGE_TRUE_VALUE
) != 0);
4559 code
= invert_tree_comparison (code
, HONOR_NANS (lhs
));
4560 if (code
!= ERROR_MARK
)
4561 record_conds (bb
, code
, lhs
, rhs
,
4562 (e
->flags
& EDGE_TRUE_VALUE
) == 0);
4567 /* Value-number all defs in the basic-block. */
4568 for (gphi_iterator gsi
= gsi_start_phis (bb
);
4569 !gsi_end_p (gsi
); gsi_next (&gsi
))
4571 gphi
*phi
= gsi
.phi ();
4572 tree res
= PHI_RESULT (phi
);
4573 if (!VN_INFO (res
)->visited
4580 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
4581 !gsi_end_p (gsi
); gsi_next (&gsi
))
4585 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
4586 if (!VN_INFO (op
)->visited
4594 /* Finally look at the last stmt. */
4595 gimple
*stmt
= last_stmt (bb
);
4599 enum gimple_code code
= gimple_code (stmt
);
4600 if (code
!= GIMPLE_COND
4601 && code
!= GIMPLE_SWITCH
4602 && code
!= GIMPLE_GOTO
)
4605 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4607 fprintf (dump_file
, "Visiting control stmt ending BB %d: ", bb
->index
);
4608 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4611 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4612 if value-numbering can prove they are not reachable. Handling
4613 computed gotos is also possible. */
4619 tree lhs
= gimple_cond_lhs (stmt
);
4620 tree rhs
= gimple_cond_rhs (stmt
);
4621 /* Work hard in computing the condition and take into account
4622 the valueization of the defining stmt. */
4623 if (TREE_CODE (lhs
) == SSA_NAME
)
4624 lhs
= vn_get_expr_for (lhs
);
4625 if (TREE_CODE (rhs
) == SSA_NAME
)
4626 rhs
= vn_get_expr_for (rhs
);
4627 val
= fold_binary (gimple_cond_code (stmt
),
4628 boolean_type_node
, lhs
, rhs
);
4629 /* If that didn't simplify to a constant see if we have recorded
4630 temporary expressions from taken edges. */
4631 if (!val
|| TREE_CODE (val
) != INTEGER_CST
)
4634 ops
[0] = gimple_cond_lhs (stmt
);
4635 ops
[1] = gimple_cond_rhs (stmt
);
4636 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt
),
4637 boolean_type_node
, ops
, NULL
);
4642 val
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
4645 val
= gimple_goto_dest (stmt
);
4653 edge taken
= find_taken_edge (bb
, vn_valueize (val
));
4657 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4658 fprintf (dump_file
, "Marking all edges out of BB %d but (%d -> %d) as "
4659 "not executable\n", bb
->index
, bb
->index
, taken
->dest
->index
);
4661 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4663 e
->flags
&= ~EDGE_EXECUTABLE
;
4666 /* Do SCCVN. Returns true if it finished, false if we bailed out
4667 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4668 how we use the alias oracle walking during the VN process. */
4671 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
4676 default_vn_walk_kind
= default_vn_walk_kind_
;
4680 /* Mark all edges as possibly executable. */
4681 FOR_ALL_BB_FN (bb
, cfun
)
4685 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4686 e
->flags
|= EDGE_EXECUTABLE
;
4689 /* Walk all blocks in dominator order, value-numbering stmts
4690 SSA defs and decide whether outgoing edges are not executable. */
4691 sccvn_dom_walker walker
;
4692 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
4699 /* Initialize the value ids and prune out remaining VN_TOPs
4701 for (i
= 1; i
< num_ssa_names
; ++i
)
4703 tree name
= ssa_name (i
);
4707 info
= VN_INFO (name
);
4709 info
->valnum
= name
;
4710 if (info
->valnum
== name
4711 || info
->valnum
== VN_TOP
)
4712 info
->value_id
= get_next_value_id ();
4713 else if (is_gimple_min_invariant (info
->valnum
))
4714 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
4718 for (i
= 1; i
< num_ssa_names
; ++i
)
4720 tree name
= ssa_name (i
);
4724 info
= VN_INFO (name
);
4725 if (TREE_CODE (info
->valnum
) == SSA_NAME
4726 && info
->valnum
!= name
4727 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
4728 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
4731 set_hashtable_value_ids ();
4733 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4735 fprintf (dump_file
, "Value numbers:\n");
4736 for (i
= 0; i
< num_ssa_names
; i
++)
4738 tree name
= ssa_name (i
);
4740 && VN_INFO (name
)->visited
4741 && SSA_VAL (name
) != name
)
4743 print_generic_expr (dump_file
, name
, 0);
4744 fprintf (dump_file
, " = ");
4745 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
4746 fprintf (dump_file
, "\n");
4754 /* Return the maximum value id we have ever seen. */
4757 get_max_value_id (void)
4759 return next_value_id
;
4762 /* Return the next unique value id. */
4765 get_next_value_id (void)
4767 return next_value_id
++;
4771 /* Compare two expressions E1 and E2 and return true if they are equal. */
4774 expressions_equal_p (tree e1
, tree e2
)
4776 /* The obvious case. */
4780 /* If only one of them is null, they cannot be equal. */
4784 /* Now perform the actual comparison. */
4785 if (TREE_CODE (e1
) == TREE_CODE (e2
)
4786 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
4793 /* Return true if the nary operation NARY may trap. This is a copy
4794 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4797 vn_nary_may_trap (vn_nary_op_t nary
)
4800 tree rhs2
= NULL_TREE
;
4801 bool honor_nans
= false;
4802 bool honor_snans
= false;
4803 bool fp_operation
= false;
4804 bool honor_trapv
= false;
4808 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
4809 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
4810 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
4813 fp_operation
= FLOAT_TYPE_P (type
);
4816 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
4817 honor_snans
= flag_signaling_nans
!= 0;
4819 else if (INTEGRAL_TYPE_P (type
)
4820 && TYPE_OVERFLOW_TRAPS (type
))
4823 if (nary
->length
>= 2)
4825 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
4827 honor_nans
, honor_snans
, rhs2
,
4833 for (i
= 0; i
< nary
->length
; ++i
)
4834 if (tree_could_trap_p (nary
->op
[i
]))