1 /* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "alloc-pool.h"
31 #include "insn-config.h"
35 #include "gimple-pretty-print.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-ssa-sccvn.h"
61 #include "gimple-iterator.h"
62 #include "gimple-match.h"
64 /* This algorithm is based on the SCC algorithm presented by Keith
65 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
66 (http://citeseer.ist.psu.edu/41805.html). In
67 straight line code, it is equivalent to a regular hash based value
68 numbering that is performed in reverse postorder.
70 For code with cycles, there are two alternatives, both of which
71 require keeping the hashtables separate from the actual list of
72 value numbers for SSA names.
74 1. Iterate value numbering in an RPO walk of the blocks, removing
75 all the entries from the hashtable after each iteration (but
76 keeping the SSA name->value number mapping between iterations).
77 Iterate until it does not change.
79 2. Perform value numbering as part of an SCC walk on the SSA graph,
80 iterating only the cycles in the SSA graph until they do not change
81 (using a separate, optimistic hashtable for value numbering the SCC
84 The second is not just faster in practice (because most SSA graph
85 cycles do not involve all the variables in the graph), it also has
88 One of these nice properties is that when we pop an SCC off the
89 stack, we are guaranteed to have processed all the operands coming from
90 *outside of that SCC*, so we do not need to do anything special to
91 ensure they have value numbers.
93 Another nice property is that the SCC walk is done as part of a DFS
94 of the SSA graph, which makes it easy to perform combining and
95 simplifying operations at the same time.
97 The code below is deliberately written in a way that makes it easy
98 to separate the SCC walk from the other work it does.
100 In order to propagate constants through the code, we track which
101 expressions contain constants, and use those while folding. In
102 theory, we could also track expressions whose value numbers are
103 replaced, in case we end up folding based on expression
106 In order to value number memory, we assign value numbers to vuses.
107 This enables us to note that, for example, stores to the same
108 address of the same value from the same starting memory states are
112 1. We can iterate only the changing portions of the SCC's, but
113 I have not seen an SCC big enough for this to be a win.
114 2. If you differentiate between phi nodes for loops and phi nodes
115 for if-then-else, you can properly consider phi nodes in different
116 blocks for equivalence.
117 3. We could value number vuses in more cases, particularly, whole
122 static tree
*last_vuse_ptr
;
123 static vn_lookup_kind vn_walk_kind
;
124 static vn_lookup_kind default_vn_walk_kind
;
127 /* vn_nary_op hashtable helpers. */
129 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
131 typedef vn_nary_op_s
*compare_type
;
132 static inline hashval_t
hash (const vn_nary_op_s
*);
133 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
136 /* Return the computed hashcode for nary operation P1. */
139 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
141 return vno1
->hashcode
;
144 /* Compare nary operations P1 and P2 and return true if they are
148 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
150 return vn_nary_op_eq (vno1
, vno2
);
153 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
154 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
157 /* vn_phi hashtable helpers. */
160 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
162 struct vn_phi_hasher
: pointer_hash
<vn_phi_s
>
164 static inline hashval_t
hash (const vn_phi_s
*);
165 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
166 static inline void remove (vn_phi_s
*);
169 /* Return the computed hashcode for phi operation P1. */
172 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
174 return vp1
->hashcode
;
177 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
180 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
182 return vn_phi_eq (vp1
, vp2
);
185 /* Free a phi operation structure VP. */
188 vn_phi_hasher::remove (vn_phi_s
*phi
)
190 phi
->phiargs
.release ();
193 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
194 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
197 /* Compare two reference operands P1 and P2 for equality. Return true if
198 they are equal, and false otherwise. */
201 vn_reference_op_eq (const void *p1
, const void *p2
)
203 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
204 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
206 return (vro1
->opcode
== vro2
->opcode
207 /* We do not care for differences in type qualification. */
208 && (vro1
->type
== vro2
->type
209 || (vro1
->type
&& vro2
->type
210 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
211 TYPE_MAIN_VARIANT (vro2
->type
))))
212 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
213 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
214 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
217 /* Free a reference operation structure VP. */
220 free_reference (vn_reference_s
*vr
)
222 vr
->operands
.release ();
226 /* vn_reference hashtable helpers. */
228 struct vn_reference_hasher
: pointer_hash
<vn_reference_s
>
230 static inline hashval_t
hash (const vn_reference_s
*);
231 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
232 static inline void remove (vn_reference_s
*);
235 /* Return the hashcode for a given reference operation P1. */
238 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
240 return vr1
->hashcode
;
244 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
246 return vn_reference_eq (v
, c
);
250 vn_reference_hasher::remove (vn_reference_s
*v
)
255 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
256 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
259 /* The set of hashtables and alloc_pool's for their items. */
261 typedef struct vn_tables_s
263 vn_nary_op_table_type
*nary
;
264 vn_phi_table_type
*phis
;
265 vn_reference_table_type
*references
;
266 struct obstack nary_obstack
;
267 object_allocator
<vn_phi_s
> *phis_pool
;
268 object_allocator
<vn_reference_s
> *references_pool
;
272 /* vn_constant hashtable helpers. */
274 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
276 static inline hashval_t
hash (const vn_constant_s
*);
277 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
280 /* Hash table hash function for vn_constant_t. */
283 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
285 return vc1
->hashcode
;
288 /* Hash table equality function for vn_constant_t. */
291 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
293 if (vc1
->hashcode
!= vc2
->hashcode
)
296 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
299 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
300 static bitmap constant_value_ids
;
303 /* Valid hashtables storing information we have proven to be
306 static vn_tables_t valid_info
;
308 /* Optimistic hashtables storing information we are making assumptions about
309 during iterations. */
311 static vn_tables_t optimistic_info
;
313 /* Pointer to the set of hashtables that is currently being used.
314 Should always point to either the optimistic_info, or the
317 static vn_tables_t current_info
;
320 /* Reverse post order index for each basic block. */
322 static int *rpo_numbers
;
324 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
326 /* Return the SSA value of the VUSE x, supporting released VDEFs
327 during elimination which will value-number the VDEF to the
328 associated VUSE (but not substitute in the whole lattice). */
331 vuse_ssa_val (tree x
)
340 while (SSA_NAME_IN_FREE_LIST (x
));
345 /* This represents the top of the VN lattice, which is the universal
350 /* Unique counter for our value ids. */
352 static unsigned int next_value_id
;
354 /* Next DFS number and the stack for strongly connected component
357 static unsigned int next_dfs_num
;
358 static vec
<tree
> sccstack
;
362 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
363 are allocated on an obstack for locality reasons, and to free them
364 without looping over the vec. */
366 static vec
<vn_ssa_aux_t
> vn_ssa_aux_table
;
367 static struct obstack vn_ssa_aux_obstack
;
369 /* Return whether there is value numbering information for a given SSA name. */
372 has_VN_INFO (tree name
)
374 if (SSA_NAME_VERSION (name
) < vn_ssa_aux_table
.length ())
375 return vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] != NULL
;
379 /* Return the value numbering information for a given SSA name. */
384 vn_ssa_aux_t res
= vn_ssa_aux_table
[SSA_NAME_VERSION (name
)];
385 gcc_checking_assert (res
);
389 /* Set the value numbering info for a given SSA name to a given
393 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
395 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = value
;
398 /* Initialize the value numbering info for a given SSA name.
399 This should be called just once for every SSA name. */
402 VN_INFO_GET (tree name
)
404 vn_ssa_aux_t newinfo
;
406 gcc_assert (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ()
407 || vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] == NULL
);
408 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
409 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
410 if (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ())
411 vn_ssa_aux_table
.safe_grow_cleared (SSA_NAME_VERSION (name
) + 1);
412 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = newinfo
;
417 /* Return the vn_kind the expression computed by the stmt should be
421 vn_get_stmt_kind (gimple
*stmt
)
423 switch (gimple_code (stmt
))
431 enum tree_code code
= gimple_assign_rhs_code (stmt
);
432 tree rhs1
= gimple_assign_rhs1 (stmt
);
433 switch (get_gimple_rhs_class (code
))
435 case GIMPLE_UNARY_RHS
:
436 case GIMPLE_BINARY_RHS
:
437 case GIMPLE_TERNARY_RHS
:
439 case GIMPLE_SINGLE_RHS
:
440 switch (TREE_CODE_CLASS (code
))
443 /* VOP-less references can go through unary case. */
444 if ((code
== REALPART_EXPR
445 || code
== IMAGPART_EXPR
446 || code
== VIEW_CONVERT_EXPR
447 || code
== BIT_FIELD_REF
)
448 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
452 case tcc_declaration
:
459 if (code
== ADDR_EXPR
)
460 return (is_gimple_min_invariant (rhs1
)
461 ? VN_CONSTANT
: VN_REFERENCE
);
462 else if (code
== CONSTRUCTOR
)
475 /* Lookup a value id for CONSTANT and return it. If it does not
479 get_constant_value_id (tree constant
)
481 vn_constant_s
**slot
;
482 struct vn_constant_s vc
;
484 vc
.hashcode
= vn_hash_constant_with_type (constant
);
485 vc
.constant
= constant
;
486 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
488 return (*slot
)->value_id
;
492 /* Lookup a value id for CONSTANT, and if it does not exist, create a
493 new one and return it. If it does exist, return it. */
496 get_or_alloc_constant_value_id (tree constant
)
498 vn_constant_s
**slot
;
499 struct vn_constant_s vc
;
502 vc
.hashcode
= vn_hash_constant_with_type (constant
);
503 vc
.constant
= constant
;
504 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
506 return (*slot
)->value_id
;
508 vcp
= XNEW (struct vn_constant_s
);
509 vcp
->hashcode
= vc
.hashcode
;
510 vcp
->constant
= constant
;
511 vcp
->value_id
= get_next_value_id ();
513 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
514 return vcp
->value_id
;
517 /* Return true if V is a value id for a constant. */
520 value_id_constant_p (unsigned int v
)
522 return bitmap_bit_p (constant_value_ids
, v
);
525 /* Compute the hash for a reference operand VRO1. */
528 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
530 hstate
.add_int (vro1
->opcode
);
532 inchash::add_expr (vro1
->op0
, hstate
);
534 inchash::add_expr (vro1
->op1
, hstate
);
536 inchash::add_expr (vro1
->op2
, hstate
);
539 /* Compute a hash for the reference operation VR1 and return it. */
542 vn_reference_compute_hash (const vn_reference_t vr1
)
544 inchash::hash hstate
;
547 vn_reference_op_t vro
;
548 HOST_WIDE_INT off
= -1;
551 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
553 if (vro
->opcode
== MEM_REF
)
555 else if (vro
->opcode
!= ADDR_EXPR
)
567 hstate
.add_int (off
);
570 && vro
->opcode
== ADDR_EXPR
)
574 tree op
= TREE_OPERAND (vro
->op0
, 0);
575 hstate
.add_int (TREE_CODE (op
));
576 inchash::add_expr (op
, hstate
);
580 vn_reference_op_compute_hash (vro
, hstate
);
583 result
= hstate
.end ();
584 /* ??? We would ICE later if we hash instead of adding that in. */
586 result
+= SSA_NAME_VERSION (vr1
->vuse
);
591 /* Return true if reference operations VR1 and VR2 are equivalent. This
592 means they have the same set of operands and vuses. */
595 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
599 /* Early out if this is not a hash collision. */
600 if (vr1
->hashcode
!= vr2
->hashcode
)
603 /* The VOP needs to be the same. */
604 if (vr1
->vuse
!= vr2
->vuse
)
607 /* If the operands are the same we are done. */
608 if (vr1
->operands
== vr2
->operands
)
611 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
614 if (INTEGRAL_TYPE_P (vr1
->type
)
615 && INTEGRAL_TYPE_P (vr2
->type
))
617 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
620 else if (INTEGRAL_TYPE_P (vr1
->type
)
621 && (TYPE_PRECISION (vr1
->type
)
622 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
624 else if (INTEGRAL_TYPE_P (vr2
->type
)
625 && (TYPE_PRECISION (vr2
->type
)
626 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
633 HOST_WIDE_INT off1
= 0, off2
= 0;
634 vn_reference_op_t vro1
, vro2
;
635 vn_reference_op_s tem1
, tem2
;
636 bool deref1
= false, deref2
= false;
637 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
639 if (vro1
->opcode
== MEM_REF
)
641 /* Do not look through a storage order barrier. */
642 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
648 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
650 if (vro2
->opcode
== MEM_REF
)
652 /* Do not look through a storage order barrier. */
653 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
661 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
663 memset (&tem1
, 0, sizeof (tem1
));
664 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
665 tem1
.type
= TREE_TYPE (tem1
.op0
);
666 tem1
.opcode
= TREE_CODE (tem1
.op0
);
670 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
672 memset (&tem2
, 0, sizeof (tem2
));
673 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
674 tem2
.type
= TREE_TYPE (tem2
.op0
);
675 tem2
.opcode
= TREE_CODE (tem2
.op0
);
679 if (deref1
!= deref2
)
681 if (!vn_reference_op_eq (vro1
, vro2
))
686 while (vr1
->operands
.length () != i
687 || vr2
->operands
.length () != j
);
692 /* Copy the operations present in load/store REF into RESULT, a vector of
693 vn_reference_op_s's. */
696 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
698 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
700 vn_reference_op_s temp
;
704 memset (&temp
, 0, sizeof (temp
));
705 temp
.type
= TREE_TYPE (ref
);
706 temp
.opcode
= TREE_CODE (ref
);
707 temp
.op0
= TMR_INDEX (ref
);
708 temp
.op1
= TMR_STEP (ref
);
709 temp
.op2
= TMR_OFFSET (ref
);
711 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
712 temp
.base
= MR_DEPENDENCE_BASE (ref
);
713 result
->quick_push (temp
);
715 memset (&temp
, 0, sizeof (temp
));
716 temp
.type
= NULL_TREE
;
717 temp
.opcode
= ERROR_MARK
;
718 temp
.op0
= TMR_INDEX2 (ref
);
720 result
->quick_push (temp
);
722 memset (&temp
, 0, sizeof (temp
));
723 temp
.type
= NULL_TREE
;
724 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
725 temp
.op0
= TMR_BASE (ref
);
727 result
->quick_push (temp
);
731 /* For non-calls, store the information that makes up the address. */
735 vn_reference_op_s temp
;
737 memset (&temp
, 0, sizeof (temp
));
738 temp
.type
= TREE_TYPE (ref
);
739 temp
.opcode
= TREE_CODE (ref
);
745 temp
.op0
= TREE_OPERAND (ref
, 1);
748 temp
.op0
= TREE_OPERAND (ref
, 1);
752 /* The base address gets its own vn_reference_op_s structure. */
753 temp
.op0
= TREE_OPERAND (ref
, 1);
755 offset_int off
= mem_ref_offset (ref
);
756 if (wi::fits_shwi_p (off
))
757 temp
.off
= off
.to_shwi ();
759 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
760 temp
.base
= MR_DEPENDENCE_BASE (ref
);
761 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
764 /* Record bits, position and storage order. */
765 temp
.op0
= TREE_OPERAND (ref
, 1);
766 temp
.op1
= TREE_OPERAND (ref
, 2);
767 if (tree_fits_shwi_p (TREE_OPERAND (ref
, 2)))
769 HOST_WIDE_INT off
= tree_to_shwi (TREE_OPERAND (ref
, 2));
770 if (off
% BITS_PER_UNIT
== 0)
771 temp
.off
= off
/ BITS_PER_UNIT
;
773 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
776 /* The field decl is enough to unambiguously specify the field,
777 a matching type is not necessary and a mismatching type
778 is always a spurious difference. */
779 temp
.type
= NULL_TREE
;
780 temp
.op0
= TREE_OPERAND (ref
, 1);
781 temp
.op1
= TREE_OPERAND (ref
, 2);
783 tree this_offset
= component_ref_field_offset (ref
);
785 && TREE_CODE (this_offset
) == INTEGER_CST
)
787 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
788 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
791 = (wi::to_offset (this_offset
)
792 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
793 if (wi::fits_shwi_p (off
)
794 /* Probibit value-numbering zero offset components
795 of addresses the same before the pass folding
796 __builtin_object_size had a chance to run
797 (checking cfun->after_inlining does the
799 && (TREE_CODE (orig
) != ADDR_EXPR
801 || cfun
->after_inlining
))
802 temp
.off
= off
.to_shwi ();
807 case ARRAY_RANGE_REF
:
810 tree eltype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref
, 0)));
811 /* Record index as operand. */
812 temp
.op0
= TREE_OPERAND (ref
, 1);
813 /* Always record lower bounds and element size. */
814 temp
.op1
= array_ref_low_bound (ref
);
815 /* But record element size in units of the type alignment. */
816 temp
.op2
= TREE_OPERAND (ref
, 3);
817 temp
.align
= eltype
->type_common
.align
;
819 temp
.op2
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE_UNIT (eltype
),
820 size_int (TYPE_ALIGN_UNIT (eltype
)));
821 if (TREE_CODE (temp
.op0
) == INTEGER_CST
822 && TREE_CODE (temp
.op1
) == INTEGER_CST
823 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
825 offset_int off
= ((wi::to_offset (temp
.op0
)
826 - wi::to_offset (temp
.op1
))
827 * wi::to_offset (temp
.op2
)
828 * vn_ref_op_align_unit (&temp
));
829 if (wi::fits_shwi_p (off
))
830 temp
.off
= off
.to_shwi();
835 if (DECL_HARD_REGISTER (ref
))
844 /* Canonicalize decls to MEM[&decl] which is what we end up with
845 when valueizing MEM[ptr] with ptr = &decl. */
846 temp
.opcode
= MEM_REF
;
847 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
849 result
->safe_push (temp
);
850 temp
.opcode
= ADDR_EXPR
;
851 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
852 temp
.type
= TREE_TYPE (temp
.op0
);
866 if (is_gimple_min_invariant (ref
))
872 /* These are only interesting for their operands, their
873 existence, and their type. They will never be the last
874 ref in the chain of references (IE they require an
875 operand), so we don't have to put anything
876 for op* as it will be handled by the iteration */
880 case VIEW_CONVERT_EXPR
:
882 temp
.reverse
= storage_order_barrier_p (ref
);
885 /* This is only interesting for its constant offset. */
886 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
891 result
->safe_push (temp
);
893 if (REFERENCE_CLASS_P (ref
)
894 || TREE_CODE (ref
) == MODIFY_EXPR
895 || TREE_CODE (ref
) == WITH_SIZE_EXPR
896 || (TREE_CODE (ref
) == ADDR_EXPR
897 && !is_gimple_min_invariant (ref
)))
898 ref
= TREE_OPERAND (ref
, 0);
904 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
905 operands in *OPS, the reference alias set SET and the reference type TYPE.
906 Return true if something useful was produced. */
909 ao_ref_init_from_vn_reference (ao_ref
*ref
,
910 alias_set_type set
, tree type
,
911 vec
<vn_reference_op_s
> ops
)
913 vn_reference_op_t op
;
915 tree base
= NULL_TREE
;
917 offset_int offset
= 0;
919 offset_int size
= -1;
920 tree size_tree
= NULL_TREE
;
921 alias_set_type base_alias_set
= -1;
923 /* First get the final access size from just the outermost expression. */
925 if (op
->opcode
== COMPONENT_REF
)
926 size_tree
= DECL_SIZE (op
->op0
);
927 else if (op
->opcode
== BIT_FIELD_REF
)
931 machine_mode mode
= TYPE_MODE (type
);
933 size_tree
= TYPE_SIZE (type
);
935 size
= int (GET_MODE_BITSIZE (mode
));
937 if (size_tree
!= NULL_TREE
938 && TREE_CODE (size_tree
) == INTEGER_CST
)
939 size
= wi::to_offset (size_tree
);
941 /* Initially, maxsize is the same as the accessed element size.
942 In the following it will only grow (or become -1). */
945 /* Compute cumulative bit-offset for nested component-refs and array-refs,
946 and find the ultimate containing object. */
947 FOR_EACH_VEC_ELT (ops
, i
, op
)
951 /* These may be in the reference ops, but we cannot do anything
952 sensible with them here. */
954 /* Apart from ADDR_EXPR arguments to MEM_REF. */
955 if (base
!= NULL_TREE
956 && TREE_CODE (base
) == MEM_REF
958 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
960 vn_reference_op_t pop
= &ops
[i
-1];
961 base
= TREE_OPERAND (op
->op0
, 0);
968 offset
+= pop
->off
* BITS_PER_UNIT
;
976 /* Record the base objects. */
978 base_alias_set
= get_deref_alias_set (op
->op0
);
979 *op0_p
= build2 (MEM_REF
, op
->type
,
981 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
982 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
983 op0_p
= &TREE_OPERAND (*op0_p
, 0);
994 /* And now the usual component-reference style ops. */
996 offset
+= wi::to_offset (op
->op1
);
1001 tree field
= op
->op0
;
1002 /* We do not have a complete COMPONENT_REF tree here so we
1003 cannot use component_ref_field_offset. Do the interesting
1005 tree this_offset
= DECL_FIELD_OFFSET (field
);
1007 if (op
->op1
|| TREE_CODE (this_offset
) != INTEGER_CST
)
1011 offset_int woffset
= (wi::to_offset (this_offset
)
1012 << LOG2_BITS_PER_UNIT
);
1013 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1019 case ARRAY_RANGE_REF
:
1021 /* We recorded the lower bound and the element size. */
1022 if (TREE_CODE (op
->op0
) != INTEGER_CST
1023 || TREE_CODE (op
->op1
) != INTEGER_CST
1024 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1029 = wi::sext (wi::to_offset (op
->op0
) - wi::to_offset (op
->op1
),
1030 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1031 woffset
*= wi::to_offset (op
->op2
) * vn_ref_op_align_unit (op
);
1032 woffset
<<= LOG2_BITS_PER_UNIT
;
1044 case VIEW_CONVERT_EXPR
:
1061 if (base
== NULL_TREE
)
1064 ref
->ref
= NULL_TREE
;
1066 ref
->ref_alias_set
= set
;
1067 if (base_alias_set
!= -1)
1068 ref
->base_alias_set
= base_alias_set
;
1070 ref
->base_alias_set
= get_alias_set (base
);
1071 /* We discount volatiles from value-numbering elsewhere. */
1072 ref
->volatile_p
= false;
1074 if (!wi::fits_shwi_p (size
) || wi::neg_p (size
))
1082 ref
->size
= size
.to_shwi ();
1084 if (!wi::fits_shwi_p (offset
))
1091 ref
->offset
= offset
.to_shwi ();
1093 if (!wi::fits_shwi_p (max_size
) || wi::neg_p (max_size
))
1096 ref
->max_size
= max_size
.to_shwi ();
1101 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1102 vn_reference_op_s's. */
1105 copy_reference_ops_from_call (gcall
*call
,
1106 vec
<vn_reference_op_s
> *result
)
1108 vn_reference_op_s temp
;
1110 tree lhs
= gimple_call_lhs (call
);
1113 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1114 different. By adding the lhs here in the vector, we ensure that the
1115 hashcode is different, guaranteeing a different value number. */
1116 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1118 memset (&temp
, 0, sizeof (temp
));
1119 temp
.opcode
= MODIFY_EXPR
;
1120 temp
.type
= TREE_TYPE (lhs
);
1123 result
->safe_push (temp
);
1126 /* Copy the type, opcode, function, static chain and EH region, if any. */
1127 memset (&temp
, 0, sizeof (temp
));
1128 temp
.type
= gimple_call_return_type (call
);
1129 temp
.opcode
= CALL_EXPR
;
1130 temp
.op0
= gimple_call_fn (call
);
1131 temp
.op1
= gimple_call_chain (call
);
1132 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1133 temp
.op2
= size_int (lr
);
1135 if (gimple_call_with_bounds_p (call
))
1136 temp
.with_bounds
= 1;
1137 result
->safe_push (temp
);
1139 /* Copy the call arguments. As they can be references as well,
1140 just chain them together. */
1141 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1143 tree callarg
= gimple_call_arg (call
, i
);
1144 copy_reference_ops_from_ref (callarg
, result
);
1148 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1149 *I_P to point to the last element of the replacement. */
1151 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1154 unsigned int i
= *i_p
;
1155 vn_reference_op_t op
= &(*ops
)[i
];
1156 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1158 HOST_WIDE_INT addr_offset
= 0;
1160 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1161 from .foo.bar to the preceding MEM_REF offset and replace the
1162 address with &OBJ. */
1163 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1165 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1166 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1168 offset_int off
= offset_int::from (mem_op
->op0
, SIGNED
);
1170 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1171 op
->op0
= build_fold_addr_expr (addr_base
);
1172 if (tree_fits_shwi_p (mem_op
->op0
))
1173 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1181 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1182 *I_P to point to the last element of the replacement. */
1184 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1187 unsigned int i
= *i_p
;
1188 vn_reference_op_t op
= &(*ops
)[i
];
1189 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1191 enum tree_code code
;
1194 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1195 if (!is_gimple_assign (def_stmt
))
1198 code
= gimple_assign_rhs_code (def_stmt
);
1199 if (code
!= ADDR_EXPR
1200 && code
!= POINTER_PLUS_EXPR
)
1203 off
= offset_int::from (mem_op
->op0
, SIGNED
);
1205 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1206 from .foo.bar to the preceding MEM_REF offset and replace the
1207 address with &OBJ. */
1208 if (code
== ADDR_EXPR
)
1210 tree addr
, addr_base
;
1211 HOST_WIDE_INT addr_offset
;
1213 addr
= gimple_assign_rhs1 (def_stmt
);
1214 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1216 /* If that didn't work because the address isn't invariant propagate
1217 the reference tree from the address operation in case the current
1218 dereference isn't offsetted. */
1220 && *i_p
== ops
->length () - 1
1222 /* This makes us disable this transform for PRE where the
1223 reference ops might be also used for code insertion which
1225 && default_vn_walk_kind
== VN_WALKREWRITE
)
1227 auto_vec
<vn_reference_op_s
, 32> tem
;
1228 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1229 /* Make sure to preserve TBAA info. The only objects not
1230 wrapped in MEM_REFs that can have their address taken are
1232 if (tem
.length () >= 2
1233 && tem
[tem
.length () - 2].opcode
== MEM_REF
)
1235 vn_reference_op_t new_mem_op
= &tem
[tem
.length () - 2];
1236 new_mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
),
1240 gcc_assert (tem
.last ().opcode
== STRING_CST
);
1243 ops
->safe_splice (tem
);
1248 || TREE_CODE (addr_base
) != MEM_REF
)
1252 off
+= mem_ref_offset (addr_base
);
1253 op
->op0
= TREE_OPERAND (addr_base
, 0);
1258 ptr
= gimple_assign_rhs1 (def_stmt
);
1259 ptroff
= gimple_assign_rhs2 (def_stmt
);
1260 if (TREE_CODE (ptr
) != SSA_NAME
1261 || TREE_CODE (ptroff
) != INTEGER_CST
)
1264 off
+= wi::to_offset (ptroff
);
1268 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1269 if (tree_fits_shwi_p (mem_op
->op0
))
1270 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1273 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1274 op
->op0
= SSA_VAL (op
->op0
);
1275 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1276 op
->opcode
= TREE_CODE (op
->op0
);
1279 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1280 vn_reference_maybe_forwprop_address (ops
, i_p
);
1281 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1282 vn_reference_fold_indirect (ops
, i_p
);
1286 /* Optimize the reference REF to a constant if possible or return
1287 NULL_TREE if not. */
1290 fully_constant_vn_reference_p (vn_reference_t ref
)
1292 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1293 vn_reference_op_t op
;
1295 /* Try to simplify the translated expression if it is
1296 a call to a builtin function with at most two arguments. */
1298 if (op
->opcode
== CALL_EXPR
1299 && TREE_CODE (op
->op0
) == ADDR_EXPR
1300 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1301 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1302 && operands
.length () >= 2
1303 && operands
.length () <= 3)
1305 vn_reference_op_t arg0
, arg1
= NULL
;
1306 bool anyconst
= false;
1307 arg0
= &operands
[1];
1308 if (operands
.length () > 2)
1309 arg1
= &operands
[2];
1310 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1311 || (arg0
->opcode
== ADDR_EXPR
1312 && is_gimple_min_invariant (arg0
->op0
)))
1315 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1316 || (arg1
->opcode
== ADDR_EXPR
1317 && is_gimple_min_invariant (arg1
->op0
))))
1321 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1324 arg1
? arg1
->op0
: NULL
);
1326 && TREE_CODE (folded
) == NOP_EXPR
)
1327 folded
= TREE_OPERAND (folded
, 0);
1329 && is_gimple_min_invariant (folded
))
1334 /* Simplify reads from constants or constant initializers. */
1335 else if (BITS_PER_UNIT
== 8
1336 && is_gimple_reg_type (ref
->type
)
1337 && (!INTEGRAL_TYPE_P (ref
->type
)
1338 || TYPE_PRECISION (ref
->type
) % BITS_PER_UNIT
== 0))
1340 HOST_WIDE_INT off
= 0;
1342 if (INTEGRAL_TYPE_P (ref
->type
))
1343 size
= TYPE_PRECISION (ref
->type
);
1345 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1346 if (size
% BITS_PER_UNIT
!= 0
1347 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1349 size
/= BITS_PER_UNIT
;
1351 for (i
= 0; i
< operands
.length (); ++i
)
1353 if (TREE_CODE_CLASS (operands
[i
].opcode
) == tcc_constant
)
1358 if (operands
[i
].off
== -1)
1360 off
+= operands
[i
].off
;
1361 if (operands
[i
].opcode
== MEM_REF
)
1367 vn_reference_op_t base
= &operands
[--i
];
1368 tree ctor
= error_mark_node
;
1369 tree decl
= NULL_TREE
;
1370 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1372 else if (base
->opcode
== MEM_REF
1373 && base
[1].opcode
== ADDR_EXPR
1374 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1375 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
))
1377 decl
= TREE_OPERAND (base
[1].op0
, 0);
1378 ctor
= ctor_for_folding (decl
);
1380 if (ctor
== NULL_TREE
)
1381 return build_zero_cst (ref
->type
);
1382 else if (ctor
!= error_mark_node
)
1386 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1387 off
* BITS_PER_UNIT
,
1388 size
* BITS_PER_UNIT
, decl
);
1391 STRIP_USELESS_TYPE_CONVERSION (res
);
1392 if (is_gimple_min_invariant (res
))
1398 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1399 int len
= native_encode_expr (ctor
, buf
, size
, off
);
1401 return native_interpret_expr (ref
->type
, buf
, len
);
1409 /* Return true if OPS contain a storage order barrier. */
1412 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1414 vn_reference_op_t op
;
1417 FOR_EACH_VEC_ELT (ops
, i
, op
)
1418 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1424 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1425 structures into their value numbers. This is done in-place, and
1426 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1427 whether any operands were valueized. */
1429 static vec
<vn_reference_op_s
>
1430 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
)
1432 vn_reference_op_t vro
;
1435 *valueized_anything
= false;
1437 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1439 if (vro
->opcode
== SSA_NAME
1440 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1442 tree tem
= SSA_VAL (vro
->op0
);
1443 if (tem
!= vro
->op0
)
1445 *valueized_anything
= true;
1448 /* If it transforms from an SSA_NAME to a constant, update
1450 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1451 vro
->opcode
= TREE_CODE (vro
->op0
);
1453 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1455 tree tem
= SSA_VAL (vro
->op1
);
1456 if (tem
!= vro
->op1
)
1458 *valueized_anything
= true;
1462 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1464 tree tem
= SSA_VAL (vro
->op2
);
1465 if (tem
!= vro
->op2
)
1467 *valueized_anything
= true;
1471 /* If it transforms from an SSA_NAME to an address, fold with
1472 a preceding indirect reference. */
1475 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1476 && orig
[i
- 1].opcode
== MEM_REF
)
1478 if (vn_reference_fold_indirect (&orig
, &i
))
1479 *valueized_anything
= true;
1482 && vro
->opcode
== SSA_NAME
1483 && orig
[i
- 1].opcode
== MEM_REF
)
1485 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1486 *valueized_anything
= true;
1488 /* If it transforms a non-constant ARRAY_REF into a constant
1489 one, adjust the constant offset. */
1490 else if (vro
->opcode
== ARRAY_REF
1492 && TREE_CODE (vro
->op0
) == INTEGER_CST
1493 && TREE_CODE (vro
->op1
) == INTEGER_CST
1494 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1496 offset_int off
= ((wi::to_offset (vro
->op0
)
1497 - wi::to_offset (vro
->op1
))
1498 * wi::to_offset (vro
->op2
)
1499 * vn_ref_op_align_unit (vro
));
1500 if (wi::fits_shwi_p (off
))
1501 vro
->off
= off
.to_shwi ();
1508 static vec
<vn_reference_op_s
>
1509 valueize_refs (vec
<vn_reference_op_s
> orig
)
1512 return valueize_refs_1 (orig
, &tem
);
1515 static vec
<vn_reference_op_s
> shared_lookup_references
;
1517 /* Create a vector of vn_reference_op_s structures from REF, a
1518 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1519 this function. *VALUEIZED_ANYTHING will specify whether any
1520 operands were valueized. */
1522 static vec
<vn_reference_op_s
>
1523 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1527 shared_lookup_references
.truncate (0);
1528 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1529 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1530 valueized_anything
);
1531 return shared_lookup_references
;
1534 /* Create a vector of vn_reference_op_s structures from CALL, a
1535 call statement. The vector is shared among all callers of
1538 static vec
<vn_reference_op_s
>
1539 valueize_shared_reference_ops_from_call (gcall
*call
)
1543 shared_lookup_references
.truncate (0);
1544 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1545 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1546 return shared_lookup_references
;
1549 /* Lookup a SCCVN reference operation VR in the current hash table.
1550 Returns the resulting value number if it exists in the hash table,
1551 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1552 vn_reference_t stored in the hashtable if something is found. */
1555 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1557 vn_reference_s
**slot
;
1560 hash
= vr
->hashcode
;
1561 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1562 if (!slot
&& current_info
== optimistic_info
)
1563 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1567 *vnresult
= (vn_reference_t
)*slot
;
1568 return ((vn_reference_t
)*slot
)->result
;
1574 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1575 with the current VUSE and performs the expression lookup. */
1578 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1579 unsigned int cnt
, void *vr_
)
1581 vn_reference_t vr
= (vn_reference_t
)vr_
;
1582 vn_reference_s
**slot
;
1585 /* This bounds the stmt walks we perform on reference lookups
1586 to O(1) instead of O(N) where N is the number of dominating
1588 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1592 *last_vuse_ptr
= vuse
;
1594 /* Fixup vuse and hash. */
1596 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1597 vr
->vuse
= vuse_ssa_val (vuse
);
1599 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1601 hash
= vr
->hashcode
;
1602 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1603 if (!slot
&& current_info
== optimistic_info
)
1604 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1611 /* Lookup an existing or insert a new vn_reference entry into the
1612 value table for the VUSE, SET, TYPE, OPERANDS reference which
1613 has the value VALUE which is either a constant or an SSA name. */
1615 static vn_reference_t
1616 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1619 vec
<vn_reference_op_s
,
1624 vn_reference_t result
;
1627 vr1
.operands
= operands
;
1630 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1631 if (vn_reference_lookup_1 (&vr1
, &result
))
1633 if (TREE_CODE (value
) == SSA_NAME
)
1634 value_id
= VN_INFO (value
)->value_id
;
1636 value_id
= get_or_alloc_constant_value_id (value
);
1637 return vn_reference_insert_pieces (vuse
, set
, type
,
1638 operands
.copy (), value
, value_id
);
1641 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*stmt
, tree result
);
1643 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1646 vn_lookup_simplify_result (code_helper rcode
, tree type
, tree
*ops
)
1648 if (!rcode
.is_tree_code ())
1650 vn_nary_op_t vnresult
= NULL
;
1651 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code
) rcode
),
1652 (tree_code
) rcode
, type
, ops
, &vnresult
);
1655 /* Return a value-number for RCODE OPS... either by looking up an existing
1656 value-number for the simplified result or by inserting the operation if
1660 vn_nary_build_or_lookup_1 (code_helper rcode
, tree type
, tree
*ops
,
1663 tree result
= NULL_TREE
;
1664 /* We will be creating a value number for
1666 So first simplify and lookup this expression to see if it
1667 is already available. */
1668 mprts_hook
= vn_lookup_simplify_result
;
1670 switch (TREE_CODE_LENGTH ((tree_code
) rcode
))
1673 res
= gimple_resimplify1 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1676 res
= gimple_resimplify2 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1679 res
= gimple_resimplify3 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1683 gimple
*new_stmt
= NULL
;
1685 && gimple_simplified_result_is_gimple_val (rcode
, ops
))
1686 /* The expression is already available. */
1690 tree val
= vn_lookup_simplify_result (rcode
, type
, ops
);
1693 gimple_seq stmts
= NULL
;
1694 result
= maybe_push_res_to_seq (rcode
, type
, ops
, &stmts
);
1697 gcc_assert (gimple_seq_singleton_p (stmts
));
1698 new_stmt
= gimple_seq_first_stmt (stmts
);
1702 /* The expression is already available. */
1707 /* The expression is not yet available, value-number lhs to
1708 the new SSA_NAME we created. */
1709 /* Initialize value-number information properly. */
1710 VN_INFO_GET (result
)->valnum
= result
;
1711 VN_INFO (result
)->value_id
= get_next_value_id ();
1712 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
1714 VN_INFO (result
)->needs_insertion
= true;
1715 /* ??? PRE phi-translation inserts NARYs without corresponding
1716 SSA name result. Re-use those but set their result according
1717 to the stmt we just built. */
1718 vn_nary_op_t nary
= NULL
;
1719 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
1722 gcc_assert (nary
->result
== NULL_TREE
);
1723 nary
->result
= gimple_assign_lhs (new_stmt
);
1725 /* As all "inserted" statements are singleton SCCs, insert
1726 to the valid table. This is strictly needed to
1727 avoid re-generating new value SSA_NAMEs for the same
1728 expression during SCC iteration over and over (the
1729 optimistic table gets cleared after each iteration).
1730 We do not need to insert into the optimistic table, as
1731 lookups there will fall back to the valid table. */
1732 else if (current_info
== optimistic_info
)
1734 current_info
= valid_info
;
1735 vn_nary_op_insert_stmt (new_stmt
, result
);
1736 current_info
= optimistic_info
;
1739 vn_nary_op_insert_stmt (new_stmt
, result
);
1740 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1742 fprintf (dump_file
, "Inserting name ");
1743 print_generic_expr (dump_file
, result
, 0);
1744 fprintf (dump_file
, " for expression ");
1745 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
1746 fprintf (dump_file
, "\n");
1752 /* Return a value-number for RCODE OPS... either by looking up an existing
1753 value-number for the simplified result or by inserting the operation. */
1756 vn_nary_build_or_lookup (code_helper rcode
, tree type
, tree
*ops
)
1758 return vn_nary_build_or_lookup_1 (rcode
, type
, ops
, true);
1761 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1762 its value if present. */
1765 vn_nary_simplify (vn_nary_op_t nary
)
1767 if (nary
->length
> 3)
1770 memcpy (ops
, nary
->op
, sizeof (tree
) * nary
->length
);
1771 return vn_nary_build_or_lookup_1 (nary
->opcode
, nary
->type
, ops
, false);
1775 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1776 from the statement defining VUSE and if not successful tries to
1777 translate *REFP and VR_ through an aggregate copy at the definition
1778 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1779 of *REF and *VR. If only disambiguation was performed then
1780 *DISAMBIGUATE_ONLY is set to true. */
1783 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1784 bool *disambiguate_only
)
1786 vn_reference_t vr
= (vn_reference_t
)vr_
;
1787 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1788 tree base
= ao_ref_base (ref
);
1789 HOST_WIDE_INT offset
, maxsize
;
1790 static vec
<vn_reference_op_s
> lhs_ops
;
1792 bool lhs_ref_ok
= false;
1794 /* If the reference is based on a parameter that was determined as
1795 pointing to readonly memory it doesn't change. */
1796 if (TREE_CODE (base
) == MEM_REF
1797 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
1798 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0))
1799 && bitmap_bit_p (const_parms
,
1800 SSA_NAME_VERSION (TREE_OPERAND (base
, 0))))
1802 *disambiguate_only
= true;
1806 /* First try to disambiguate after value-replacing in the definitions LHS. */
1807 if (is_gimple_assign (def_stmt
))
1809 tree lhs
= gimple_assign_lhs (def_stmt
);
1810 bool valueized_anything
= false;
1811 /* Avoid re-allocation overhead. */
1812 lhs_ops
.truncate (0);
1813 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1814 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
);
1815 if (valueized_anything
)
1817 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1818 get_alias_set (lhs
),
1819 TREE_TYPE (lhs
), lhs_ops
);
1821 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1823 *disambiguate_only
= true;
1829 ao_ref_init (&lhs_ref
, lhs
);
1833 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1834 && gimple_call_num_args (def_stmt
) <= 4)
1836 /* For builtin calls valueize its arguments and call the
1837 alias oracle again. Valueization may improve points-to
1838 info of pointers and constify size and position arguments.
1839 Originally this was motivated by PR61034 which has
1840 conditional calls to free falsely clobbering ref because
1841 of imprecise points-to info of the argument. */
1843 bool valueized_anything
= false;
1844 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1846 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1847 if (TREE_CODE (oldargs
[i
]) == SSA_NAME
1848 && VN_INFO (oldargs
[i
])->valnum
!= oldargs
[i
])
1850 gimple_call_set_arg (def_stmt
, i
, VN_INFO (oldargs
[i
])->valnum
);
1851 valueized_anything
= true;
1854 if (valueized_anything
)
1856 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1858 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1859 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1862 *disambiguate_only
= true;
1868 if (*disambiguate_only
)
1871 offset
= ref
->offset
;
1872 maxsize
= ref
->max_size
;
1874 /* If we cannot constrain the size of the reference we cannot
1875 test if anything kills it. */
1879 /* We can't deduce anything useful from clobbers. */
1880 if (gimple_clobber_p (def_stmt
))
1883 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1884 from that definition.
1886 if (is_gimple_reg_type (vr
->type
)
1887 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1888 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1889 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2))
1890 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1892 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1894 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1896 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
1898 size2
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2)) * 8;
1899 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1900 == tree_to_uhwi (gimple_call_arg (def_stmt
, 2))
1902 && operand_equal_p (base
, base2
, 0)
1903 && offset2
<= offset
1904 && offset2
+ size2
>= offset
+ maxsize
)
1906 tree val
= build_zero_cst (vr
->type
);
1907 return vn_reference_lookup_or_insert_for_pieces
1908 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1912 /* 2) Assignment from an empty CONSTRUCTOR. */
1913 else if (is_gimple_reg_type (vr
->type
)
1914 && gimple_assign_single_p (def_stmt
)
1915 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1916 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1919 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1921 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1922 &offset2
, &size2
, &maxsize2
, &reverse
);
1924 && operand_equal_p (base
, base2
, 0)
1925 && offset2
<= offset
1926 && offset2
+ size2
>= offset
+ maxsize
)
1928 tree val
= build_zero_cst (vr
->type
);
1929 return vn_reference_lookup_or_insert_for_pieces
1930 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1934 /* 3) Assignment from a constant. We can use folds native encode/interpret
1935 routines to extract the assigned bits. */
1936 else if (ref
->size
== maxsize
1937 && is_gimple_reg_type (vr
->type
)
1938 && !contains_storage_order_barrier_p (vr
->operands
)
1939 && gimple_assign_single_p (def_stmt
)
1940 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
1941 && maxsize
% BITS_PER_UNIT
== 0
1942 && offset
% BITS_PER_UNIT
== 0
1943 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
1944 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
1945 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
1948 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1950 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1951 &offset2
, &size2
, &maxsize2
, &reverse
);
1954 && maxsize2
== size2
1955 && size2
% BITS_PER_UNIT
== 0
1956 && offset2
% BITS_PER_UNIT
== 0
1957 && operand_equal_p (base
, base2
, 0)
1958 && offset2
<= offset
1959 && offset2
+ size2
>= offset
+ maxsize
)
1961 /* We support up to 512-bit values (for V8DFmode). */
1962 unsigned char buffer
[64];
1965 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1966 if (TREE_CODE (rhs
) == SSA_NAME
)
1967 rhs
= SSA_VAL (rhs
);
1968 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
1969 buffer
, sizeof (buffer
));
1972 tree type
= vr
->type
;
1973 /* Make sure to interpret in a type that has a range
1974 covering the whole access size. */
1975 if (INTEGRAL_TYPE_P (vr
->type
)
1976 && ref
->size
!= TYPE_PRECISION (vr
->type
))
1977 type
= build_nonstandard_integer_type (ref
->size
,
1978 TYPE_UNSIGNED (type
));
1979 tree val
= native_interpret_expr (type
,
1981 + ((offset
- offset2
)
1983 ref
->size
/ BITS_PER_UNIT
);
1984 /* If we chop off bits because the types precision doesn't
1985 match the memory access size this is ok when optimizing
1986 reads but not when called from the DSE code during
1989 && type
!= vr
->type
)
1991 if (! int_fits_type_p (val
, vr
->type
))
1994 val
= fold_convert (vr
->type
, val
);
1998 return vn_reference_lookup_or_insert_for_pieces
1999 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2004 /* 4) Assignment from an SSA name which definition we may be able
2005 to access pieces from. */
2006 else if (ref
->size
== maxsize
2007 && is_gimple_reg_type (vr
->type
)
2008 && !contains_storage_order_barrier_p (vr
->operands
)
2009 && gimple_assign_single_p (def_stmt
)
2010 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
2013 HOST_WIDE_INT offset2
, size2
, maxsize2
;
2015 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
2016 &offset2
, &size2
, &maxsize2
,
2020 && maxsize2
== size2
2021 && operand_equal_p (base
, base2
, 0)
2022 && offset2
<= offset
2023 && offset2
+ size2
>= offset
+ maxsize
2024 /* ??? We can't handle bitfield precision extracts without
2025 either using an alternate type for the BIT_FIELD_REF and
2026 then doing a conversion or possibly adjusting the offset
2027 according to endianness. */
2028 && (! INTEGRAL_TYPE_P (vr
->type
)
2029 || ref
->size
== TYPE_PRECISION (vr
->type
))
2030 && ref
->size
% BITS_PER_UNIT
== 0)
2032 code_helper rcode
= BIT_FIELD_REF
;
2034 ops
[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt
));
2035 ops
[1] = bitsize_int (ref
->size
);
2036 ops
[2] = bitsize_int (offset
- offset2
);
2037 tree val
= vn_nary_build_or_lookup (rcode
, vr
->type
, ops
);
2040 vn_reference_t res
= vn_reference_lookup_or_insert_for_pieces
2041 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2047 /* 5) For aggregate copies translate the reference through them if
2048 the copy kills ref. */
2049 else if (vn_walk_kind
== VN_WALKREWRITE
2050 && gimple_assign_single_p (def_stmt
)
2051 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
2052 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
2053 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
2056 HOST_WIDE_INT maxsize2
;
2058 auto_vec
<vn_reference_op_s
> rhs
;
2059 vn_reference_op_t vro
;
2065 /* See if the assignment kills REF. */
2066 base2
= ao_ref_base (&lhs_ref
);
2067 maxsize2
= lhs_ref
.max_size
;
2070 && (TREE_CODE (base
) != MEM_REF
2071 || TREE_CODE (base2
) != MEM_REF
2072 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
2073 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
2074 TREE_OPERAND (base2
, 1))))
2075 || !stmt_kills_ref_p (def_stmt
, ref
))
2078 /* Find the common base of ref and the lhs. lhs_ops already
2079 contains valueized operands for the lhs. */
2080 i
= vr
->operands
.length () - 1;
2081 j
= lhs_ops
.length () - 1;
2082 while (j
>= 0 && i
>= 0
2083 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
2089 /* ??? The innermost op should always be a MEM_REF and we already
2090 checked that the assignment to the lhs kills vr. Thus for
2091 aggregate copies using char[] types the vn_reference_op_eq
2092 may fail when comparing types for compatibility. But we really
2093 don't care here - further lookups with the rewritten operands
2094 will simply fail if we messed up types too badly. */
2095 HOST_WIDE_INT extra_off
= 0;
2096 if (j
== 0 && i
>= 0
2097 && lhs_ops
[0].opcode
== MEM_REF
2098 && lhs_ops
[0].off
!= -1)
2100 if (lhs_ops
[0].off
== vr
->operands
[i
].off
)
2102 else if (vr
->operands
[i
].opcode
== MEM_REF
2103 && vr
->operands
[i
].off
!= -1)
2105 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
2110 /* i now points to the first additional op.
2111 ??? LHS may not be completely contained in VR, one or more
2112 VIEW_CONVERT_EXPRs could be in its way. We could at least
2113 try handling outermost VIEW_CONVERT_EXPRs. */
2117 /* Punt if the additional ops contain a storage order barrier. */
2118 for (k
= i
; k
>= 0; k
--)
2120 vro
= &vr
->operands
[k
];
2121 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
2125 /* Now re-write REF to be based on the rhs of the assignment. */
2126 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
2128 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2131 if (rhs
.length () < 2
2132 || rhs
[0].opcode
!= MEM_REF
2133 || rhs
[0].off
== -1)
2135 rhs
[0].off
+= extra_off
;
2136 rhs
[0].op0
= int_const_binop (PLUS_EXPR
, rhs
[0].op0
,
2137 build_int_cst (TREE_TYPE (rhs
[0].op0
),
2141 /* We need to pre-pend vr->operands[0..i] to rhs. */
2142 vec
<vn_reference_op_s
> old
= vr
->operands
;
2143 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
2144 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
2146 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
2147 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
2148 vr
->operands
[i
+ 1 + j
] = *vro
;
2149 vr
->operands
= valueize_refs (vr
->operands
);
2150 if (old
== shared_lookup_references
)
2151 shared_lookup_references
= vr
->operands
;
2152 vr
->hashcode
= vn_reference_compute_hash (vr
);
2154 /* Try folding the new reference to a constant. */
2155 tree val
= fully_constant_vn_reference_p (vr
);
2157 return vn_reference_lookup_or_insert_for_pieces
2158 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2160 /* Adjust *ref from the new operands. */
2161 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2163 /* This can happen with bitfields. */
2164 if (ref
->size
!= r
.size
)
2168 /* Do not update last seen VUSE after translating. */
2169 last_vuse_ptr
= NULL
;
2171 /* Keep looking for the adjusted *REF / VR pair. */
2175 /* 6) For memcpy copies translate the reference through them if
2176 the copy kills ref. */
2177 else if (vn_walk_kind
== VN_WALKREWRITE
2178 && is_gimple_reg_type (vr
->type
)
2179 /* ??? Handle BCOPY as well. */
2180 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
2181 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
2182 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
2183 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2184 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
2185 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
2186 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
2187 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2)))
2191 HOST_WIDE_INT rhs_offset
, copy_size
, lhs_offset
;
2192 vn_reference_op_s op
;
2195 /* Only handle non-variable, addressable refs. */
2196 if (ref
->size
!= maxsize
2197 || offset
% BITS_PER_UNIT
!= 0
2198 || ref
->size
% BITS_PER_UNIT
!= 0)
2201 /* Extract a pointer base and an offset for the destination. */
2202 lhs
= gimple_call_arg (def_stmt
, 0);
2204 if (TREE_CODE (lhs
) == SSA_NAME
)
2206 lhs
= SSA_VAL (lhs
);
2207 if (TREE_CODE (lhs
) == SSA_NAME
)
2209 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
2210 if (gimple_assign_single_p (def_stmt
)
2211 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2212 lhs
= gimple_assign_rhs1 (def_stmt
);
2215 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2217 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2221 if (TREE_CODE (tem
) == MEM_REF
2222 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2224 lhs
= TREE_OPERAND (tem
, 0);
2225 if (TREE_CODE (lhs
) == SSA_NAME
)
2226 lhs
= SSA_VAL (lhs
);
2227 lhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2229 else if (DECL_P (tem
))
2230 lhs
= build_fold_addr_expr (tem
);
2234 if (TREE_CODE (lhs
) != SSA_NAME
2235 && TREE_CODE (lhs
) != ADDR_EXPR
)
2238 /* Extract a pointer base and an offset for the source. */
2239 rhs
= gimple_call_arg (def_stmt
, 1);
2241 if (TREE_CODE (rhs
) == SSA_NAME
)
2242 rhs
= SSA_VAL (rhs
);
2243 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2245 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2249 if (TREE_CODE (tem
) == MEM_REF
2250 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2252 rhs
= TREE_OPERAND (tem
, 0);
2253 rhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2255 else if (DECL_P (tem
))
2256 rhs
= build_fold_addr_expr (tem
);
2260 if (TREE_CODE (rhs
) != SSA_NAME
2261 && TREE_CODE (rhs
) != ADDR_EXPR
)
2264 copy_size
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2));
2266 /* The bases of the destination and the references have to agree. */
2267 if ((TREE_CODE (base
) != MEM_REF
2269 || (TREE_CODE (base
) == MEM_REF
2270 && (TREE_OPERAND (base
, 0) != lhs
2271 || !tree_fits_uhwi_p (TREE_OPERAND (base
, 1))))
2273 && (TREE_CODE (lhs
) != ADDR_EXPR
2274 || TREE_OPERAND (lhs
, 0) != base
)))
2277 at
= offset
/ BITS_PER_UNIT
;
2278 if (TREE_CODE (base
) == MEM_REF
)
2279 at
+= tree_to_uhwi (TREE_OPERAND (base
, 1));
2280 /* If the access is completely outside of the memcpy destination
2281 area there is no aliasing. */
2282 if (lhs_offset
>= at
+ maxsize
/ BITS_PER_UNIT
2283 || lhs_offset
+ copy_size
<= at
)
2285 /* And the access has to be contained within the memcpy destination. */
2287 || lhs_offset
+ copy_size
< at
+ maxsize
/ BITS_PER_UNIT
)
2290 /* Make room for 2 operands in the new reference. */
2291 if (vr
->operands
.length () < 2)
2293 vec
<vn_reference_op_s
> old
= vr
->operands
;
2294 vr
->operands
.safe_grow_cleared (2);
2295 if (old
== shared_lookup_references
)
2296 shared_lookup_references
= vr
->operands
;
2299 vr
->operands
.truncate (2);
2301 /* The looked-through reference is a simple MEM_REF. */
2302 memset (&op
, 0, sizeof (op
));
2304 op
.opcode
= MEM_REF
;
2305 op
.op0
= build_int_cst (ptr_type_node
, at
- rhs_offset
);
2306 op
.off
= at
- lhs_offset
+ rhs_offset
;
2307 vr
->operands
[0] = op
;
2308 op
.type
= TREE_TYPE (rhs
);
2309 op
.opcode
= TREE_CODE (rhs
);
2312 vr
->operands
[1] = op
;
2313 vr
->hashcode
= vn_reference_compute_hash (vr
);
2315 /* Try folding the new reference to a constant. */
2316 tree val
= fully_constant_vn_reference_p (vr
);
2318 return vn_reference_lookup_or_insert_for_pieces
2319 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2321 /* Adjust *ref from the new operands. */
2322 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2324 /* This can happen with bitfields. */
2325 if (ref
->size
!= r
.size
)
2329 /* Do not update last seen VUSE after translating. */
2330 last_vuse_ptr
= NULL
;
2332 /* Keep looking for the adjusted *REF / VR pair. */
2336 /* Bail out and stop walking. */
2340 /* Return a reference op vector from OP that can be used for
2341 vn_reference_lookup_pieces. The caller is responsible for releasing
2344 vec
<vn_reference_op_s
>
2345 vn_reference_operands_for_lookup (tree op
)
2348 return valueize_shared_reference_ops_from_ref (op
, &valueized
).copy ();
2351 /* Lookup a reference operation by it's parts, in the current hash table.
2352 Returns the resulting value number if it exists in the hash table,
2353 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2354 vn_reference_t stored in the hashtable if something is found. */
2357 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2358 vec
<vn_reference_op_s
> operands
,
2359 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2361 struct vn_reference_s vr1
;
2369 vr1
.vuse
= vuse_ssa_val (vuse
);
2370 shared_lookup_references
.truncate (0);
2371 shared_lookup_references
.safe_grow (operands
.length ());
2372 memcpy (shared_lookup_references
.address (),
2373 operands
.address (),
2374 sizeof (vn_reference_op_s
)
2375 * operands
.length ());
2376 vr1
.operands
= operands
= shared_lookup_references
2377 = valueize_refs (shared_lookup_references
);
2380 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2381 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2384 vn_reference_lookup_1 (&vr1
, vnresult
);
2386 && kind
!= VN_NOWALK
2390 vn_walk_kind
= kind
;
2391 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2393 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2394 vn_reference_lookup_2
,
2395 vn_reference_lookup_3
,
2396 vuse_ssa_val
, &vr1
);
2397 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2401 return (*vnresult
)->result
;
2406 /* Lookup OP in the current hash table, and return the resulting value
2407 number if it exists in the hash table. Return NULL_TREE if it does
2408 not exist in the hash table or if the result field of the structure
2409 was NULL.. VNRESULT will be filled in with the vn_reference_t
2410 stored in the hashtable if one exists. When TBAA_P is false assume
2411 we are looking up a store and treat it as having alias-set zero. */
2414 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2415 vn_reference_t
*vnresult
, bool tbaa_p
)
2417 vec
<vn_reference_op_s
> operands
;
2418 struct vn_reference_s vr1
;
2420 bool valuezied_anything
;
2425 vr1
.vuse
= vuse_ssa_val (vuse
);
2426 vr1
.operands
= operands
2427 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2428 vr1
.type
= TREE_TYPE (op
);
2429 vr1
.set
= tbaa_p
? get_alias_set (op
) : 0;
2430 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2431 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2434 if (kind
!= VN_NOWALK
2437 vn_reference_t wvnresult
;
2439 /* Make sure to use a valueized reference if we valueized anything.
2440 Otherwise preserve the full reference for advanced TBAA. */
2441 if (!valuezied_anything
2442 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2444 ao_ref_init (&r
, op
);
2446 r
.ref_alias_set
= r
.base_alias_set
= 0;
2447 vn_walk_kind
= kind
;
2449 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2450 vn_reference_lookup_2
,
2451 vn_reference_lookup_3
,
2452 vuse_ssa_val
, &vr1
);
2453 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2457 *vnresult
= wvnresult
;
2458 return wvnresult
->result
;
2464 return vn_reference_lookup_1 (&vr1
, vnresult
);
2467 /* Lookup CALL in the current hash table and return the entry in
2468 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2471 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2477 tree vuse
= gimple_vuse (call
);
2479 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2480 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2481 vr
->type
= gimple_expr_type (call
);
2483 vr
->hashcode
= vn_reference_compute_hash (vr
);
2484 vn_reference_lookup_1 (vr
, vnresult
);
2487 /* Insert OP into the current hash table with a value number of
2488 RESULT, and return the resulting reference structure we created. */
2490 static vn_reference_t
2491 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2493 vn_reference_s
**slot
;
2497 vr1
= current_info
->references_pool
->allocate ();
2498 if (TREE_CODE (result
) == SSA_NAME
)
2499 vr1
->value_id
= VN_INFO (result
)->value_id
;
2501 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2502 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2503 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2504 vr1
->type
= TREE_TYPE (op
);
2505 vr1
->set
= get_alias_set (op
);
2506 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2507 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2508 vr1
->result_vdef
= vdef
;
2510 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2513 /* Because we lookup stores using vuses, and value number failures
2514 using the vdefs (see visit_reference_op_store for how and why),
2515 it's possible that on failure we may try to insert an already
2516 inserted store. This is not wrong, there is no ssa name for a
2517 store that we could use as a differentiator anyway. Thus, unlike
2518 the other lookup functions, you cannot gcc_assert (!*slot)
2521 /* But free the old slot in case of a collision. */
2523 free_reference (*slot
);
2529 /* Insert a reference by it's pieces into the current hash table with
2530 a value number of RESULT. Return the resulting reference
2531 structure we created. */
2534 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2535 vec
<vn_reference_op_s
> operands
,
2536 tree result
, unsigned int value_id
)
2539 vn_reference_s
**slot
;
2542 vr1
= current_info
->references_pool
->allocate ();
2543 vr1
->value_id
= value_id
;
2544 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2545 vr1
->operands
= valueize_refs (operands
);
2548 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2549 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2550 result
= SSA_VAL (result
);
2551 vr1
->result
= result
;
2553 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2556 /* At this point we should have all the things inserted that we have
2557 seen before, and we should never try inserting something that
2559 gcc_assert (!*slot
);
2561 free_reference (*slot
);
2567 /* Compute and return the hash value for nary operation VBO1. */
2570 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2572 inchash::hash hstate
;
2575 for (i
= 0; i
< vno1
->length
; ++i
)
2576 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2577 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2579 if (((vno1
->length
== 2
2580 && commutative_tree_code (vno1
->opcode
))
2581 || (vno1
->length
== 3
2582 && commutative_ternary_tree_code (vno1
->opcode
)))
2583 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
2584 std::swap (vno1
->op
[0], vno1
->op
[1]);
2585 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
2586 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
2588 std::swap (vno1
->op
[0], vno1
->op
[1]);
2589 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
2592 hstate
.add_int (vno1
->opcode
);
2593 for (i
= 0; i
< vno1
->length
; ++i
)
2594 inchash::add_expr (vno1
->op
[i
], hstate
);
2596 return hstate
.end ();
2599 /* Compare nary operations VNO1 and VNO2 and return true if they are
2603 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2607 if (vno1
->hashcode
!= vno2
->hashcode
)
2610 if (vno1
->length
!= vno2
->length
)
2613 if (vno1
->opcode
!= vno2
->opcode
2614 || !types_compatible_p (vno1
->type
, vno2
->type
))
2617 for (i
= 0; i
< vno1
->length
; ++i
)
2618 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2624 /* Initialize VNO from the pieces provided. */
2627 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2628 enum tree_code code
, tree type
, tree
*ops
)
2631 vno
->length
= length
;
2633 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2636 /* Initialize VNO from OP. */
2639 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2643 vno
->opcode
= TREE_CODE (op
);
2644 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2645 vno
->type
= TREE_TYPE (op
);
2646 for (i
= 0; i
< vno
->length
; ++i
)
2647 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2650 /* Return the number of operands for a vn_nary ops structure from STMT. */
2653 vn_nary_length_from_stmt (gimple
*stmt
)
2655 switch (gimple_assign_rhs_code (stmt
))
2659 case VIEW_CONVERT_EXPR
:
2666 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2669 return gimple_num_ops (stmt
) - 1;
2673 /* Initialize VNO from STMT. */
2676 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
2680 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2681 vno
->type
= gimple_expr_type (stmt
);
2682 switch (vno
->opcode
)
2686 case VIEW_CONVERT_EXPR
:
2688 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2693 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2694 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2695 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2699 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2700 for (i
= 0; i
< vno
->length
; ++i
)
2701 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2705 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2706 vno
->length
= gimple_num_ops (stmt
) - 1;
2707 for (i
= 0; i
< vno
->length
; ++i
)
2708 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2712 /* Compute the hashcode for VNO and look for it in the hash table;
2713 return the resulting value number if it exists in the hash table.
2714 Return NULL_TREE if it does not exist in the hash table or if the
2715 result field of the operation is NULL. VNRESULT will contain the
2716 vn_nary_op_t from the hashtable if it exists. */
2719 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2721 vn_nary_op_s
**slot
;
2726 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2727 slot
= current_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2729 if (!slot
&& current_info
== optimistic_info
)
2730 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2736 return (*slot
)->result
;
2739 /* Lookup a n-ary operation by its pieces and return the resulting value
2740 number if it exists in the hash table. Return NULL_TREE if it does
2741 not exist in the hash table or if the result field of the operation
2742 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2746 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2747 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2749 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2750 sizeof_vn_nary_op (length
));
2751 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2752 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2755 /* Lookup OP in the current hash table, and return the resulting value
2756 number if it exists in the hash table. Return NULL_TREE if it does
2757 not exist in the hash table or if the result field of the operation
2758 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2762 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2765 = XALLOCAVAR (struct vn_nary_op_s
,
2766 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2767 init_vn_nary_op_from_op (vno1
, op
);
2768 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2771 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2772 value number if it exists in the hash table. Return NULL_TREE if
2773 it does not exist in the hash table. VNRESULT will contain the
2774 vn_nary_op_t from the hashtable if it exists. */
2777 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
2780 = XALLOCAVAR (struct vn_nary_op_s
,
2781 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2782 init_vn_nary_op_from_stmt (vno1
, stmt
);
2783 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2786 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2789 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2791 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2794 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2798 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2800 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2801 ¤t_info
->nary_obstack
);
2803 vno1
->value_id
= value_id
;
2804 vno1
->length
= length
;
2805 vno1
->result
= result
;
2810 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2811 VNO->HASHCODE first. */
2814 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
2817 vn_nary_op_s
**slot
;
2820 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2822 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
2823 /* While we do not want to insert things twice it's awkward to
2824 avoid it in the case where visit_nary_op pattern-matches stuff
2825 and ends up simplifying the replacement to itself. We then
2826 get two inserts, one from visit_nary_op and one from
2827 vn_nary_build_or_lookup.
2828 So allow inserts with the same value number. */
2829 if (*slot
&& (*slot
)->result
== vno
->result
)
2832 gcc_assert (!*slot
);
2838 /* Insert a n-ary operation into the current hash table using it's
2839 pieces. Return the vn_nary_op_t structure we created and put in
2843 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2844 tree type
, tree
*ops
,
2845 tree result
, unsigned int value_id
)
2847 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2848 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2849 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2852 /* Insert OP into the current hash table with a value number of
2853 RESULT. Return the vn_nary_op_t structure we created and put in
2857 vn_nary_op_insert (tree op
, tree result
)
2859 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2862 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2863 init_vn_nary_op_from_op (vno1
, op
);
2864 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2867 /* Insert the rhs of STMT into the current hash table with a value number of
2871 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
2874 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
2875 result
, VN_INFO (result
)->value_id
);
2876 init_vn_nary_op_from_stmt (vno1
, stmt
);
2877 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2880 /* Compute a hashcode for PHI operation VP1 and return it. */
2882 static inline hashval_t
2883 vn_phi_compute_hash (vn_phi_t vp1
)
2885 inchash::hash
hstate (vp1
->phiargs
.length () > 2
2886 ? vp1
->block
->index
: vp1
->phiargs
.length ());
2892 /* If all PHI arguments are constants we need to distinguish
2893 the PHI node via its type. */
2895 hstate
.merge_hash (vn_hash_type (type
));
2897 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
2899 /* Don't hash backedge values they need to be handled as VN_TOP
2900 for optimistic value-numbering. */
2901 if (e
->flags
& EDGE_DFS_BACK
)
2904 phi1op
= vp1
->phiargs
[e
->dest_idx
];
2905 if (phi1op
== VN_TOP
)
2907 inchash::add_expr (phi1op
, hstate
);
2910 return hstate
.end ();
2914 /* Return true if COND1 and COND2 represent the same condition, set
2915 *INVERTED_P if one needs to be inverted to make it the same as
2919 cond_stmts_equal_p (gcond
*cond1
, tree lhs1
, tree rhs1
,
2920 gcond
*cond2
, tree lhs2
, tree rhs2
, bool *inverted_p
)
2922 enum tree_code code1
= gimple_cond_code (cond1
);
2923 enum tree_code code2
= gimple_cond_code (cond2
);
2925 *inverted_p
= false;
2928 else if (code1
== swap_tree_comparison (code2
))
2929 std::swap (lhs2
, rhs2
);
2930 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
2932 else if (code1
== invert_tree_comparison
2933 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
2935 std::swap (lhs2
, rhs2
);
2941 return ((expressions_equal_p (lhs1
, lhs2
)
2942 && expressions_equal_p (rhs1
, rhs2
))
2943 || (commutative_tree_code (code1
)
2944 && expressions_equal_p (lhs1
, rhs2
)
2945 && expressions_equal_p (rhs1
, lhs2
)));
2948 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2951 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
2953 if (vp1
->hashcode
!= vp2
->hashcode
)
2956 if (vp1
->block
!= vp2
->block
)
2958 if (vp1
->phiargs
.length () != vp2
->phiargs
.length ())
2961 switch (vp1
->phiargs
.length ())
2964 /* Single-arg PHIs are just copies. */
2969 /* Rule out backedges into the PHI. */
2970 if (vp1
->block
->loop_father
->header
== vp1
->block
2971 || vp2
->block
->loop_father
->header
== vp2
->block
)
2974 /* If the PHI nodes do not have compatible types
2975 they are not the same. */
2976 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2980 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
2982 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
2983 /* If the immediate dominator end in switch stmts multiple
2984 values may end up in the same PHI arg via intermediate
2986 if (EDGE_COUNT (idom1
->succs
) != 2
2987 || EDGE_COUNT (idom2
->succs
) != 2)
2990 /* Verify the controlling stmt is the same. */
2991 gimple
*last1
= last_stmt (idom1
);
2992 gimple
*last2
= last_stmt (idom2
);
2993 if (gimple_code (last1
) != GIMPLE_COND
2994 || gimple_code (last2
) != GIMPLE_COND
)
2997 if (! cond_stmts_equal_p (as_a
<gcond
*> (last1
),
2998 vp1
->cclhs
, vp1
->ccrhs
,
2999 as_a
<gcond
*> (last2
),
3000 vp2
->cclhs
, vp2
->ccrhs
,
3004 /* Get at true/false controlled edges into the PHI. */
3005 edge te1
, te2
, fe1
, fe2
;
3006 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
3008 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
3012 /* Swap edges if the second condition is the inverted of the
3015 std::swap (te2
, fe2
);
3017 /* ??? Handle VN_TOP specially. */
3018 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
3019 vp2
->phiargs
[te2
->dest_idx
])
3020 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
3021 vp2
->phiargs
[fe2
->dest_idx
]))
3032 /* If the PHI nodes do not have compatible types
3033 they are not the same. */
3034 if (!types_compatible_p (vp1
->type
, vp2
->type
))
3037 /* Any phi in the same block will have it's arguments in the
3038 same edge order, because of how we store phi nodes. */
3041 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
3043 tree phi2op
= vp2
->phiargs
[i
];
3044 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
3046 if (!expressions_equal_p (phi1op
, phi2op
))
3053 static vec
<tree
> shared_lookup_phiargs
;
3055 /* Lookup PHI in the current hash table, and return the resulting
3056 value number if it exists in the hash table. Return NULL_TREE if
3057 it does not exist in the hash table. */
3060 vn_phi_lookup (gimple
*phi
)
3063 struct vn_phi_s vp1
;
3067 shared_lookup_phiargs
.truncate (0);
3068 shared_lookup_phiargs
.safe_grow (gimple_phi_num_args (phi
));
3070 /* Canonicalize the SSA_NAME's to their value number. */
3071 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3073 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3074 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
3075 shared_lookup_phiargs
[e
->dest_idx
] = def
;
3077 vp1
.type
= TREE_TYPE (gimple_phi_result (phi
));
3078 vp1
.phiargs
= shared_lookup_phiargs
;
3079 vp1
.block
= gimple_bb (phi
);
3080 /* Extract values of the controlling condition. */
3081 vp1
.cclhs
= NULL_TREE
;
3082 vp1
.ccrhs
= NULL_TREE
;
3083 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
.block
);
3084 if (EDGE_COUNT (idom1
->succs
) == 2)
3085 if (gcond
*last1
= dyn_cast
<gcond
*> (last_stmt (idom1
)))
3087 vp1
.cclhs
= vn_valueize (gimple_cond_lhs (last1
));
3088 vp1
.ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
3090 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
3091 slot
= current_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
3093 if (!slot
&& current_info
== optimistic_info
)
3094 slot
= valid_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
3098 return (*slot
)->result
;
3101 /* Insert PHI into the current hash table with a value number of
3105 vn_phi_insert (gimple
*phi
, tree result
)
3108 vn_phi_t vp1
= current_info
->phis_pool
->allocate ();
3109 vec
<tree
> args
= vNULL
;
3113 args
.safe_grow (gimple_phi_num_args (phi
));
3115 /* Canonicalize the SSA_NAME's to their value number. */
3116 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3118 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3119 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
3120 args
[e
->dest_idx
] = def
;
3122 vp1
->value_id
= VN_INFO (result
)->value_id
;
3123 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
3124 vp1
->phiargs
= args
;
3125 vp1
->block
= gimple_bb (phi
);
3126 /* Extract values of the controlling condition. */
3127 vp1
->cclhs
= NULL_TREE
;
3128 vp1
->ccrhs
= NULL_TREE
;
3129 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3130 if (EDGE_COUNT (idom1
->succs
) == 2)
3131 if (gcond
*last1
= dyn_cast
<gcond
*> (last_stmt (idom1
)))
3133 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
3134 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
3136 vp1
->result
= result
;
3137 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
3139 slot
= current_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
3141 /* Because we iterate over phi operations more than once, it's
3142 possible the slot might already exist here, hence no assert.*/
3148 /* Print set of components in strongly connected component SCC to OUT. */
3151 print_scc (FILE *out
, vec
<tree
> scc
)
3156 fprintf (out
, "SCC consists of %u:", scc
.length ());
3157 FOR_EACH_VEC_ELT (scc
, i
, var
)
3160 print_generic_expr (out
, var
, 0);
3162 fprintf (out
, "\n");
3165 /* Return true if BB1 is dominated by BB2 taking into account edges
3166 that are not executable. */
3169 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
)
3174 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3177 /* Before iterating we'd like to know if there exists a
3178 (executable) path from bb2 to bb1 at all, if not we can
3179 directly return false. For now simply iterate once. */
3181 /* Iterate to the single executable bb1 predecessor. */
3182 if (EDGE_COUNT (bb1
->preds
) > 1)
3185 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
3186 if (e
->flags
& EDGE_EXECUTABLE
)
3199 /* Re-do the dominance check with changed bb1. */
3200 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3205 /* Iterate to the single executable bb2 successor. */
3207 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
3208 if (e
->flags
& EDGE_EXECUTABLE
)
3219 /* Verify the reached block is only reached through succe.
3220 If there is only one edge we can spare us the dominator
3221 check and iterate directly. */
3222 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
3224 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
3226 && (e
->flags
& EDGE_EXECUTABLE
))
3236 /* Re-do the dominance check with changed bb2. */
3237 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3242 /* We could now iterate updating bb1 / bb2. */
3246 /* Set the value number of FROM to TO, return true if it has changed
3250 set_ssa_val_to (tree from
, tree to
)
3252 tree currval
= SSA_VAL (from
);
3253 HOST_WIDE_INT toff
, coff
;
3255 /* The only thing we allow as value numbers are ssa_names
3256 and invariants. So assert that here. We don't allow VN_TOP
3257 as visiting a stmt should produce a value-number other than
3259 ??? Still VN_TOP can happen for unreachable code, so force
3260 it to varying in that case. Not all code is prepared to
3261 get VN_TOP on valueization. */
3264 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3265 fprintf (dump_file
, "Forcing value number to varying on "
3266 "receiving VN_TOP\n");
3270 gcc_assert (to
!= NULL_TREE
3271 && ((TREE_CODE (to
) == SSA_NAME
3272 && (to
== from
|| SSA_VAL (to
) == to
))
3273 || is_gimple_min_invariant (to
)));
3277 if (currval
== from
)
3279 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3281 fprintf (dump_file
, "Not changing value number of ");
3282 print_generic_expr (dump_file
, from
, 0);
3283 fprintf (dump_file
, " from VARYING to ");
3284 print_generic_expr (dump_file
, to
, 0);
3285 fprintf (dump_file
, "\n");
3289 else if (currval
!= VN_TOP
3290 && ! is_gimple_min_invariant (currval
)
3291 && is_gimple_min_invariant (to
))
3293 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3295 fprintf (dump_file
, "Forcing VARYING instead of changing "
3296 "value number of ");
3297 print_generic_expr (dump_file
, from
, 0);
3298 fprintf (dump_file
, " from ");
3299 print_generic_expr (dump_file
, currval
, 0);
3300 fprintf (dump_file
, " (non-constant) to ");
3301 print_generic_expr (dump_file
, to
, 0);
3302 fprintf (dump_file
, " (constant)\n");
3306 else if (TREE_CODE (to
) == SSA_NAME
3307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
3311 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3313 fprintf (dump_file
, "Setting value number of ");
3314 print_generic_expr (dump_file
, from
, 0);
3315 fprintf (dump_file
, " to ");
3316 print_generic_expr (dump_file
, to
, 0);
3320 && !operand_equal_p (currval
, to
, 0)
3321 /* ??? For addresses involving volatile objects or types operand_equal_p
3322 does not reliably detect ADDR_EXPRs as equal. We know we are only
3323 getting invariant gimple addresses here, so can use
3324 get_addr_base_and_unit_offset to do this comparison. */
3325 && !(TREE_CODE (currval
) == ADDR_EXPR
3326 && TREE_CODE (to
) == ADDR_EXPR
3327 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
3328 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
3331 /* If we equate two SSA names we have to make the side-band info
3332 of the leader conservative (and remember whatever original value
3334 if (TREE_CODE (to
) == SSA_NAME
)
3336 if (INTEGRAL_TYPE_P (TREE_TYPE (to
))
3337 && SSA_NAME_RANGE_INFO (to
))
3339 if (SSA_NAME_IS_DEFAULT_DEF (to
)
3340 || dominated_by_p_w_unex
3341 (gimple_bb (SSA_NAME_DEF_STMT (from
)),
3342 gimple_bb (SSA_NAME_DEF_STMT (to
))))
3343 /* Keep the info from the dominator. */
3345 else if (SSA_NAME_IS_DEFAULT_DEF (from
)
3346 || dominated_by_p_w_unex
3347 (gimple_bb (SSA_NAME_DEF_STMT (to
)),
3348 gimple_bb (SSA_NAME_DEF_STMT (from
))))
3350 /* Save old info. */
3351 if (! VN_INFO (to
)->info
.range_info
)
3353 VN_INFO (to
)->info
.range_info
= SSA_NAME_RANGE_INFO (to
);
3354 VN_INFO (to
)->range_info_anti_range_p
3355 = SSA_NAME_ANTI_RANGE_P (to
);
3357 /* Use that from the dominator. */
3358 SSA_NAME_RANGE_INFO (to
) = SSA_NAME_RANGE_INFO (from
);
3359 SSA_NAME_ANTI_RANGE_P (to
) = SSA_NAME_ANTI_RANGE_P (from
);
3363 /* Save old info. */
3364 if (! VN_INFO (to
)->info
.range_info
)
3366 VN_INFO (to
)->info
.range_info
= SSA_NAME_RANGE_INFO (to
);
3367 VN_INFO (to
)->range_info_anti_range_p
3368 = SSA_NAME_ANTI_RANGE_P (to
);
3370 /* Rather than allocating memory and unioning the info
3372 SSA_NAME_RANGE_INFO (to
) = NULL
;
3375 else if (POINTER_TYPE_P (TREE_TYPE (to
))
3376 && SSA_NAME_PTR_INFO (to
))
3378 if (SSA_NAME_IS_DEFAULT_DEF (to
)
3379 || dominated_by_p_w_unex
3380 (gimple_bb (SSA_NAME_DEF_STMT (from
)),
3381 gimple_bb (SSA_NAME_DEF_STMT (to
))))
3382 /* Keep the info from the dominator. */
3384 else if (SSA_NAME_IS_DEFAULT_DEF (from
)
3385 || dominated_by_p_w_unex
3386 (gimple_bb (SSA_NAME_DEF_STMT (to
)),
3387 gimple_bb (SSA_NAME_DEF_STMT (from
))))
3389 /* Save old info. */
3390 if (! VN_INFO (to
)->info
.ptr_info
)
3391 VN_INFO (to
)->info
.ptr_info
= SSA_NAME_PTR_INFO (to
);
3392 /* Use that from the dominator. */
3393 SSA_NAME_PTR_INFO (to
) = SSA_NAME_PTR_INFO (from
);
3395 else if (! SSA_NAME_PTR_INFO (from
)
3396 /* Handle the case of trivially equivalent info. */
3397 || memcmp (SSA_NAME_PTR_INFO (to
),
3398 SSA_NAME_PTR_INFO (from
),
3399 sizeof (ptr_info_def
)) != 0)
3401 /* Save old info. */
3402 if (! VN_INFO (to
)->info
.ptr_info
)
3403 VN_INFO (to
)->info
.ptr_info
= SSA_NAME_PTR_INFO (to
);
3404 /* Rather than allocating memory and unioning the info
3406 SSA_NAME_PTR_INFO (to
) = NULL
;
3411 VN_INFO (from
)->valnum
= to
;
3412 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3413 fprintf (dump_file
, " (changed)\n");
3416 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3417 fprintf (dump_file
, "\n");
3421 /* Mark as processed all the definitions in the defining stmt of USE, or
3425 mark_use_processed (tree use
)
3429 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
3431 if (SSA_NAME_IS_DEFAULT_DEF (use
) || gimple_code (stmt
) == GIMPLE_PHI
)
3433 VN_INFO (use
)->use_processed
= true;
3437 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3439 tree def
= DEF_FROM_PTR (defp
);
3441 VN_INFO (def
)->use_processed
= true;
3445 /* Set all definitions in STMT to value number to themselves.
3446 Return true if a value number changed. */
3449 defs_to_varying (gimple
*stmt
)
3451 bool changed
= false;
3455 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3457 tree def
= DEF_FROM_PTR (defp
);
3458 changed
|= set_ssa_val_to (def
, def
);
3463 /* Visit a copy between LHS and RHS, return true if the value number
3467 visit_copy (tree lhs
, tree rhs
)
3470 rhs
= SSA_VAL (rhs
);
3472 return set_ssa_val_to (lhs
, rhs
);
3475 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3479 valueized_wider_op (tree wide_type
, tree op
)
3481 if (TREE_CODE (op
) == SSA_NAME
)
3484 /* Either we have the op widened available. */
3487 tree tem
= vn_nary_op_lookup_pieces (1, NOP_EXPR
,
3488 wide_type
, ops
, NULL
);
3492 /* Or the op is truncated from some existing value. */
3493 if (TREE_CODE (op
) == SSA_NAME
)
3495 gimple
*def
= SSA_NAME_DEF_STMT (op
);
3496 if (is_gimple_assign (def
)
3497 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
3499 tem
= gimple_assign_rhs1 (def
);
3500 if (useless_type_conversion_p (wide_type
, TREE_TYPE (tem
)))
3502 if (TREE_CODE (tem
) == SSA_NAME
)
3503 tem
= SSA_VAL (tem
);
3509 /* For constants simply extend it. */
3510 if (TREE_CODE (op
) == INTEGER_CST
)
3511 return wide_int_to_tree (wide_type
, op
);
3516 /* Visit a nary operator RHS, value number it, and return true if the
3517 value number of LHS has changed as a result. */
3520 visit_nary_op (tree lhs
, gassign
*stmt
)
3522 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
3524 return set_ssa_val_to (lhs
, result
);
3526 /* Do some special pattern matching for redundancies of operations
3527 in different types. */
3528 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3529 tree type
= TREE_TYPE (lhs
);
3530 tree rhs1
= gimple_assign_rhs1 (stmt
);
3534 /* Match arithmetic done in a different type where we can easily
3535 substitute the result from some earlier sign-changed or widened
3537 if (INTEGRAL_TYPE_P (type
)
3538 && TREE_CODE (rhs1
) == SSA_NAME
3539 /* We only handle sign-changes or zero-extension -> & mask. */
3540 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1
))
3541 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (rhs1
)))
3542 || TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (rhs1
))))
3544 gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
3546 && (gimple_assign_rhs_code (def
) == PLUS_EXPR
3547 || gimple_assign_rhs_code (def
) == MINUS_EXPR
3548 || gimple_assign_rhs_code (def
) == MULT_EXPR
))
3551 /* Either we have the op widened available. */
3552 ops
[0] = valueized_wider_op (type
,
3553 gimple_assign_rhs1 (def
));
3555 ops
[1] = valueized_wider_op (type
,
3556 gimple_assign_rhs2 (def
));
3557 if (ops
[0] && ops
[1])
3559 ops
[0] = vn_nary_op_lookup_pieces
3560 (2, gimple_assign_rhs_code (def
), type
, ops
, NULL
);
3561 /* We have wider operation available. */
3564 unsigned lhs_prec
= TYPE_PRECISION (type
);
3565 unsigned rhs_prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
3566 if (lhs_prec
== rhs_prec
)
3569 result
= vn_nary_build_or_lookup (NOP_EXPR
,
3573 bool changed
= set_ssa_val_to (lhs
, result
);
3574 vn_nary_op_insert_stmt (stmt
, result
);
3580 ops
[1] = wide_int_to_tree (type
,
3581 wi::mask (rhs_prec
, false,
3583 result
= vn_nary_build_or_lookup (BIT_AND_EXPR
,
3588 bool changed
= set_ssa_val_to (lhs
, result
);
3589 vn_nary_op_insert_stmt (stmt
, result
);
3600 bool changed
= set_ssa_val_to (lhs
, lhs
);
3601 vn_nary_op_insert_stmt (stmt
, lhs
);
3605 /* Visit a call STMT storing into LHS. Return true if the value number
3606 of the LHS has changed as a result. */
3609 visit_reference_op_call (tree lhs
, gcall
*stmt
)
3611 bool changed
= false;
3612 struct vn_reference_s vr1
;
3613 vn_reference_t vnresult
= NULL
;
3614 tree vdef
= gimple_vdef (stmt
);
3616 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3617 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
3620 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
3623 if (vnresult
->result_vdef
&& vdef
)
3624 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3626 /* If the call was discovered to be pure or const reflect
3627 that as far as possible. */
3628 changed
|= set_ssa_val_to (vdef
, vuse_ssa_val (gimple_vuse (stmt
)));
3630 if (!vnresult
->result
&& lhs
)
3631 vnresult
->result
= lhs
;
3633 if (vnresult
->result
&& lhs
)
3634 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
3639 vn_reference_s
**slot
;
3640 tree vdef_val
= vdef
;
3643 /* If we value numbered an indirect functions function to
3644 one not clobbering memory value number its VDEF to its
3646 tree fn
= gimple_call_fn (stmt
);
3647 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
3650 if (TREE_CODE (fn
) == ADDR_EXPR
3651 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
3652 && (flags_from_decl_or_type (TREE_OPERAND (fn
, 0))
3653 & (ECF_CONST
| ECF_PURE
)))
3654 vdef_val
= vuse_ssa_val (gimple_vuse (stmt
));
3656 changed
|= set_ssa_val_to (vdef
, vdef_val
);
3659 changed
|= set_ssa_val_to (lhs
, lhs
);
3660 vr2
= current_info
->references_pool
->allocate ();
3661 vr2
->vuse
= vr1
.vuse
;
3662 /* As we are not walking the virtual operand chain we know the
3663 shared_lookup_references are still original so we can re-use
3665 vr2
->operands
= vr1
.operands
.copy ();
3666 vr2
->type
= vr1
.type
;
3668 vr2
->hashcode
= vr1
.hashcode
;
3670 vr2
->result_vdef
= vdef_val
;
3671 slot
= current_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
3673 gcc_assert (!*slot
);
3680 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3681 and return true if the value number of the LHS has changed as a result. */
3684 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
3686 bool changed
= false;
3690 last_vuse
= gimple_vuse (stmt
);
3691 last_vuse_ptr
= &last_vuse
;
3692 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3693 default_vn_walk_kind
, NULL
, true);
3694 last_vuse_ptr
= NULL
;
3696 /* We handle type-punning through unions by value-numbering based
3697 on offset and size of the access. Be prepared to handle a
3698 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3700 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3702 /* We will be setting the value number of lhs to the value number
3703 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3704 So first simplify and lookup this expression to see if it
3705 is already available. */
3706 code_helper rcode
= VIEW_CONVERT_EXPR
;
3707 tree ops
[3] = { result
};
3708 result
= vn_nary_build_or_lookup (rcode
, TREE_TYPE (op
), ops
);
3712 changed
= set_ssa_val_to (lhs
, result
);
3715 changed
= set_ssa_val_to (lhs
, lhs
);
3716 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
3723 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3724 and return true if the value number of the LHS has changed as a result. */
3727 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
3729 bool changed
= false;
3730 vn_reference_t vnresult
= NULL
;
3732 bool resultsame
= false;
3733 tree vuse
= gimple_vuse (stmt
);
3734 tree vdef
= gimple_vdef (stmt
);
3736 if (TREE_CODE (op
) == SSA_NAME
)
3739 /* First we want to lookup using the *vuses* from the store and see
3740 if there the last store to this location with the same address
3743 The vuses represent the memory state before the store. If the
3744 memory state, address, and value of the store is the same as the
3745 last store to this location, then this store will produce the
3746 same memory state as that store.
3748 In this case the vdef versions for this store are value numbered to those
3749 vuse versions, since they represent the same memory state after
3752 Otherwise, the vdefs for the store are used when inserting into
3753 the table, since the store generates a new memory state. */
3755 vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, &vnresult
, false);
3757 && vnresult
->result
)
3759 tree result
= vnresult
->result
;
3760 if (TREE_CODE (result
) == SSA_NAME
)
3761 result
= SSA_VAL (result
);
3762 resultsame
= expressions_equal_p (result
, op
);
3765 /* If the TBAA state isn't compatible for downstream reads
3766 we cannot value-number the VDEFs the same. */
3767 alias_set_type set
= get_alias_set (lhs
);
3768 if (vnresult
->set
!= set
3769 && ! alias_set_subset_of (set
, vnresult
->set
))
3776 /* Only perform the following when being called from PRE
3777 which embeds tail merging. */
3778 if (default_vn_walk_kind
== VN_WALK
)
3780 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3781 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
3784 VN_INFO (vdef
)->use_processed
= true;
3785 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3789 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3791 fprintf (dump_file
, "No store match\n");
3792 fprintf (dump_file
, "Value numbering store ");
3793 print_generic_expr (dump_file
, lhs
, 0);
3794 fprintf (dump_file
, " to ");
3795 print_generic_expr (dump_file
, op
, 0);
3796 fprintf (dump_file
, "\n");
3798 /* Have to set value numbers before insert, since insert is
3799 going to valueize the references in-place. */
3801 changed
|= set_ssa_val_to (vdef
, vdef
);
3803 /* Do not insert structure copies into the tables. */
3804 if (is_gimple_min_invariant (op
)
3805 || is_gimple_reg (op
))
3806 vn_reference_insert (lhs
, op
, vdef
, NULL
);
3808 /* Only perform the following when being called from PRE
3809 which embeds tail merging. */
3810 if (default_vn_walk_kind
== VN_WALK
)
3812 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3813 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
3818 /* We had a match, so value number the vdef to have the value
3819 number of the vuse it came from. */
3821 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3822 fprintf (dump_file
, "Store matched earlier value, "
3823 "value numbering store vdefs to matching vuses.\n");
3825 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
3831 /* Visit and value number PHI, return true if the value number
3835 visit_phi (gimple
*phi
)
3837 bool changed
= false;
3839 tree sameval
= VN_TOP
;
3840 bool allsame
= true;
3841 unsigned n_executable
= 0;
3843 /* TODO: We could check for this in init_sccvn, and replace this
3844 with a gcc_assert. */
3845 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
3846 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3848 /* See if all non-TOP arguments have the same value. TOP is
3849 equivalent to everything, so we can ignore it. */
3852 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3853 if (e
->flags
& EDGE_EXECUTABLE
)
3855 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3858 if (TREE_CODE (def
) == SSA_NAME
)
3859 def
= SSA_VAL (def
);
3862 if (sameval
== VN_TOP
)
3864 else if (!expressions_equal_p (def
, sameval
))
3871 /* If none of the edges was executable or all incoming values are
3872 undefined keep the value-number at VN_TOP. If only a single edge
3873 is exectuable use its value. */
3874 if (sameval
== VN_TOP
3875 || n_executable
== 1)
3876 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3878 /* First see if it is equivalent to a phi node in this block. We prefer
3879 this as it allows IV elimination - see PRs 66502 and 67167. */
3880 result
= vn_phi_lookup (phi
);
3882 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
3883 /* Otherwise all value numbered to the same value, the phi node has that
3886 changed
= set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3889 vn_phi_insert (phi
, PHI_RESULT (phi
));
3890 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3896 /* Try to simplify RHS using equivalences and constant folding. */
3899 try_to_simplify (gassign
*stmt
)
3901 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3904 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3905 in this case, there is no point in doing extra work. */
3906 if (code
== SSA_NAME
)
3909 /* First try constant folding based on our current lattice. */
3910 mprts_hook
= vn_lookup_simplify_result
;
3911 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
3914 && (TREE_CODE (tem
) == SSA_NAME
3915 || is_gimple_min_invariant (tem
)))
3921 /* Visit and value number USE, return true if the value number
3925 visit_use (tree use
)
3927 bool changed
= false;
3928 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
3930 mark_use_processed (use
);
3932 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
3933 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3934 && !SSA_NAME_IS_DEFAULT_DEF (use
))
3936 fprintf (dump_file
, "Value numbering ");
3937 print_generic_expr (dump_file
, use
, 0);
3938 fprintf (dump_file
, " stmt = ");
3939 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3942 /* Handle uninitialized uses. */
3943 if (SSA_NAME_IS_DEFAULT_DEF (use
))
3944 changed
= set_ssa_val_to (use
, use
);
3945 else if (gimple_code (stmt
) == GIMPLE_PHI
)
3946 changed
= visit_phi (stmt
);
3947 else if (gimple_has_volatile_ops (stmt
))
3948 changed
= defs_to_varying (stmt
);
3949 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
3951 enum tree_code code
= gimple_assign_rhs_code (ass
);
3952 tree lhs
= gimple_assign_lhs (ass
);
3953 tree rhs1
= gimple_assign_rhs1 (ass
);
3956 /* Shortcut for copies. Simplifying copies is pointless,
3957 since we copy the expression and value they represent. */
3958 if (code
== SSA_NAME
3959 && TREE_CODE (lhs
) == SSA_NAME
)
3961 changed
= visit_copy (lhs
, rhs1
);
3964 simplified
= try_to_simplify (ass
);
3967 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3969 fprintf (dump_file
, "RHS ");
3970 print_gimple_expr (dump_file
, ass
, 0, 0);
3971 fprintf (dump_file
, " simplified to ");
3972 print_generic_expr (dump_file
, simplified
, 0);
3973 fprintf (dump_file
, "\n");
3976 /* Setting value numbers to constants will occasionally
3977 screw up phi congruence because constants are not
3978 uniquely associated with a single ssa name that can be
3981 && is_gimple_min_invariant (simplified
)
3982 && TREE_CODE (lhs
) == SSA_NAME
)
3984 changed
= set_ssa_val_to (lhs
, simplified
);
3988 && TREE_CODE (simplified
) == SSA_NAME
3989 && TREE_CODE (lhs
) == SSA_NAME
)
3991 changed
= visit_copy (lhs
, simplified
);
3995 if ((TREE_CODE (lhs
) == SSA_NAME
3996 /* We can substitute SSA_NAMEs that are live over
3997 abnormal edges with their constant value. */
3998 && !(gimple_assign_copy_p (ass
)
3999 && is_gimple_min_invariant (rhs1
))
4001 && is_gimple_min_invariant (simplified
))
4002 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4003 /* Stores or copies from SSA_NAMEs that are live over
4004 abnormal edges are a problem. */
4005 || (code
== SSA_NAME
4006 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
4007 changed
= defs_to_varying (ass
);
4008 else if (REFERENCE_CLASS_P (lhs
)
4010 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
4011 else if (TREE_CODE (lhs
) == SSA_NAME
)
4013 if ((gimple_assign_copy_p (ass
)
4014 && is_gimple_min_invariant (rhs1
))
4016 && is_gimple_min_invariant (simplified
)))
4019 changed
= set_ssa_val_to (lhs
, simplified
);
4021 changed
= set_ssa_val_to (lhs
, rhs1
);
4025 /* Visit the original statement. */
4026 switch (vn_get_stmt_kind (ass
))
4029 changed
= visit_nary_op (lhs
, ass
);
4032 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
4035 changed
= defs_to_varying (ass
);
4041 changed
= defs_to_varying (ass
);
4043 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
4045 tree lhs
= gimple_call_lhs (call_stmt
);
4046 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
4048 /* Try constant folding based on our current lattice. */
4049 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
4053 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4055 fprintf (dump_file
, "call ");
4056 print_gimple_expr (dump_file
, call_stmt
, 0, 0);
4057 fprintf (dump_file
, " simplified to ");
4058 print_generic_expr (dump_file
, simplified
, 0);
4059 fprintf (dump_file
, "\n");
4062 /* Setting value numbers to constants will occasionally
4063 screw up phi congruence because constants are not
4064 uniquely associated with a single ssa name that can be
4067 && is_gimple_min_invariant (simplified
))
4069 changed
= set_ssa_val_to (lhs
, simplified
);
4070 if (gimple_vdef (call_stmt
))
4071 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
4072 SSA_VAL (gimple_vuse (call_stmt
)));
4076 && TREE_CODE (simplified
) == SSA_NAME
)
4078 changed
= visit_copy (lhs
, simplified
);
4079 if (gimple_vdef (call_stmt
))
4080 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
4081 SSA_VAL (gimple_vuse (call_stmt
)));
4084 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4086 changed
= defs_to_varying (call_stmt
);
4091 /* Pick up flags from a devirtualization target. */
4092 tree fn
= gimple_call_fn (stmt
);
4093 int extra_fnflags
= 0;
4094 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
4097 if (TREE_CODE (fn
) == ADDR_EXPR
4098 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
4099 extra_fnflags
= flags_from_decl_or_type (TREE_OPERAND (fn
, 0));
4101 if (!gimple_call_internal_p (call_stmt
)
4102 && (/* Calls to the same function with the same vuse
4103 and the same operands do not necessarily return the same
4104 value, unless they're pure or const. */
4105 ((gimple_call_flags (call_stmt
) | extra_fnflags
)
4106 & (ECF_PURE
| ECF_CONST
))
4107 /* If calls have a vdef, subsequent calls won't have
4108 the same incoming vuse. So, if 2 calls with vdef have the
4109 same vuse, we know they're not subsequent.
4110 We can value number 2 calls to the same function with the
4111 same vuse and the same operands which are not subsequent
4112 the same, because there is no code in the program that can
4113 compare the 2 values... */
4114 || (gimple_vdef (call_stmt
)
4115 /* ... unless the call returns a pointer which does
4116 not alias with anything else. In which case the
4117 information that the values are distinct are encoded
4119 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
4120 /* Only perform the following when being called from PRE
4121 which embeds tail merging. */
4122 && default_vn_walk_kind
== VN_WALK
)))
4123 changed
= visit_reference_op_call (lhs
, call_stmt
);
4125 changed
= defs_to_varying (call_stmt
);
4128 changed
= defs_to_varying (stmt
);
4133 /* Compare two operands by reverse postorder index */
4136 compare_ops (const void *pa
, const void *pb
)
4138 const tree opa
= *((const tree
*)pa
);
4139 const tree opb
= *((const tree
*)pb
);
4140 gimple
*opstmta
= SSA_NAME_DEF_STMT (opa
);
4141 gimple
*opstmtb
= SSA_NAME_DEF_STMT (opb
);
4145 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
4146 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
4147 else if (gimple_nop_p (opstmta
))
4149 else if (gimple_nop_p (opstmtb
))
4152 bba
= gimple_bb (opstmta
);
4153 bbb
= gimple_bb (opstmtb
);
4156 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
4164 if (gimple_code (opstmta
) == GIMPLE_PHI
4165 && gimple_code (opstmtb
) == GIMPLE_PHI
)
4166 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
4167 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
4169 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
4171 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
4172 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
4174 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
4176 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
4179 /* Sort an array containing members of a strongly connected component
4180 SCC so that the members are ordered by RPO number.
4181 This means that when the sort is complete, iterating through the
4182 array will give you the members in RPO order. */
4185 sort_scc (vec
<tree
> scc
)
4187 scc
.qsort (compare_ops
);
4190 /* Insert the no longer used nary ONARY to the hash INFO. */
4193 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
4195 size_t size
= sizeof_vn_nary_op (onary
->length
);
4196 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
4197 &info
->nary_obstack
);
4198 memcpy (nary
, onary
, size
);
4199 vn_nary_op_insert_into (nary
, info
->nary
, false);
4202 /* Insert the no longer used phi OPHI to the hash INFO. */
4205 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
4207 vn_phi_t phi
= info
->phis_pool
->allocate ();
4209 memcpy (phi
, ophi
, sizeof (*phi
));
4210 ophi
->phiargs
.create (0);
4211 slot
= info
->phis
->find_slot_with_hash (phi
, phi
->hashcode
, INSERT
);
4212 gcc_assert (!*slot
);
4216 /* Insert the no longer used reference OREF to the hash INFO. */
4219 copy_reference (vn_reference_t oref
, vn_tables_t info
)
4222 vn_reference_s
**slot
;
4223 ref
= info
->references_pool
->allocate ();
4224 memcpy (ref
, oref
, sizeof (*ref
));
4225 oref
->operands
.create (0);
4226 slot
= info
->references
->find_slot_with_hash (ref
, ref
->hashcode
, INSERT
);
4228 free_reference (*slot
);
4232 /* Process a strongly connected component in the SSA graph. */
4235 process_scc (vec
<tree
> scc
)
4239 unsigned int iterations
= 0;
4240 bool changed
= true;
4241 vn_nary_op_iterator_type hin
;
4242 vn_phi_iterator_type hip
;
4243 vn_reference_iterator_type hir
;
4248 /* If the SCC has a single member, just visit it. */
4249 if (scc
.length () == 1)
4252 if (VN_INFO (use
)->use_processed
)
4254 /* We need to make sure it doesn't form a cycle itself, which can
4255 happen for self-referential PHI nodes. In that case we would
4256 end up inserting an expression with VN_TOP operands into the
4257 valid table which makes us derive bogus equivalences later.
4258 The cheapest way to check this is to assume it for all PHI nodes. */
4259 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
4260 /* Fallthru to iteration. */ ;
4268 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4269 print_scc (dump_file
, scc
);
4271 /* Iterate over the SCC with the optimistic table until it stops
4273 current_info
= optimistic_info
;
4278 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4279 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
4280 /* As we are value-numbering optimistically we have to
4281 clear the expression tables and the simplified expressions
4282 in each iteration until we converge. */
4283 optimistic_info
->nary
->empty ();
4284 optimistic_info
->phis
->empty ();
4285 optimistic_info
->references
->empty ();
4286 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
4287 gcc_obstack_init (&optimistic_info
->nary_obstack
);
4288 optimistic_info
->phis_pool
->release ();
4289 optimistic_info
->references_pool
->release ();
4290 FOR_EACH_VEC_ELT (scc
, i
, var
)
4291 gcc_assert (!VN_INFO (var
)->needs_insertion
4292 && VN_INFO (var
)->expr
== NULL
);
4293 FOR_EACH_VEC_ELT (scc
, i
, var
)
4294 changed
|= visit_use (var
);
4297 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4298 fprintf (dump_file
, "Processing SCC needed %d iterations\n", iterations
);
4299 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
4301 /* Finally, copy the contents of the no longer used optimistic
4302 table to the valid table. */
4303 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->nary
, nary
, vn_nary_op_t
, hin
)
4304 copy_nary (nary
, valid_info
);
4305 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->phis
, phi
, vn_phi_t
, hip
)
4306 copy_phi (phi
, valid_info
);
4307 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->references
,
4308 ref
, vn_reference_t
, hir
)
4309 copy_reference (ref
, valid_info
);
4311 current_info
= valid_info
;
4315 /* Pop the components of the found SCC for NAME off the SCC stack
4316 and process them. Returns true if all went well, false if
4317 we run into resource limits. */
4320 extract_and_process_scc_for_name (tree name
)
4325 /* Found an SCC, pop the components off the SCC stack and
4329 x
= sccstack
.pop ();
4331 VN_INFO (x
)->on_sccstack
= false;
4333 } while (x
!= name
);
4335 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4337 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4338 if (scc
.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
4342 print_scc (dump_file
, scc
);
4343 fprintf (dump_file
, "WARNING: Giving up value-numbering SCC due to "
4344 "size %u exceeding %u\n", scc
.length (),
4345 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
4349 FOR_EACH_VEC_ELT (scc
, i
, var
)
4351 gimple
*def
= SSA_NAME_DEF_STMT (var
);
4352 mark_use_processed (var
);
4353 if (SSA_NAME_IS_DEFAULT_DEF (var
)
4354 || gimple_code (def
) == GIMPLE_PHI
)
4355 set_ssa_val_to (var
, var
);
4357 defs_to_varying (def
);
4362 if (scc
.length () > 1)
4368 /* Depth first search on NAME to discover and process SCC's in the SSA
4370 Execution of this algorithm relies on the fact that the SCC's are
4371 popped off the stack in topological order.
4372 Returns true if successful, false if we stopped processing SCC's due
4373 to resource constraints. */
4378 auto_vec
<ssa_op_iter
> itervec
;
4379 auto_vec
<tree
> namevec
;
4380 use_operand_p usep
= NULL
;
4387 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
4388 VN_INFO (name
)->visited
= true;
4389 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
4391 sccstack
.safe_push (name
);
4392 VN_INFO (name
)->on_sccstack
= true;
4393 defstmt
= SSA_NAME_DEF_STMT (name
);
4395 /* Recursively DFS on our operands, looking for SCC's. */
4396 if (!gimple_nop_p (defstmt
))
4398 /* Push a new iterator. */
4399 if (gphi
*phi
= dyn_cast
<gphi
*> (defstmt
))
4400 usep
= op_iter_init_phiuse (&iter
, phi
, SSA_OP_ALL_USES
);
4402 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
4405 clear_and_done_ssa_iter (&iter
);
4409 /* If we are done processing uses of a name, go up the stack
4410 of iterators and process SCCs as we found them. */
4411 if (op_iter_done (&iter
))
4413 /* See if we found an SCC. */
4414 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
4415 extract_and_process_scc_for_name (name
);
4417 /* Check if we are done. */
4418 if (namevec
.is_empty ())
4421 /* Restore the last use walker and continue walking there. */
4423 name
= namevec
.pop ();
4424 memcpy (&iter
, &itervec
.last (),
4425 sizeof (ssa_op_iter
));
4427 goto continue_walking
;
4430 use
= USE_FROM_PTR (usep
);
4432 /* Since we handle phi nodes, we will sometimes get
4433 invariants in the use expression. */
4434 if (TREE_CODE (use
) == SSA_NAME
)
4436 if (! (VN_INFO (use
)->visited
))
4438 /* Recurse by pushing the current use walking state on
4439 the stack and starting over. */
4440 itervec
.safe_push (iter
);
4441 namevec
.safe_push (name
);
4446 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
4447 VN_INFO (use
)->low
);
4449 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
4450 && VN_INFO (use
)->on_sccstack
)
4452 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
4453 VN_INFO (name
)->low
);
4457 usep
= op_iter_next_use (&iter
);
4461 /* Allocate a value number table. */
4464 allocate_vn_table (vn_tables_t table
)
4466 table
->phis
= new vn_phi_table_type (23);
4467 table
->nary
= new vn_nary_op_table_type (23);
4468 table
->references
= new vn_reference_table_type (23);
4470 gcc_obstack_init (&table
->nary_obstack
);
4471 table
->phis_pool
= new object_allocator
<vn_phi_s
> ("VN phis");
4472 table
->references_pool
= new object_allocator
<vn_reference_s
>
4476 /* Free a value number table. */
4479 free_vn_table (vn_tables_t table
)
4485 delete table
->references
;
4486 table
->references
= NULL
;
4487 obstack_free (&table
->nary_obstack
, NULL
);
4488 delete table
->phis_pool
;
4489 delete table
->references_pool
;
4496 int *rpo_numbers_temp
;
4498 calculate_dominance_info (CDI_DOMINATORS
);
4499 mark_dfs_back_edges ();
4501 sccstack
.create (0);
4502 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
4504 constant_value_ids
= BITMAP_ALLOC (NULL
);
4509 vn_ssa_aux_table
.create (num_ssa_names
+ 1);
4510 /* VEC_alloc doesn't actually grow it to the right size, it just
4511 preallocates the space to do so. */
4512 vn_ssa_aux_table
.safe_grow_cleared (num_ssa_names
+ 1);
4513 gcc_obstack_init (&vn_ssa_aux_obstack
);
4515 shared_lookup_phiargs
.create (0);
4516 shared_lookup_references
.create (0);
4517 rpo_numbers
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
4519 XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
4520 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
4522 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4523 the i'th block in RPO order is bb. We want to map bb's to RPO
4524 numbers, so we need to rearrange this array. */
4525 for (j
= 0; j
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; j
++)
4526 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
4528 XDELETE (rpo_numbers_temp
);
4530 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
4532 renumber_gimple_stmt_uids ();
4534 /* Create the valid and optimistic value numbering tables. */
4535 valid_info
= XCNEW (struct vn_tables_s
);
4536 allocate_vn_table (valid_info
);
4537 optimistic_info
= XCNEW (struct vn_tables_s
);
4538 allocate_vn_table (optimistic_info
);
4539 current_info
= valid_info
;
4541 /* Create the VN_INFO structures, and initialize value numbers to
4542 TOP or VARYING for parameters. */
4546 FOR_EACH_SSA_NAME (i
, name
, cfun
)
4548 VN_INFO_GET (name
)->valnum
= VN_TOP
;
4549 VN_INFO (name
)->needs_insertion
= false;
4550 VN_INFO (name
)->expr
= NULL
;
4551 VN_INFO (name
)->value_id
= 0;
4553 if (!SSA_NAME_IS_DEFAULT_DEF (name
))
4556 switch (TREE_CODE (SSA_NAME_VAR (name
)))
4559 /* Undefined vars keep TOP. */
4563 /* Parameters are VARYING but we can record a condition
4564 if we know it is a non-NULL pointer. */
4565 VN_INFO (name
)->visited
= true;
4566 VN_INFO (name
)->valnum
= name
;
4567 if (POINTER_TYPE_P (TREE_TYPE (name
))
4568 && nonnull_arg_p (SSA_NAME_VAR (name
)))
4572 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
4573 vn_nary_op_insert_pieces (2, NE_EXPR
, boolean_type_node
, ops
,
4574 boolean_true_node
, 0);
4575 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4577 fprintf (dump_file
, "Recording ");
4578 print_generic_expr (dump_file
, name
, TDF_SLIM
);
4579 fprintf (dump_file
, " != 0\n");
4585 /* If the result is passed by invisible reference the default
4586 def is initialized, otherwise it's uninitialized. */
4587 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name
)))
4589 VN_INFO (name
)->visited
= true;
4590 VN_INFO (name
)->valnum
= name
;
4600 /* Restore SSA info that has been reset on value leaders. */
4603 scc_vn_restore_ssa_info (void)
4608 FOR_EACH_SSA_NAME (i
, name
, cfun
)
4610 if (has_VN_INFO (name
))
4612 if (VN_INFO (name
)->needs_insertion
)
4614 else if (POINTER_TYPE_P (TREE_TYPE (name
))
4615 && VN_INFO (name
)->info
.ptr_info
)
4616 SSA_NAME_PTR_INFO (name
) = VN_INFO (name
)->info
.ptr_info
;
4617 else if (INTEGRAL_TYPE_P (TREE_TYPE (name
))
4618 && VN_INFO (name
)->info
.range_info
)
4620 SSA_NAME_RANGE_INFO (name
) = VN_INFO (name
)->info
.range_info
;
4621 SSA_NAME_ANTI_RANGE_P (name
)
4622 = VN_INFO (name
)->range_info_anti_range_p
;
4634 delete constant_to_value_id
;
4635 constant_to_value_id
= NULL
;
4636 BITMAP_FREE (constant_value_ids
);
4637 shared_lookup_phiargs
.release ();
4638 shared_lookup_references
.release ();
4639 XDELETEVEC (rpo_numbers
);
4641 FOR_EACH_SSA_NAME (i
, name
, cfun
)
4643 if (has_VN_INFO (name
)
4644 && VN_INFO (name
)->needs_insertion
)
4645 release_ssa_name (name
);
4647 obstack_free (&vn_ssa_aux_obstack
, NULL
);
4648 vn_ssa_aux_table
.release ();
4650 sccstack
.release ();
4651 free_vn_table (valid_info
);
4652 XDELETE (valid_info
);
4653 free_vn_table (optimistic_info
);
4654 XDELETE (optimistic_info
);
4656 BITMAP_FREE (const_parms
);
4659 /* Set *ID according to RESULT. */
4662 set_value_id_for_result (tree result
, unsigned int *id
)
4664 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4665 *id
= VN_INFO (result
)->value_id
;
4666 else if (result
&& is_gimple_min_invariant (result
))
4667 *id
= get_or_alloc_constant_value_id (result
);
4669 *id
= get_next_value_id ();
4672 /* Set the value ids in the valid hash tables. */
4675 set_hashtable_value_ids (void)
4677 vn_nary_op_iterator_type hin
;
4678 vn_phi_iterator_type hip
;
4679 vn_reference_iterator_type hir
;
4684 /* Now set the value ids of the things we had put in the hash
4687 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4688 set_value_id_for_result (vno
->result
, &vno
->value_id
);
4690 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4691 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4693 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4695 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4698 class sccvn_dom_walker
: public dom_walker
4702 : dom_walker (CDI_DOMINATORS
, true), cond_stack (0) {}
4704 virtual edge
before_dom_children (basic_block
);
4705 virtual void after_dom_children (basic_block
);
4707 void record_cond (basic_block
,
4708 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4709 void record_conds (basic_block
,
4710 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4712 auto_vec
<std::pair
<basic_block
, std::pair
<vn_nary_op_t
, vn_nary_op_t
> > >
4716 /* Record a temporary condition for the BB and its dominated blocks. */
4719 sccvn_dom_walker::record_cond (basic_block bb
,
4720 enum tree_code code
, tree lhs
, tree rhs
,
4723 tree ops
[2] = { lhs
, rhs
};
4724 vn_nary_op_t old
= NULL
;
4725 if (vn_nary_op_lookup_pieces (2, code
, boolean_type_node
, ops
, &old
))
4726 current_info
->nary
->remove_elt_with_hash (old
, old
->hashcode
);
4728 = vn_nary_op_insert_pieces (2, code
, boolean_type_node
, ops
,
4731 : boolean_false_node
, 0);
4732 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4734 fprintf (dump_file
, "Recording temporarily ");
4735 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
4736 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
4737 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
4738 fprintf (dump_file
, " == %s%s\n",
4739 value
? "true" : "false",
4740 old
? " (old entry saved)" : "");
4742 cond_stack
.safe_push (std::make_pair (bb
, std::make_pair (cond
, old
)));
4745 /* Record temporary conditions for the BB and its dominated blocks
4746 according to LHS CODE RHS == VALUE and its dominated conditions. */
4749 sccvn_dom_walker::record_conds (basic_block bb
,
4750 enum tree_code code
, tree lhs
, tree rhs
,
4753 /* Record the original condition. */
4754 record_cond (bb
, code
, lhs
, rhs
, value
);
4759 /* Record dominated conditions if the condition is true. Note that
4760 the inversion is already recorded. */
4765 record_cond (bb
, code
== LT_EXPR
? LE_EXPR
: GE_EXPR
, lhs
, rhs
, true);
4766 record_cond (bb
, NE_EXPR
, lhs
, rhs
, true);
4767 record_cond (bb
, EQ_EXPR
, lhs
, rhs
, false);
4771 record_cond (bb
, LE_EXPR
, lhs
, rhs
, true);
4772 record_cond (bb
, GE_EXPR
, lhs
, rhs
, true);
4773 record_cond (bb
, LT_EXPR
, lhs
, rhs
, false);
4774 record_cond (bb
, GT_EXPR
, lhs
, rhs
, false);
4782 /* Restore expressions and values derived from conditionals. */
4785 sccvn_dom_walker::after_dom_children (basic_block bb
)
4787 while (!cond_stack
.is_empty ()
4788 && cond_stack
.last ().first
== bb
)
4790 vn_nary_op_t cond
= cond_stack
.last ().second
.first
;
4791 vn_nary_op_t old
= cond_stack
.last ().second
.second
;
4792 current_info
->nary
->remove_elt_with_hash (cond
, cond
->hashcode
);
4794 vn_nary_op_insert_into (old
, current_info
->nary
, false);
4799 /* Value number all statements in BB. */
4802 sccvn_dom_walker::before_dom_children (basic_block bb
)
4807 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4808 fprintf (dump_file
, "Visiting BB %d\n", bb
->index
);
4810 /* If we have a single predecessor record the equivalence from a
4811 possible condition on the predecessor edge. */
4813 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4815 /* Ignore simple backedges from this to allow recording conditions
4817 if (dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
4829 /* Check if there are multiple executable successor edges in
4830 the source block. Otherwise there is no additional info
4833 FOR_EACH_EDGE (e2
, ei
, pred_e
->src
->succs
)
4835 && e2
->flags
& EDGE_EXECUTABLE
)
4837 if (e2
&& (e2
->flags
& EDGE_EXECUTABLE
))
4839 gimple
*stmt
= last_stmt (pred_e
->src
);
4841 && gimple_code (stmt
) == GIMPLE_COND
)
4843 enum tree_code code
= gimple_cond_code (stmt
);
4844 tree lhs
= gimple_cond_lhs (stmt
);
4845 tree rhs
= gimple_cond_rhs (stmt
);
4846 record_conds (bb
, code
, lhs
, rhs
,
4847 (pred_e
->flags
& EDGE_TRUE_VALUE
) != 0);
4848 code
= invert_tree_comparison (code
, HONOR_NANS (lhs
));
4849 if (code
!= ERROR_MARK
)
4850 record_conds (bb
, code
, lhs
, rhs
,
4851 (pred_e
->flags
& EDGE_TRUE_VALUE
) == 0);
4856 /* Value-number all defs in the basic-block. */
4857 for (gphi_iterator gsi
= gsi_start_phis (bb
);
4858 !gsi_end_p (gsi
); gsi_next (&gsi
))
4860 gphi
*phi
= gsi
.phi ();
4861 tree res
= PHI_RESULT (phi
);
4862 if (!VN_INFO (res
)->visited
)
4865 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
4866 !gsi_end_p (gsi
); gsi_next (&gsi
))
4870 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
4871 if (!VN_INFO (op
)->visited
)
4875 /* Finally look at the last stmt. */
4876 gimple
*stmt
= last_stmt (bb
);
4880 enum gimple_code code
= gimple_code (stmt
);
4881 if (code
!= GIMPLE_COND
4882 && code
!= GIMPLE_SWITCH
4883 && code
!= GIMPLE_GOTO
)
4886 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4888 fprintf (dump_file
, "Visiting control stmt ending BB %d: ", bb
->index
);
4889 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4892 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4893 if value-numbering can prove they are not reachable. Handling
4894 computed gotos is also possible. */
4900 tree lhs
= vn_valueize (gimple_cond_lhs (stmt
));
4901 tree rhs
= vn_valueize (gimple_cond_rhs (stmt
));
4902 val
= gimple_simplify (gimple_cond_code (stmt
),
4903 boolean_type_node
, lhs
, rhs
,
4905 /* If that didn't simplify to a constant see if we have recorded
4906 temporary expressions from taken edges. */
4907 if (!val
|| TREE_CODE (val
) != INTEGER_CST
)
4912 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt
),
4913 boolean_type_node
, ops
, NULL
);
4918 val
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
4921 val
= gimple_goto_dest (stmt
);
4929 edge taken
= find_taken_edge (bb
, vn_valueize (val
));
4933 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4934 fprintf (dump_file
, "Marking all edges out of BB %d but (%d -> %d) as "
4935 "not executable\n", bb
->index
, bb
->index
, taken
->dest
->index
);
4940 /* Do SCCVN. Returns true if it finished, false if we bailed out
4941 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4942 how we use the alias oracle walking during the VN process. */
4945 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
4949 default_vn_walk_kind
= default_vn_walk_kind_
;
4953 /* Collect pointers we know point to readonly memory. */
4954 const_parms
= BITMAP_ALLOC (NULL
);
4955 tree fnspec
= lookup_attribute ("fn spec",
4956 TYPE_ATTRIBUTES (TREE_TYPE (cfun
->decl
)));
4959 fnspec
= TREE_VALUE (TREE_VALUE (fnspec
));
4961 for (tree arg
= DECL_ARGUMENTS (cfun
->decl
);
4962 arg
; arg
= DECL_CHAIN (arg
), ++i
)
4964 if (i
>= (unsigned) TREE_STRING_LENGTH (fnspec
))
4966 if (TREE_STRING_POINTER (fnspec
)[i
] == 'R'
4967 || TREE_STRING_POINTER (fnspec
)[i
] == 'r')
4969 tree name
= ssa_default_def (cfun
, arg
);
4971 bitmap_set_bit (const_parms
, SSA_NAME_VERSION (name
));
4976 /* Walk all blocks in dominator order, value-numbering stmts
4977 SSA defs and decide whether outgoing edges are not executable. */
4978 sccvn_dom_walker walker
;
4979 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
4981 /* Initialize the value ids and prune out remaining VN_TOPs
4985 FOR_EACH_SSA_NAME (i
, name
, cfun
)
4987 vn_ssa_aux_t info
= VN_INFO (name
);
4989 info
->valnum
= name
;
4990 if (info
->valnum
== name
4991 || info
->valnum
== VN_TOP
)
4992 info
->value_id
= get_next_value_id ();
4993 else if (is_gimple_min_invariant (info
->valnum
))
4994 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
4998 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5000 vn_ssa_aux_t info
= VN_INFO (name
);
5001 if (TREE_CODE (info
->valnum
) == SSA_NAME
5002 && info
->valnum
!= name
5003 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
5004 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
5007 set_hashtable_value_ids ();
5009 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5011 fprintf (dump_file
, "Value numbers:\n");
5012 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5014 if (VN_INFO (name
)->visited
5015 && SSA_VAL (name
) != name
)
5017 print_generic_expr (dump_file
, name
, 0);
5018 fprintf (dump_file
, " = ");
5019 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
5020 fprintf (dump_file
, "\n");
5026 /* Return the maximum value id we have ever seen. */
5029 get_max_value_id (void)
5031 return next_value_id
;
5034 /* Return the next unique value id. */
5037 get_next_value_id (void)
5039 return next_value_id
++;
5043 /* Compare two expressions E1 and E2 and return true if they are equal. */
5046 expressions_equal_p (tree e1
, tree e2
)
5048 /* The obvious case. */
5052 /* If either one is VN_TOP consider them equal. */
5053 if (e1
== VN_TOP
|| e2
== VN_TOP
)
5056 /* If only one of them is null, they cannot be equal. */
5060 /* Now perform the actual comparison. */
5061 if (TREE_CODE (e1
) == TREE_CODE (e2
)
5062 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
5069 /* Return true if the nary operation NARY may trap. This is a copy
5070 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5073 vn_nary_may_trap (vn_nary_op_t nary
)
5076 tree rhs2
= NULL_TREE
;
5077 bool honor_nans
= false;
5078 bool honor_snans
= false;
5079 bool fp_operation
= false;
5080 bool honor_trapv
= false;
5084 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
5085 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
5086 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
5089 fp_operation
= FLOAT_TYPE_P (type
);
5092 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
5093 honor_snans
= flag_signaling_nans
!= 0;
5095 else if (INTEGRAL_TYPE_P (type
)
5096 && TYPE_OVERFLOW_TRAPS (type
))
5099 if (nary
->length
>= 2)
5101 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
5103 honor_nans
, honor_snans
, rhs2
,
5109 for (i
= 0; i
< nary
->length
; ++i
)
5110 if (tree_could_trap_p (nary
->op
[i
]))