1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "alloc-pool.h"
31 #include "insn-config.h"
35 #include "gimple-pretty-print.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
57 #include "tree-ssa-propagate.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
132 static tree
*last_vuse_ptr
;
133 static vn_lookup_kind vn_walk_kind
;
134 static vn_lookup_kind default_vn_walk_kind
;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
141 typedef vn_nary_op_s
*compare_type
;
142 static inline hashval_t
hash (const vn_nary_op_s
*);
143 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
146 /* Return the computed hashcode for nary operation P1. */
149 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
151 return vno1
->hashcode
;
154 /* Compare nary operations P1 and P2 and return true if they are
158 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
160 return vn_nary_op_eq (vno1
, vno2
);
163 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
167 /* vn_phi hashtable helpers. */
170 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
172 struct vn_phi_hasher
: pointer_hash
<vn_phi_s
>
174 static inline hashval_t
hash (const vn_phi_s
*);
175 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
176 static inline void remove (vn_phi_s
*);
179 /* Return the computed hashcode for phi operation P1. */
182 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
184 return vp1
->hashcode
;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
190 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
192 return vn_phi_eq (vp1
, vp2
);
195 /* Free a phi operation structure VP. */
198 vn_phi_hasher::remove (vn_phi_s
*phi
)
200 phi
->phiargs
.release ();
203 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
211 vn_reference_op_eq (const void *p1
, const void *p2
)
213 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
214 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
216 return (vro1
->opcode
== vro2
->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1
->type
== vro2
->type
219 || (vro1
->type
&& vro2
->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
221 TYPE_MAIN_VARIANT (vro2
->type
))))
222 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
223 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
224 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
227 /* Free a reference operation structure VP. */
230 free_reference (vn_reference_s
*vr
)
232 vr
->operands
.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher
: pointer_hash
<vn_reference_s
>
240 static inline hashval_t
hash (const vn_reference_s
*);
241 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
242 static inline void remove (vn_reference_s
*);
245 /* Return the hashcode for a given reference operation P1. */
248 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
250 return vr1
->hashcode
;
254 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
256 return vn_reference_eq (v
, c
);
260 vn_reference_hasher::remove (vn_reference_s
*v
)
265 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type
*nary
;
274 vn_phi_table_type
*phis
;
275 vn_reference_table_type
*references
;
276 struct obstack nary_obstack
;
277 object_allocator
<vn_phi_s
> *phis_pool
;
278 object_allocator
<vn_reference_s
> *references_pool
;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
286 static inline hashval_t
hash (const vn_constant_s
*);
287 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
290 /* Hash table hash function for vn_constant_t. */
293 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
295 return vc1
->hashcode
;
298 /* Hash table equality function for vn_constant_t. */
301 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
303 if (vc1
->hashcode
!= vc2
->hashcode
)
306 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
309 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
310 static bitmap constant_value_ids
;
313 /* Valid hashtables storing information we have proven to be
316 static vn_tables_t valid_info
;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info
;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
327 static vn_tables_t current_info
;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers
;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
341 vuse_ssa_val (tree x
)
348 tree tem
= SSA_VAL (x
);
349 /* stmt walking can walk over a backedge and reach code we didn't
355 while (SSA_NAME_IN_FREE_LIST (x
));
360 /* This represents the top of the VN lattice, which is the universal
365 /* Unique counter for our value ids. */
367 static unsigned int next_value_id
;
369 /* Next DFS number and the stack for strongly connected component
372 static unsigned int next_dfs_num
;
373 static vec
<tree
> sccstack
;
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
381 static vec
<vn_ssa_aux_t
> vn_ssa_aux_table
;
382 static struct obstack vn_ssa_aux_obstack
;
384 /* Return whether there is value numbering information for a given SSA name. */
387 has_VN_INFO (tree name
)
389 if (SSA_NAME_VERSION (name
) < vn_ssa_aux_table
.length ())
390 return vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] != NULL
;
394 /* Return the value numbering information for a given SSA name. */
399 vn_ssa_aux_t res
= vn_ssa_aux_table
[SSA_NAME_VERSION (name
)];
400 gcc_checking_assert (res
);
404 /* Set the value numbering info for a given SSA name to a given
408 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
410 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = value
;
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
417 VN_INFO_GET (tree name
)
419 vn_ssa_aux_t newinfo
;
421 gcc_assert (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ()
422 || vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] == NULL
);
423 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
424 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
425 if (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ())
426 vn_ssa_aux_table
.safe_grow_cleared (SSA_NAME_VERSION (name
) + 1);
427 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = newinfo
;
432 /* Return the vn_kind the expression computed by the stmt should be
436 vn_get_stmt_kind (gimple
*stmt
)
438 switch (gimple_code (stmt
))
446 enum tree_code code
= gimple_assign_rhs_code (stmt
);
447 tree rhs1
= gimple_assign_rhs1 (stmt
);
448 switch (get_gimple_rhs_class (code
))
450 case GIMPLE_UNARY_RHS
:
451 case GIMPLE_BINARY_RHS
:
452 case GIMPLE_TERNARY_RHS
:
454 case GIMPLE_SINGLE_RHS
:
455 switch (TREE_CODE_CLASS (code
))
458 /* VOP-less references can go through unary case. */
459 if ((code
== REALPART_EXPR
460 || code
== IMAGPART_EXPR
461 || code
== VIEW_CONVERT_EXPR
462 || code
== BIT_FIELD_REF
)
463 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
467 case tcc_declaration
:
474 if (code
== ADDR_EXPR
)
475 return (is_gimple_min_invariant (rhs1
)
476 ? VN_CONSTANT
: VN_REFERENCE
);
477 else if (code
== CONSTRUCTOR
)
490 /* Lookup a value id for CONSTANT and return it. If it does not
494 get_constant_value_id (tree constant
)
496 vn_constant_s
**slot
;
497 struct vn_constant_s vc
;
499 vc
.hashcode
= vn_hash_constant_with_type (constant
);
500 vc
.constant
= constant
;
501 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
503 return (*slot
)->value_id
;
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
511 get_or_alloc_constant_value_id (tree constant
)
513 vn_constant_s
**slot
;
514 struct vn_constant_s vc
;
517 vc
.hashcode
= vn_hash_constant_with_type (constant
);
518 vc
.constant
= constant
;
519 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
521 return (*slot
)->value_id
;
523 vcp
= XNEW (struct vn_constant_s
);
524 vcp
->hashcode
= vc
.hashcode
;
525 vcp
->constant
= constant
;
526 vcp
->value_id
= get_next_value_id ();
528 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
529 return vcp
->value_id
;
532 /* Return true if V is a value id for a constant. */
535 value_id_constant_p (unsigned int v
)
537 return bitmap_bit_p (constant_value_ids
, v
);
540 /* Compute the hash for a reference operand VRO1. */
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
545 hstate
.add_int (vro1
->opcode
);
547 inchash::add_expr (vro1
->op0
, hstate
);
549 inchash::add_expr (vro1
->op1
, hstate
);
551 inchash::add_expr (vro1
->op2
, hstate
);
554 /* Compute a hash for the reference operation VR1 and return it. */
557 vn_reference_compute_hash (const vn_reference_t vr1
)
559 inchash::hash hstate
;
562 vn_reference_op_t vro
;
566 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
568 if (vro
->opcode
== MEM_REF
)
570 else if (vro
->opcode
!= ADDR_EXPR
)
572 if (maybe_ne (vro
->off
, -1))
574 if (known_eq (off
, -1))
580 if (maybe_ne (off
, -1)
581 && maybe_ne (off
, 0))
582 hstate
.add_poly_int (off
);
585 && vro
->opcode
== ADDR_EXPR
)
589 tree op
= TREE_OPERAND (vro
->op0
, 0);
590 hstate
.add_int (TREE_CODE (op
));
591 inchash::add_expr (op
, hstate
);
595 vn_reference_op_compute_hash (vro
, hstate
);
598 result
= hstate
.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
601 result
+= SSA_NAME_VERSION (vr1
->vuse
);
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
610 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
614 /* Early out if this is not a hash collision. */
615 if (vr1
->hashcode
!= vr2
->hashcode
)
618 /* The VOP needs to be the same. */
619 if (vr1
->vuse
!= vr2
->vuse
)
622 /* If the operands are the same we are done. */
623 if (vr1
->operands
== vr2
->operands
)
626 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
629 if (INTEGRAL_TYPE_P (vr1
->type
)
630 && INTEGRAL_TYPE_P (vr2
->type
))
632 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
635 else if (INTEGRAL_TYPE_P (vr1
->type
)
636 && (TYPE_PRECISION (vr1
->type
)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
639 else if (INTEGRAL_TYPE_P (vr2
->type
)
640 && (TYPE_PRECISION (vr2
->type
)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
648 poly_int64 off1
= 0, off2
= 0;
649 vn_reference_op_t vro1
, vro2
;
650 vn_reference_op_s tem1
, tem2
;
651 bool deref1
= false, deref2
= false;
652 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
654 if (vro1
->opcode
== MEM_REF
)
656 /* Do not look through a storage order barrier. */
657 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
659 if (known_eq (vro1
->off
, -1))
663 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
665 if (vro2
->opcode
== MEM_REF
)
667 /* Do not look through a storage order barrier. */
668 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
670 if (known_eq (vro2
->off
, -1))
674 if (maybe_ne (off1
, off2
))
676 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
678 memset (&tem1
, 0, sizeof (tem1
));
679 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
680 tem1
.type
= TREE_TYPE (tem1
.op0
);
681 tem1
.opcode
= TREE_CODE (tem1
.op0
);
685 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
687 memset (&tem2
, 0, sizeof (tem2
));
688 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
689 tem2
.type
= TREE_TYPE (tem2
.op0
);
690 tem2
.opcode
= TREE_CODE (tem2
.op0
);
694 if (deref1
!= deref2
)
696 if (!vn_reference_op_eq (vro1
, vro2
))
701 while (vr1
->operands
.length () != i
702 || vr2
->operands
.length () != j
);
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
711 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
713 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
715 vn_reference_op_s temp
;
719 memset (&temp
, 0, sizeof (temp
));
720 temp
.type
= TREE_TYPE (ref
);
721 temp
.opcode
= TREE_CODE (ref
);
722 temp
.op0
= TMR_INDEX (ref
);
723 temp
.op1
= TMR_STEP (ref
);
724 temp
.op2
= TMR_OFFSET (ref
);
726 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
727 temp
.base
= MR_DEPENDENCE_BASE (ref
);
728 result
->quick_push (temp
);
730 memset (&temp
, 0, sizeof (temp
));
731 temp
.type
= NULL_TREE
;
732 temp
.opcode
= ERROR_MARK
;
733 temp
.op0
= TMR_INDEX2 (ref
);
735 result
->quick_push (temp
);
737 memset (&temp
, 0, sizeof (temp
));
738 temp
.type
= NULL_TREE
;
739 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
740 temp
.op0
= TMR_BASE (ref
);
742 result
->quick_push (temp
);
746 /* For non-calls, store the information that makes up the address. */
750 vn_reference_op_s temp
;
752 memset (&temp
, 0, sizeof (temp
));
753 temp
.type
= TREE_TYPE (ref
);
754 temp
.opcode
= TREE_CODE (ref
);
760 temp
.op0
= TREE_OPERAND (ref
, 1);
763 temp
.op0
= TREE_OPERAND (ref
, 1);
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp
.op0
= TREE_OPERAND (ref
, 1);
769 if (!mem_ref_offset (ref
).to_shwi (&temp
.off
))
771 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
772 temp
.base
= MR_DEPENDENCE_BASE (ref
);
773 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
776 /* Record bits, position and storage order. */
777 temp
.op0
= TREE_OPERAND (ref
, 1);
778 temp
.op1
= TREE_OPERAND (ref
, 2);
779 if (!multiple_p (bit_field_offset (ref
), BITS_PER_UNIT
, &temp
.off
))
781 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
784 /* The field decl is enough to unambiguously specify the field,
785 a matching type is not necessary and a mismatching type
786 is always a spurious difference. */
787 temp
.type
= NULL_TREE
;
788 temp
.op0
= TREE_OPERAND (ref
, 1);
789 temp
.op1
= TREE_OPERAND (ref
, 2);
791 tree this_offset
= component_ref_field_offset (ref
);
793 && poly_int_tree_p (this_offset
))
795 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
796 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
799 = (wi::to_poly_offset (this_offset
)
800 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
801 /* Probibit value-numbering zero offset components
802 of addresses the same before the pass folding
803 __builtin_object_size had a chance to run
804 (checking cfun->after_inlining does the
806 if (TREE_CODE (orig
) != ADDR_EXPR
808 || cfun
->after_inlining
)
809 off
.to_shwi (&temp
.off
);
814 case ARRAY_RANGE_REF
:
817 tree eltype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref
, 0)));
818 /* Record index as operand. */
819 temp
.op0
= TREE_OPERAND (ref
, 1);
820 /* Always record lower bounds and element size. */
821 temp
.op1
= array_ref_low_bound (ref
);
822 /* But record element size in units of the type alignment. */
823 temp
.op2
= TREE_OPERAND (ref
, 3);
824 temp
.align
= eltype
->type_common
.align
;
826 temp
.op2
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE_UNIT (eltype
),
827 size_int (TYPE_ALIGN_UNIT (eltype
)));
828 if (poly_int_tree_p (temp
.op0
)
829 && poly_int_tree_p (temp
.op1
)
830 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
832 poly_offset_int off
= ((wi::to_poly_offset (temp
.op0
)
833 - wi::to_poly_offset (temp
.op1
))
834 * wi::to_offset (temp
.op2
)
835 * vn_ref_op_align_unit (&temp
));
836 off
.to_shwi (&temp
.off
);
841 if (DECL_HARD_REGISTER (ref
))
850 /* Canonicalize decls to MEM[&decl] which is what we end up with
851 when valueizing MEM[ptr] with ptr = &decl. */
852 temp
.opcode
= MEM_REF
;
853 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
855 result
->safe_push (temp
);
856 temp
.opcode
= ADDR_EXPR
;
857 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
858 temp
.type
= TREE_TYPE (temp
.op0
);
872 if (is_gimple_min_invariant (ref
))
878 /* These are only interesting for their operands, their
879 existence, and their type. They will never be the last
880 ref in the chain of references (IE they require an
881 operand), so we don't have to put anything
882 for op* as it will be handled by the iteration */
886 case VIEW_CONVERT_EXPR
:
888 temp
.reverse
= storage_order_barrier_p (ref
);
891 /* This is only interesting for its constant offset. */
892 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
897 result
->safe_push (temp
);
899 if (REFERENCE_CLASS_P (ref
)
900 || TREE_CODE (ref
) == MODIFY_EXPR
901 || TREE_CODE (ref
) == WITH_SIZE_EXPR
902 || (TREE_CODE (ref
) == ADDR_EXPR
903 && !is_gimple_min_invariant (ref
)))
904 ref
= TREE_OPERAND (ref
, 0);
910 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
911 operands in *OPS, the reference alias set SET and the reference type TYPE.
912 Return true if something useful was produced. */
915 ao_ref_init_from_vn_reference (ao_ref
*ref
,
916 alias_set_type set
, tree type
,
917 vec
<vn_reference_op_s
> ops
)
919 vn_reference_op_t op
;
921 tree base
= NULL_TREE
;
923 poly_offset_int offset
= 0;
924 poly_offset_int max_size
;
925 poly_offset_int size
= -1;
926 tree size_tree
= NULL_TREE
;
927 alias_set_type base_alias_set
= -1;
929 /* First get the final access size from just the outermost expression. */
931 if (op
->opcode
== COMPONENT_REF
)
932 size_tree
= DECL_SIZE (op
->op0
);
933 else if (op
->opcode
== BIT_FIELD_REF
)
937 machine_mode mode
= TYPE_MODE (type
);
939 size_tree
= TYPE_SIZE (type
);
941 size
= GET_MODE_BITSIZE (mode
);
943 if (size_tree
!= NULL_TREE
944 && poly_int_tree_p (size_tree
))
945 size
= wi::to_poly_offset (size_tree
);
947 /* Initially, maxsize is the same as the accessed element size.
948 In the following it will only grow (or become -1). */
951 /* Compute cumulative bit-offset for nested component-refs and array-refs,
952 and find the ultimate containing object. */
953 FOR_EACH_VEC_ELT (ops
, i
, op
)
957 /* These may be in the reference ops, but we cannot do anything
958 sensible with them here. */
960 /* Apart from ADDR_EXPR arguments to MEM_REF. */
961 if (base
!= NULL_TREE
962 && TREE_CODE (base
) == MEM_REF
964 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
966 vn_reference_op_t pop
= &ops
[i
-1];
967 base
= TREE_OPERAND (op
->op0
, 0);
968 if (known_eq (pop
->off
, -1))
974 offset
+= pop
->off
* BITS_PER_UNIT
;
982 /* Record the base objects. */
984 base_alias_set
= get_deref_alias_set (op
->op0
);
985 *op0_p
= build2 (MEM_REF
, op
->type
,
987 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
988 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
989 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1000 /* And now the usual component-reference style ops. */
1002 offset
+= wi::to_offset (op
->op1
);
1007 tree field
= op
->op0
;
1008 /* We do not have a complete COMPONENT_REF tree here so we
1009 cannot use component_ref_field_offset. Do the interesting
1011 tree this_offset
= DECL_FIELD_OFFSET (field
);
1013 if (op
->op1
|| !poly_int_tree_p (this_offset
))
1017 poly_offset_int woffset
= (wi::to_poly_offset (this_offset
)
1018 << LOG2_BITS_PER_UNIT
);
1019 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1025 case ARRAY_RANGE_REF
:
1027 /* We recorded the lower bound and the element size. */
1028 if (!poly_int_tree_p (op
->op0
)
1029 || !poly_int_tree_p (op
->op1
)
1030 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1034 poly_offset_int woffset
1035 = wi::sext (wi::to_poly_offset (op
->op0
)
1036 - wi::to_poly_offset (op
->op1
),
1037 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1038 woffset
*= wi::to_offset (op
->op2
) * vn_ref_op_align_unit (op
);
1039 woffset
<<= LOG2_BITS_PER_UNIT
;
1051 case VIEW_CONVERT_EXPR
:
1068 if (base
== NULL_TREE
)
1071 ref
->ref
= NULL_TREE
;
1073 ref
->ref_alias_set
= set
;
1074 if (base_alias_set
!= -1)
1075 ref
->base_alias_set
= base_alias_set
;
1077 ref
->base_alias_set
= get_alias_set (base
);
1078 /* We discount volatiles from value-numbering elsewhere. */
1079 ref
->volatile_p
= false;
1081 if (!size
.to_shwi (&ref
->size
) || maybe_lt (ref
->size
, 0))
1089 if (!offset
.to_shwi (&ref
->offset
))
1096 if (!max_size
.to_shwi (&ref
->max_size
) || maybe_lt (ref
->max_size
, 0))
1102 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1103 vn_reference_op_s's. */
1106 copy_reference_ops_from_call (gcall
*call
,
1107 vec
<vn_reference_op_s
> *result
)
1109 vn_reference_op_s temp
;
1111 tree lhs
= gimple_call_lhs (call
);
1114 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1115 different. By adding the lhs here in the vector, we ensure that the
1116 hashcode is different, guaranteeing a different value number. */
1117 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1119 memset (&temp
, 0, sizeof (temp
));
1120 temp
.opcode
= MODIFY_EXPR
;
1121 temp
.type
= TREE_TYPE (lhs
);
1124 result
->safe_push (temp
);
1127 /* Copy the type, opcode, function, static chain and EH region, if any. */
1128 memset (&temp
, 0, sizeof (temp
));
1129 temp
.type
= gimple_call_return_type (call
);
1130 temp
.opcode
= CALL_EXPR
;
1131 temp
.op0
= gimple_call_fn (call
);
1132 temp
.op1
= gimple_call_chain (call
);
1133 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1134 temp
.op2
= size_int (lr
);
1136 if (gimple_call_with_bounds_p (call
))
1137 temp
.with_bounds
= 1;
1138 result
->safe_push (temp
);
1140 /* Copy the call arguments. As they can be references as well,
1141 just chain them together. */
1142 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1144 tree callarg
= gimple_call_arg (call
, i
);
1145 copy_reference_ops_from_ref (callarg
, result
);
1149 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1150 *I_P to point to the last element of the replacement. */
1152 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1155 unsigned int i
= *i_p
;
1156 vn_reference_op_t op
= &(*ops
)[i
];
1157 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1159 poly_int64 addr_offset
= 0;
1161 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1162 from .foo.bar to the preceding MEM_REF offset and replace the
1163 address with &OBJ. */
1164 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1166 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1167 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1170 = (poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
),
1173 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1174 op
->op0
= build_fold_addr_expr (addr_base
);
1175 if (tree_fits_shwi_p (mem_op
->op0
))
1176 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1184 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1185 *I_P to point to the last element of the replacement. */
1187 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1190 unsigned int i
= *i_p
;
1191 vn_reference_op_t op
= &(*ops
)[i
];
1192 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1194 enum tree_code code
;
1195 poly_offset_int off
;
1197 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1198 if (!is_gimple_assign (def_stmt
))
1201 code
= gimple_assign_rhs_code (def_stmt
);
1202 if (code
!= ADDR_EXPR
1203 && code
!= POINTER_PLUS_EXPR
)
1206 off
= poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
), SIGNED
);
1208 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1209 from .foo.bar to the preceding MEM_REF offset and replace the
1210 address with &OBJ. */
1211 if (code
== ADDR_EXPR
)
1213 tree addr
, addr_base
;
1214 poly_int64 addr_offset
;
1216 addr
= gimple_assign_rhs1 (def_stmt
);
1217 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1219 /* If that didn't work because the address isn't invariant propagate
1220 the reference tree from the address operation in case the current
1221 dereference isn't offsetted. */
1223 && *i_p
== ops
->length () - 1
1224 && known_eq (off
, 0)
1225 /* This makes us disable this transform for PRE where the
1226 reference ops might be also used for code insertion which
1228 && default_vn_walk_kind
== VN_WALKREWRITE
)
1230 auto_vec
<vn_reference_op_s
, 32> tem
;
1231 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1232 /* Make sure to preserve TBAA info. The only objects not
1233 wrapped in MEM_REFs that can have their address taken are
1235 if (tem
.length () >= 2
1236 && tem
[tem
.length () - 2].opcode
== MEM_REF
)
1238 vn_reference_op_t new_mem_op
= &tem
[tem
.length () - 2];
1240 = wide_int_to_tree (TREE_TYPE (mem_op
->op0
),
1241 wi::to_poly_wide (new_mem_op
->op0
));
1244 gcc_assert (tem
.last ().opcode
== STRING_CST
);
1247 ops
->safe_splice (tem
);
1252 || TREE_CODE (addr_base
) != MEM_REF
1253 || (TREE_CODE (TREE_OPERAND (addr_base
, 0)) == SSA_NAME
1254 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base
, 0))))
1258 off
+= mem_ref_offset (addr_base
);
1259 op
->op0
= TREE_OPERAND (addr_base
, 0);
1264 ptr
= gimple_assign_rhs1 (def_stmt
);
1265 ptroff
= gimple_assign_rhs2 (def_stmt
);
1266 if (TREE_CODE (ptr
) != SSA_NAME
1267 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1268 || TREE_CODE (ptroff
) != INTEGER_CST
)
1271 off
+= wi::to_offset (ptroff
);
1275 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1276 if (tree_fits_shwi_p (mem_op
->op0
))
1277 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1280 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1281 op
->op0
= SSA_VAL (op
->op0
);
1282 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1283 op
->opcode
= TREE_CODE (op
->op0
);
1286 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1287 vn_reference_maybe_forwprop_address (ops
, i_p
);
1288 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1289 vn_reference_fold_indirect (ops
, i_p
);
1293 /* Optimize the reference REF to a constant if possible or return
1294 NULL_TREE if not. */
1297 fully_constant_vn_reference_p (vn_reference_t ref
)
1299 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1300 vn_reference_op_t op
;
1302 /* Try to simplify the translated expression if it is
1303 a call to a builtin function with at most two arguments. */
1305 if (op
->opcode
== CALL_EXPR
1306 && TREE_CODE (op
->op0
) == ADDR_EXPR
1307 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1308 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1309 && operands
.length () >= 2
1310 && operands
.length () <= 3)
1312 vn_reference_op_t arg0
, arg1
= NULL
;
1313 bool anyconst
= false;
1314 arg0
= &operands
[1];
1315 if (operands
.length () > 2)
1316 arg1
= &operands
[2];
1317 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1318 || (arg0
->opcode
== ADDR_EXPR
1319 && is_gimple_min_invariant (arg0
->op0
)))
1322 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1323 || (arg1
->opcode
== ADDR_EXPR
1324 && is_gimple_min_invariant (arg1
->op0
))))
1328 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1331 arg1
? arg1
->op0
: NULL
);
1333 && TREE_CODE (folded
) == NOP_EXPR
)
1334 folded
= TREE_OPERAND (folded
, 0);
1336 && is_gimple_min_invariant (folded
))
1341 /* Simplify reads from constants or constant initializers. */
1342 else if (BITS_PER_UNIT
== 8
1343 && is_gimple_reg_type (ref
->type
)
1344 && (!INTEGRAL_TYPE_P (ref
->type
)
1345 || TYPE_PRECISION (ref
->type
) % BITS_PER_UNIT
== 0))
1349 if (INTEGRAL_TYPE_P (ref
->type
))
1350 size
= TYPE_PRECISION (ref
->type
);
1352 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1353 if (size
% BITS_PER_UNIT
!= 0
1354 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1356 size
/= BITS_PER_UNIT
;
1358 for (i
= 0; i
< operands
.length (); ++i
)
1360 if (TREE_CODE_CLASS (operands
[i
].opcode
) == tcc_constant
)
1365 if (known_eq (operands
[i
].off
, -1))
1367 off
+= operands
[i
].off
;
1368 if (operands
[i
].opcode
== MEM_REF
)
1374 vn_reference_op_t base
= &operands
[--i
];
1375 tree ctor
= error_mark_node
;
1376 tree decl
= NULL_TREE
;
1377 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1379 else if (base
->opcode
== MEM_REF
1380 && base
[1].opcode
== ADDR_EXPR
1381 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1382 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
1383 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == STRING_CST
))
1385 decl
= TREE_OPERAND (base
[1].op0
, 0);
1386 if (TREE_CODE (decl
) == STRING_CST
)
1389 ctor
= ctor_for_folding (decl
);
1391 if (ctor
== NULL_TREE
)
1392 return build_zero_cst (ref
->type
);
1393 else if (ctor
!= error_mark_node
)
1395 HOST_WIDE_INT const_off
;
1398 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1399 off
* BITS_PER_UNIT
,
1400 size
* BITS_PER_UNIT
, decl
);
1403 STRIP_USELESS_TYPE_CONVERSION (res
);
1404 if (is_gimple_min_invariant (res
))
1408 else if (off
.is_constant (&const_off
))
1410 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1411 int len
= native_encode_expr (ctor
, buf
, size
, const_off
);
1413 return native_interpret_expr (ref
->type
, buf
, len
);
1421 /* Return true if OPS contain a storage order barrier. */
1424 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1426 vn_reference_op_t op
;
1429 FOR_EACH_VEC_ELT (ops
, i
, op
)
1430 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1436 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1437 structures into their value numbers. This is done in-place, and
1438 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1439 whether any operands were valueized. */
1441 static vec
<vn_reference_op_s
>
1442 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
)
1444 vn_reference_op_t vro
;
1447 *valueized_anything
= false;
1449 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1451 if (vro
->opcode
== SSA_NAME
1452 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1454 tree tem
= SSA_VAL (vro
->op0
);
1455 if (tem
!= vro
->op0
)
1457 *valueized_anything
= true;
1460 /* If it transforms from an SSA_NAME to a constant, update
1462 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1463 vro
->opcode
= TREE_CODE (vro
->op0
);
1465 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1467 tree tem
= SSA_VAL (vro
->op1
);
1468 if (tem
!= vro
->op1
)
1470 *valueized_anything
= true;
1474 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1476 tree tem
= SSA_VAL (vro
->op2
);
1477 if (tem
!= vro
->op2
)
1479 *valueized_anything
= true;
1483 /* If it transforms from an SSA_NAME to an address, fold with
1484 a preceding indirect reference. */
1487 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1488 && orig
[i
- 1].opcode
== MEM_REF
)
1490 if (vn_reference_fold_indirect (&orig
, &i
))
1491 *valueized_anything
= true;
1494 && vro
->opcode
== SSA_NAME
1495 && orig
[i
- 1].opcode
== MEM_REF
)
1497 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1498 *valueized_anything
= true;
1500 /* If it transforms a non-constant ARRAY_REF into a constant
1501 one, adjust the constant offset. */
1502 else if (vro
->opcode
== ARRAY_REF
1503 && known_eq (vro
->off
, -1)
1504 && poly_int_tree_p (vro
->op0
)
1505 && poly_int_tree_p (vro
->op1
)
1506 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1508 poly_offset_int off
= ((wi::to_poly_offset (vro
->op0
)
1509 - wi::to_poly_offset (vro
->op1
))
1510 * wi::to_offset (vro
->op2
)
1511 * vn_ref_op_align_unit (vro
));
1512 off
.to_shwi (&vro
->off
);
1519 static vec
<vn_reference_op_s
>
1520 valueize_refs (vec
<vn_reference_op_s
> orig
)
1523 return valueize_refs_1 (orig
, &tem
);
1526 static vec
<vn_reference_op_s
> shared_lookup_references
;
1528 /* Create a vector of vn_reference_op_s structures from REF, a
1529 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1530 this function. *VALUEIZED_ANYTHING will specify whether any
1531 operands were valueized. */
1533 static vec
<vn_reference_op_s
>
1534 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1538 shared_lookup_references
.truncate (0);
1539 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1540 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1541 valueized_anything
);
1542 return shared_lookup_references
;
1545 /* Create a vector of vn_reference_op_s structures from CALL, a
1546 call statement. The vector is shared among all callers of
1549 static vec
<vn_reference_op_s
>
1550 valueize_shared_reference_ops_from_call (gcall
*call
)
1554 shared_lookup_references
.truncate (0);
1555 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1556 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1557 return shared_lookup_references
;
1560 /* Lookup a SCCVN reference operation VR in the current hash table.
1561 Returns the resulting value number if it exists in the hash table,
1562 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1563 vn_reference_t stored in the hashtable if something is found. */
1566 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1568 vn_reference_s
**slot
;
1571 hash
= vr
->hashcode
;
1572 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1573 if (!slot
&& current_info
== optimistic_info
)
1574 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1578 *vnresult
= (vn_reference_t
)*slot
;
1579 return ((vn_reference_t
)*slot
)->result
;
1585 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1586 with the current VUSE and performs the expression lookup. */
1589 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1590 unsigned int cnt
, void *vr_
)
1592 vn_reference_t vr
= (vn_reference_t
)vr_
;
1593 vn_reference_s
**slot
;
1596 /* This bounds the stmt walks we perform on reference lookups
1597 to O(1) instead of O(N) where N is the number of dominating
1599 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1603 *last_vuse_ptr
= vuse
;
1605 /* Fixup vuse and hash. */
1607 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1608 vr
->vuse
= vuse_ssa_val (vuse
);
1610 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1612 hash
= vr
->hashcode
;
1613 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1614 if (!slot
&& current_info
== optimistic_info
)
1615 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1622 /* Lookup an existing or insert a new vn_reference entry into the
1623 value table for the VUSE, SET, TYPE, OPERANDS reference which
1624 has the value VALUE which is either a constant or an SSA name. */
1626 static vn_reference_t
1627 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1630 vec
<vn_reference_op_s
,
1635 vn_reference_t result
;
1637 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1638 vr1
.operands
= operands
;
1641 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1642 if (vn_reference_lookup_1 (&vr1
, &result
))
1644 if (TREE_CODE (value
) == SSA_NAME
)
1645 value_id
= VN_INFO (value
)->value_id
;
1647 value_id
= get_or_alloc_constant_value_id (value
);
1648 return vn_reference_insert_pieces (vuse
, set
, type
,
1649 operands
.copy (), value
, value_id
);
1652 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*stmt
, tree result
);
1653 static unsigned mprts_hook_cnt
;
1655 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1658 vn_lookup_simplify_result (code_helper rcode
, tree type
, tree
*ops_
)
1660 if (!rcode
.is_tree_code ())
1663 unsigned int length
= TREE_CODE_LENGTH ((tree_code
) rcode
);
1664 if (rcode
== CONSTRUCTOR
1665 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1666 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1667 && TREE_CODE (ops_
[0]) == CONSTRUCTOR
)
1669 length
= CONSTRUCTOR_NELTS (ops_
[0]);
1670 ops
= XALLOCAVEC (tree
, length
);
1671 for (unsigned i
= 0; i
< length
; ++i
)
1672 ops
[i
] = CONSTRUCTOR_ELT (ops_
[0], i
)->value
;
1674 vn_nary_op_t vnresult
= NULL
;
1675 tree res
= vn_nary_op_lookup_pieces (length
, (tree_code
) rcode
,
1676 type
, ops
, &vnresult
);
1677 /* We can end up endlessly recursing simplifications if the lookup above
1678 presents us with a def-use chain that mirrors the original simplification.
1679 See PR80887 for an example. Limit successful lookup artificially
1680 to 10 times if we are called as mprts_hook. */
1683 && --mprts_hook_cnt
== 0)
1685 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1686 fprintf (dump_file
, "Resetting mprts_hook after too many "
1693 /* Return a value-number for RCODE OPS... either by looking up an existing
1694 value-number for the simplified result or by inserting the operation if
1698 vn_nary_build_or_lookup_1 (code_helper rcode
, tree type
, tree
*ops
,
1701 tree result
= NULL_TREE
;
1702 /* We will be creating a value number for
1704 So first simplify and lookup this expression to see if it
1705 is already available. */
1706 mprts_hook
= vn_lookup_simplify_result
;
1709 switch (TREE_CODE_LENGTH ((tree_code
) rcode
))
1712 res
= gimple_resimplify1 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1715 res
= gimple_resimplify2 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1718 res
= gimple_resimplify3 (NULL
, &rcode
, type
, ops
, vn_valueize
);
1722 gimple
*new_stmt
= NULL
;
1724 && gimple_simplified_result_is_gimple_val (rcode
, ops
))
1725 /* The expression is already available. */
1729 tree val
= vn_lookup_simplify_result (rcode
, type
, ops
);
1732 gimple_seq stmts
= NULL
;
1733 result
= maybe_push_res_to_seq (rcode
, type
, ops
, &stmts
);
1736 gcc_assert (gimple_seq_singleton_p (stmts
));
1737 new_stmt
= gimple_seq_first_stmt (stmts
);
1741 /* The expression is already available. */
1746 /* The expression is not yet available, value-number lhs to
1747 the new SSA_NAME we created. */
1748 /* Initialize value-number information properly. */
1749 VN_INFO_GET (result
)->valnum
= result
;
1750 VN_INFO (result
)->value_id
= get_next_value_id ();
1751 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
1753 VN_INFO (result
)->needs_insertion
= true;
1754 /* ??? PRE phi-translation inserts NARYs without corresponding
1755 SSA name result. Re-use those but set their result according
1756 to the stmt we just built. */
1757 vn_nary_op_t nary
= NULL
;
1758 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
1761 gcc_assert (nary
->result
== NULL_TREE
);
1762 nary
->result
= gimple_assign_lhs (new_stmt
);
1764 /* As all "inserted" statements are singleton SCCs, insert
1765 to the valid table. This is strictly needed to
1766 avoid re-generating new value SSA_NAMEs for the same
1767 expression during SCC iteration over and over (the
1768 optimistic table gets cleared after each iteration).
1769 We do not need to insert into the optimistic table, as
1770 lookups there will fall back to the valid table. */
1771 else if (current_info
== optimistic_info
)
1773 current_info
= valid_info
;
1774 vn_nary_op_insert_stmt (new_stmt
, result
);
1775 current_info
= optimistic_info
;
1778 vn_nary_op_insert_stmt (new_stmt
, result
);
1779 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1781 fprintf (dump_file
, "Inserting name ");
1782 print_generic_expr (dump_file
, result
);
1783 fprintf (dump_file
, " for expression ");
1784 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
1785 fprintf (dump_file
, "\n");
1791 /* Return a value-number for RCODE OPS... either by looking up an existing
1792 value-number for the simplified result or by inserting the operation. */
1795 vn_nary_build_or_lookup (code_helper rcode
, tree type
, tree
*ops
)
1797 return vn_nary_build_or_lookup_1 (rcode
, type
, ops
, true);
1800 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1801 its value if present. */
1804 vn_nary_simplify (vn_nary_op_t nary
)
1806 if (nary
->length
> 3)
1809 memcpy (ops
, nary
->op
, sizeof (tree
) * nary
->length
);
1810 return vn_nary_build_or_lookup_1 (nary
->opcode
, nary
->type
, ops
, false);
1814 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1815 from the statement defining VUSE and if not successful tries to
1816 translate *REFP and VR_ through an aggregate copy at the definition
1817 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1818 of *REF and *VR. If only disambiguation was performed then
1819 *DISAMBIGUATE_ONLY is set to true. */
1822 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1823 bool *disambiguate_only
)
1825 vn_reference_t vr
= (vn_reference_t
)vr_
;
1826 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1827 tree base
= ao_ref_base (ref
);
1828 HOST_WIDE_INT offseti
, maxsizei
;
1829 static vec
<vn_reference_op_s
> lhs_ops
;
1831 bool lhs_ref_ok
= false;
1832 poly_int64 copy_size
;
1834 /* If the reference is based on a parameter that was determined as
1835 pointing to readonly memory it doesn't change. */
1836 if (TREE_CODE (base
) == MEM_REF
1837 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
1838 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0))
1839 && bitmap_bit_p (const_parms
,
1840 SSA_NAME_VERSION (TREE_OPERAND (base
, 0))))
1842 *disambiguate_only
= true;
1846 /* First try to disambiguate after value-replacing in the definitions LHS. */
1847 if (is_gimple_assign (def_stmt
))
1849 tree lhs
= gimple_assign_lhs (def_stmt
);
1850 bool valueized_anything
= false;
1851 /* Avoid re-allocation overhead. */
1852 lhs_ops
.truncate (0);
1853 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1854 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
);
1855 if (valueized_anything
)
1857 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1858 get_alias_set (lhs
),
1859 TREE_TYPE (lhs
), lhs_ops
);
1861 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1863 *disambiguate_only
= true;
1869 ao_ref_init (&lhs_ref
, lhs
);
1873 /* If we reach a clobbering statement try to skip it and see if
1874 we find a VN result with exactly the same value as the
1875 possible clobber. In this case we can ignore the clobber
1876 and return the found value.
1877 Note that we don't need to worry about partial overlapping
1878 accesses as we then can use TBAA to disambiguate against the
1879 clobbering statement when looking up a load (thus the
1880 VN_WALKREWRITE guard). */
1881 if (vn_walk_kind
== VN_WALKREWRITE
1882 && is_gimple_reg_type (TREE_TYPE (lhs
))
1883 && types_compatible_p (TREE_TYPE (lhs
), vr
->type
))
1885 tree
*saved_last_vuse_ptr
= last_vuse_ptr
;
1886 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1887 last_vuse_ptr
= NULL
;
1888 tree saved_vuse
= vr
->vuse
;
1889 hashval_t saved_hashcode
= vr
->hashcode
;
1890 void *res
= vn_reference_lookup_2 (ref
,
1891 gimple_vuse (def_stmt
), 0, vr
);
1892 /* Need to restore vr->vuse and vr->hashcode. */
1893 vr
->vuse
= saved_vuse
;
1894 vr
->hashcode
= saved_hashcode
;
1895 last_vuse_ptr
= saved_last_vuse_ptr
;
1896 if (res
&& res
!= (void *)-1)
1898 vn_reference_t vnresult
= (vn_reference_t
) res
;
1899 if (vnresult
->result
1900 && operand_equal_p (vnresult
->result
,
1901 gimple_assign_rhs1 (def_stmt
), 0))
1906 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1907 && gimple_call_num_args (def_stmt
) <= 4)
1909 /* For builtin calls valueize its arguments and call the
1910 alias oracle again. Valueization may improve points-to
1911 info of pointers and constify size and position arguments.
1912 Originally this was motivated by PR61034 which has
1913 conditional calls to free falsely clobbering ref because
1914 of imprecise points-to info of the argument. */
1916 bool valueized_anything
= false;
1917 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1919 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1920 tree val
= vn_valueize (oldargs
[i
]);
1921 if (val
!= oldargs
[i
])
1923 gimple_call_set_arg (def_stmt
, i
, val
);
1924 valueized_anything
= true;
1927 if (valueized_anything
)
1929 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1931 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1932 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1935 *disambiguate_only
= true;
1941 if (*disambiguate_only
)
1944 /* If we cannot constrain the size of the reference we cannot
1945 test if anything kills it. */
1946 if (!ref
->max_size_known_p ())
1949 poly_int64 offset
= ref
->offset
;
1950 poly_int64 maxsize
= ref
->max_size
;
1952 /* We can't deduce anything useful from clobbers. */
1953 if (gimple_clobber_p (def_stmt
))
1956 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1957 from that definition.
1959 if (is_gimple_reg_type (vr
->type
)
1960 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1961 && (integer_zerop (gimple_call_arg (def_stmt
, 1))
1962 || ((TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
1963 || (INTEGRAL_TYPE_P (vr
->type
) && known_eq (ref
->size
, 8)))
1964 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
1965 && offset
.is_constant (&offseti
)
1966 && offseti
% BITS_PER_UNIT
== 0))
1967 && poly_int_tree_p (gimple_call_arg (def_stmt
, 2))
1968 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
1969 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
))
1972 poly_int64 offset2
, size2
, maxsize2
;
1974 tree ref2
= gimple_call_arg (def_stmt
, 0);
1975 if (TREE_CODE (ref2
) == SSA_NAME
)
1977 ref2
= SSA_VAL (ref2
);
1978 if (TREE_CODE (ref2
) == SSA_NAME
1979 && (TREE_CODE (base
) != MEM_REF
1980 || TREE_OPERAND (base
, 0) != ref2
))
1982 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref2
);
1983 if (gimple_assign_single_p (def_stmt
)
1984 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
1985 ref2
= gimple_assign_rhs1 (def_stmt
);
1988 if (TREE_CODE (ref2
) == ADDR_EXPR
)
1990 ref2
= TREE_OPERAND (ref2
, 0);
1991 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
1993 if (!known_size_p (maxsize2
)
1994 || !operand_equal_p (base
, base2
, OEP_ADDRESS_OF
))
1997 else if (TREE_CODE (ref2
) == SSA_NAME
)
2000 if (TREE_CODE (base
) != MEM_REF
2001 || !(mem_ref_offset (base
) << LOG2_BITS_PER_UNIT
).to_shwi (&soff
))
2005 if (TREE_OPERAND (base
, 0) != ref2
)
2007 gimple
*def
= SSA_NAME_DEF_STMT (ref2
);
2008 if (is_gimple_assign (def
)
2009 && gimple_assign_rhs_code (def
) == POINTER_PLUS_EXPR
2010 && gimple_assign_rhs1 (def
) == TREE_OPERAND (base
, 0)
2011 && poly_int_tree_p (gimple_assign_rhs2 (def
))
2012 && (wi::to_poly_offset (gimple_assign_rhs2 (def
))
2013 << LOG2_BITS_PER_UNIT
).to_shwi (&offset2
))
2015 ref2
= gimple_assign_rhs1 (def
);
2016 if (TREE_CODE (ref2
) == SSA_NAME
)
2017 ref2
= SSA_VAL (ref2
);
2025 tree len
= gimple_call_arg (def_stmt
, 2);
2026 if (known_subrange_p (offset
, maxsize
, offset2
,
2027 wi::to_poly_offset (len
) << LOG2_BITS_PER_UNIT
))
2030 if (integer_zerop (gimple_call_arg (def_stmt
, 1)))
2031 val
= build_zero_cst (vr
->type
);
2032 else if (INTEGRAL_TYPE_P (vr
->type
)
2033 && known_eq (ref
->size
, 8))
2035 code_helper rcode
= NOP_EXPR
;
2037 ops
[0] = gimple_call_arg (def_stmt
, 1);
2038 val
= vn_nary_build_or_lookup (rcode
, vr
->type
, ops
);
2040 || (TREE_CODE (val
) == SSA_NAME
2041 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2046 unsigned len
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr
->type
));
2047 unsigned char *buf
= XALLOCAVEC (unsigned char, len
);
2048 memset (buf
, TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 1)),
2050 val
= native_interpret_expr (vr
->type
, buf
, len
);
2054 return vn_reference_lookup_or_insert_for_pieces
2055 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2059 /* 2) Assignment from an empty CONSTRUCTOR. */
2060 else if (is_gimple_reg_type (vr
->type
)
2061 && gimple_assign_single_p (def_stmt
)
2062 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
2063 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
2066 poly_int64 offset2
, size2
, maxsize2
;
2068 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
2069 &offset2
, &size2
, &maxsize2
, &reverse
);
2070 if (known_size_p (maxsize2
)
2071 && operand_equal_p (base
, base2
, 0)
2072 && known_subrange_p (offset
, maxsize
, offset2
, size2
))
2074 tree val
= build_zero_cst (vr
->type
);
2075 return vn_reference_lookup_or_insert_for_pieces
2076 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2080 /* 3) Assignment from a constant. We can use folds native encode/interpret
2081 routines to extract the assigned bits. */
2082 else if (known_eq (ref
->size
, maxsize
)
2083 && is_gimple_reg_type (vr
->type
)
2084 && !contains_storage_order_barrier_p (vr
->operands
)
2085 && gimple_assign_single_p (def_stmt
)
2086 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
2087 /* native_encode and native_decode operate on arrays of bytes
2088 and so fundamentally need a compile-time size and offset. */
2089 && maxsize
.is_constant (&maxsizei
)
2090 && maxsizei
% BITS_PER_UNIT
== 0
2091 && offset
.is_constant (&offseti
)
2092 && offseti
% BITS_PER_UNIT
== 0
2093 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
2094 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
2095 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
2098 HOST_WIDE_INT offset2
, size2
;
2100 base2
= get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt
),
2101 &offset2
, &size2
, &reverse
);
2104 && size2
% BITS_PER_UNIT
== 0
2105 && offset2
% BITS_PER_UNIT
== 0
2106 && operand_equal_p (base
, base2
, 0)
2107 && known_subrange_p (offseti
, maxsizei
, offset2
, size2
))
2109 /* We support up to 512-bit values (for V8DFmode). */
2110 unsigned char buffer
[64];
2113 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2114 if (TREE_CODE (rhs
) == SSA_NAME
)
2115 rhs
= SSA_VAL (rhs
);
2116 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
2117 buffer
, sizeof (buffer
),
2118 (offseti
- offset2
) / BITS_PER_UNIT
);
2119 if (len
> 0 && len
* BITS_PER_UNIT
>= maxsizei
)
2121 tree type
= vr
->type
;
2122 /* Make sure to interpret in a type that has a range
2123 covering the whole access size. */
2124 if (INTEGRAL_TYPE_P (vr
->type
)
2125 && maxsizei
!= TYPE_PRECISION (vr
->type
))
2126 type
= build_nonstandard_integer_type (maxsizei
,
2127 TYPE_UNSIGNED (type
));
2128 tree val
= native_interpret_expr (type
, buffer
,
2129 maxsizei
/ BITS_PER_UNIT
);
2130 /* If we chop off bits because the types precision doesn't
2131 match the memory access size this is ok when optimizing
2132 reads but not when called from the DSE code during
2135 && type
!= vr
->type
)
2137 if (! int_fits_type_p (val
, vr
->type
))
2140 val
= fold_convert (vr
->type
, val
);
2144 return vn_reference_lookup_or_insert_for_pieces
2145 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2150 /* 4) Assignment from an SSA name which definition we may be able
2151 to access pieces from. */
2152 else if (known_eq (ref
->size
, maxsize
)
2153 && is_gimple_reg_type (vr
->type
)
2154 && !contains_storage_order_barrier_p (vr
->operands
)
2155 && gimple_assign_single_p (def_stmt
)
2156 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
2159 poly_int64 offset2
, size2
, maxsize2
;
2161 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
2162 &offset2
, &size2
, &maxsize2
,
2165 && known_size_p (maxsize2
)
2166 && known_eq (maxsize2
, size2
)
2167 && operand_equal_p (base
, base2
, 0)
2168 && known_subrange_p (offset
, maxsize
, offset2
, size2
)
2169 /* ??? We can't handle bitfield precision extracts without
2170 either using an alternate type for the BIT_FIELD_REF and
2171 then doing a conversion or possibly adjusting the offset
2172 according to endianness. */
2173 && (! INTEGRAL_TYPE_P (vr
->type
)
2174 || known_eq (ref
->size
, TYPE_PRECISION (vr
->type
)))
2175 && multiple_p (ref
->size
, BITS_PER_UNIT
))
2177 code_helper rcode
= BIT_FIELD_REF
;
2179 ops
[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt
));
2180 ops
[1] = bitsize_int (ref
->size
);
2181 ops
[2] = bitsize_int (offset
- offset2
);
2182 tree val
= vn_nary_build_or_lookup (rcode
, vr
->type
, ops
);
2184 && (TREE_CODE (val
) != SSA_NAME
2185 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2187 vn_reference_t res
= vn_reference_lookup_or_insert_for_pieces
2188 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2194 /* 5) For aggregate copies translate the reference through them if
2195 the copy kills ref. */
2196 else if (vn_walk_kind
== VN_WALKREWRITE
2197 && gimple_assign_single_p (def_stmt
)
2198 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
2199 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
2200 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
2204 auto_vec
<vn_reference_op_s
> rhs
;
2205 vn_reference_op_t vro
;
2211 /* See if the assignment kills REF. */
2212 base2
= ao_ref_base (&lhs_ref
);
2213 if (!lhs_ref
.max_size_known_p ()
2215 && (TREE_CODE (base
) != MEM_REF
2216 || TREE_CODE (base2
) != MEM_REF
2217 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
2218 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
2219 TREE_OPERAND (base2
, 1))))
2220 || !stmt_kills_ref_p (def_stmt
, ref
))
2223 /* Find the common base of ref and the lhs. lhs_ops already
2224 contains valueized operands for the lhs. */
2225 i
= vr
->operands
.length () - 1;
2226 j
= lhs_ops
.length () - 1;
2227 while (j
>= 0 && i
>= 0
2228 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
2234 /* ??? The innermost op should always be a MEM_REF and we already
2235 checked that the assignment to the lhs kills vr. Thus for
2236 aggregate copies using char[] types the vn_reference_op_eq
2237 may fail when comparing types for compatibility. But we really
2238 don't care here - further lookups with the rewritten operands
2239 will simply fail if we messed up types too badly. */
2240 poly_int64 extra_off
= 0;
2241 if (j
== 0 && i
>= 0
2242 && lhs_ops
[0].opcode
== MEM_REF
2243 && maybe_ne (lhs_ops
[0].off
, -1))
2245 if (known_eq (lhs_ops
[0].off
, vr
->operands
[i
].off
))
2247 else if (vr
->operands
[i
].opcode
== MEM_REF
2248 && maybe_ne (vr
->operands
[i
].off
, -1))
2250 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
2255 /* i now points to the first additional op.
2256 ??? LHS may not be completely contained in VR, one or more
2257 VIEW_CONVERT_EXPRs could be in its way. We could at least
2258 try handling outermost VIEW_CONVERT_EXPRs. */
2262 /* Punt if the additional ops contain a storage order barrier. */
2263 for (k
= i
; k
>= 0; k
--)
2265 vro
= &vr
->operands
[k
];
2266 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
2270 /* Now re-write REF to be based on the rhs of the assignment. */
2271 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
2273 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2274 if (maybe_ne (extra_off
, 0))
2276 if (rhs
.length () < 2
2277 || rhs
[0].opcode
!= MEM_REF
2278 || known_eq (rhs
[0].off
, -1))
2280 rhs
[0].off
+= extra_off
;
2281 rhs
[0].op0
= int_const_binop (PLUS_EXPR
, rhs
[0].op0
,
2282 build_int_cst (TREE_TYPE (rhs
[0].op0
),
2286 /* We need to pre-pend vr->operands[0..i] to rhs. */
2287 vec
<vn_reference_op_s
> old
= vr
->operands
;
2288 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
2289 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
2291 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
2292 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
2293 vr
->operands
[i
+ 1 + j
] = *vro
;
2294 vr
->operands
= valueize_refs (vr
->operands
);
2295 if (old
== shared_lookup_references
)
2296 shared_lookup_references
= vr
->operands
;
2297 vr
->hashcode
= vn_reference_compute_hash (vr
);
2299 /* Try folding the new reference to a constant. */
2300 tree val
= fully_constant_vn_reference_p (vr
);
2302 return vn_reference_lookup_or_insert_for_pieces
2303 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2305 /* Adjust *ref from the new operands. */
2306 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2308 /* This can happen with bitfields. */
2309 if (maybe_ne (ref
->size
, r
.size
))
2313 /* Do not update last seen VUSE after translating. */
2314 last_vuse_ptr
= NULL
;
2316 /* Keep looking for the adjusted *REF / VR pair. */
2320 /* 6) For memcpy copies translate the reference through them if
2321 the copy kills ref. */
2322 else if (vn_walk_kind
== VN_WALKREWRITE
2323 && is_gimple_reg_type (vr
->type
)
2324 /* ??? Handle BCOPY as well. */
2325 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
2326 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
2327 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
2328 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2329 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
2330 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
2331 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
2332 && poly_int_tree_p (gimple_call_arg (def_stmt
, 2), ©_size
))
2336 poly_int64 rhs_offset
, lhs_offset
;
2337 vn_reference_op_s op
;
2338 poly_uint64 mem_offset
;
2339 poly_int64 at
, byte_maxsize
;
2341 /* Only handle non-variable, addressable refs. */
2342 if (maybe_ne (ref
->size
, maxsize
)
2343 || !multiple_p (offset
, BITS_PER_UNIT
, &at
)
2344 || !multiple_p (maxsize
, BITS_PER_UNIT
, &byte_maxsize
))
2347 /* Extract a pointer base and an offset for the destination. */
2348 lhs
= gimple_call_arg (def_stmt
, 0);
2350 if (TREE_CODE (lhs
) == SSA_NAME
)
2352 lhs
= SSA_VAL (lhs
);
2353 if (TREE_CODE (lhs
) == SSA_NAME
)
2355 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
2356 if (gimple_assign_single_p (def_stmt
)
2357 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2358 lhs
= gimple_assign_rhs1 (def_stmt
);
2361 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2363 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2367 if (TREE_CODE (tem
) == MEM_REF
2368 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
2370 lhs
= TREE_OPERAND (tem
, 0);
2371 if (TREE_CODE (lhs
) == SSA_NAME
)
2372 lhs
= SSA_VAL (lhs
);
2373 lhs_offset
+= mem_offset
;
2375 else if (DECL_P (tem
))
2376 lhs
= build_fold_addr_expr (tem
);
2380 if (TREE_CODE (lhs
) != SSA_NAME
2381 && TREE_CODE (lhs
) != ADDR_EXPR
)
2384 /* Extract a pointer base and an offset for the source. */
2385 rhs
= gimple_call_arg (def_stmt
, 1);
2387 if (TREE_CODE (rhs
) == SSA_NAME
)
2388 rhs
= SSA_VAL (rhs
);
2389 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2391 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2395 if (TREE_CODE (tem
) == MEM_REF
2396 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
2398 rhs
= TREE_OPERAND (tem
, 0);
2399 rhs_offset
+= mem_offset
;
2401 else if (DECL_P (tem
)
2402 || TREE_CODE (tem
) == STRING_CST
)
2403 rhs
= build_fold_addr_expr (tem
);
2407 if (TREE_CODE (rhs
) != SSA_NAME
2408 && TREE_CODE (rhs
) != ADDR_EXPR
)
2411 /* The bases of the destination and the references have to agree. */
2412 if (TREE_CODE (base
) == MEM_REF
)
2414 if (TREE_OPERAND (base
, 0) != lhs
2415 || !poly_int_tree_p (TREE_OPERAND (base
, 1), &mem_offset
))
2419 else if (!DECL_P (base
)
2420 || TREE_CODE (lhs
) != ADDR_EXPR
2421 || TREE_OPERAND (lhs
, 0) != base
)
2424 /* If the access is completely outside of the memcpy destination
2425 area there is no aliasing. */
2426 if (!ranges_maybe_overlap_p (lhs_offset
, copy_size
, at
, byte_maxsize
))
2428 /* And the access has to be contained within the memcpy destination. */
2429 if (!known_subrange_p (at
, byte_maxsize
, lhs_offset
, copy_size
))
2432 /* Make room for 2 operands in the new reference. */
2433 if (vr
->operands
.length () < 2)
2435 vec
<vn_reference_op_s
> old
= vr
->operands
;
2436 vr
->operands
.safe_grow_cleared (2);
2437 if (old
== shared_lookup_references
)
2438 shared_lookup_references
= vr
->operands
;
2441 vr
->operands
.truncate (2);
2443 /* The looked-through reference is a simple MEM_REF. */
2444 memset (&op
, 0, sizeof (op
));
2446 op
.opcode
= MEM_REF
;
2447 op
.op0
= build_int_cst (ptr_type_node
, at
- lhs_offset
+ rhs_offset
);
2448 op
.off
= at
- lhs_offset
+ rhs_offset
;
2449 vr
->operands
[0] = op
;
2450 op
.type
= TREE_TYPE (rhs
);
2451 op
.opcode
= TREE_CODE (rhs
);
2454 vr
->operands
[1] = op
;
2455 vr
->hashcode
= vn_reference_compute_hash (vr
);
2457 /* Try folding the new reference to a constant. */
2458 tree val
= fully_constant_vn_reference_p (vr
);
2460 return vn_reference_lookup_or_insert_for_pieces
2461 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2463 /* Adjust *ref from the new operands. */
2464 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2466 /* This can happen with bitfields. */
2467 if (maybe_ne (ref
->size
, r
.size
))
2471 /* Do not update last seen VUSE after translating. */
2472 last_vuse_ptr
= NULL
;
2474 /* Keep looking for the adjusted *REF / VR pair. */
2478 /* Bail out and stop walking. */
2482 /* Return a reference op vector from OP that can be used for
2483 vn_reference_lookup_pieces. The caller is responsible for releasing
2486 vec
<vn_reference_op_s
>
2487 vn_reference_operands_for_lookup (tree op
)
2490 return valueize_shared_reference_ops_from_ref (op
, &valueized
).copy ();
2493 /* Lookup a reference operation by it's parts, in the current hash table.
2494 Returns the resulting value number if it exists in the hash table,
2495 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2496 vn_reference_t stored in the hashtable if something is found. */
2499 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2500 vec
<vn_reference_op_s
> operands
,
2501 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2503 struct vn_reference_s vr1
;
2511 vr1
.vuse
= vuse_ssa_val (vuse
);
2512 shared_lookup_references
.truncate (0);
2513 shared_lookup_references
.safe_grow (operands
.length ());
2514 memcpy (shared_lookup_references
.address (),
2515 operands
.address (),
2516 sizeof (vn_reference_op_s
)
2517 * operands
.length ());
2518 vr1
.operands
= operands
= shared_lookup_references
2519 = valueize_refs (shared_lookup_references
);
2522 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2523 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2526 vn_reference_lookup_1 (&vr1
, vnresult
);
2528 && kind
!= VN_NOWALK
2532 vn_walk_kind
= kind
;
2533 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2535 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2536 vn_reference_lookup_2
,
2537 vn_reference_lookup_3
,
2538 vuse_ssa_val
, &vr1
);
2539 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2543 return (*vnresult
)->result
;
2548 /* Lookup OP in the current hash table, and return the resulting value
2549 number if it exists in the hash table. Return NULL_TREE if it does
2550 not exist in the hash table or if the result field of the structure
2551 was NULL.. VNRESULT will be filled in with the vn_reference_t
2552 stored in the hashtable if one exists. When TBAA_P is false assume
2553 we are looking up a store and treat it as having alias-set zero. */
2556 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2557 vn_reference_t
*vnresult
, bool tbaa_p
)
2559 vec
<vn_reference_op_s
> operands
;
2560 struct vn_reference_s vr1
;
2562 bool valuezied_anything
;
2567 vr1
.vuse
= vuse_ssa_val (vuse
);
2568 vr1
.operands
= operands
2569 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2570 vr1
.type
= TREE_TYPE (op
);
2571 vr1
.set
= tbaa_p
? get_alias_set (op
) : 0;
2572 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2573 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2576 if (kind
!= VN_NOWALK
2579 vn_reference_t wvnresult
;
2581 /* Make sure to use a valueized reference if we valueized anything.
2582 Otherwise preserve the full reference for advanced TBAA. */
2583 if (!valuezied_anything
2584 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2586 ao_ref_init (&r
, op
);
2588 r
.ref_alias_set
= r
.base_alias_set
= 0;
2589 vn_walk_kind
= kind
;
2591 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2592 vn_reference_lookup_2
,
2593 vn_reference_lookup_3
,
2594 vuse_ssa_val
, &vr1
);
2595 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2599 *vnresult
= wvnresult
;
2600 return wvnresult
->result
;
2606 return vn_reference_lookup_1 (&vr1
, vnresult
);
2609 /* Lookup CALL in the current hash table and return the entry in
2610 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2613 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2619 tree vuse
= gimple_vuse (call
);
2621 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2622 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2623 vr
->type
= gimple_expr_type (call
);
2625 vr
->hashcode
= vn_reference_compute_hash (vr
);
2626 vn_reference_lookup_1 (vr
, vnresult
);
2629 /* Insert OP into the current hash table with a value number of
2630 RESULT, and return the resulting reference structure we created. */
2632 static vn_reference_t
2633 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2635 vn_reference_s
**slot
;
2639 vr1
= current_info
->references_pool
->allocate ();
2640 if (TREE_CODE (result
) == SSA_NAME
)
2641 vr1
->value_id
= VN_INFO (result
)->value_id
;
2643 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2644 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2645 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2646 vr1
->type
= TREE_TYPE (op
);
2647 vr1
->set
= get_alias_set (op
);
2648 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2649 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2650 vr1
->result_vdef
= vdef
;
2652 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2655 /* Because we lookup stores using vuses, and value number failures
2656 using the vdefs (see visit_reference_op_store for how and why),
2657 it's possible that on failure we may try to insert an already
2658 inserted store. This is not wrong, there is no ssa name for a
2659 store that we could use as a differentiator anyway. Thus, unlike
2660 the other lookup functions, you cannot gcc_assert (!*slot)
2663 /* But free the old slot in case of a collision. */
2665 free_reference (*slot
);
2671 /* Insert a reference by it's pieces into the current hash table with
2672 a value number of RESULT. Return the resulting reference
2673 structure we created. */
2676 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2677 vec
<vn_reference_op_s
> operands
,
2678 tree result
, unsigned int value_id
)
2681 vn_reference_s
**slot
;
2684 vr1
= current_info
->references_pool
->allocate ();
2685 vr1
->value_id
= value_id
;
2686 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2687 vr1
->operands
= valueize_refs (operands
);
2690 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2691 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2692 result
= SSA_VAL (result
);
2693 vr1
->result
= result
;
2695 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2698 /* At this point we should have all the things inserted that we have
2699 seen before, and we should never try inserting something that
2701 gcc_assert (!*slot
);
2703 free_reference (*slot
);
2709 /* Compute and return the hash value for nary operation VBO1. */
2712 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2714 inchash::hash hstate
;
2717 for (i
= 0; i
< vno1
->length
; ++i
)
2718 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2719 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2721 if (((vno1
->length
== 2
2722 && commutative_tree_code (vno1
->opcode
))
2723 || (vno1
->length
== 3
2724 && commutative_ternary_tree_code (vno1
->opcode
)))
2725 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
2726 std::swap (vno1
->op
[0], vno1
->op
[1]);
2727 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
2728 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
2730 std::swap (vno1
->op
[0], vno1
->op
[1]);
2731 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
2734 hstate
.add_int (vno1
->opcode
);
2735 for (i
= 0; i
< vno1
->length
; ++i
)
2736 inchash::add_expr (vno1
->op
[i
], hstate
);
2738 return hstate
.end ();
2741 /* Compare nary operations VNO1 and VNO2 and return true if they are
2745 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2749 if (vno1
->hashcode
!= vno2
->hashcode
)
2752 if (vno1
->length
!= vno2
->length
)
2755 if (vno1
->opcode
!= vno2
->opcode
2756 || !types_compatible_p (vno1
->type
, vno2
->type
))
2759 for (i
= 0; i
< vno1
->length
; ++i
)
2760 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2763 /* BIT_INSERT_EXPR has an implict operand as the type precision
2764 of op1. Need to check to make sure they are the same. */
2765 if (vno1
->opcode
== BIT_INSERT_EXPR
2766 && TREE_CODE (vno1
->op
[1]) == INTEGER_CST
2767 && TYPE_PRECISION (TREE_TYPE (vno1
->op
[1]))
2768 != TYPE_PRECISION (TREE_TYPE (vno2
->op
[1])))
2774 /* Initialize VNO from the pieces provided. */
2777 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2778 enum tree_code code
, tree type
, tree
*ops
)
2781 vno
->length
= length
;
2783 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2786 /* Initialize VNO from OP. */
2789 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2793 vno
->opcode
= TREE_CODE (op
);
2794 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2795 vno
->type
= TREE_TYPE (op
);
2796 for (i
= 0; i
< vno
->length
; ++i
)
2797 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2800 /* Return the number of operands for a vn_nary ops structure from STMT. */
2803 vn_nary_length_from_stmt (gimple
*stmt
)
2805 switch (gimple_assign_rhs_code (stmt
))
2809 case VIEW_CONVERT_EXPR
:
2816 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2819 return gimple_num_ops (stmt
) - 1;
2823 /* Initialize VNO from STMT. */
2826 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
2830 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2831 vno
->type
= gimple_expr_type (stmt
);
2832 switch (vno
->opcode
)
2836 case VIEW_CONVERT_EXPR
:
2838 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2843 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2844 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2845 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2849 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2850 for (i
= 0; i
< vno
->length
; ++i
)
2851 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2855 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2856 vno
->length
= gimple_num_ops (stmt
) - 1;
2857 for (i
= 0; i
< vno
->length
; ++i
)
2858 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2862 /* Compute the hashcode for VNO and look for it in the hash table;
2863 return the resulting value number if it exists in the hash table.
2864 Return NULL_TREE if it does not exist in the hash table or if the
2865 result field of the operation is NULL. VNRESULT will contain the
2866 vn_nary_op_t from the hashtable if it exists. */
2869 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2871 vn_nary_op_s
**slot
;
2876 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2877 slot
= current_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2879 if (!slot
&& current_info
== optimistic_info
)
2880 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2886 return (*slot
)->result
;
2889 /* Lookup a n-ary operation by its pieces and return the resulting value
2890 number if it exists in the hash table. Return NULL_TREE if it does
2891 not exist in the hash table or if the result field of the operation
2892 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2896 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2897 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2899 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2900 sizeof_vn_nary_op (length
));
2901 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2902 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2905 /* Lookup OP in the current hash table, and return the resulting value
2906 number if it exists in the hash table. Return NULL_TREE if it does
2907 not exist in the hash table or if the result field of the operation
2908 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2912 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2915 = XALLOCAVAR (struct vn_nary_op_s
,
2916 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2917 init_vn_nary_op_from_op (vno1
, op
);
2918 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2921 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2922 value number if it exists in the hash table. Return NULL_TREE if
2923 it does not exist in the hash table. VNRESULT will contain the
2924 vn_nary_op_t from the hashtable if it exists. */
2927 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
2930 = XALLOCAVAR (struct vn_nary_op_s
,
2931 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2932 init_vn_nary_op_from_stmt (vno1
, stmt
);
2933 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2936 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2939 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2941 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2944 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2948 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2950 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2951 ¤t_info
->nary_obstack
);
2953 vno1
->value_id
= value_id
;
2954 vno1
->length
= length
;
2955 vno1
->result
= result
;
2960 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2961 VNO->HASHCODE first. */
2964 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
2967 vn_nary_op_s
**slot
;
2970 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2972 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
2973 /* While we do not want to insert things twice it's awkward to
2974 avoid it in the case where visit_nary_op pattern-matches stuff
2975 and ends up simplifying the replacement to itself. We then
2976 get two inserts, one from visit_nary_op and one from
2977 vn_nary_build_or_lookup.
2978 So allow inserts with the same value number. */
2979 if (*slot
&& (*slot
)->result
== vno
->result
)
2982 gcc_assert (!*slot
);
2988 /* Insert a n-ary operation into the current hash table using it's
2989 pieces. Return the vn_nary_op_t structure we created and put in
2993 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2994 tree type
, tree
*ops
,
2995 tree result
, unsigned int value_id
)
2997 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2998 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2999 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
3002 /* Insert OP into the current hash table with a value number of
3003 RESULT. Return the vn_nary_op_t structure we created and put in
3007 vn_nary_op_insert (tree op
, tree result
)
3009 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
3012 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
3013 init_vn_nary_op_from_op (vno1
, op
);
3014 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
3017 /* Insert the rhs of STMT into the current hash table with a value number of
3021 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
3024 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
3025 result
, VN_INFO (result
)->value_id
);
3026 init_vn_nary_op_from_stmt (vno1
, stmt
);
3027 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
3030 /* Compute a hashcode for PHI operation VP1 and return it. */
3032 static inline hashval_t
3033 vn_phi_compute_hash (vn_phi_t vp1
)
3035 inchash::hash
hstate (vp1
->phiargs
.length () > 2
3036 ? vp1
->block
->index
: vp1
->phiargs
.length ());
3042 /* If all PHI arguments are constants we need to distinguish
3043 the PHI node via its type. */
3045 hstate
.merge_hash (vn_hash_type (type
));
3047 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
3049 /* Don't hash backedge values they need to be handled as VN_TOP
3050 for optimistic value-numbering. */
3051 if (e
->flags
& EDGE_DFS_BACK
)
3054 phi1op
= vp1
->phiargs
[e
->dest_idx
];
3055 if (phi1op
== VN_TOP
)
3057 inchash::add_expr (phi1op
, hstate
);
3060 return hstate
.end ();
3064 /* Return true if COND1 and COND2 represent the same condition, set
3065 *INVERTED_P if one needs to be inverted to make it the same as
3069 cond_stmts_equal_p (gcond
*cond1
, tree lhs1
, tree rhs1
,
3070 gcond
*cond2
, tree lhs2
, tree rhs2
, bool *inverted_p
)
3072 enum tree_code code1
= gimple_cond_code (cond1
);
3073 enum tree_code code2
= gimple_cond_code (cond2
);
3075 *inverted_p
= false;
3078 else if (code1
== swap_tree_comparison (code2
))
3079 std::swap (lhs2
, rhs2
);
3080 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
3082 else if (code1
== invert_tree_comparison
3083 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
3085 std::swap (lhs2
, rhs2
);
3091 return ((expressions_equal_p (lhs1
, lhs2
)
3092 && expressions_equal_p (rhs1
, rhs2
))
3093 || (commutative_tree_code (code1
)
3094 && expressions_equal_p (lhs1
, rhs2
)
3095 && expressions_equal_p (rhs1
, lhs2
)));
3098 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3101 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
3103 if (vp1
->hashcode
!= vp2
->hashcode
)
3106 if (vp1
->block
!= vp2
->block
)
3108 if (vp1
->phiargs
.length () != vp2
->phiargs
.length ())
3111 switch (vp1
->phiargs
.length ())
3114 /* Single-arg PHIs are just copies. */
3119 /* Rule out backedges into the PHI. */
3120 if (vp1
->block
->loop_father
->header
== vp1
->block
3121 || vp2
->block
->loop_father
->header
== vp2
->block
)
3124 /* If the PHI nodes do not have compatible types
3125 they are not the same. */
3126 if (!types_compatible_p (vp1
->type
, vp2
->type
))
3130 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3132 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
3133 /* If the immediate dominator end in switch stmts multiple
3134 values may end up in the same PHI arg via intermediate
3136 if (EDGE_COUNT (idom1
->succs
) != 2
3137 || EDGE_COUNT (idom2
->succs
) != 2)
3140 /* Verify the controlling stmt is the same. */
3141 gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
));
3142 gcond
*last2
= safe_dyn_cast
<gcond
*> (last_stmt (idom2
));
3143 if (! last1
|| ! last2
)
3146 if (! cond_stmts_equal_p (last1
, vp1
->cclhs
, vp1
->ccrhs
,
3147 last2
, vp2
->cclhs
, vp2
->ccrhs
,
3151 /* Get at true/false controlled edges into the PHI. */
3152 edge te1
, te2
, fe1
, fe2
;
3153 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
3155 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
3159 /* Swap edges if the second condition is the inverted of the
3162 std::swap (te2
, fe2
);
3164 /* ??? Handle VN_TOP specially. */
3165 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
3166 vp2
->phiargs
[te2
->dest_idx
])
3167 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
3168 vp2
->phiargs
[fe2
->dest_idx
]))
3179 /* If the PHI nodes do not have compatible types
3180 they are not the same. */
3181 if (!types_compatible_p (vp1
->type
, vp2
->type
))
3184 /* Any phi in the same block will have it's arguments in the
3185 same edge order, because of how we store phi nodes. */
3188 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
3190 tree phi2op
= vp2
->phiargs
[i
];
3191 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
3193 if (!expressions_equal_p (phi1op
, phi2op
))
3200 static vec
<tree
> shared_lookup_phiargs
;
3202 /* Lookup PHI in the current hash table, and return the resulting
3203 value number if it exists in the hash table. Return NULL_TREE if
3204 it does not exist in the hash table. */
3207 vn_phi_lookup (gimple
*phi
)
3210 struct vn_phi_s vp1
;
3214 shared_lookup_phiargs
.truncate (0);
3215 shared_lookup_phiargs
.safe_grow (gimple_phi_num_args (phi
));
3217 /* Canonicalize the SSA_NAME's to their value number. */
3218 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3220 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3221 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
3222 shared_lookup_phiargs
[e
->dest_idx
] = def
;
3224 vp1
.type
= TREE_TYPE (gimple_phi_result (phi
));
3225 vp1
.phiargs
= shared_lookup_phiargs
;
3226 vp1
.block
= gimple_bb (phi
);
3227 /* Extract values of the controlling condition. */
3228 vp1
.cclhs
= NULL_TREE
;
3229 vp1
.ccrhs
= NULL_TREE
;
3230 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
.block
);
3231 if (EDGE_COUNT (idom1
->succs
) == 2)
3232 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
3234 vp1
.cclhs
= vn_valueize (gimple_cond_lhs (last1
));
3235 vp1
.ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
3237 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
3238 slot
= current_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
3240 if (!slot
&& current_info
== optimistic_info
)
3241 slot
= valid_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
3245 return (*slot
)->result
;
3248 /* Insert PHI into the current hash table with a value number of
3252 vn_phi_insert (gimple
*phi
, tree result
)
3255 vn_phi_t vp1
= current_info
->phis_pool
->allocate ();
3256 vec
<tree
> args
= vNULL
;
3260 args
.safe_grow (gimple_phi_num_args (phi
));
3262 /* Canonicalize the SSA_NAME's to their value number. */
3263 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3265 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3266 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
3267 args
[e
->dest_idx
] = def
;
3269 vp1
->value_id
= VN_INFO (result
)->value_id
;
3270 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
3271 vp1
->phiargs
= args
;
3272 vp1
->block
= gimple_bb (phi
);
3273 /* Extract values of the controlling condition. */
3274 vp1
->cclhs
= NULL_TREE
;
3275 vp1
->ccrhs
= NULL_TREE
;
3276 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3277 if (EDGE_COUNT (idom1
->succs
) == 2)
3278 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
3280 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
3281 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
3283 vp1
->result
= result
;
3284 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
3286 slot
= current_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
3288 /* Because we iterate over phi operations more than once, it's
3289 possible the slot might already exist here, hence no assert.*/
3295 /* Print set of components in strongly connected component SCC to OUT. */
3298 print_scc (FILE *out
, vec
<tree
> scc
)
3303 fprintf (out
, "SCC consists of %u:", scc
.length ());
3304 FOR_EACH_VEC_ELT (scc
, i
, var
)
3307 print_generic_expr (out
, var
);
3309 fprintf (out
, "\n");
3312 /* Return true if BB1 is dominated by BB2 taking into account edges
3313 that are not executable. */
3316 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
)
3321 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3324 /* Before iterating we'd like to know if there exists a
3325 (executable) path from bb2 to bb1 at all, if not we can
3326 directly return false. For now simply iterate once. */
3328 /* Iterate to the single executable bb1 predecessor. */
3329 if (EDGE_COUNT (bb1
->preds
) > 1)
3332 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
3333 if (e
->flags
& EDGE_EXECUTABLE
)
3346 /* Re-do the dominance check with changed bb1. */
3347 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3352 /* Iterate to the single executable bb2 successor. */
3354 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
3355 if (e
->flags
& EDGE_EXECUTABLE
)
3366 /* Verify the reached block is only reached through succe.
3367 If there is only one edge we can spare us the dominator
3368 check and iterate directly. */
3369 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
3371 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
3373 && (e
->flags
& EDGE_EXECUTABLE
))
3383 /* Re-do the dominance check with changed bb2. */
3384 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3389 /* We could now iterate updating bb1 / bb2. */
3393 /* Set the value number of FROM to TO, return true if it has changed
3397 set_ssa_val_to (tree from
, tree to
)
3399 tree currval
= SSA_VAL (from
);
3400 poly_int64 toff
, coff
;
3402 /* The only thing we allow as value numbers are ssa_names
3403 and invariants. So assert that here. We don't allow VN_TOP
3404 as visiting a stmt should produce a value-number other than
3406 ??? Still VN_TOP can happen for unreachable code, so force
3407 it to varying in that case. Not all code is prepared to
3408 get VN_TOP on valueization. */
3411 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3412 fprintf (dump_file
, "Forcing value number to varying on "
3413 "receiving VN_TOP\n");
3417 gcc_assert (to
!= NULL_TREE
3418 && ((TREE_CODE (to
) == SSA_NAME
3419 && (to
== from
|| SSA_VAL (to
) == to
))
3420 || is_gimple_min_invariant (to
)));
3424 if (currval
== from
)
3426 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3428 fprintf (dump_file
, "Not changing value number of ");
3429 print_generic_expr (dump_file
, from
);
3430 fprintf (dump_file
, " from VARYING to ");
3431 print_generic_expr (dump_file
, to
);
3432 fprintf (dump_file
, "\n");
3436 else if (currval
!= VN_TOP
3437 && ! is_gimple_min_invariant (currval
)
3438 && is_gimple_min_invariant (to
))
3440 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3442 fprintf (dump_file
, "Forcing VARYING instead of changing "
3443 "value number of ");
3444 print_generic_expr (dump_file
, from
);
3445 fprintf (dump_file
, " from ");
3446 print_generic_expr (dump_file
, currval
);
3447 fprintf (dump_file
, " (non-constant) to ");
3448 print_generic_expr (dump_file
, to
);
3449 fprintf (dump_file
, " (constant)\n");
3453 else if (TREE_CODE (to
) == SSA_NAME
3454 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
3458 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3460 fprintf (dump_file
, "Setting value number of ");
3461 print_generic_expr (dump_file
, from
);
3462 fprintf (dump_file
, " to ");
3463 print_generic_expr (dump_file
, to
);
3467 && !operand_equal_p (currval
, to
, 0)
3468 /* Different undefined SSA names are not actually different. See
3469 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3470 && !(TREE_CODE (currval
) == SSA_NAME
3471 && TREE_CODE (to
) == SSA_NAME
3472 && ssa_undefined_value_p (currval
, false)
3473 && ssa_undefined_value_p (to
, false))
3474 /* ??? For addresses involving volatile objects or types operand_equal_p
3475 does not reliably detect ADDR_EXPRs as equal. We know we are only
3476 getting invariant gimple addresses here, so can use
3477 get_addr_base_and_unit_offset to do this comparison. */
3478 && !(TREE_CODE (currval
) == ADDR_EXPR
3479 && TREE_CODE (to
) == ADDR_EXPR
3480 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
3481 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
3482 && known_eq (coff
, toff
)))
3484 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3485 fprintf (dump_file
, " (changed)\n");
3487 /* If we equate two SSA names we have to make the side-band info
3488 of the leader conservative (and remember whatever original value
3490 if (TREE_CODE (to
) == SSA_NAME
)
3492 if (INTEGRAL_TYPE_P (TREE_TYPE (to
))
3493 && SSA_NAME_RANGE_INFO (to
))
3495 if (SSA_NAME_IS_DEFAULT_DEF (to
)
3496 || dominated_by_p_w_unex
3497 (gimple_bb (SSA_NAME_DEF_STMT (from
)),
3498 gimple_bb (SSA_NAME_DEF_STMT (to
))))
3499 /* Keep the info from the dominator. */
3503 /* Save old info. */
3504 if (! VN_INFO (to
)->info
.range_info
)
3506 VN_INFO (to
)->info
.range_info
= SSA_NAME_RANGE_INFO (to
);
3507 VN_INFO (to
)->range_info_anti_range_p
3508 = SSA_NAME_ANTI_RANGE_P (to
);
3510 /* Rather than allocating memory and unioning the info
3512 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3514 fprintf (dump_file
, "clearing range info of ");
3515 print_generic_expr (dump_file
, to
);
3516 fprintf (dump_file
, "\n");
3518 SSA_NAME_RANGE_INFO (to
) = NULL
;
3521 else if (POINTER_TYPE_P (TREE_TYPE (to
))
3522 && SSA_NAME_PTR_INFO (to
))
3524 if (SSA_NAME_IS_DEFAULT_DEF (to
)
3525 || dominated_by_p_w_unex
3526 (gimple_bb (SSA_NAME_DEF_STMT (from
)),
3527 gimple_bb (SSA_NAME_DEF_STMT (to
))))
3528 /* Keep the info from the dominator. */
3530 else if (! SSA_NAME_PTR_INFO (from
)
3531 /* Handle the case of trivially equivalent info. */
3532 || memcmp (SSA_NAME_PTR_INFO (to
),
3533 SSA_NAME_PTR_INFO (from
),
3534 sizeof (ptr_info_def
)) != 0)
3536 /* Save old info. */
3537 if (! VN_INFO (to
)->info
.ptr_info
)
3538 VN_INFO (to
)->info
.ptr_info
= SSA_NAME_PTR_INFO (to
);
3539 /* Rather than allocating memory and unioning the info
3541 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3543 fprintf (dump_file
, "clearing points-to info of ");
3544 print_generic_expr (dump_file
, to
);
3545 fprintf (dump_file
, "\n");
3547 SSA_NAME_PTR_INFO (to
) = NULL
;
3552 VN_INFO (from
)->valnum
= to
;
3555 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3556 fprintf (dump_file
, "\n");
3560 /* Mark as processed all the definitions in the defining stmt of USE, or
3564 mark_use_processed (tree use
)
3568 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
3570 if (SSA_NAME_IS_DEFAULT_DEF (use
) || gimple_code (stmt
) == GIMPLE_PHI
)
3572 VN_INFO (use
)->use_processed
= true;
3576 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3578 tree def
= DEF_FROM_PTR (defp
);
3580 VN_INFO (def
)->use_processed
= true;
3584 /* Set all definitions in STMT to value number to themselves.
3585 Return true if a value number changed. */
3588 defs_to_varying (gimple
*stmt
)
3590 bool changed
= false;
3594 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3596 tree def
= DEF_FROM_PTR (defp
);
3597 changed
|= set_ssa_val_to (def
, def
);
3602 /* Visit a copy between LHS and RHS, return true if the value number
3606 visit_copy (tree lhs
, tree rhs
)
3609 rhs
= SSA_VAL (rhs
);
3611 return set_ssa_val_to (lhs
, rhs
);
3614 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3618 valueized_wider_op (tree wide_type
, tree op
)
3620 if (TREE_CODE (op
) == SSA_NAME
)
3623 /* Either we have the op widened available. */
3626 tree tem
= vn_nary_op_lookup_pieces (1, NOP_EXPR
,
3627 wide_type
, ops
, NULL
);
3631 /* Or the op is truncated from some existing value. */
3632 if (TREE_CODE (op
) == SSA_NAME
)
3634 gimple
*def
= SSA_NAME_DEF_STMT (op
);
3635 if (is_gimple_assign (def
)
3636 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
3638 tem
= gimple_assign_rhs1 (def
);
3639 if (useless_type_conversion_p (wide_type
, TREE_TYPE (tem
)))
3641 if (TREE_CODE (tem
) == SSA_NAME
)
3642 tem
= SSA_VAL (tem
);
3648 /* For constants simply extend it. */
3649 if (TREE_CODE (op
) == INTEGER_CST
)
3650 return wide_int_to_tree (wide_type
, wi::to_wide (op
));
3655 /* Visit a nary operator RHS, value number it, and return true if the
3656 value number of LHS has changed as a result. */
3659 visit_nary_op (tree lhs
, gassign
*stmt
)
3661 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
3663 return set_ssa_val_to (lhs
, result
);
3665 /* Do some special pattern matching for redundancies of operations
3666 in different types. */
3667 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3668 tree type
= TREE_TYPE (lhs
);
3669 tree rhs1
= gimple_assign_rhs1 (stmt
);
3673 /* Match arithmetic done in a different type where we can easily
3674 substitute the result from some earlier sign-changed or widened
3676 if (INTEGRAL_TYPE_P (type
)
3677 && TREE_CODE (rhs1
) == SSA_NAME
3678 /* We only handle sign-changes or zero-extension -> & mask. */
3679 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1
))
3680 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (rhs1
)))
3681 || TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (rhs1
))))
3683 gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
3685 && (gimple_assign_rhs_code (def
) == PLUS_EXPR
3686 || gimple_assign_rhs_code (def
) == MINUS_EXPR
3687 || gimple_assign_rhs_code (def
) == MULT_EXPR
))
3690 /* Either we have the op widened available. */
3691 ops
[0] = valueized_wider_op (type
,
3692 gimple_assign_rhs1 (def
));
3694 ops
[1] = valueized_wider_op (type
,
3695 gimple_assign_rhs2 (def
));
3696 if (ops
[0] && ops
[1])
3698 ops
[0] = vn_nary_op_lookup_pieces
3699 (2, gimple_assign_rhs_code (def
), type
, ops
, NULL
);
3700 /* We have wider operation available. */
3703 unsigned lhs_prec
= TYPE_PRECISION (type
);
3704 unsigned rhs_prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
3705 if (lhs_prec
== rhs_prec
)
3708 result
= vn_nary_build_or_lookup (NOP_EXPR
,
3712 bool changed
= set_ssa_val_to (lhs
, result
);
3713 vn_nary_op_insert_stmt (stmt
, result
);
3719 ops
[1] = wide_int_to_tree (type
,
3720 wi::mask (rhs_prec
, false,
3722 result
= vn_nary_build_or_lookup (BIT_AND_EXPR
,
3727 bool changed
= set_ssa_val_to (lhs
, result
);
3728 vn_nary_op_insert_stmt (stmt
, result
);
3739 bool changed
= set_ssa_val_to (lhs
, lhs
);
3740 vn_nary_op_insert_stmt (stmt
, lhs
);
3744 /* Visit a call STMT storing into LHS. Return true if the value number
3745 of the LHS has changed as a result. */
3748 visit_reference_op_call (tree lhs
, gcall
*stmt
)
3750 bool changed
= false;
3751 struct vn_reference_s vr1
;
3752 vn_reference_t vnresult
= NULL
;
3753 tree vdef
= gimple_vdef (stmt
);
3755 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3756 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
3759 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
3762 if (vnresult
->result_vdef
&& vdef
)
3763 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3765 /* If the call was discovered to be pure or const reflect
3766 that as far as possible. */
3767 changed
|= set_ssa_val_to (vdef
, vuse_ssa_val (gimple_vuse (stmt
)));
3769 if (!vnresult
->result
&& lhs
)
3770 vnresult
->result
= lhs
;
3772 if (vnresult
->result
&& lhs
)
3773 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
3778 vn_reference_s
**slot
;
3779 tree vdef_val
= vdef
;
3782 /* If we value numbered an indirect functions function to
3783 one not clobbering memory value number its VDEF to its
3785 tree fn
= gimple_call_fn (stmt
);
3786 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
3789 if (TREE_CODE (fn
) == ADDR_EXPR
3790 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
3791 && (flags_from_decl_or_type (TREE_OPERAND (fn
, 0))
3792 & (ECF_CONST
| ECF_PURE
)))
3793 vdef_val
= vuse_ssa_val (gimple_vuse (stmt
));
3795 changed
|= set_ssa_val_to (vdef
, vdef_val
);
3798 changed
|= set_ssa_val_to (lhs
, lhs
);
3799 vr2
= current_info
->references_pool
->allocate ();
3800 vr2
->vuse
= vr1
.vuse
;
3801 /* As we are not walking the virtual operand chain we know the
3802 shared_lookup_references are still original so we can re-use
3804 vr2
->operands
= vr1
.operands
.copy ();
3805 vr2
->type
= vr1
.type
;
3807 vr2
->hashcode
= vr1
.hashcode
;
3809 vr2
->result_vdef
= vdef_val
;
3810 slot
= current_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
3812 gcc_assert (!*slot
);
3819 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3820 and return true if the value number of the LHS has changed as a result. */
3823 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
3825 bool changed
= false;
3829 last_vuse
= gimple_vuse (stmt
);
3830 last_vuse_ptr
= &last_vuse
;
3831 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3832 default_vn_walk_kind
, NULL
, true);
3833 last_vuse_ptr
= NULL
;
3835 /* We handle type-punning through unions by value-numbering based
3836 on offset and size of the access. Be prepared to handle a
3837 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3839 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3841 /* We will be setting the value number of lhs to the value number
3842 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3843 So first simplify and lookup this expression to see if it
3844 is already available. */
3845 code_helper rcode
= VIEW_CONVERT_EXPR
;
3846 tree ops
[3] = { result
};
3847 result
= vn_nary_build_or_lookup (rcode
, TREE_TYPE (op
), ops
);
3851 changed
= set_ssa_val_to (lhs
, result
);
3854 changed
= set_ssa_val_to (lhs
, lhs
);
3855 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
3862 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3863 and return true if the value number of the LHS has changed as a result. */
3866 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
3868 bool changed
= false;
3869 vn_reference_t vnresult
= NULL
;
3871 bool resultsame
= false;
3872 tree vuse
= gimple_vuse (stmt
);
3873 tree vdef
= gimple_vdef (stmt
);
3875 if (TREE_CODE (op
) == SSA_NAME
)
3878 /* First we want to lookup using the *vuses* from the store and see
3879 if there the last store to this location with the same address
3882 The vuses represent the memory state before the store. If the
3883 memory state, address, and value of the store is the same as the
3884 last store to this location, then this store will produce the
3885 same memory state as that store.
3887 In this case the vdef versions for this store are value numbered to those
3888 vuse versions, since they represent the same memory state after
3891 Otherwise, the vdefs for the store are used when inserting into
3892 the table, since the store generates a new memory state. */
3894 vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, &vnresult
, false);
3896 && vnresult
->result
)
3898 tree result
= vnresult
->result
;
3899 if (TREE_CODE (result
) == SSA_NAME
)
3900 result
= SSA_VAL (result
);
3901 resultsame
= expressions_equal_p (result
, op
);
3904 /* If the TBAA state isn't compatible for downstream reads
3905 we cannot value-number the VDEFs the same. */
3906 alias_set_type set
= get_alias_set (lhs
);
3907 if (vnresult
->set
!= set
3908 && ! alias_set_subset_of (set
, vnresult
->set
))
3915 /* Only perform the following when being called from PRE
3916 which embeds tail merging. */
3917 if (default_vn_walk_kind
== VN_WALK
)
3919 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3920 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
3923 VN_INFO (vdef
)->use_processed
= true;
3924 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3928 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3930 fprintf (dump_file
, "No store match\n");
3931 fprintf (dump_file
, "Value numbering store ");
3932 print_generic_expr (dump_file
, lhs
);
3933 fprintf (dump_file
, " to ");
3934 print_generic_expr (dump_file
, op
);
3935 fprintf (dump_file
, "\n");
3937 /* Have to set value numbers before insert, since insert is
3938 going to valueize the references in-place. */
3940 changed
|= set_ssa_val_to (vdef
, vdef
);
3942 /* Do not insert structure copies into the tables. */
3943 if (is_gimple_min_invariant (op
)
3944 || is_gimple_reg (op
))
3945 vn_reference_insert (lhs
, op
, vdef
, NULL
);
3947 /* Only perform the following when being called from PRE
3948 which embeds tail merging. */
3949 if (default_vn_walk_kind
== VN_WALK
)
3951 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3952 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
3957 /* We had a match, so value number the vdef to have the value
3958 number of the vuse it came from. */
3960 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3961 fprintf (dump_file
, "Store matched earlier value, "
3962 "value numbering store vdefs to matching vuses.\n");
3964 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
3970 /* Visit and value number PHI, return true if the value number
3974 visit_phi (gimple
*phi
)
3976 tree result
, sameval
= VN_TOP
, seen_undef
= NULL_TREE
;
3977 unsigned n_executable
= 0;
3978 bool allsame
= true;
3982 /* TODO: We could check for this in init_sccvn, and replace this
3983 with a gcc_assert. */
3984 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
3985 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3987 /* See if all non-TOP arguments have the same value. TOP is
3988 equivalent to everything, so we can ignore it. */
3989 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3990 if (e
->flags
& EDGE_EXECUTABLE
)
3992 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3995 if (TREE_CODE (def
) == SSA_NAME
)
3996 def
= SSA_VAL (def
);
3999 /* Ignore undefined defs for sameval but record one. */
4000 else if (TREE_CODE (def
) == SSA_NAME
4001 && ssa_undefined_value_p (def
, false))
4003 else if (sameval
== VN_TOP
)
4005 else if (!expressions_equal_p (def
, sameval
))
4013 /* If none of the edges was executable keep the value-number at VN_TOP,
4014 if only a single edge is exectuable use its value. */
4015 if (n_executable
<= 1)
4016 result
= seen_undef
? seen_undef
: sameval
;
4017 /* If we saw only undefined values and VN_TOP use one of the
4018 undefined values. */
4019 else if (sameval
== VN_TOP
)
4020 result
= seen_undef
? seen_undef
: sameval
;
4021 /* First see if it is equivalent to a phi node in this block. We prefer
4022 this as it allows IV elimination - see PRs 66502 and 67167. */
4023 else if ((result
= vn_phi_lookup (phi
)))
4025 /* If all values are the same use that, unless we've seen undefined
4026 values as well and the value isn't constant.
4027 CCP/copyprop have the same restriction to not remove uninit warnings. */
4029 && (! seen_undef
|| is_gimple_min_invariant (sameval
)))
4033 result
= PHI_RESULT (phi
);
4034 /* Only insert PHIs that are varying, for constant value numbers
4035 we mess up equivalences otherwise as we are only comparing
4036 the immediate controlling predicates. */
4037 vn_phi_insert (phi
, result
);
4040 return set_ssa_val_to (PHI_RESULT (phi
), result
);
4043 /* Try to simplify RHS using equivalences and constant folding. */
4046 try_to_simplify (gassign
*stmt
)
4048 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4051 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4052 in this case, there is no point in doing extra work. */
4053 if (code
== SSA_NAME
)
4056 /* First try constant folding based on our current lattice. */
4057 mprts_hook
= vn_lookup_simplify_result
;
4059 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
4062 && (TREE_CODE (tem
) == SSA_NAME
4063 || is_gimple_min_invariant (tem
)))
4069 /* Visit and value number USE, return true if the value number
4073 visit_use (tree use
)
4075 bool changed
= false;
4076 gimple
*stmt
= SSA_NAME_DEF_STMT (use
);
4078 mark_use_processed (use
);
4080 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
)
4081 && !SSA_NAME_IS_DEFAULT_DEF (use
));
4083 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4085 fprintf (dump_file
, "Value numbering ");
4086 print_generic_expr (dump_file
, use
);
4087 fprintf (dump_file
, " stmt = ");
4088 print_gimple_stmt (dump_file
, stmt
, 0);
4091 if (gimple_code (stmt
) == GIMPLE_PHI
)
4092 changed
= visit_phi (stmt
);
4093 else if (gimple_has_volatile_ops (stmt
))
4094 changed
= defs_to_varying (stmt
);
4095 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
4097 enum tree_code code
= gimple_assign_rhs_code (ass
);
4098 tree lhs
= gimple_assign_lhs (ass
);
4099 tree rhs1
= gimple_assign_rhs1 (ass
);
4102 /* Shortcut for copies. Simplifying copies is pointless,
4103 since we copy the expression and value they represent. */
4104 if (code
== SSA_NAME
4105 && TREE_CODE (lhs
) == SSA_NAME
)
4107 changed
= visit_copy (lhs
, rhs1
);
4110 simplified
= try_to_simplify (ass
);
4113 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4115 fprintf (dump_file
, "RHS ");
4116 print_gimple_expr (dump_file
, ass
, 0);
4117 fprintf (dump_file
, " simplified to ");
4118 print_generic_expr (dump_file
, simplified
);
4119 fprintf (dump_file
, "\n");
4122 /* Setting value numbers to constants will occasionally
4123 screw up phi congruence because constants are not
4124 uniquely associated with a single ssa name that can be
4127 && is_gimple_min_invariant (simplified
)
4128 && TREE_CODE (lhs
) == SSA_NAME
)
4130 changed
= set_ssa_val_to (lhs
, simplified
);
4134 && TREE_CODE (simplified
) == SSA_NAME
4135 && TREE_CODE (lhs
) == SSA_NAME
)
4137 changed
= visit_copy (lhs
, simplified
);
4141 if ((TREE_CODE (lhs
) == SSA_NAME
4142 /* We can substitute SSA_NAMEs that are live over
4143 abnormal edges with their constant value. */
4144 && !(gimple_assign_copy_p (ass
)
4145 && is_gimple_min_invariant (rhs1
))
4147 && is_gimple_min_invariant (simplified
))
4148 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4149 /* Stores or copies from SSA_NAMEs that are live over
4150 abnormal edges are a problem. */
4151 || (code
== SSA_NAME
4152 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
4153 changed
= defs_to_varying (ass
);
4154 else if (REFERENCE_CLASS_P (lhs
)
4156 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
4157 else if (TREE_CODE (lhs
) == SSA_NAME
)
4159 if ((gimple_assign_copy_p (ass
)
4160 && is_gimple_min_invariant (rhs1
))
4162 && is_gimple_min_invariant (simplified
)))
4165 changed
= set_ssa_val_to (lhs
, simplified
);
4167 changed
= set_ssa_val_to (lhs
, rhs1
);
4171 /* Visit the original statement. */
4172 switch (vn_get_stmt_kind (ass
))
4175 changed
= visit_nary_op (lhs
, ass
);
4178 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
4181 changed
= defs_to_varying (ass
);
4187 changed
= defs_to_varying (ass
);
4189 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
4191 tree lhs
= gimple_call_lhs (call_stmt
);
4192 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
4194 /* Try constant folding based on our current lattice. */
4195 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
4199 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4201 fprintf (dump_file
, "call ");
4202 print_gimple_expr (dump_file
, call_stmt
, 0);
4203 fprintf (dump_file
, " simplified to ");
4204 print_generic_expr (dump_file
, simplified
);
4205 fprintf (dump_file
, "\n");
4208 /* Setting value numbers to constants will occasionally
4209 screw up phi congruence because constants are not
4210 uniquely associated with a single ssa name that can be
4213 && is_gimple_min_invariant (simplified
))
4215 changed
= set_ssa_val_to (lhs
, simplified
);
4216 if (gimple_vdef (call_stmt
))
4217 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
4218 SSA_VAL (gimple_vuse (call_stmt
)));
4222 && TREE_CODE (simplified
) == SSA_NAME
)
4224 changed
= visit_copy (lhs
, simplified
);
4225 if (gimple_vdef (call_stmt
))
4226 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
4227 SSA_VAL (gimple_vuse (call_stmt
)));
4230 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4232 changed
= defs_to_varying (call_stmt
);
4237 /* Pick up flags from a devirtualization target. */
4238 tree fn
= gimple_call_fn (stmt
);
4239 int extra_fnflags
= 0;
4240 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
4243 if (TREE_CODE (fn
) == ADDR_EXPR
4244 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
4245 extra_fnflags
= flags_from_decl_or_type (TREE_OPERAND (fn
, 0));
4247 if (!gimple_call_internal_p (call_stmt
)
4248 && (/* Calls to the same function with the same vuse
4249 and the same operands do not necessarily return the same
4250 value, unless they're pure or const. */
4251 ((gimple_call_flags (call_stmt
) | extra_fnflags
)
4252 & (ECF_PURE
| ECF_CONST
))
4253 /* If calls have a vdef, subsequent calls won't have
4254 the same incoming vuse. So, if 2 calls with vdef have the
4255 same vuse, we know they're not subsequent.
4256 We can value number 2 calls to the same function with the
4257 same vuse and the same operands which are not subsequent
4258 the same, because there is no code in the program that can
4259 compare the 2 values... */
4260 || (gimple_vdef (call_stmt
)
4261 /* ... unless the call returns a pointer which does
4262 not alias with anything else. In which case the
4263 information that the values are distinct are encoded
4265 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
4266 /* Only perform the following when being called from PRE
4267 which embeds tail merging. */
4268 && default_vn_walk_kind
== VN_WALK
)))
4269 changed
= visit_reference_op_call (lhs
, call_stmt
);
4271 changed
= defs_to_varying (call_stmt
);
4274 changed
= defs_to_varying (stmt
);
4279 /* Compare two operands by reverse postorder index */
4282 compare_ops (const void *pa
, const void *pb
)
4284 const tree opa
= *((const tree
*)pa
);
4285 const tree opb
= *((const tree
*)pb
);
4286 gimple
*opstmta
= SSA_NAME_DEF_STMT (opa
);
4287 gimple
*opstmtb
= SSA_NAME_DEF_STMT (opb
);
4291 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
4292 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
4293 else if (gimple_nop_p (opstmta
))
4295 else if (gimple_nop_p (opstmtb
))
4298 bba
= gimple_bb (opstmta
);
4299 bbb
= gimple_bb (opstmtb
);
4302 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
4310 if (gimple_code (opstmta
) == GIMPLE_PHI
4311 && gimple_code (opstmtb
) == GIMPLE_PHI
)
4312 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
4313 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
4315 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
4317 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
4318 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
4320 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
4322 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
4325 /* Sort an array containing members of a strongly connected component
4326 SCC so that the members are ordered by RPO number.
4327 This means that when the sort is complete, iterating through the
4328 array will give you the members in RPO order. */
4331 sort_scc (vec
<tree
> scc
)
4333 scc
.qsort (compare_ops
);
4336 /* Insert the no longer used nary ONARY to the hash INFO. */
4339 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
4341 size_t size
= sizeof_vn_nary_op (onary
->length
);
4342 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
4343 &info
->nary_obstack
);
4344 memcpy (nary
, onary
, size
);
4345 vn_nary_op_insert_into (nary
, info
->nary
, false);
4348 /* Insert the no longer used phi OPHI to the hash INFO. */
4351 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
4353 vn_phi_t phi
= info
->phis_pool
->allocate ();
4355 memcpy (phi
, ophi
, sizeof (*phi
));
4356 ophi
->phiargs
.create (0);
4357 slot
= info
->phis
->find_slot_with_hash (phi
, phi
->hashcode
, INSERT
);
4358 gcc_assert (!*slot
);
4362 /* Insert the no longer used reference OREF to the hash INFO. */
4365 copy_reference (vn_reference_t oref
, vn_tables_t info
)
4368 vn_reference_s
**slot
;
4369 ref
= info
->references_pool
->allocate ();
4370 memcpy (ref
, oref
, sizeof (*ref
));
4371 oref
->operands
.create (0);
4372 slot
= info
->references
->find_slot_with_hash (ref
, ref
->hashcode
, INSERT
);
4374 free_reference (*slot
);
4378 /* Process a strongly connected component in the SSA graph. */
4381 process_scc (vec
<tree
> scc
)
4385 unsigned int iterations
= 0;
4386 bool changed
= true;
4387 vn_nary_op_iterator_type hin
;
4388 vn_phi_iterator_type hip
;
4389 vn_reference_iterator_type hir
;
4394 /* If the SCC has a single member, just visit it. */
4395 if (scc
.length () == 1)
4398 if (VN_INFO (use
)->use_processed
)
4400 /* We need to make sure it doesn't form a cycle itself, which can
4401 happen for self-referential PHI nodes. In that case we would
4402 end up inserting an expression with VN_TOP operands into the
4403 valid table which makes us derive bogus equivalences later.
4404 The cheapest way to check this is to assume it for all PHI nodes. */
4405 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
4406 /* Fallthru to iteration. */ ;
4414 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4415 print_scc (dump_file
, scc
);
4417 /* Iterate over the SCC with the optimistic table until it stops
4419 current_info
= optimistic_info
;
4424 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4425 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
4426 /* As we are value-numbering optimistically we have to
4427 clear the expression tables and the simplified expressions
4428 in each iteration until we converge. */
4429 optimistic_info
->nary
->empty ();
4430 optimistic_info
->phis
->empty ();
4431 optimistic_info
->references
->empty ();
4432 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
4433 gcc_obstack_init (&optimistic_info
->nary_obstack
);
4434 optimistic_info
->phis_pool
->release ();
4435 optimistic_info
->references_pool
->release ();
4436 FOR_EACH_VEC_ELT (scc
, i
, var
)
4437 gcc_assert (!VN_INFO (var
)->needs_insertion
4438 && VN_INFO (var
)->expr
== NULL
);
4439 FOR_EACH_VEC_ELT (scc
, i
, var
)
4440 changed
|= visit_use (var
);
4443 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4444 fprintf (dump_file
, "Processing SCC needed %d iterations\n", iterations
);
4445 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
4447 /* Finally, copy the contents of the no longer used optimistic
4448 table to the valid table. */
4449 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->nary
, nary
, vn_nary_op_t
, hin
)
4450 copy_nary (nary
, valid_info
);
4451 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->phis
, phi
, vn_phi_t
, hip
)
4452 copy_phi (phi
, valid_info
);
4453 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->references
,
4454 ref
, vn_reference_t
, hir
)
4455 copy_reference (ref
, valid_info
);
4457 current_info
= valid_info
;
4461 /* Pop the components of the found SCC for NAME off the SCC stack
4462 and process them. Returns true if all went well, false if
4463 we run into resource limits. */
4466 extract_and_process_scc_for_name (tree name
)
4471 /* Found an SCC, pop the components off the SCC stack and
4475 x
= sccstack
.pop ();
4477 VN_INFO (x
)->on_sccstack
= false;
4479 } while (x
!= name
);
4481 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4483 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4484 if (scc
.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
4488 print_scc (dump_file
, scc
);
4489 fprintf (dump_file
, "WARNING: Giving up value-numbering SCC due to "
4490 "size %u exceeding %u\n", scc
.length (),
4491 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
4495 FOR_EACH_VEC_ELT (scc
, i
, var
)
4497 gimple
*def
= SSA_NAME_DEF_STMT (var
);
4498 mark_use_processed (var
);
4499 if (SSA_NAME_IS_DEFAULT_DEF (var
)
4500 || gimple_code (def
) == GIMPLE_PHI
)
4501 set_ssa_val_to (var
, var
);
4503 defs_to_varying (def
);
4508 if (scc
.length () > 1)
4514 /* Depth first search on NAME to discover and process SCC's in the SSA
4516 Execution of this algorithm relies on the fact that the SCC's are
4517 popped off the stack in topological order.
4518 Returns true if successful, false if we stopped processing SCC's due
4519 to resource constraints. */
4524 auto_vec
<ssa_op_iter
> itervec
;
4525 auto_vec
<tree
> namevec
;
4526 use_operand_p usep
= NULL
;
4533 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
4534 VN_INFO (name
)->visited
= true;
4535 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
4537 sccstack
.safe_push (name
);
4538 VN_INFO (name
)->on_sccstack
= true;
4539 defstmt
= SSA_NAME_DEF_STMT (name
);
4541 /* Recursively DFS on our operands, looking for SCC's. */
4542 if (!gimple_nop_p (defstmt
))
4544 /* Push a new iterator. */
4545 if (gphi
*phi
= dyn_cast
<gphi
*> (defstmt
))
4546 usep
= op_iter_init_phiuse (&iter
, phi
, SSA_OP_ALL_USES
);
4548 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
4551 clear_and_done_ssa_iter (&iter
);
4555 /* If we are done processing uses of a name, go up the stack
4556 of iterators and process SCCs as we found them. */
4557 if (op_iter_done (&iter
))
4559 /* See if we found an SCC. */
4560 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
4561 extract_and_process_scc_for_name (name
);
4563 /* Check if we are done. */
4564 if (namevec
.is_empty ())
4567 /* Restore the last use walker and continue walking there. */
4569 name
= namevec
.pop ();
4570 memcpy (&iter
, &itervec
.last (),
4571 sizeof (ssa_op_iter
));
4573 goto continue_walking
;
4576 use
= USE_FROM_PTR (usep
);
4578 /* Since we handle phi nodes, we will sometimes get
4579 invariants in the use expression. */
4580 if (TREE_CODE (use
) == SSA_NAME
)
4582 if (! (VN_INFO (use
)->visited
))
4584 /* Recurse by pushing the current use walking state on
4585 the stack and starting over. */
4586 itervec
.safe_push (iter
);
4587 namevec
.safe_push (name
);
4592 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
4593 VN_INFO (use
)->low
);
4595 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
4596 && VN_INFO (use
)->on_sccstack
)
4598 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
4599 VN_INFO (name
)->low
);
4603 usep
= op_iter_next_use (&iter
);
4607 /* Allocate a value number table. */
4610 allocate_vn_table (vn_tables_t table
)
4612 table
->phis
= new vn_phi_table_type (23);
4613 table
->nary
= new vn_nary_op_table_type (23);
4614 table
->references
= new vn_reference_table_type (23);
4616 gcc_obstack_init (&table
->nary_obstack
);
4617 table
->phis_pool
= new object_allocator
<vn_phi_s
> ("VN phis");
4618 table
->references_pool
= new object_allocator
<vn_reference_s
>
4622 /* Free a value number table. */
4625 free_vn_table (vn_tables_t table
)
4631 delete table
->references
;
4632 table
->references
= NULL
;
4633 obstack_free (&table
->nary_obstack
, NULL
);
4634 delete table
->phis_pool
;
4635 delete table
->references_pool
;
4642 int *rpo_numbers_temp
;
4644 calculate_dominance_info (CDI_DOMINATORS
);
4645 mark_dfs_back_edges ();
4647 sccstack
.create (0);
4648 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
4650 constant_value_ids
= BITMAP_ALLOC (NULL
);
4655 vn_ssa_aux_table
.create (num_ssa_names
+ 1);
4656 /* VEC_alloc doesn't actually grow it to the right size, it just
4657 preallocates the space to do so. */
4658 vn_ssa_aux_table
.safe_grow_cleared (num_ssa_names
+ 1);
4659 gcc_obstack_init (&vn_ssa_aux_obstack
);
4661 shared_lookup_phiargs
.create (0);
4662 shared_lookup_references
.create (0);
4663 rpo_numbers
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
4665 XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
4666 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
4668 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4669 the i'th block in RPO order is bb. We want to map bb's to RPO
4670 numbers, so we need to rearrange this array. */
4671 for (j
= 0; j
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; j
++)
4672 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
4674 XDELETE (rpo_numbers_temp
);
4676 VN_TOP
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
4677 get_identifier ("VN_TOP"), error_mark_node
);
4679 renumber_gimple_stmt_uids ();
4681 /* Create the valid and optimistic value numbering tables. */
4682 valid_info
= XCNEW (struct vn_tables_s
);
4683 allocate_vn_table (valid_info
);
4684 optimistic_info
= XCNEW (struct vn_tables_s
);
4685 allocate_vn_table (optimistic_info
);
4686 current_info
= valid_info
;
4688 /* Create the VN_INFO structures, and initialize value numbers to
4689 TOP or VARYING for parameters. */
4693 FOR_EACH_SSA_NAME (i
, name
, cfun
)
4695 VN_INFO_GET (name
)->valnum
= VN_TOP
;
4696 VN_INFO (name
)->needs_insertion
= false;
4697 VN_INFO (name
)->expr
= NULL
;
4698 VN_INFO (name
)->value_id
= 0;
4700 if (!SSA_NAME_IS_DEFAULT_DEF (name
))
4703 switch (TREE_CODE (SSA_NAME_VAR (name
)))
4706 /* All undefined vars are VARYING. */
4707 VN_INFO (name
)->valnum
= name
;
4708 VN_INFO (name
)->visited
= true;
4712 /* Parameters are VARYING but we can record a condition
4713 if we know it is a non-NULL pointer. */
4714 VN_INFO (name
)->visited
= true;
4715 VN_INFO (name
)->valnum
= name
;
4716 if (POINTER_TYPE_P (TREE_TYPE (name
))
4717 && nonnull_arg_p (SSA_NAME_VAR (name
)))
4721 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
4722 vn_nary_op_insert_pieces (2, NE_EXPR
, boolean_type_node
, ops
,
4723 boolean_true_node
, 0);
4724 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4726 fprintf (dump_file
, "Recording ");
4727 print_generic_expr (dump_file
, name
, TDF_SLIM
);
4728 fprintf (dump_file
, " != 0\n");
4734 /* If the result is passed by invisible reference the default
4735 def is initialized, otherwise it's uninitialized. Still
4736 undefined is varying. */
4737 VN_INFO (name
)->visited
= true;
4738 VN_INFO (name
)->valnum
= name
;
4747 /* Restore SSA info that has been reset on value leaders. */
4750 scc_vn_restore_ssa_info (void)
4755 FOR_EACH_SSA_NAME (i
, name
, cfun
)
4757 if (has_VN_INFO (name
))
4759 if (VN_INFO (name
)->needs_insertion
)
4761 else if (POINTER_TYPE_P (TREE_TYPE (name
))
4762 && VN_INFO (name
)->info
.ptr_info
)
4763 SSA_NAME_PTR_INFO (name
) = VN_INFO (name
)->info
.ptr_info
;
4764 else if (INTEGRAL_TYPE_P (TREE_TYPE (name
))
4765 && VN_INFO (name
)->info
.range_info
)
4767 SSA_NAME_RANGE_INFO (name
) = VN_INFO (name
)->info
.range_info
;
4768 SSA_NAME_ANTI_RANGE_P (name
)
4769 = VN_INFO (name
)->range_info_anti_range_p
;
4781 delete constant_to_value_id
;
4782 constant_to_value_id
= NULL
;
4783 BITMAP_FREE (constant_value_ids
);
4784 shared_lookup_phiargs
.release ();
4785 shared_lookup_references
.release ();
4786 XDELETEVEC (rpo_numbers
);
4788 FOR_EACH_SSA_NAME (i
, name
, cfun
)
4790 if (has_VN_INFO (name
)
4791 && VN_INFO (name
)->needs_insertion
)
4792 release_ssa_name (name
);
4794 obstack_free (&vn_ssa_aux_obstack
, NULL
);
4795 vn_ssa_aux_table
.release ();
4797 sccstack
.release ();
4798 free_vn_table (valid_info
);
4799 XDELETE (valid_info
);
4800 free_vn_table (optimistic_info
);
4801 XDELETE (optimistic_info
);
4803 BITMAP_FREE (const_parms
);
4806 /* Set *ID according to RESULT. */
4809 set_value_id_for_result (tree result
, unsigned int *id
)
4811 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4812 *id
= VN_INFO (result
)->value_id
;
4813 else if (result
&& is_gimple_min_invariant (result
))
4814 *id
= get_or_alloc_constant_value_id (result
);
4816 *id
= get_next_value_id ();
4819 /* Set the value ids in the valid hash tables. */
4822 set_hashtable_value_ids (void)
4824 vn_nary_op_iterator_type hin
;
4825 vn_phi_iterator_type hip
;
4826 vn_reference_iterator_type hir
;
4831 /* Now set the value ids of the things we had put in the hash
4834 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4835 set_value_id_for_result (vno
->result
, &vno
->value_id
);
4837 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4838 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4840 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4842 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4845 class sccvn_dom_walker
: public dom_walker
4849 : dom_walker (CDI_DOMINATORS
, REACHABLE_BLOCKS
), cond_stack (0) {}
4851 virtual edge
before_dom_children (basic_block
);
4852 virtual void after_dom_children (basic_block
);
4854 void record_cond (basic_block
,
4855 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4856 void record_conds (basic_block
,
4857 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4859 auto_vec
<std::pair
<basic_block
, std::pair
<vn_nary_op_t
, vn_nary_op_t
> > >
4863 /* Record a temporary condition for the BB and its dominated blocks. */
4866 sccvn_dom_walker::record_cond (basic_block bb
,
4867 enum tree_code code
, tree lhs
, tree rhs
,
4870 tree ops
[2] = { lhs
, rhs
};
4871 vn_nary_op_t old
= NULL
;
4872 if (vn_nary_op_lookup_pieces (2, code
, boolean_type_node
, ops
, &old
))
4873 current_info
->nary
->remove_elt_with_hash (old
, old
->hashcode
);
4875 = vn_nary_op_insert_pieces (2, code
, boolean_type_node
, ops
,
4878 : boolean_false_node
, 0);
4879 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4881 fprintf (dump_file
, "Recording temporarily ");
4882 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
4883 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
4884 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
4885 fprintf (dump_file
, " == %s%s\n",
4886 value
? "true" : "false",
4887 old
? " (old entry saved)" : "");
4889 cond_stack
.safe_push (std::make_pair (bb
, std::make_pair (cond
, old
)));
4892 /* Record temporary conditions for the BB and its dominated blocks
4893 according to LHS CODE RHS == VALUE and its dominated conditions. */
4896 sccvn_dom_walker::record_conds (basic_block bb
,
4897 enum tree_code code
, tree lhs
, tree rhs
,
4900 /* Record the original condition. */
4901 record_cond (bb
, code
, lhs
, rhs
, value
);
4906 /* Record dominated conditions if the condition is true. Note that
4907 the inversion is already recorded. */
4912 record_cond (bb
, code
== LT_EXPR
? LE_EXPR
: GE_EXPR
, lhs
, rhs
, true);
4913 record_cond (bb
, NE_EXPR
, lhs
, rhs
, true);
4914 record_cond (bb
, EQ_EXPR
, lhs
, rhs
, false);
4918 record_cond (bb
, LE_EXPR
, lhs
, rhs
, true);
4919 record_cond (bb
, GE_EXPR
, lhs
, rhs
, true);
4920 record_cond (bb
, LT_EXPR
, lhs
, rhs
, false);
4921 record_cond (bb
, GT_EXPR
, lhs
, rhs
, false);
4929 /* Restore expressions and values derived from conditionals. */
4932 sccvn_dom_walker::after_dom_children (basic_block bb
)
4934 while (!cond_stack
.is_empty ()
4935 && cond_stack
.last ().first
== bb
)
4937 vn_nary_op_t cond
= cond_stack
.last ().second
.first
;
4938 vn_nary_op_t old
= cond_stack
.last ().second
.second
;
4939 current_info
->nary
->remove_elt_with_hash (cond
, cond
->hashcode
);
4941 vn_nary_op_insert_into (old
, current_info
->nary
, false);
4946 /* Value number all statements in BB. */
4949 sccvn_dom_walker::before_dom_children (basic_block bb
)
4951 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4952 fprintf (dump_file
, "Visiting BB %d\n", bb
->index
);
4954 /* If we have a single predecessor record the equivalence from a
4955 possible condition on the predecessor edge. */
4956 edge pred_e
= single_pred_edge_ignoring_loop_edges (bb
, false);
4959 /* Check if there are multiple executable successor edges in
4960 the source block. Otherwise there is no additional info
4964 FOR_EACH_EDGE (e2
, ei
, pred_e
->src
->succs
)
4966 && e2
->flags
& EDGE_EXECUTABLE
)
4968 if (e2
&& (e2
->flags
& EDGE_EXECUTABLE
))
4970 gimple
*stmt
= last_stmt (pred_e
->src
);
4972 && gimple_code (stmt
) == GIMPLE_COND
)
4974 enum tree_code code
= gimple_cond_code (stmt
);
4975 tree lhs
= gimple_cond_lhs (stmt
);
4976 tree rhs
= gimple_cond_rhs (stmt
);
4977 record_conds (bb
, code
, lhs
, rhs
,
4978 (pred_e
->flags
& EDGE_TRUE_VALUE
) != 0);
4979 code
= invert_tree_comparison (code
, HONOR_NANS (lhs
));
4980 if (code
!= ERROR_MARK
)
4981 record_conds (bb
, code
, lhs
, rhs
,
4982 (pred_e
->flags
& EDGE_TRUE_VALUE
) == 0);
4987 /* Value-number all defs in the basic-block. */
4988 for (gphi_iterator gsi
= gsi_start_phis (bb
);
4989 !gsi_end_p (gsi
); gsi_next (&gsi
))
4991 gphi
*phi
= gsi
.phi ();
4992 tree res
= PHI_RESULT (phi
);
4993 if (!VN_INFO (res
)->visited
)
4996 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
4997 !gsi_end_p (gsi
); gsi_next (&gsi
))
5001 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
5002 if (!VN_INFO (op
)->visited
)
5006 /* Finally look at the last stmt. */
5007 gimple
*stmt
= last_stmt (bb
);
5011 enum gimple_code code
= gimple_code (stmt
);
5012 if (code
!= GIMPLE_COND
5013 && code
!= GIMPLE_SWITCH
5014 && code
!= GIMPLE_GOTO
)
5017 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5019 fprintf (dump_file
, "Visiting control stmt ending BB %d: ", bb
->index
);
5020 print_gimple_stmt (dump_file
, stmt
, 0);
5023 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
5024 if value-numbering can prove they are not reachable. Handling
5025 computed gotos is also possible. */
5031 tree lhs
= vn_valueize (gimple_cond_lhs (stmt
));
5032 tree rhs
= vn_valueize (gimple_cond_rhs (stmt
));
5033 val
= gimple_simplify (gimple_cond_code (stmt
),
5034 boolean_type_node
, lhs
, rhs
,
5036 /* If that didn't simplify to a constant see if we have recorded
5037 temporary expressions from taken edges. */
5038 if (!val
|| TREE_CODE (val
) != INTEGER_CST
)
5043 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt
),
5044 boolean_type_node
, ops
, NULL
);
5049 val
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
5052 val
= gimple_goto_dest (stmt
);
5060 edge taken
= find_taken_edge (bb
, vn_valueize (val
));
5064 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5065 fprintf (dump_file
, "Marking all edges out of BB %d but (%d -> %d) as "
5066 "not executable\n", bb
->index
, bb
->index
, taken
->dest
->index
);
5071 /* Do SCCVN. Returns true if it finished, false if we bailed out
5072 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5073 how we use the alias oracle walking during the VN process. */
5076 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
5080 default_vn_walk_kind
= default_vn_walk_kind_
;
5084 /* Collect pointers we know point to readonly memory. */
5085 const_parms
= BITMAP_ALLOC (NULL
);
5086 tree fnspec
= lookup_attribute ("fn spec",
5087 TYPE_ATTRIBUTES (TREE_TYPE (cfun
->decl
)));
5090 fnspec
= TREE_VALUE (TREE_VALUE (fnspec
));
5092 for (tree arg
= DECL_ARGUMENTS (cfun
->decl
);
5093 arg
; arg
= DECL_CHAIN (arg
), ++i
)
5095 if (i
>= (unsigned) TREE_STRING_LENGTH (fnspec
))
5097 if (TREE_STRING_POINTER (fnspec
)[i
] == 'R'
5098 || TREE_STRING_POINTER (fnspec
)[i
] == 'r')
5100 tree name
= ssa_default_def (cfun
, arg
);
5102 bitmap_set_bit (const_parms
, SSA_NAME_VERSION (name
));
5107 /* Walk all blocks in dominator order, value-numbering stmts
5108 SSA defs and decide whether outgoing edges are not executable. */
5109 sccvn_dom_walker walker
;
5110 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5112 /* Initialize the value ids and prune out remaining VN_TOPs
5115 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5117 vn_ssa_aux_t info
= VN_INFO (name
);
5119 || info
->valnum
== VN_TOP
)
5120 info
->valnum
= name
;
5121 if (info
->valnum
== name
)
5122 info
->value_id
= get_next_value_id ();
5123 else if (is_gimple_min_invariant (info
->valnum
))
5124 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
5128 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5130 vn_ssa_aux_t info
= VN_INFO (name
);
5131 if (TREE_CODE (info
->valnum
) == SSA_NAME
5132 && info
->valnum
!= name
5133 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
5134 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
5137 set_hashtable_value_ids ();
5139 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5141 fprintf (dump_file
, "Value numbers:\n");
5142 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5144 if (VN_INFO (name
)->visited
5145 && SSA_VAL (name
) != name
)
5147 print_generic_expr (dump_file
, name
);
5148 fprintf (dump_file
, " = ");
5149 print_generic_expr (dump_file
, SSA_VAL (name
));
5150 fprintf (dump_file
, "\n");
5156 /* Return the maximum value id we have ever seen. */
5159 get_max_value_id (void)
5161 return next_value_id
;
5164 /* Return the next unique value id. */
5167 get_next_value_id (void)
5169 return next_value_id
++;
5173 /* Compare two expressions E1 and E2 and return true if they are equal. */
5176 expressions_equal_p (tree e1
, tree e2
)
5178 /* The obvious case. */
5182 /* If either one is VN_TOP consider them equal. */
5183 if (e1
== VN_TOP
|| e2
== VN_TOP
)
5186 /* If only one of them is null, they cannot be equal. */
5190 /* Now perform the actual comparison. */
5191 if (TREE_CODE (e1
) == TREE_CODE (e2
)
5192 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
5199 /* Return true if the nary operation NARY may trap. This is a copy
5200 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5203 vn_nary_may_trap (vn_nary_op_t nary
)
5206 tree rhs2
= NULL_TREE
;
5207 bool honor_nans
= false;
5208 bool honor_snans
= false;
5209 bool fp_operation
= false;
5210 bool honor_trapv
= false;
5214 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
5215 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
5216 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
5219 fp_operation
= FLOAT_TYPE_P (type
);
5222 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
5223 honor_snans
= flag_signaling_nans
!= 0;
5225 else if (INTEGRAL_TYPE_P (type
)
5226 && TYPE_OVERFLOW_TRAPS (type
))
5229 if (nary
->length
>= 2)
5231 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
5233 honor_nans
, honor_snans
, rhs2
,
5239 for (i
= 0; i
< nary
->length
; ++i
)
5240 if (tree_could_trap_p (nary
->op
[i
]))
5247 class eliminate_dom_walker
: public dom_walker
5250 eliminate_dom_walker (cdi_direction
, bitmap
);
5251 ~eliminate_dom_walker ();
5253 virtual edge
before_dom_children (basic_block
);
5254 virtual void after_dom_children (basic_block
);
5256 tree
eliminate_avail (tree op
);
5257 void eliminate_push_avail (tree op
);
5258 tree
eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
);
5261 unsigned int el_todo
;
5262 unsigned int eliminations
;
5263 unsigned int insertions
;
5265 /* SSA names that had their defs inserted by PRE if do_pre. */
5266 bitmap inserted_exprs
;
5268 /* Blocks with statements that have had their EH properties changed. */
5269 bitmap need_eh_cleanup
;
5271 /* Blocks with statements that have had their AB properties changed. */
5272 bitmap need_ab_cleanup
;
5274 auto_vec
<gimple
*> to_remove
;
5275 auto_vec
<gimple
*> to_fixup
;
5276 auto_vec
<tree
> avail
;
5277 auto_vec
<tree
> avail_stack
;
5280 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction
,
5281 bitmap inserted_exprs_
)
5282 : dom_walker (direction
), do_pre (inserted_exprs_
!= NULL
),
5283 el_todo (0), eliminations (0), insertions (0),
5284 inserted_exprs (inserted_exprs_
)
5286 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
5287 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
5290 eliminate_dom_walker::~eliminate_dom_walker ()
5292 BITMAP_FREE (need_eh_cleanup
);
5293 BITMAP_FREE (need_ab_cleanup
);
5296 /* Return a leader for OP that is available at the current point of the
5297 eliminate domwalk. */
5300 eliminate_dom_walker::eliminate_avail (tree op
)
5302 tree valnum
= VN_INFO (op
)->valnum
;
5303 if (TREE_CODE (valnum
) == SSA_NAME
)
5305 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
5307 if (avail
.length () > SSA_NAME_VERSION (valnum
))
5308 return avail
[SSA_NAME_VERSION (valnum
)];
5310 else if (is_gimple_min_invariant (valnum
))
5315 /* At the current point of the eliminate domwalk make OP available. */
5318 eliminate_dom_walker::eliminate_push_avail (tree op
)
5320 tree valnum
= VN_INFO (op
)->valnum
;
5321 if (TREE_CODE (valnum
) == SSA_NAME
)
5323 if (avail
.length () <= SSA_NAME_VERSION (valnum
))
5324 avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
5326 if (avail
[SSA_NAME_VERSION (valnum
)])
5327 pushop
= avail
[SSA_NAME_VERSION (valnum
)];
5328 avail_stack
.safe_push (pushop
);
5329 avail
[SSA_NAME_VERSION (valnum
)] = op
;
5333 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5334 the leader for the expression if insertion was successful. */
5337 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
)
5339 /* We can insert a sequence with a single assignment only. */
5340 gimple_seq stmts
= VN_INFO (val
)->expr
;
5341 if (!gimple_seq_singleton_p (stmts
))
5343 gassign
*stmt
= dyn_cast
<gassign
*> (gimple_seq_first_stmt (stmts
));
5345 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
5346 && gimple_assign_rhs_code (stmt
) != VIEW_CONVERT_EXPR
5347 && gimple_assign_rhs_code (stmt
) != BIT_FIELD_REF
5348 && (gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
5349 || TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)))
5352 tree op
= gimple_assign_rhs1 (stmt
);
5353 if (gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
5354 || gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
5355 op
= TREE_OPERAND (op
, 0);
5356 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (op
) : op
;
5362 if (gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
5363 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
5364 TREE_TYPE (val
), leader
,
5365 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1),
5366 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2));
5367 else if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
)
5368 res
= gimple_build (&stmts
, BIT_AND_EXPR
,
5369 TREE_TYPE (val
), leader
, gimple_assign_rhs2 (stmt
));
5371 res
= gimple_build (&stmts
, gimple_assign_rhs_code (stmt
),
5372 TREE_TYPE (val
), leader
);
5373 if (TREE_CODE (res
) != SSA_NAME
5374 || SSA_NAME_IS_DEFAULT_DEF (res
)
5375 || gimple_bb (SSA_NAME_DEF_STMT (res
)))
5377 gimple_seq_discard (stmts
);
5379 /* During propagation we have to treat SSA info conservatively
5380 and thus we can end up simplifying the inserted expression
5381 at elimination time to sth not defined in stmts. */
5382 /* But then this is a redundancy we failed to detect. Which means
5383 res now has two values. That doesn't play well with how
5384 we track availability here, so give up. */
5385 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5387 if (TREE_CODE (res
) == SSA_NAME
)
5388 res
= eliminate_avail (res
);
5391 fprintf (dump_file
, "Failed to insert expression for value ");
5392 print_generic_expr (dump_file
, val
);
5393 fprintf (dump_file
, " which is really fully redundant to ");
5394 print_generic_expr (dump_file
, res
);
5395 fprintf (dump_file
, "\n");
5403 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
5404 VN_INFO_GET (res
)->valnum
= val
;
5408 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5410 fprintf (dump_file
, "Inserted ");
5411 print_gimple_stmt (dump_file
, SSA_NAME_DEF_STMT (res
), 0);
5419 /* Perform elimination for the basic-block B during the domwalk. */
5422 eliminate_dom_walker::before_dom_children (basic_block b
)
5425 avail_stack
.safe_push (NULL_TREE
);
5427 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5430 FOR_EACH_EDGE (e
, ei
, b
->preds
)
5431 if (e
->flags
& EDGE_EXECUTABLE
)
5436 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
5438 gphi
*phi
= gsi
.phi ();
5439 tree res
= PHI_RESULT (phi
);
5441 if (virtual_operand_p (res
))
5447 tree sprime
= eliminate_avail (res
);
5451 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5453 fprintf (dump_file
, "Replaced redundant PHI node defining ");
5454 print_generic_expr (dump_file
, res
);
5455 fprintf (dump_file
, " with ");
5456 print_generic_expr (dump_file
, sprime
);
5457 fprintf (dump_file
, "\n");
5460 /* If we inserted this PHI node ourself, it's not an elimination. */
5461 if (! inserted_exprs
5462 || ! bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
5465 /* If we will propagate into all uses don't bother to do
5467 if (may_propagate_copy (res
, sprime
))
5469 /* Mark the PHI for removal. */
5470 to_remove
.safe_push (phi
);
5475 remove_phi_node (&gsi
, false);
5477 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
5478 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
5479 gimple
*stmt
= gimple_build_assign (res
, sprime
);
5480 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
5481 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
5485 eliminate_push_avail (res
);
5489 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
5493 tree sprime
= NULL_TREE
;
5494 gimple
*stmt
= gsi_stmt (gsi
);
5495 tree lhs
= gimple_get_lhs (stmt
);
5496 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
5497 && !gimple_has_volatile_ops (stmt
)
5498 /* See PR43491. Do not replace a global register variable when
5499 it is a the RHS of an assignment. Do replace local register
5500 variables since gcc does not guarantee a local variable will
5501 be allocated in register.
5502 ??? The fix isn't effective here. This should instead
5503 be ensured by not value-numbering them the same but treating
5504 them like volatiles? */
5505 && !(gimple_assign_single_p (stmt
)
5506 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
5507 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
5508 && is_global_var (gimple_assign_rhs1 (stmt
)))))
5510 sprime
= eliminate_avail (lhs
);
5513 /* If there is no existing usable leader but SCCVN thinks
5514 it has an expression it wants to use as replacement,
5516 tree val
= VN_INFO (lhs
)->valnum
;
5518 && TREE_CODE (val
) == SSA_NAME
5519 && VN_INFO (val
)->needs_insertion
5520 && VN_INFO (val
)->expr
!= NULL
5521 && (sprime
= eliminate_insert (&gsi
, val
)) != NULL_TREE
)
5522 eliminate_push_avail (sprime
);
5525 /* If this now constitutes a copy duplicate points-to
5526 and range info appropriately. This is especially
5527 important for inserted code. See tree-ssa-copy.c
5528 for similar code. */
5530 && TREE_CODE (sprime
) == SSA_NAME
)
5532 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
5533 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
5534 && VN_INFO_PTR_INFO (lhs
)
5535 && ! VN_INFO_PTR_INFO (sprime
))
5537 duplicate_ssa_name_ptr_info (sprime
,
5538 VN_INFO_PTR_INFO (lhs
));
5540 mark_ptr_info_alignment_unknown
5541 (SSA_NAME_PTR_INFO (sprime
));
5543 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5544 && VN_INFO_RANGE_INFO (lhs
)
5545 && ! VN_INFO_RANGE_INFO (sprime
)
5547 duplicate_ssa_name_range_info (sprime
,
5548 VN_INFO_RANGE_TYPE (lhs
),
5549 VN_INFO_RANGE_INFO (lhs
));
5552 /* Inhibit the use of an inserted PHI on a loop header when
5553 the address of the memory reference is a simple induction
5554 variable. In other cases the vectorizer won't do anything
5555 anyway (either it's loop invariant or a complicated
5558 && TREE_CODE (sprime
) == SSA_NAME
5560 && (flag_tree_loop_vectorize
|| flag_tree_parallelize_loops
> 1)
5561 && loop_outer (b
->loop_father
)
5562 && has_zero_uses (sprime
)
5563 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
5564 && gimple_assign_load_p (stmt
))
5566 gimple
*def_stmt
= SSA_NAME_DEF_STMT (sprime
);
5567 basic_block def_bb
= gimple_bb (def_stmt
);
5568 if (gimple_code (def_stmt
) == GIMPLE_PHI
5569 && def_bb
->loop_father
->header
== def_bb
)
5571 loop_p loop
= def_bb
->loop_father
;
5575 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5578 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
5580 && flow_bb_inside_loop_p (loop
, def_bb
)
5581 && simple_iv (loop
, loop
, op
, &iv
, true))
5589 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5591 fprintf (dump_file
, "Not replacing ");
5592 print_gimple_expr (dump_file
, stmt
, 0);
5593 fprintf (dump_file
, " with ");
5594 print_generic_expr (dump_file
, sprime
);
5595 fprintf (dump_file
, " which would add a loop"
5596 " carried dependence to loop %d\n",
5599 /* Don't keep sprime available. */
5607 /* If we can propagate the value computed for LHS into
5608 all uses don't bother doing anything with this stmt. */
5609 if (may_propagate_copy (lhs
, sprime
))
5611 /* Mark it for removal. */
5612 to_remove
.safe_push (stmt
);
5614 /* ??? Don't count copy/constant propagations. */
5615 if (gimple_assign_single_p (stmt
)
5616 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
5617 || gimple_assign_rhs1 (stmt
) == sprime
))
5620 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5622 fprintf (dump_file
, "Replaced ");
5623 print_gimple_expr (dump_file
, stmt
, 0);
5624 fprintf (dump_file
, " with ");
5625 print_generic_expr (dump_file
, sprime
);
5626 fprintf (dump_file
, " in all uses of ");
5627 print_gimple_stmt (dump_file
, stmt
, 0);
5634 /* If this is an assignment from our leader (which
5635 happens in the case the value-number is a constant)
5636 then there is nothing to do. */
5637 if (gimple_assign_single_p (stmt
)
5638 && sprime
== gimple_assign_rhs1 (stmt
))
5641 /* Else replace its RHS. */
5642 bool can_make_abnormal_goto
5643 = is_gimple_call (stmt
)
5644 && stmt_can_make_abnormal_goto (stmt
);
5646 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5648 fprintf (dump_file
, "Replaced ");
5649 print_gimple_expr (dump_file
, stmt
, 0);
5650 fprintf (dump_file
, " with ");
5651 print_generic_expr (dump_file
, sprime
);
5652 fprintf (dump_file
, " in ");
5653 print_gimple_stmt (dump_file
, stmt
, 0);
5657 gimple
*orig_stmt
= stmt
;
5658 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
5659 TREE_TYPE (sprime
)))
5660 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
5661 tree vdef
= gimple_vdef (stmt
);
5662 tree vuse
= gimple_vuse (stmt
);
5663 propagate_tree_value_into_stmt (&gsi
, sprime
);
5664 stmt
= gsi_stmt (gsi
);
5666 if (vdef
!= gimple_vdef (stmt
))
5667 VN_INFO (vdef
)->valnum
= vuse
;
5669 /* If we removed EH side-effects from the statement, clean
5670 its EH information. */
5671 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
5673 bitmap_set_bit (need_eh_cleanup
,
5674 gimple_bb (stmt
)->index
);
5675 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5676 fprintf (dump_file
, " Removed EH side-effects.\n");
5679 /* Likewise for AB side-effects. */
5680 if (can_make_abnormal_goto
5681 && !stmt_can_make_abnormal_goto (stmt
))
5683 bitmap_set_bit (need_ab_cleanup
,
5684 gimple_bb (stmt
)->index
);
5685 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5686 fprintf (dump_file
, " Removed AB side-effects.\n");
5693 /* If the statement is a scalar store, see if the expression
5694 has the same value number as its rhs. If so, the store is
5696 if (gimple_assign_single_p (stmt
)
5697 && !gimple_has_volatile_ops (stmt
)
5698 && !is_gimple_reg (gimple_assign_lhs (stmt
))
5699 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
5700 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
5703 tree rhs
= gimple_assign_rhs1 (stmt
);
5704 vn_reference_t vnresult
;
5705 val
= vn_reference_lookup (lhs
, gimple_vuse (stmt
), VN_WALKREWRITE
,
5707 if (TREE_CODE (rhs
) == SSA_NAME
)
5708 rhs
= VN_INFO (rhs
)->valnum
;
5710 && operand_equal_p (val
, rhs
, 0))
5712 /* We can only remove the later store if the former aliases
5713 at least all accesses the later one does or if the store
5714 was to readonly memory storing the same value. */
5715 alias_set_type set
= get_alias_set (lhs
);
5717 || vnresult
->set
== set
5718 || alias_set_subset_of (set
, vnresult
->set
))
5720 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5722 fprintf (dump_file
, "Deleted redundant store ");
5723 print_gimple_stmt (dump_file
, stmt
, 0);
5726 /* Queue stmt for removal. */
5727 to_remove
.safe_push (stmt
);
5733 /* If this is a control statement value numbering left edges
5734 unexecuted on force the condition in a way consistent with
5736 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
5738 if ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
)
5739 ^ (EDGE_SUCC (b
, 1)->flags
& EDGE_EXECUTABLE
))
5741 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5743 fprintf (dump_file
, "Removing unexecutable edge from ");
5744 print_gimple_stmt (dump_file
, stmt
, 0);
5746 if (((EDGE_SUCC (b
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
5747 == ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
5748 gimple_cond_make_true (cond
);
5750 gimple_cond_make_false (cond
);
5752 el_todo
|= TODO_cleanup_cfg
;
5757 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
5758 bool was_noreturn
= (is_gimple_call (stmt
)
5759 && gimple_call_noreturn_p (stmt
));
5760 tree vdef
= gimple_vdef (stmt
);
5761 tree vuse
= gimple_vuse (stmt
);
5763 /* If we didn't replace the whole stmt (or propagate the result
5764 into all uses), replace all uses on this stmt with their
5766 bool modified
= false;
5767 use_operand_p use_p
;
5769 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5771 tree use
= USE_FROM_PTR (use_p
);
5772 /* ??? The call code above leaves stmt operands un-updated. */
5773 if (TREE_CODE (use
) != SSA_NAME
)
5775 tree sprime
= eliminate_avail (use
);
5776 if (sprime
&& sprime
!= use
5777 && may_propagate_copy (use
, sprime
)
5778 /* We substitute into debug stmts to avoid excessive
5779 debug temporaries created by removed stmts, but we need
5780 to avoid doing so for inserted sprimes as we never want
5781 to create debug temporaries for them. */
5783 || TREE_CODE (sprime
) != SSA_NAME
5784 || !is_gimple_debug (stmt
)
5785 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
5787 propagate_value (use_p
, sprime
);
5792 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5793 into which is a requirement for the IPA devirt machinery. */
5794 gimple
*old_stmt
= stmt
;
5797 /* If a formerly non-invariant ADDR_EXPR is turned into an
5798 invariant one it was on a separate stmt. */
5799 if (gimple_assign_single_p (stmt
)
5800 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
5801 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
5802 gimple_stmt_iterator prev
= gsi
;
5804 if (fold_stmt (&gsi
))
5806 /* fold_stmt may have created new stmts inbetween
5807 the previous stmt and the folded stmt. Mark
5808 all defs created there as varying to not confuse
5809 the SCCVN machinery as we're using that even during
5811 if (gsi_end_p (prev
))
5812 prev
= gsi_start_bb (b
);
5815 if (gsi_stmt (prev
) != gsi_stmt (gsi
))
5820 FOR_EACH_SSA_TREE_OPERAND (def
, gsi_stmt (prev
),
5821 dit
, SSA_OP_ALL_DEFS
)
5822 /* As existing DEFs may move between stmts
5823 we have to guard VN_INFO_GET. */
5824 if (! has_VN_INFO (def
))
5825 VN_INFO_GET (def
)->valnum
= def
;
5826 if (gsi_stmt (prev
) == gsi_stmt (gsi
))
5832 stmt
= gsi_stmt (gsi
);
5833 /* In case we folded the stmt away schedule the NOP for removal. */
5834 if (gimple_nop_p (stmt
))
5835 to_remove
.safe_push (stmt
);
5838 /* Visit indirect calls and turn them into direct calls if
5839 possible using the devirtualization machinery. Do this before
5840 checking for required EH/abnormal/noreturn cleanup as devird
5841 may expose more of those. */
5842 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
5844 tree fn
= gimple_call_fn (call_stmt
);
5846 && flag_devirtualize
5847 && virtual_method_call_p (fn
))
5849 tree otr_type
= obj_type_ref_class (fn
);
5850 unsigned HOST_WIDE_INT otr_tok
5851 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn
));
5853 ipa_polymorphic_call_context
context (current_function_decl
,
5854 fn
, stmt
, &instance
);
5855 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
),
5858 vec
<cgraph_node
*> targets
5859 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
5860 otr_tok
, context
, &final
);
5862 dump_possible_polymorphic_call_targets (dump_file
,
5863 obj_type_ref_class (fn
),
5865 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5868 if (targets
.length () == 1)
5869 fn
= targets
[0]->decl
;
5871 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5872 if (dump_enabled_p ())
5874 location_t loc
= gimple_location (stmt
);
5875 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
5876 "converting indirect call to "
5878 lang_hooks
.decl_printable_name (fn
, 2));
5880 gimple_call_set_fndecl (call_stmt
, fn
);
5881 /* If changing the call to __builtin_unreachable
5882 or similar noreturn function, adjust gimple_call_fntype
5884 if (gimple_call_noreturn_p (call_stmt
)
5885 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn
)))
5886 && TYPE_ARG_TYPES (TREE_TYPE (fn
))
5887 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn
)))
5889 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fn
));
5890 maybe_remove_unused_call_args (cfun
, call_stmt
);
5898 /* When changing a call into a noreturn call, cfg cleanup
5899 is needed to fix up the noreturn call. */
5901 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
5902 to_fixup
.safe_push (stmt
);
5903 /* When changing a condition or switch into one we know what
5904 edge will be executed, schedule a cfg cleanup. */
5905 if ((gimple_code (stmt
) == GIMPLE_COND
5906 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
5907 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
5908 || (gimple_code (stmt
) == GIMPLE_SWITCH
5909 && TREE_CODE (gimple_switch_index
5910 (as_a
<gswitch
*> (stmt
))) == INTEGER_CST
))
5911 el_todo
|= TODO_cleanup_cfg
;
5912 /* If we removed EH side-effects from the statement, clean
5913 its EH information. */
5914 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
5916 bitmap_set_bit (need_eh_cleanup
,
5917 gimple_bb (stmt
)->index
);
5918 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5919 fprintf (dump_file
, " Removed EH side-effects.\n");
5921 /* Likewise for AB side-effects. */
5922 if (can_make_abnormal_goto
5923 && !stmt_can_make_abnormal_goto (stmt
))
5925 bitmap_set_bit (need_ab_cleanup
,
5926 gimple_bb (stmt
)->index
);
5927 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5928 fprintf (dump_file
, " Removed AB side-effects.\n");
5931 if (vdef
!= gimple_vdef (stmt
))
5932 VN_INFO (vdef
)->valnum
= vuse
;
5935 /* Make new values available - for fully redundant LHS we
5936 continue with the next stmt above and skip this. */
5938 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
5939 eliminate_push_avail (DEF_FROM_PTR (defp
));
5942 /* Replace destination PHI arguments. */
5943 FOR_EACH_EDGE (e
, ei
, b
->succs
)
5944 if (e
->flags
& EDGE_EXECUTABLE
)
5945 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
5949 gphi
*phi
= gsi
.phi ();
5950 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
5951 tree arg
= USE_FROM_PTR (use_p
);
5952 if (TREE_CODE (arg
) != SSA_NAME
5953 || virtual_operand_p (arg
))
5955 tree sprime
= eliminate_avail (arg
);
5956 if (sprime
&& may_propagate_copy (arg
, sprime
))
5957 propagate_value (use_p
, sprime
);
5962 /* Make no longer available leaders no longer available. */
5965 eliminate_dom_walker::after_dom_children (basic_block
)
5968 while ((entry
= avail_stack
.pop ()) != NULL_TREE
)
5970 tree valnum
= VN_INFO (entry
)->valnum
;
5971 tree old
= avail
[SSA_NAME_VERSION (valnum
)];
5973 avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
5975 avail
[SSA_NAME_VERSION (valnum
)] = entry
;
5979 /* Eliminate fully redundant computations. */
5982 vn_eliminate (bitmap inserted_exprs
)
5984 eliminate_dom_walker
el (CDI_DOMINATORS
, inserted_exprs
);
5985 el
.avail
.reserve (num_ssa_names
);
5987 el
.walk (cfun
->cfg
->x_entry_block_ptr
);
5989 /* We cannot remove stmts during BB walk, especially not release SSA
5990 names there as this confuses the VN machinery. The stmts ending
5991 up in to_remove are either stores or simple copies.
5992 Remove stmts in reverse order to make debug stmt creation possible. */
5993 while (!el
.to_remove
.is_empty ())
5995 gimple
*stmt
= el
.to_remove
.pop ();
5997 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5999 fprintf (dump_file
, "Removing dead stmt ");
6000 print_gimple_stmt (dump_file
, stmt
, 0, 0);
6003 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
6004 if (gimple_code (stmt
) == GIMPLE_PHI
)
6005 remove_phi_node (&gsi
, true);
6008 basic_block bb
= gimple_bb (stmt
);
6009 unlink_stmt_vdef (stmt
);
6010 if (gsi_remove (&gsi
, true))
6011 bitmap_set_bit (el
.need_eh_cleanup
, bb
->index
);
6012 if (is_gimple_call (stmt
) && stmt_can_make_abnormal_goto (stmt
))
6013 bitmap_set_bit (el
.need_ab_cleanup
, bb
->index
);
6014 release_defs (stmt
);
6017 /* Removing a stmt may expose a forwarder block. */
6018 el
.el_todo
|= TODO_cleanup_cfg
;
6021 /* Fixup stmts that became noreturn calls. This may require splitting
6022 blocks and thus isn't possible during the dominator walk. Do this
6023 in reverse order so we don't inadvertedly remove a stmt we want to
6024 fixup by visiting a dominating now noreturn call first. */
6025 while (!el
.to_fixup
.is_empty ())
6027 gimple
*stmt
= el
.to_fixup
.pop ();
6029 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6031 fprintf (dump_file
, "Fixing up noreturn call ");
6032 print_gimple_stmt (dump_file
, stmt
, 0);
6035 if (fixup_noreturn_call (stmt
))
6036 el
.el_todo
|= TODO_cleanup_cfg
;
6039 bool do_eh_cleanup
= !bitmap_empty_p (el
.need_eh_cleanup
);
6040 bool do_ab_cleanup
= !bitmap_empty_p (el
.need_ab_cleanup
);
6043 gimple_purge_all_dead_eh_edges (el
.need_eh_cleanup
);
6046 gimple_purge_all_dead_abnormal_call_edges (el
.need_ab_cleanup
);
6048 if (do_eh_cleanup
|| do_ab_cleanup
)
6049 el
.el_todo
|= TODO_cleanup_cfg
;
6051 statistics_counter_event (cfun
, "Eliminated", el
.eliminations
);
6052 statistics_counter_event (cfun
, "Insertions", el
.insertions
);
6060 const pass_data pass_data_fre
=
6062 GIMPLE_PASS
, /* type */
6064 OPTGROUP_NONE
, /* optinfo_flags */
6065 TV_TREE_FRE
, /* tv_id */
6066 ( PROP_cfg
| PROP_ssa
), /* properties_required */
6067 0, /* properties_provided */
6068 0, /* properties_destroyed */
6069 0, /* todo_flags_start */
6070 0, /* todo_flags_finish */
6073 class pass_fre
: public gimple_opt_pass
6076 pass_fre (gcc::context
*ctxt
)
6077 : gimple_opt_pass (pass_data_fre
, ctxt
)
6080 /* opt_pass methods: */
6081 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
6082 virtual bool gate (function
*) { return flag_tree_fre
!= 0; }
6083 virtual unsigned int execute (function
*);
6085 }; // class pass_fre
6088 pass_fre::execute (function
*)
6090 unsigned int todo
= 0;
6092 run_scc_vn (VN_WALKREWRITE
);
6094 /* Remove all the redundant expressions. */
6095 todo
|= vn_eliminate (NULL
);
6097 scc_vn_restore_ssa_info ();
6106 make_pass_fre (gcc::context
*ctxt
)
6108 return new pass_fre (ctxt
);