1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
33 #include "gimple-pretty-print.h"
34 #include "tree-inline.h"
35 #include "internal-fn.h"
36 #include "gimple-fold.h"
40 #include "insn-config.h"
52 #include "alloc-pool.h"
55 #include "tree-ssa-propagate.h"
56 #include "tree-ssa-sccvn.h"
60 #include "gimple-iterator.h"
62 /* This algorithm is based on the SCC algorithm presented by Keith
63 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
64 (http://citeseer.ist.psu.edu/41805.html). In
65 straight line code, it is equivalent to a regular hash based value
66 numbering that is performed in reverse postorder.
68 For code with cycles, there are two alternatives, both of which
69 require keeping the hashtables separate from the actual list of
70 value numbers for SSA names.
72 1. Iterate value numbering in an RPO walk of the blocks, removing
73 all the entries from the hashtable after each iteration (but
74 keeping the SSA name->value number mapping between iterations).
75 Iterate until it does not change.
77 2. Perform value numbering as part of an SCC walk on the SSA graph,
78 iterating only the cycles in the SSA graph until they do not change
79 (using a separate, optimistic hashtable for value numbering the SCC
82 The second is not just faster in practice (because most SSA graph
83 cycles do not involve all the variables in the graph), it also has
86 One of these nice properties is that when we pop an SCC off the
87 stack, we are guaranteed to have processed all the operands coming from
88 *outside of that SCC*, so we do not need to do anything special to
89 ensure they have value numbers.
91 Another nice property is that the SCC walk is done as part of a DFS
92 of the SSA graph, which makes it easy to perform combining and
93 simplifying operations at the same time.
95 The code below is deliberately written in a way that makes it easy
96 to separate the SCC walk from the other work it does.
98 In order to propagate constants through the code, we track which
99 expressions contain constants, and use those while folding. In
100 theory, we could also track expressions whose value numbers are
101 replaced, in case we end up folding based on expression
104 In order to value number memory, we assign value numbers to vuses.
105 This enables us to note that, for example, stores to the same
106 address of the same value from the same starting memory states are
110 1. We can iterate only the changing portions of the SCC's, but
111 I have not seen an SCC big enough for this to be a win.
112 2. If you differentiate between phi nodes for loops and phi nodes
113 for if-then-else, you can properly consider phi nodes in different
114 blocks for equivalence.
115 3. We could value number vuses in more cases, particularly, whole
120 static tree
*last_vuse_ptr
;
121 static vn_lookup_kind vn_walk_kind
;
122 static vn_lookup_kind default_vn_walk_kind
;
124 /* vn_nary_op hashtable helpers. */
126 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
128 typedef vn_nary_op_s
*compare_type
;
129 static inline hashval_t
hash (const vn_nary_op_s
*);
130 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
133 /* Return the computed hashcode for nary operation P1. */
136 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
138 return vno1
->hashcode
;
141 /* Compare nary operations P1 and P2 and return true if they are
145 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
147 return vn_nary_op_eq (vno1
, vno2
);
150 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
151 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
154 /* vn_phi hashtable helpers. */
157 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
159 struct vn_phi_hasher
: pointer_hash
<vn_phi_s
>
161 static inline hashval_t
hash (const vn_phi_s
*);
162 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
163 static inline void remove (vn_phi_s
*);
166 /* Return the computed hashcode for phi operation P1. */
169 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
171 return vp1
->hashcode
;
174 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
177 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
179 return vn_phi_eq (vp1
, vp2
);
182 /* Free a phi operation structure VP. */
185 vn_phi_hasher::remove (vn_phi_s
*phi
)
187 phi
->phiargs
.release ();
190 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
191 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
194 /* Compare two reference operands P1 and P2 for equality. Return true if
195 they are equal, and false otherwise. */
198 vn_reference_op_eq (const void *p1
, const void *p2
)
200 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
201 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
203 return (vro1
->opcode
== vro2
->opcode
204 /* We do not care for differences in type qualification. */
205 && (vro1
->type
== vro2
->type
206 || (vro1
->type
&& vro2
->type
207 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
208 TYPE_MAIN_VARIANT (vro2
->type
))))
209 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
210 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
211 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
214 /* Free a reference operation structure VP. */
217 free_reference (vn_reference_s
*vr
)
219 vr
->operands
.release ();
223 /* vn_reference hashtable helpers. */
225 struct vn_reference_hasher
: pointer_hash
<vn_reference_s
>
227 static inline hashval_t
hash (const vn_reference_s
*);
228 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
229 static inline void remove (vn_reference_s
*);
232 /* Return the hashcode for a given reference operation P1. */
235 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
237 return vr1
->hashcode
;
241 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
243 return vn_reference_eq (v
, c
);
247 vn_reference_hasher::remove (vn_reference_s
*v
)
252 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
253 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
256 /* The set of hashtables and alloc_pool's for their items. */
258 typedef struct vn_tables_s
260 vn_nary_op_table_type
*nary
;
261 vn_phi_table_type
*phis
;
262 vn_reference_table_type
*references
;
263 struct obstack nary_obstack
;
264 object_allocator
<vn_phi_s
> *phis_pool
;
265 object_allocator
<vn_reference_s
> *references_pool
;
269 /* vn_constant hashtable helpers. */
271 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
273 static inline hashval_t
hash (const vn_constant_s
*);
274 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
277 /* Hash table hash function for vn_constant_t. */
280 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
282 return vc1
->hashcode
;
285 /* Hash table equality function for vn_constant_t. */
288 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
290 if (vc1
->hashcode
!= vc2
->hashcode
)
293 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
296 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
297 static bitmap constant_value_ids
;
300 /* Valid hashtables storing information we have proven to be
303 static vn_tables_t valid_info
;
305 /* Optimistic hashtables storing information we are making assumptions about
306 during iterations. */
308 static vn_tables_t optimistic_info
;
310 /* Pointer to the set of hashtables that is currently being used.
311 Should always point to either the optimistic_info, or the
314 static vn_tables_t current_info
;
317 /* Reverse post order index for each basic block. */
319 static int *rpo_numbers
;
321 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
323 /* Return the SSA value of the VUSE x, supporting released VDEFs
324 during elimination which will value-number the VDEF to the
325 associated VUSE (but not substitute in the whole lattice). */
328 vuse_ssa_val (tree x
)
337 while (SSA_NAME_IN_FREE_LIST (x
));
342 /* This represents the top of the VN lattice, which is the universal
347 /* Unique counter for our value ids. */
349 static unsigned int next_value_id
;
351 /* Next DFS number and the stack for strongly connected component
354 static unsigned int next_dfs_num
;
355 static vec
<tree
> sccstack
;
359 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
360 are allocated on an obstack for locality reasons, and to free them
361 without looping over the vec. */
363 static vec
<vn_ssa_aux_t
> vn_ssa_aux_table
;
364 static struct obstack vn_ssa_aux_obstack
;
366 /* Return the value numbering information for a given SSA name. */
371 vn_ssa_aux_t res
= vn_ssa_aux_table
[SSA_NAME_VERSION (name
)];
372 gcc_checking_assert (res
);
376 /* Set the value numbering info for a given SSA name to a given
380 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
382 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = value
;
385 /* Initialize the value numbering info for a given SSA name.
386 This should be called just once for every SSA name. */
389 VN_INFO_GET (tree name
)
391 vn_ssa_aux_t newinfo
;
393 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
394 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
395 if (SSA_NAME_VERSION (name
) >= vn_ssa_aux_table
.length ())
396 vn_ssa_aux_table
.safe_grow (SSA_NAME_VERSION (name
) + 1);
397 vn_ssa_aux_table
[SSA_NAME_VERSION (name
)] = newinfo
;
402 /* Get the representative expression for the SSA_NAME NAME. Returns
403 the representative SSA_NAME if there is no expression associated with it. */
406 vn_get_expr_for (tree name
)
408 vn_ssa_aux_t vn
= VN_INFO (name
);
410 tree expr
= NULL_TREE
;
413 if (vn
->valnum
== VN_TOP
)
416 /* If the value-number is a constant it is the representative
418 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
421 /* Get to the information of the value of this SSA_NAME. */
422 vn
= VN_INFO (vn
->valnum
);
424 /* If the value-number is a constant it is the representative
426 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
429 /* Else if we have an expression, return it. */
430 if (vn
->expr
!= NULL_TREE
)
433 /* Otherwise use the defining statement to build the expression. */
434 def_stmt
= SSA_NAME_DEF_STMT (vn
->valnum
);
436 /* If the value number is not an assignment use it directly. */
437 if (!is_gimple_assign (def_stmt
))
440 /* Note that we can valueize here because we clear the cached
441 simplified expressions after each optimistic iteration. */
442 code
= gimple_assign_rhs_code (def_stmt
);
443 switch (TREE_CODE_CLASS (code
))
446 if ((code
== REALPART_EXPR
447 || code
== IMAGPART_EXPR
448 || code
== VIEW_CONVERT_EXPR
)
449 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt
),
451 expr
= fold_build1 (code
,
452 gimple_expr_type (def_stmt
),
453 vn_valueize (TREE_OPERAND
454 (gimple_assign_rhs1 (def_stmt
), 0)));
458 expr
= fold_build1 (code
,
459 gimple_expr_type (def_stmt
),
460 vn_valueize (gimple_assign_rhs1 (def_stmt
)));
464 expr
= fold_build2 (code
,
465 gimple_expr_type (def_stmt
),
466 vn_valueize (gimple_assign_rhs1 (def_stmt
)),
467 vn_valueize (gimple_assign_rhs2 (def_stmt
)));
470 case tcc_exceptional
:
471 if (code
== CONSTRUCTOR
473 (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))) == VECTOR_TYPE
)
474 expr
= gimple_assign_rhs1 (def_stmt
);
479 if (expr
== NULL_TREE
)
482 /* Cache the expression. */
488 /* Return the vn_kind the expression computed by the stmt should be
492 vn_get_stmt_kind (gimple stmt
)
494 switch (gimple_code (stmt
))
502 enum tree_code code
= gimple_assign_rhs_code (stmt
);
503 tree rhs1
= gimple_assign_rhs1 (stmt
);
504 switch (get_gimple_rhs_class (code
))
506 case GIMPLE_UNARY_RHS
:
507 case GIMPLE_BINARY_RHS
:
508 case GIMPLE_TERNARY_RHS
:
510 case GIMPLE_SINGLE_RHS
:
511 switch (TREE_CODE_CLASS (code
))
514 /* VOP-less references can go through unary case. */
515 if ((code
== REALPART_EXPR
516 || code
== IMAGPART_EXPR
517 || code
== VIEW_CONVERT_EXPR
518 || code
== BIT_FIELD_REF
)
519 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
523 case tcc_declaration
:
530 if (code
== ADDR_EXPR
)
531 return (is_gimple_min_invariant (rhs1
)
532 ? VN_CONSTANT
: VN_REFERENCE
);
533 else if (code
== CONSTRUCTOR
)
546 /* Lookup a value id for CONSTANT and return it. If it does not
550 get_constant_value_id (tree constant
)
552 vn_constant_s
**slot
;
553 struct vn_constant_s vc
;
555 vc
.hashcode
= vn_hash_constant_with_type (constant
);
556 vc
.constant
= constant
;
557 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
559 return (*slot
)->value_id
;
563 /* Lookup a value id for CONSTANT, and if it does not exist, create a
564 new one and return it. If it does exist, return it. */
567 get_or_alloc_constant_value_id (tree constant
)
569 vn_constant_s
**slot
;
570 struct vn_constant_s vc
;
573 vc
.hashcode
= vn_hash_constant_with_type (constant
);
574 vc
.constant
= constant
;
575 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
577 return (*slot
)->value_id
;
579 vcp
= XNEW (struct vn_constant_s
);
580 vcp
->hashcode
= vc
.hashcode
;
581 vcp
->constant
= constant
;
582 vcp
->value_id
= get_next_value_id ();
584 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
585 return vcp
->value_id
;
588 /* Return true if V is a value id for a constant. */
591 value_id_constant_p (unsigned int v
)
593 return bitmap_bit_p (constant_value_ids
, v
);
596 /* Compute the hash for a reference operand VRO1. */
599 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
601 hstate
.add_int (vro1
->opcode
);
603 inchash::add_expr (vro1
->op0
, hstate
);
605 inchash::add_expr (vro1
->op1
, hstate
);
607 inchash::add_expr (vro1
->op2
, hstate
);
610 /* Compute a hash for the reference operation VR1 and return it. */
613 vn_reference_compute_hash (const vn_reference_t vr1
)
615 inchash::hash hstate
;
618 vn_reference_op_t vro
;
619 HOST_WIDE_INT off
= -1;
622 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
624 if (vro
->opcode
== MEM_REF
)
626 else if (vro
->opcode
!= ADDR_EXPR
)
638 hstate
.add_int (off
);
641 && vro
->opcode
== ADDR_EXPR
)
645 tree op
= TREE_OPERAND (vro
->op0
, 0);
646 hstate
.add_int (TREE_CODE (op
));
647 inchash::add_expr (op
, hstate
);
651 vn_reference_op_compute_hash (vro
, hstate
);
654 result
= hstate
.end ();
655 /* ??? We would ICE later if we hash instead of adding that in. */
657 result
+= SSA_NAME_VERSION (vr1
->vuse
);
662 /* Return true if reference operations VR1 and VR2 are equivalent. This
663 means they have the same set of operands and vuses. */
666 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
670 /* Early out if this is not a hash collision. */
671 if (vr1
->hashcode
!= vr2
->hashcode
)
674 /* The VOP needs to be the same. */
675 if (vr1
->vuse
!= vr2
->vuse
)
678 /* If the operands are the same we are done. */
679 if (vr1
->operands
== vr2
->operands
)
682 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
685 if (INTEGRAL_TYPE_P (vr1
->type
)
686 && INTEGRAL_TYPE_P (vr2
->type
))
688 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
691 else if (INTEGRAL_TYPE_P (vr1
->type
)
692 && (TYPE_PRECISION (vr1
->type
)
693 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
695 else if (INTEGRAL_TYPE_P (vr2
->type
)
696 && (TYPE_PRECISION (vr2
->type
)
697 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
704 HOST_WIDE_INT off1
= 0, off2
= 0;
705 vn_reference_op_t vro1
, vro2
;
706 vn_reference_op_s tem1
, tem2
;
707 bool deref1
= false, deref2
= false;
708 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
710 if (vro1
->opcode
== MEM_REF
)
716 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
718 if (vro2
->opcode
== MEM_REF
)
726 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
728 memset (&tem1
, 0, sizeof (tem1
));
729 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
730 tem1
.type
= TREE_TYPE (tem1
.op0
);
731 tem1
.opcode
= TREE_CODE (tem1
.op0
);
735 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
737 memset (&tem2
, 0, sizeof (tem2
));
738 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
739 tem2
.type
= TREE_TYPE (tem2
.op0
);
740 tem2
.opcode
= TREE_CODE (tem2
.op0
);
744 if (deref1
!= deref2
)
746 if (!vn_reference_op_eq (vro1
, vro2
))
751 while (vr1
->operands
.length () != i
752 || vr2
->operands
.length () != j
);
757 /* Copy the operations present in load/store REF into RESULT, a vector of
758 vn_reference_op_s's. */
761 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
763 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
765 vn_reference_op_s temp
;
769 memset (&temp
, 0, sizeof (temp
));
770 temp
.type
= TREE_TYPE (ref
);
771 temp
.opcode
= TREE_CODE (ref
);
772 temp
.op0
= TMR_INDEX (ref
);
773 temp
.op1
= TMR_STEP (ref
);
774 temp
.op2
= TMR_OFFSET (ref
);
776 result
->quick_push (temp
);
778 memset (&temp
, 0, sizeof (temp
));
779 temp
.type
= NULL_TREE
;
780 temp
.opcode
= ERROR_MARK
;
781 temp
.op0
= TMR_INDEX2 (ref
);
783 result
->quick_push (temp
);
785 memset (&temp
, 0, sizeof (temp
));
786 temp
.type
= NULL_TREE
;
787 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
788 temp
.op0
= TMR_BASE (ref
);
790 result
->quick_push (temp
);
794 /* For non-calls, store the information that makes up the address. */
798 vn_reference_op_s temp
;
800 memset (&temp
, 0, sizeof (temp
));
801 temp
.type
= TREE_TYPE (ref
);
802 temp
.opcode
= TREE_CODE (ref
);
808 temp
.op0
= TREE_OPERAND (ref
, 1);
811 temp
.op0
= TREE_OPERAND (ref
, 1);
815 /* The base address gets its own vn_reference_op_s structure. */
816 temp
.op0
= TREE_OPERAND (ref
, 1);
817 if (tree_fits_shwi_p (TREE_OPERAND (ref
, 1)))
818 temp
.off
= tree_to_shwi (TREE_OPERAND (ref
, 1));
821 /* Record bits and position. */
822 temp
.op0
= TREE_OPERAND (ref
, 1);
823 temp
.op1
= TREE_OPERAND (ref
, 2);
826 /* The field decl is enough to unambiguously specify the field,
827 a matching type is not necessary and a mismatching type
828 is always a spurious difference. */
829 temp
.type
= NULL_TREE
;
830 temp
.op0
= TREE_OPERAND (ref
, 1);
831 temp
.op1
= TREE_OPERAND (ref
, 2);
833 tree this_offset
= component_ref_field_offset (ref
);
835 && TREE_CODE (this_offset
) == INTEGER_CST
)
837 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
838 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
841 = (wi::to_offset (this_offset
)
842 + wi::lrshift (wi::to_offset (bit_offset
),
843 LOG2_BITS_PER_UNIT
));
844 if (wi::fits_shwi_p (off
)
845 /* Probibit value-numbering zero offset components
846 of addresses the same before the pass folding
847 __builtin_object_size had a chance to run
848 (checking cfun->after_inlining does the
850 && (TREE_CODE (orig
) != ADDR_EXPR
852 || cfun
->after_inlining
))
853 temp
.off
= off
.to_shwi ();
858 case ARRAY_RANGE_REF
:
860 /* Record index as operand. */
861 temp
.op0
= TREE_OPERAND (ref
, 1);
862 /* Always record lower bounds and element size. */
863 temp
.op1
= array_ref_low_bound (ref
);
864 temp
.op2
= array_ref_element_size (ref
);
865 if (TREE_CODE (temp
.op0
) == INTEGER_CST
866 && TREE_CODE (temp
.op1
) == INTEGER_CST
867 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
869 offset_int off
= ((wi::to_offset (temp
.op0
)
870 - wi::to_offset (temp
.op1
))
871 * wi::to_offset (temp
.op2
));
872 if (wi::fits_shwi_p (off
))
873 temp
.off
= off
.to_shwi();
877 if (DECL_HARD_REGISTER (ref
))
886 /* Canonicalize decls to MEM[&decl] which is what we end up with
887 when valueizing MEM[ptr] with ptr = &decl. */
888 temp
.opcode
= MEM_REF
;
889 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
891 result
->safe_push (temp
);
892 temp
.opcode
= ADDR_EXPR
;
893 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
894 temp
.type
= TREE_TYPE (temp
.op0
);
908 if (is_gimple_min_invariant (ref
))
914 /* These are only interesting for their operands, their
915 existence, and their type. They will never be the last
916 ref in the chain of references (IE they require an
917 operand), so we don't have to put anything
918 for op* as it will be handled by the iteration */
920 case VIEW_CONVERT_EXPR
:
924 /* This is only interesting for its constant offset. */
925 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
930 result
->safe_push (temp
);
932 if (REFERENCE_CLASS_P (ref
)
933 || TREE_CODE (ref
) == MODIFY_EXPR
934 || TREE_CODE (ref
) == WITH_SIZE_EXPR
935 || (TREE_CODE (ref
) == ADDR_EXPR
936 && !is_gimple_min_invariant (ref
)))
937 ref
= TREE_OPERAND (ref
, 0);
943 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
944 operands in *OPS, the reference alias set SET and the reference type TYPE.
945 Return true if something useful was produced. */
948 ao_ref_init_from_vn_reference (ao_ref
*ref
,
949 alias_set_type set
, tree type
,
950 vec
<vn_reference_op_s
> ops
)
952 vn_reference_op_t op
;
954 tree base
= NULL_TREE
;
956 HOST_WIDE_INT offset
= 0;
957 HOST_WIDE_INT max_size
;
958 HOST_WIDE_INT size
= -1;
959 tree size_tree
= NULL_TREE
;
960 alias_set_type base_alias_set
= -1;
962 /* First get the final access size from just the outermost expression. */
964 if (op
->opcode
== COMPONENT_REF
)
965 size_tree
= DECL_SIZE (op
->op0
);
966 else if (op
->opcode
== BIT_FIELD_REF
)
970 machine_mode mode
= TYPE_MODE (type
);
972 size_tree
= TYPE_SIZE (type
);
974 size
= GET_MODE_BITSIZE (mode
);
976 if (size_tree
!= NULL_TREE
)
978 if (!tree_fits_uhwi_p (size_tree
))
981 size
= tree_to_uhwi (size_tree
);
984 /* Initially, maxsize is the same as the accessed element size.
985 In the following it will only grow (or become -1). */
988 /* Compute cumulative bit-offset for nested component-refs and array-refs,
989 and find the ultimate containing object. */
990 FOR_EACH_VEC_ELT (ops
, i
, op
)
994 /* These may be in the reference ops, but we cannot do anything
995 sensible with them here. */
997 /* Apart from ADDR_EXPR arguments to MEM_REF. */
998 if (base
!= NULL_TREE
999 && TREE_CODE (base
) == MEM_REF
1001 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1003 vn_reference_op_t pop
= &ops
[i
-1];
1004 base
= TREE_OPERAND (op
->op0
, 0);
1011 offset
+= pop
->off
* BITS_PER_UNIT
;
1019 /* Record the base objects. */
1021 base_alias_set
= get_deref_alias_set (op
->op0
);
1022 *op0_p
= build2 (MEM_REF
, op
->type
,
1023 NULL_TREE
, op
->op0
);
1024 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1035 /* And now the usual component-reference style ops. */
1037 offset
+= tree_to_shwi (op
->op1
);
1042 tree field
= op
->op0
;
1043 /* We do not have a complete COMPONENT_REF tree here so we
1044 cannot use component_ref_field_offset. Do the interesting
1048 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
)))
1052 offset
+= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
1054 offset
+= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1059 case ARRAY_RANGE_REF
:
1061 /* We recorded the lower bound and the element size. */
1062 if (!tree_fits_shwi_p (op
->op0
)
1063 || !tree_fits_shwi_p (op
->op1
)
1064 || !tree_fits_shwi_p (op
->op2
))
1068 HOST_WIDE_INT hindex
= tree_to_shwi (op
->op0
);
1069 hindex
-= tree_to_shwi (op
->op1
);
1070 hindex
*= tree_to_shwi (op
->op2
);
1071 hindex
*= BITS_PER_UNIT
;
1083 case VIEW_CONVERT_EXPR
:
1100 if (base
== NULL_TREE
)
1103 ref
->ref
= NULL_TREE
;
1105 ref
->offset
= offset
;
1107 ref
->max_size
= max_size
;
1108 ref
->ref_alias_set
= set
;
1109 if (base_alias_set
!= -1)
1110 ref
->base_alias_set
= base_alias_set
;
1112 ref
->base_alias_set
= get_alias_set (base
);
1113 /* We discount volatiles from value-numbering elsewhere. */
1114 ref
->volatile_p
= false;
1119 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1120 vn_reference_op_s's. */
1123 copy_reference_ops_from_call (gcall
*call
,
1124 vec
<vn_reference_op_s
> *result
)
1126 vn_reference_op_s temp
;
1128 tree lhs
= gimple_call_lhs (call
);
1131 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1132 different. By adding the lhs here in the vector, we ensure that the
1133 hashcode is different, guaranteeing a different value number. */
1134 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1136 memset (&temp
, 0, sizeof (temp
));
1137 temp
.opcode
= MODIFY_EXPR
;
1138 temp
.type
= TREE_TYPE (lhs
);
1141 result
->safe_push (temp
);
1144 /* Copy the type, opcode, function, static chain and EH region, if any. */
1145 memset (&temp
, 0, sizeof (temp
));
1146 temp
.type
= gimple_call_return_type (call
);
1147 temp
.opcode
= CALL_EXPR
;
1148 temp
.op0
= gimple_call_fn (call
);
1149 temp
.op1
= gimple_call_chain (call
);
1150 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1151 temp
.op2
= size_int (lr
);
1153 if (gimple_call_with_bounds_p (call
))
1154 temp
.with_bounds
= 1;
1155 result
->safe_push (temp
);
1157 /* Copy the call arguments. As they can be references as well,
1158 just chain them together. */
1159 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1161 tree callarg
= gimple_call_arg (call
, i
);
1162 copy_reference_ops_from_ref (callarg
, result
);
1166 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1167 *I_P to point to the last element of the replacement. */
1169 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1172 unsigned int i
= *i_p
;
1173 vn_reference_op_t op
= &(*ops
)[i
];
1174 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1176 HOST_WIDE_INT addr_offset
= 0;
1178 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1179 from .foo.bar to the preceding MEM_REF offset and replace the
1180 address with &OBJ. */
1181 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1183 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1184 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1186 offset_int off
= offset_int::from (mem_op
->op0
, SIGNED
);
1188 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1189 op
->op0
= build_fold_addr_expr (addr_base
);
1190 if (tree_fits_shwi_p (mem_op
->op0
))
1191 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1197 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1198 *I_P to point to the last element of the replacement. */
1200 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1203 unsigned int i
= *i_p
;
1204 vn_reference_op_t op
= &(*ops
)[i
];
1205 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1207 enum tree_code code
;
1210 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1211 if (!is_gimple_assign (def_stmt
))
1214 code
= gimple_assign_rhs_code (def_stmt
);
1215 if (code
!= ADDR_EXPR
1216 && code
!= POINTER_PLUS_EXPR
)
1219 off
= offset_int::from (mem_op
->op0
, SIGNED
);
1221 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1222 from .foo.bar to the preceding MEM_REF offset and replace the
1223 address with &OBJ. */
1224 if (code
== ADDR_EXPR
)
1226 tree addr
, addr_base
;
1227 HOST_WIDE_INT addr_offset
;
1229 addr
= gimple_assign_rhs1 (def_stmt
);
1230 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1232 /* If that didn't work because the address isn't invariant propagate
1233 the reference tree from the address operation in case the current
1234 dereference isn't offsetted. */
1236 && *i_p
== ops
->length () - 1
1238 /* This makes us disable this transform for PRE where the
1239 reference ops might be also used for code insertion which
1241 && default_vn_walk_kind
== VN_WALKREWRITE
)
1243 auto_vec
<vn_reference_op_s
, 32> tem
;
1244 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1247 ops
->safe_splice (tem
);
1252 || TREE_CODE (addr_base
) != MEM_REF
)
1256 off
+= mem_ref_offset (addr_base
);
1257 op
->op0
= TREE_OPERAND (addr_base
, 0);
1262 ptr
= gimple_assign_rhs1 (def_stmt
);
1263 ptroff
= gimple_assign_rhs2 (def_stmt
);
1264 if (TREE_CODE (ptr
) != SSA_NAME
1265 || TREE_CODE (ptroff
) != INTEGER_CST
)
1268 off
+= wi::to_offset (ptroff
);
1272 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1273 if (tree_fits_shwi_p (mem_op
->op0
))
1274 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1277 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1278 op
->op0
= SSA_VAL (op
->op0
);
1279 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1280 op
->opcode
= TREE_CODE (op
->op0
);
1283 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1284 vn_reference_maybe_forwprop_address (ops
, i_p
);
1285 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1286 vn_reference_fold_indirect (ops
, i_p
);
1289 /* Optimize the reference REF to a constant if possible or return
1290 NULL_TREE if not. */
1293 fully_constant_vn_reference_p (vn_reference_t ref
)
1295 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1296 vn_reference_op_t op
;
1298 /* Try to simplify the translated expression if it is
1299 a call to a builtin function with at most two arguments. */
1301 if (op
->opcode
== CALL_EXPR
1302 && TREE_CODE (op
->op0
) == ADDR_EXPR
1303 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1304 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1305 && operands
.length () >= 2
1306 && operands
.length () <= 3)
1308 vn_reference_op_t arg0
, arg1
= NULL
;
1309 bool anyconst
= false;
1310 arg0
= &operands
[1];
1311 if (operands
.length () > 2)
1312 arg1
= &operands
[2];
1313 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1314 || (arg0
->opcode
== ADDR_EXPR
1315 && is_gimple_min_invariant (arg0
->op0
)))
1318 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1319 || (arg1
->opcode
== ADDR_EXPR
1320 && is_gimple_min_invariant (arg1
->op0
))))
1324 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1327 arg1
? arg1
->op0
: NULL
);
1329 && TREE_CODE (folded
) == NOP_EXPR
)
1330 folded
= TREE_OPERAND (folded
, 0);
1332 && is_gimple_min_invariant (folded
))
1337 /* Simplify reads from constants or constant initializers. */
1338 else if (BITS_PER_UNIT
== 8
1339 && is_gimple_reg_type (ref
->type
)
1340 && (!INTEGRAL_TYPE_P (ref
->type
)
1341 || TYPE_PRECISION (ref
->type
) % BITS_PER_UNIT
== 0))
1343 HOST_WIDE_INT off
= 0;
1345 if (INTEGRAL_TYPE_P (ref
->type
))
1346 size
= TYPE_PRECISION (ref
->type
);
1348 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1349 if (size
% BITS_PER_UNIT
!= 0
1350 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1352 size
/= BITS_PER_UNIT
;
1354 for (i
= 0; i
< operands
.length (); ++i
)
1356 if (operands
[i
].off
== -1)
1358 off
+= operands
[i
].off
;
1359 if (operands
[i
].opcode
== MEM_REF
)
1365 vn_reference_op_t base
= &operands
[--i
];
1366 tree ctor
= error_mark_node
;
1367 tree decl
= NULL_TREE
;
1368 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1370 else if (base
->opcode
== MEM_REF
1371 && base
[1].opcode
== ADDR_EXPR
1372 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1373 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
))
1375 decl
= TREE_OPERAND (base
[1].op0
, 0);
1376 ctor
= ctor_for_folding (decl
);
1378 if (ctor
== NULL_TREE
)
1379 return build_zero_cst (ref
->type
);
1380 else if (ctor
!= error_mark_node
)
1384 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1385 off
* BITS_PER_UNIT
,
1386 size
* BITS_PER_UNIT
, decl
);
1389 STRIP_USELESS_TYPE_CONVERSION (res
);
1390 if (is_gimple_min_invariant (res
))
1396 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1397 if (native_encode_expr (ctor
, buf
, size
, off
) > 0)
1398 return native_interpret_expr (ref
->type
, buf
, size
);
1406 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1407 structures into their value numbers. This is done in-place, and
1408 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1409 whether any operands were valueized. */
1411 static vec
<vn_reference_op_s
>
1412 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
)
1414 vn_reference_op_t vro
;
1417 *valueized_anything
= false;
1419 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1421 if (vro
->opcode
== SSA_NAME
1422 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1424 tree tem
= SSA_VAL (vro
->op0
);
1425 if (tem
!= vro
->op0
)
1427 *valueized_anything
= true;
1430 /* If it transforms from an SSA_NAME to a constant, update
1432 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1433 vro
->opcode
= TREE_CODE (vro
->op0
);
1435 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1437 tree tem
= SSA_VAL (vro
->op1
);
1438 if (tem
!= vro
->op1
)
1440 *valueized_anything
= true;
1444 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1446 tree tem
= SSA_VAL (vro
->op2
);
1447 if (tem
!= vro
->op2
)
1449 *valueized_anything
= true;
1453 /* If it transforms from an SSA_NAME to an address, fold with
1454 a preceding indirect reference. */
1457 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1458 && orig
[i
- 1].opcode
== MEM_REF
)
1459 vn_reference_fold_indirect (&orig
, &i
);
1461 && vro
->opcode
== SSA_NAME
1462 && orig
[i
- 1].opcode
== MEM_REF
)
1463 vn_reference_maybe_forwprop_address (&orig
, &i
);
1464 /* If it transforms a non-constant ARRAY_REF into a constant
1465 one, adjust the constant offset. */
1466 else if (vro
->opcode
== ARRAY_REF
1468 && TREE_CODE (vro
->op0
) == INTEGER_CST
1469 && TREE_CODE (vro
->op1
) == INTEGER_CST
1470 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1472 offset_int off
= ((wi::to_offset (vro
->op0
)
1473 - wi::to_offset (vro
->op1
))
1474 * wi::to_offset (vro
->op2
));
1475 if (wi::fits_shwi_p (off
))
1476 vro
->off
= off
.to_shwi ();
1483 static vec
<vn_reference_op_s
>
1484 valueize_refs (vec
<vn_reference_op_s
> orig
)
1487 return valueize_refs_1 (orig
, &tem
);
1490 static vec
<vn_reference_op_s
> shared_lookup_references
;
1492 /* Create a vector of vn_reference_op_s structures from REF, a
1493 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1494 this function. *VALUEIZED_ANYTHING will specify whether any
1495 operands were valueized. */
1497 static vec
<vn_reference_op_s
>
1498 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1502 shared_lookup_references
.truncate (0);
1503 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1504 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1505 valueized_anything
);
1506 return shared_lookup_references
;
1509 /* Create a vector of vn_reference_op_s structures from CALL, a
1510 call statement. The vector is shared among all callers of
1513 static vec
<vn_reference_op_s
>
1514 valueize_shared_reference_ops_from_call (gcall
*call
)
1518 shared_lookup_references
.truncate (0);
1519 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1520 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1521 return shared_lookup_references
;
1524 /* Lookup a SCCVN reference operation VR in the current hash table.
1525 Returns the resulting value number if it exists in the hash table,
1526 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1527 vn_reference_t stored in the hashtable if something is found. */
1530 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1532 vn_reference_s
**slot
;
1535 hash
= vr
->hashcode
;
1536 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1537 if (!slot
&& current_info
== optimistic_info
)
1538 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1542 *vnresult
= (vn_reference_t
)*slot
;
1543 return ((vn_reference_t
)*slot
)->result
;
1549 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1550 with the current VUSE and performs the expression lookup. */
1553 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1554 unsigned int cnt
, void *vr_
)
1556 vn_reference_t vr
= (vn_reference_t
)vr_
;
1557 vn_reference_s
**slot
;
1560 /* This bounds the stmt walks we perform on reference lookups
1561 to O(1) instead of O(N) where N is the number of dominating
1563 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1567 *last_vuse_ptr
= vuse
;
1569 /* Fixup vuse and hash. */
1571 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1572 vr
->vuse
= vuse_ssa_val (vuse
);
1574 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1576 hash
= vr
->hashcode
;
1577 slot
= current_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1578 if (!slot
&& current_info
== optimistic_info
)
1579 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1586 /* Lookup an existing or insert a new vn_reference entry into the
1587 value table for the VUSE, SET, TYPE, OPERANDS reference which
1588 has the value VALUE which is either a constant or an SSA name. */
1590 static vn_reference_t
1591 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1594 vec
<vn_reference_op_s
,
1599 vn_reference_t result
;
1602 vr1
.operands
= operands
;
1605 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1606 if (vn_reference_lookup_1 (&vr1
, &result
))
1608 if (TREE_CODE (value
) == SSA_NAME
)
1609 value_id
= VN_INFO (value
)->value_id
;
1611 value_id
= get_or_alloc_constant_value_id (value
);
1612 return vn_reference_insert_pieces (vuse
, set
, type
,
1613 operands
.copy (), value
, value_id
);
1616 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1617 from the statement defining VUSE and if not successful tries to
1618 translate *REFP and VR_ through an aggregate copy at the definition
1622 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1623 bool disambiguate_only
)
1625 vn_reference_t vr
= (vn_reference_t
)vr_
;
1626 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1628 HOST_WIDE_INT offset
, maxsize
;
1629 static vec
<vn_reference_op_s
>
1632 bool lhs_ref_ok
= false;
1634 /* First try to disambiguate after value-replacing in the definitions LHS. */
1635 if (is_gimple_assign (def_stmt
))
1637 tree lhs
= gimple_assign_lhs (def_stmt
);
1638 bool valueized_anything
= false;
1639 /* Avoid re-allocation overhead. */
1640 lhs_ops
.truncate (0);
1641 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1642 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
);
1643 if (valueized_anything
)
1645 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1646 get_alias_set (lhs
),
1647 TREE_TYPE (lhs
), lhs_ops
);
1649 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1654 ao_ref_init (&lhs_ref
, lhs
);
1658 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1659 && gimple_call_num_args (def_stmt
) <= 4)
1661 /* For builtin calls valueize its arguments and call the
1662 alias oracle again. Valueization may improve points-to
1663 info of pointers and constify size and position arguments.
1664 Originally this was motivated by PR61034 which has
1665 conditional calls to free falsely clobbering ref because
1666 of imprecise points-to info of the argument. */
1668 bool valueized_anything
= false;
1669 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1671 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1672 if (TREE_CODE (oldargs
[i
]) == SSA_NAME
1673 && VN_INFO (oldargs
[i
])->valnum
!= oldargs
[i
])
1675 gimple_call_set_arg (def_stmt
, i
, VN_INFO (oldargs
[i
])->valnum
);
1676 valueized_anything
= true;
1679 if (valueized_anything
)
1681 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1683 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1684 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1690 if (disambiguate_only
)
1693 base
= ao_ref_base (ref
);
1694 offset
= ref
->offset
;
1695 maxsize
= ref
->max_size
;
1697 /* If we cannot constrain the size of the reference we cannot
1698 test if anything kills it. */
1702 /* We can't deduce anything useful from clobbers. */
1703 if (gimple_clobber_p (def_stmt
))
1706 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1707 from that definition.
1709 if (is_gimple_reg_type (vr
->type
)
1710 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1711 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1712 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2))
1713 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1715 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1717 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1718 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
);
1719 size2
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2)) * 8;
1720 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1721 == tree_to_uhwi (gimple_call_arg (def_stmt
, 2))
1723 && operand_equal_p (base
, base2
, 0)
1724 && offset2
<= offset
1725 && offset2
+ size2
>= offset
+ maxsize
)
1727 tree val
= build_zero_cst (vr
->type
);
1728 return vn_reference_lookup_or_insert_for_pieces
1729 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1733 /* 2) Assignment from an empty CONSTRUCTOR. */
1734 else if (is_gimple_reg_type (vr
->type
)
1735 && gimple_assign_single_p (def_stmt
)
1736 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1737 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1740 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1741 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1742 &offset2
, &size2
, &maxsize2
);
1744 && operand_equal_p (base
, base2
, 0)
1745 && offset2
<= offset
1746 && offset2
+ size2
>= offset
+ maxsize
)
1748 tree val
= build_zero_cst (vr
->type
);
1749 return vn_reference_lookup_or_insert_for_pieces
1750 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1754 /* 3) Assignment from a constant. We can use folds native encode/interpret
1755 routines to extract the assigned bits. */
1756 else if (vn_walk_kind
== VN_WALKREWRITE
1757 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
1758 && ref
->size
== maxsize
1759 && maxsize
% BITS_PER_UNIT
== 0
1760 && offset
% BITS_PER_UNIT
== 0
1761 && is_gimple_reg_type (vr
->type
)
1762 && gimple_assign_single_p (def_stmt
)
1763 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
1766 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1767 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1768 &offset2
, &size2
, &maxsize2
);
1770 && maxsize2
== size2
1771 && size2
% BITS_PER_UNIT
== 0
1772 && offset2
% BITS_PER_UNIT
== 0
1773 && operand_equal_p (base
, base2
, 0)
1774 && offset2
<= offset
1775 && offset2
+ size2
>= offset
+ maxsize
)
1777 /* We support up to 512-bit values (for V8DFmode). */
1778 unsigned char buffer
[64];
1781 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
1782 buffer
, sizeof (buffer
));
1785 tree val
= native_interpret_expr (vr
->type
,
1787 + ((offset
- offset2
)
1789 ref
->size
/ BITS_PER_UNIT
);
1791 return vn_reference_lookup_or_insert_for_pieces
1792 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1797 /* 4) Assignment from an SSA name which definition we may be able
1798 to access pieces from. */
1799 else if (ref
->size
== maxsize
1800 && is_gimple_reg_type (vr
->type
)
1801 && gimple_assign_single_p (def_stmt
)
1802 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
1804 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
1805 gimple def_stmt2
= SSA_NAME_DEF_STMT (rhs1
);
1806 if (is_gimple_assign (def_stmt2
)
1807 && (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
1808 || gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
)
1809 && types_compatible_p (vr
->type
, TREE_TYPE (TREE_TYPE (rhs1
))))
1812 HOST_WIDE_INT offset2
, size2
, maxsize2
, off
;
1813 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1814 &offset2
, &size2
, &maxsize2
);
1815 off
= offset
- offset2
;
1817 && maxsize2
== size2
1818 && operand_equal_p (base
, base2
, 0)
1819 && offset2
<= offset
1820 && offset2
+ size2
>= offset
+ maxsize
)
1822 tree val
= NULL_TREE
;
1824 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1
))));
1825 if (gimple_assign_rhs_code (def_stmt2
) == COMPLEX_EXPR
)
1828 val
= gimple_assign_rhs1 (def_stmt2
);
1829 else if (off
== elsz
)
1830 val
= gimple_assign_rhs2 (def_stmt2
);
1832 else if (gimple_assign_rhs_code (def_stmt2
) == CONSTRUCTOR
1835 tree ctor
= gimple_assign_rhs1 (def_stmt2
);
1836 unsigned i
= off
/ elsz
;
1837 if (i
< CONSTRUCTOR_NELTS (ctor
))
1839 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, i
);
1840 if (TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
1842 if (TREE_CODE (TREE_TYPE (elt
->value
))
1849 return vn_reference_lookup_or_insert_for_pieces
1850 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1855 /* 5) For aggregate copies translate the reference through them if
1856 the copy kills ref. */
1857 else if (vn_walk_kind
== VN_WALKREWRITE
1858 && gimple_assign_single_p (def_stmt
)
1859 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
1860 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
1861 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
1864 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1866 auto_vec
<vn_reference_op_s
> rhs
;
1867 vn_reference_op_t vro
;
1873 /* See if the assignment kills REF. */
1874 base2
= ao_ref_base (&lhs_ref
);
1875 offset2
= lhs_ref
.offset
;
1876 size2
= lhs_ref
.size
;
1877 maxsize2
= lhs_ref
.max_size
;
1880 && (TREE_CODE (base
) != MEM_REF
1881 || TREE_CODE (base2
) != MEM_REF
1882 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
1883 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
1884 TREE_OPERAND (base2
, 1))))
1886 || offset2
+ size2
< offset
+ maxsize
)
1889 /* Find the common base of ref and the lhs. lhs_ops already
1890 contains valueized operands for the lhs. */
1891 i
= vr
->operands
.length () - 1;
1892 j
= lhs_ops
.length () - 1;
1893 while (j
>= 0 && i
>= 0
1894 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
1900 /* ??? The innermost op should always be a MEM_REF and we already
1901 checked that the assignment to the lhs kills vr. Thus for
1902 aggregate copies using char[] types the vn_reference_op_eq
1903 may fail when comparing types for compatibility. But we really
1904 don't care here - further lookups with the rewritten operands
1905 will simply fail if we messed up types too badly. */
1906 HOST_WIDE_INT extra_off
= 0;
1907 if (j
== 0 && i
>= 0
1908 && lhs_ops
[0].opcode
== MEM_REF
1909 && lhs_ops
[0].off
!= -1)
1911 if (lhs_ops
[0].off
== vr
->operands
[i
].off
)
1913 else if (vr
->operands
[i
].opcode
== MEM_REF
1914 && vr
->operands
[i
].off
!= -1)
1916 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
1921 /* i now points to the first additional op.
1922 ??? LHS may not be completely contained in VR, one or more
1923 VIEW_CONVERT_EXPRs could be in its way. We could at least
1924 try handling outermost VIEW_CONVERT_EXPRs. */
1928 /* Now re-write REF to be based on the rhs of the assignment. */
1929 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
1931 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1934 if (rhs
.length () < 2
1935 || rhs
[0].opcode
!= MEM_REF
1936 || rhs
[0].off
== -1)
1938 rhs
[0].off
+= extra_off
;
1939 rhs
[0].op0
= int_const_binop (PLUS_EXPR
, rhs
[0].op0
,
1940 build_int_cst (TREE_TYPE (rhs
[0].op0
),
1944 /* We need to pre-pend vr->operands[0..i] to rhs. */
1945 vec
<vn_reference_op_s
> old
= vr
->operands
;
1946 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
1948 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
1949 if (old
== shared_lookup_references
)
1950 shared_lookup_references
= vr
->operands
;
1953 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
1954 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
1955 vr
->operands
[i
+ 1 + j
] = *vro
;
1956 vr
->operands
= valueize_refs (vr
->operands
);
1957 if (old
== shared_lookup_references
)
1958 shared_lookup_references
= vr
->operands
;
1959 vr
->hashcode
= vn_reference_compute_hash (vr
);
1961 /* Try folding the new reference to a constant. */
1962 tree val
= fully_constant_vn_reference_p (vr
);
1964 return vn_reference_lookup_or_insert_for_pieces
1965 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
1967 /* Adjust *ref from the new operands. */
1968 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
1970 /* This can happen with bitfields. */
1971 if (ref
->size
!= r
.size
)
1975 /* Do not update last seen VUSE after translating. */
1976 last_vuse_ptr
= NULL
;
1978 /* Keep looking for the adjusted *REF / VR pair. */
1982 /* 6) For memcpy copies translate the reference through them if
1983 the copy kills ref. */
1984 else if (vn_walk_kind
== VN_WALKREWRITE
1985 && is_gimple_reg_type (vr
->type
)
1986 /* ??? Handle BCOPY as well. */
1987 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
1988 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
1989 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
1990 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
1991 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
1992 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
1993 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
1994 && tree_fits_uhwi_p (gimple_call_arg (def_stmt
, 2)))
1998 HOST_WIDE_INT rhs_offset
, copy_size
, lhs_offset
;
1999 vn_reference_op_s op
;
2003 /* Only handle non-variable, addressable refs. */
2004 if (ref
->size
!= maxsize
2005 || offset
% BITS_PER_UNIT
!= 0
2006 || ref
->size
% BITS_PER_UNIT
!= 0)
2009 /* Extract a pointer base and an offset for the destination. */
2010 lhs
= gimple_call_arg (def_stmt
, 0);
2012 if (TREE_CODE (lhs
) == SSA_NAME
)
2014 lhs
= SSA_VAL (lhs
);
2015 if (TREE_CODE (lhs
) == SSA_NAME
)
2017 gimple def_stmt
= SSA_NAME_DEF_STMT (lhs
);
2018 if (gimple_assign_single_p (def_stmt
)
2019 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2020 lhs
= gimple_assign_rhs1 (def_stmt
);
2023 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2025 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2029 if (TREE_CODE (tem
) == MEM_REF
2030 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2032 lhs
= TREE_OPERAND (tem
, 0);
2033 if (TREE_CODE (lhs
) == SSA_NAME
)
2034 lhs
= SSA_VAL (lhs
);
2035 lhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2037 else if (DECL_P (tem
))
2038 lhs
= build_fold_addr_expr (tem
);
2042 if (TREE_CODE (lhs
) != SSA_NAME
2043 && TREE_CODE (lhs
) != ADDR_EXPR
)
2046 /* Extract a pointer base and an offset for the source. */
2047 rhs
= gimple_call_arg (def_stmt
, 1);
2049 if (TREE_CODE (rhs
) == SSA_NAME
)
2050 rhs
= SSA_VAL (rhs
);
2051 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2053 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2057 if (TREE_CODE (tem
) == MEM_REF
2058 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1)))
2060 rhs
= TREE_OPERAND (tem
, 0);
2061 rhs_offset
+= tree_to_uhwi (TREE_OPERAND (tem
, 1));
2063 else if (DECL_P (tem
))
2064 rhs
= build_fold_addr_expr (tem
);
2068 if (TREE_CODE (rhs
) != SSA_NAME
2069 && TREE_CODE (rhs
) != ADDR_EXPR
)
2072 copy_size
= tree_to_uhwi (gimple_call_arg (def_stmt
, 2));
2074 /* The bases of the destination and the references have to agree. */
2075 if ((TREE_CODE (base
) != MEM_REF
2077 || (TREE_CODE (base
) == MEM_REF
2078 && (TREE_OPERAND (base
, 0) != lhs
2079 || !tree_fits_uhwi_p (TREE_OPERAND (base
, 1))))
2081 && (TREE_CODE (lhs
) != ADDR_EXPR
2082 || TREE_OPERAND (lhs
, 0) != base
)))
2085 at
= offset
/ BITS_PER_UNIT
;
2086 if (TREE_CODE (base
) == MEM_REF
)
2087 at
+= tree_to_uhwi (TREE_OPERAND (base
, 1));
2088 /* If the access is completely outside of the memcpy destination
2089 area there is no aliasing. */
2090 if (lhs_offset
>= at
+ maxsize
/ BITS_PER_UNIT
2091 || lhs_offset
+ copy_size
<= at
)
2093 /* And the access has to be contained within the memcpy destination. */
2095 || lhs_offset
+ copy_size
< at
+ maxsize
/ BITS_PER_UNIT
)
2098 /* Make room for 2 operands in the new reference. */
2099 if (vr
->operands
.length () < 2)
2101 vec
<vn_reference_op_s
> old
= vr
->operands
;
2102 vr
->operands
.safe_grow_cleared (2);
2103 if (old
== shared_lookup_references
2104 && vr
->operands
!= old
)
2105 shared_lookup_references
= vr
->operands
;
2108 vr
->operands
.truncate (2);
2110 /* The looked-through reference is a simple MEM_REF. */
2111 memset (&op
, 0, sizeof (op
));
2113 op
.opcode
= MEM_REF
;
2114 op
.op0
= build_int_cst (ptr_type_node
, at
- rhs_offset
);
2115 op
.off
= at
- lhs_offset
+ rhs_offset
;
2116 vr
->operands
[0] = op
;
2117 op
.type
= TREE_TYPE (rhs
);
2118 op
.opcode
= TREE_CODE (rhs
);
2121 vr
->operands
[1] = op
;
2122 vr
->hashcode
= vn_reference_compute_hash (vr
);
2124 /* Adjust *ref from the new operands. */
2125 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2127 /* This can happen with bitfields. */
2128 if (ref
->size
!= r
.size
)
2132 /* Do not update last seen VUSE after translating. */
2133 last_vuse_ptr
= NULL
;
2135 /* Keep looking for the adjusted *REF / VR pair. */
2139 /* Bail out and stop walking. */
2143 /* Lookup a reference operation by it's parts, in the current hash table.
2144 Returns the resulting value number if it exists in the hash table,
2145 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2146 vn_reference_t stored in the hashtable if something is found. */
2149 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2150 vec
<vn_reference_op_s
> operands
,
2151 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2153 struct vn_reference_s vr1
;
2161 vr1
.vuse
= vuse_ssa_val (vuse
);
2162 shared_lookup_references
.truncate (0);
2163 shared_lookup_references
.safe_grow (operands
.length ());
2164 memcpy (shared_lookup_references
.address (),
2165 operands
.address (),
2166 sizeof (vn_reference_op_s
)
2167 * operands
.length ());
2168 vr1
.operands
= operands
= shared_lookup_references
2169 = valueize_refs (shared_lookup_references
);
2172 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2173 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2176 vn_reference_lookup_1 (&vr1
, vnresult
);
2178 && kind
!= VN_NOWALK
2182 vn_walk_kind
= kind
;
2183 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2185 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2186 vn_reference_lookup_2
,
2187 vn_reference_lookup_3
,
2188 vuse_ssa_val
, &vr1
);
2189 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2193 return (*vnresult
)->result
;
2198 /* Lookup OP in the current hash table, and return the resulting value
2199 number if it exists in the hash table. Return NULL_TREE if it does
2200 not exist in the hash table or if the result field of the structure
2201 was NULL.. VNRESULT will be filled in with the vn_reference_t
2202 stored in the hashtable if one exists. */
2205 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2206 vn_reference_t
*vnresult
)
2208 vec
<vn_reference_op_s
> operands
;
2209 struct vn_reference_s vr1
;
2211 bool valuezied_anything
;
2216 vr1
.vuse
= vuse_ssa_val (vuse
);
2217 vr1
.operands
= operands
2218 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2219 vr1
.type
= TREE_TYPE (op
);
2220 vr1
.set
= get_alias_set (op
);
2221 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2222 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2225 if (kind
!= VN_NOWALK
2228 vn_reference_t wvnresult
;
2230 /* Make sure to use a valueized reference if we valueized anything.
2231 Otherwise preserve the full reference for advanced TBAA. */
2232 if (!valuezied_anything
2233 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2235 ao_ref_init (&r
, op
);
2236 vn_walk_kind
= kind
;
2238 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2239 vn_reference_lookup_2
,
2240 vn_reference_lookup_3
,
2241 vuse_ssa_val
, &vr1
);
2242 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2246 *vnresult
= wvnresult
;
2247 return wvnresult
->result
;
2253 return vn_reference_lookup_1 (&vr1
, vnresult
);
2256 /* Lookup CALL in the current hash table and return the entry in
2257 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2260 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2266 tree vuse
= gimple_vuse (call
);
2268 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2269 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2270 vr
->type
= gimple_expr_type (call
);
2272 vr
->hashcode
= vn_reference_compute_hash (vr
);
2273 vn_reference_lookup_1 (vr
, vnresult
);
2276 /* Insert OP into the current hash table with a value number of
2277 RESULT, and return the resulting reference structure we created. */
2279 static vn_reference_t
2280 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2282 vn_reference_s
**slot
;
2286 vr1
= current_info
->references_pool
->allocate ();
2287 if (TREE_CODE (result
) == SSA_NAME
)
2288 vr1
->value_id
= VN_INFO (result
)->value_id
;
2290 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2291 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2292 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2293 vr1
->type
= TREE_TYPE (op
);
2294 vr1
->set
= get_alias_set (op
);
2295 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2296 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2297 vr1
->result_vdef
= vdef
;
2299 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2302 /* Because we lookup stores using vuses, and value number failures
2303 using the vdefs (see visit_reference_op_store for how and why),
2304 it's possible that on failure we may try to insert an already
2305 inserted store. This is not wrong, there is no ssa name for a
2306 store that we could use as a differentiator anyway. Thus, unlike
2307 the other lookup functions, you cannot gcc_assert (!*slot)
2310 /* But free the old slot in case of a collision. */
2312 free_reference (*slot
);
2318 /* Insert a reference by it's pieces into the current hash table with
2319 a value number of RESULT. Return the resulting reference
2320 structure we created. */
2323 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2324 vec
<vn_reference_op_s
> operands
,
2325 tree result
, unsigned int value_id
)
2328 vn_reference_s
**slot
;
2331 vr1
= current_info
->references_pool
->allocate ();
2332 vr1
->value_id
= value_id
;
2333 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2334 vr1
->operands
= valueize_refs (operands
);
2337 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2338 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2339 result
= SSA_VAL (result
);
2340 vr1
->result
= result
;
2342 slot
= current_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2345 /* At this point we should have all the things inserted that we have
2346 seen before, and we should never try inserting something that
2348 gcc_assert (!*slot
);
2350 free_reference (*slot
);
2356 /* Compute and return the hash value for nary operation VBO1. */
2359 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2361 inchash::hash hstate
;
2364 for (i
= 0; i
< vno1
->length
; ++i
)
2365 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2366 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2368 if (((vno1
->length
== 2
2369 && commutative_tree_code (vno1
->opcode
))
2370 || (vno1
->length
== 3
2371 && commutative_ternary_tree_code (vno1
->opcode
)))
2372 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2373 std::swap (vno1
->op
[0], vno1
->op
[1]);
2374 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
2375 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
2377 std::swap (vno1
->op
[0], vno1
->op
[1]);
2378 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
2381 hstate
.add_int (vno1
->opcode
);
2382 for (i
= 0; i
< vno1
->length
; ++i
)
2383 inchash::add_expr (vno1
->op
[i
], hstate
);
2385 return hstate
.end ();
2388 /* Compare nary operations VNO1 and VNO2 and return true if they are
2392 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2396 if (vno1
->hashcode
!= vno2
->hashcode
)
2399 if (vno1
->length
!= vno2
->length
)
2402 if (vno1
->opcode
!= vno2
->opcode
2403 || !types_compatible_p (vno1
->type
, vno2
->type
))
2406 for (i
= 0; i
< vno1
->length
; ++i
)
2407 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2413 /* Initialize VNO from the pieces provided. */
2416 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2417 enum tree_code code
, tree type
, tree
*ops
)
2420 vno
->length
= length
;
2422 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2425 /* Initialize VNO from OP. */
2428 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2432 vno
->opcode
= TREE_CODE (op
);
2433 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2434 vno
->type
= TREE_TYPE (op
);
2435 for (i
= 0; i
< vno
->length
; ++i
)
2436 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2439 /* Return the number of operands for a vn_nary ops structure from STMT. */
2442 vn_nary_length_from_stmt (gimple stmt
)
2444 switch (gimple_assign_rhs_code (stmt
))
2448 case VIEW_CONVERT_EXPR
:
2455 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2458 return gimple_num_ops (stmt
) - 1;
2462 /* Initialize VNO from STMT. */
2465 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple stmt
)
2469 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2470 vno
->type
= gimple_expr_type (stmt
);
2471 switch (vno
->opcode
)
2475 case VIEW_CONVERT_EXPR
:
2477 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2482 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2483 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2484 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2488 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2489 for (i
= 0; i
< vno
->length
; ++i
)
2490 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2494 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2495 vno
->length
= gimple_num_ops (stmt
) - 1;
2496 for (i
= 0; i
< vno
->length
; ++i
)
2497 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2501 /* Compute the hashcode for VNO and look for it in the hash table;
2502 return the resulting value number if it exists in the hash table.
2503 Return NULL_TREE if it does not exist in the hash table or if the
2504 result field of the operation is NULL. VNRESULT will contain the
2505 vn_nary_op_t from the hashtable if it exists. */
2508 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2510 vn_nary_op_s
**slot
;
2515 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2516 slot
= current_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2518 if (!slot
&& current_info
== optimistic_info
)
2519 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
,
2525 return (*slot
)->result
;
2528 /* Lookup a n-ary operation by its pieces and return the resulting value
2529 number if it exists in the hash table. Return NULL_TREE if it does
2530 not exist in the hash table or if the result field of the operation
2531 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2535 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2536 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2538 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2539 sizeof_vn_nary_op (length
));
2540 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2541 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2544 /* Lookup OP in the current hash table, and return the resulting value
2545 number if it exists in the hash table. Return NULL_TREE if it does
2546 not exist in the hash table or if the result field of the operation
2547 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2551 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2554 = XALLOCAVAR (struct vn_nary_op_s
,
2555 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2556 init_vn_nary_op_from_op (vno1
, op
);
2557 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2560 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2561 value number if it exists in the hash table. Return NULL_TREE if
2562 it does not exist in the hash table. VNRESULT will contain the
2563 vn_nary_op_t from the hashtable if it exists. */
2566 vn_nary_op_lookup_stmt (gimple stmt
, vn_nary_op_t
*vnresult
)
2569 = XALLOCAVAR (struct vn_nary_op_s
,
2570 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2571 init_vn_nary_op_from_stmt (vno1
, stmt
);
2572 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2575 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2578 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2580 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2583 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2587 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2589 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2590 ¤t_info
->nary_obstack
);
2592 vno1
->value_id
= value_id
;
2593 vno1
->length
= length
;
2594 vno1
->result
= result
;
2599 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2600 VNO->HASHCODE first. */
2603 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
2606 vn_nary_op_s
**slot
;
2609 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2611 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
2612 gcc_assert (!*slot
);
2618 /* Insert a n-ary operation into the current hash table using it's
2619 pieces. Return the vn_nary_op_t structure we created and put in
2623 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2624 tree type
, tree
*ops
,
2625 tree result
, unsigned int value_id
)
2627 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2628 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2629 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2632 /* Insert OP into the current hash table with a value number of
2633 RESULT. Return the vn_nary_op_t structure we created and put in
2637 vn_nary_op_insert (tree op
, tree result
)
2639 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2642 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2643 init_vn_nary_op_from_op (vno1
, op
);
2644 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2647 /* Insert the rhs of STMT into the current hash table with a value number of
2651 vn_nary_op_insert_stmt (gimple stmt
, tree result
)
2654 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
2655 result
, VN_INFO (result
)->value_id
);
2656 init_vn_nary_op_from_stmt (vno1
, stmt
);
2657 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2660 /* Compute a hashcode for PHI operation VP1 and return it. */
2662 static inline hashval_t
2663 vn_phi_compute_hash (vn_phi_t vp1
)
2665 inchash::hash
hstate (vp1
->block
->index
);
2670 /* If all PHI arguments are constants we need to distinguish
2671 the PHI node via its type. */
2673 hstate
.merge_hash (vn_hash_type (type
));
2675 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2677 if (phi1op
== VN_TOP
)
2679 inchash::add_expr (phi1op
, hstate
);
2682 return hstate
.end ();
2685 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2688 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
2690 if (vp1
->hashcode
!= vp2
->hashcode
)
2693 if (vp1
->block
== vp2
->block
)
2698 /* If the PHI nodes do not have compatible types
2699 they are not the same. */
2700 if (!types_compatible_p (vp1
->type
, vp2
->type
))
2703 /* Any phi in the same block will have it's arguments in the
2704 same edge order, because of how we store phi nodes. */
2705 FOR_EACH_VEC_ELT (vp1
->phiargs
, i
, phi1op
)
2707 tree phi2op
= vp2
->phiargs
[i
];
2708 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
2710 if (!expressions_equal_p (phi1op
, phi2op
))
2718 static vec
<tree
> shared_lookup_phiargs
;
2720 /* Lookup PHI in the current hash table, and return the resulting
2721 value number if it exists in the hash table. Return NULL_TREE if
2722 it does not exist in the hash table. */
2725 vn_phi_lookup (gimple phi
)
2728 struct vn_phi_s vp1
;
2731 shared_lookup_phiargs
.truncate (0);
2733 /* Canonicalize the SSA_NAME's to their value number. */
2734 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2736 tree def
= PHI_ARG_DEF (phi
, i
);
2737 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2738 shared_lookup_phiargs
.safe_push (def
);
2740 vp1
.type
= TREE_TYPE (gimple_phi_result (phi
));
2741 vp1
.phiargs
= shared_lookup_phiargs
;
2742 vp1
.block
= gimple_bb (phi
);
2743 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
2744 slot
= current_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2746 if (!slot
&& current_info
== optimistic_info
)
2747 slot
= valid_info
->phis
->find_slot_with_hash (&vp1
, vp1
.hashcode
,
2751 return (*slot
)->result
;
2754 /* Insert PHI into the current hash table with a value number of
2758 vn_phi_insert (gimple phi
, tree result
)
2761 vn_phi_t vp1
= current_info
->phis_pool
->allocate ();
2763 vec
<tree
> args
= vNULL
;
2765 /* Canonicalize the SSA_NAME's to their value number. */
2766 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2768 tree def
= PHI_ARG_DEF (phi
, i
);
2769 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2770 args
.safe_push (def
);
2772 vp1
->value_id
= VN_INFO (result
)->value_id
;
2773 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
2774 vp1
->phiargs
= args
;
2775 vp1
->block
= gimple_bb (phi
);
2776 vp1
->result
= result
;
2777 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
2779 slot
= current_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
2781 /* Because we iterate over phi operations more than once, it's
2782 possible the slot might already exist here, hence no assert.*/
2788 /* Print set of components in strongly connected component SCC to OUT. */
2791 print_scc (FILE *out
, vec
<tree
> scc
)
2796 fprintf (out
, "SCC consists of:");
2797 FOR_EACH_VEC_ELT (scc
, i
, var
)
2800 print_generic_expr (out
, var
, 0);
2802 fprintf (out
, "\n");
2805 /* Set the value number of FROM to TO, return true if it has changed
2809 set_ssa_val_to (tree from
, tree to
)
2811 tree currval
= SSA_VAL (from
);
2812 HOST_WIDE_INT toff
, coff
;
2814 /* The only thing we allow as value numbers are ssa_names
2815 and invariants. So assert that here. We don't allow VN_TOP
2816 as visiting a stmt should produce a value-number other than
2818 ??? Still VN_TOP can happen for unreachable code, so force
2819 it to varying in that case. Not all code is prepared to
2820 get VN_TOP on valueization. */
2823 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2824 fprintf (dump_file
, "Forcing value number to varying on "
2825 "receiving VN_TOP\n");
2829 gcc_assert (to
!= NULL_TREE
2830 && ((TREE_CODE (to
) == SSA_NAME
2831 && (to
== from
|| SSA_VAL (to
) == to
))
2832 || is_gimple_min_invariant (to
)));
2836 if (currval
== from
)
2838 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2840 fprintf (dump_file
, "Not changing value number of ");
2841 print_generic_expr (dump_file
, from
, 0);
2842 fprintf (dump_file
, " from VARYING to ");
2843 print_generic_expr (dump_file
, to
, 0);
2844 fprintf (dump_file
, "\n");
2848 else if (TREE_CODE (to
) == SSA_NAME
2849 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
2853 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2855 fprintf (dump_file
, "Setting value number of ");
2856 print_generic_expr (dump_file
, from
, 0);
2857 fprintf (dump_file
, " to ");
2858 print_generic_expr (dump_file
, to
, 0);
2862 && !operand_equal_p (currval
, to
, 0)
2863 /* ??? For addresses involving volatile objects or types operand_equal_p
2864 does not reliably detect ADDR_EXPRs as equal. We know we are only
2865 getting invariant gimple addresses here, so can use
2866 get_addr_base_and_unit_offset to do this comparison. */
2867 && !(TREE_CODE (currval
) == ADDR_EXPR
2868 && TREE_CODE (to
) == ADDR_EXPR
2869 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
2870 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
2873 VN_INFO (from
)->valnum
= to
;
2874 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2875 fprintf (dump_file
, " (changed)\n");
2878 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2879 fprintf (dump_file
, "\n");
2883 /* Mark as processed all the definitions in the defining stmt of USE, or
2887 mark_use_processed (tree use
)
2891 gimple stmt
= SSA_NAME_DEF_STMT (use
);
2893 if (SSA_NAME_IS_DEFAULT_DEF (use
) || gimple_code (stmt
) == GIMPLE_PHI
)
2895 VN_INFO (use
)->use_processed
= true;
2899 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2901 tree def
= DEF_FROM_PTR (defp
);
2903 VN_INFO (def
)->use_processed
= true;
2907 /* Set all definitions in STMT to value number to themselves.
2908 Return true if a value number changed. */
2911 defs_to_varying (gimple stmt
)
2913 bool changed
= false;
2917 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2919 tree def
= DEF_FROM_PTR (defp
);
2920 changed
|= set_ssa_val_to (def
, def
);
2925 static bool expr_has_constants (tree expr
);
2927 /* Visit a copy between LHS and RHS, return true if the value number
2931 visit_copy (tree lhs
, tree rhs
)
2933 /* The copy may have a more interesting constant filled expression
2934 (we don't, since we know our RHS is just an SSA name). */
2935 VN_INFO (lhs
)->has_constants
= VN_INFO (rhs
)->has_constants
;
2936 VN_INFO (lhs
)->expr
= VN_INFO (rhs
)->expr
;
2938 /* And finally valueize. */
2939 rhs
= SSA_VAL (rhs
);
2941 return set_ssa_val_to (lhs
, rhs
);
2944 /* Visit a nary operator RHS, value number it, and return true if the
2945 value number of LHS has changed as a result. */
2948 visit_nary_op (tree lhs
, gimple stmt
)
2950 bool changed
= false;
2951 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
2954 changed
= set_ssa_val_to (lhs
, result
);
2957 changed
= set_ssa_val_to (lhs
, lhs
);
2958 vn_nary_op_insert_stmt (stmt
, lhs
);
2964 /* Visit a call STMT storing into LHS. Return true if the value number
2965 of the LHS has changed as a result. */
2968 visit_reference_op_call (tree lhs
, gcall
*stmt
)
2970 bool changed
= false;
2971 struct vn_reference_s vr1
;
2972 vn_reference_t vnresult
= NULL
;
2973 tree vdef
= gimple_vdef (stmt
);
2975 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2976 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
2979 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
2982 if (vnresult
->result_vdef
&& vdef
)
2983 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
2985 if (!vnresult
->result
&& lhs
)
2986 vnresult
->result
= lhs
;
2988 if (vnresult
->result
&& lhs
)
2990 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
2992 if (VN_INFO (vnresult
->result
)->has_constants
)
2993 VN_INFO (lhs
)->has_constants
= true;
2999 vn_reference_s
**slot
;
3001 changed
|= set_ssa_val_to (vdef
, vdef
);
3003 changed
|= set_ssa_val_to (lhs
, lhs
);
3004 vr2
= current_info
->references_pool
->allocate ();
3005 vr2
->vuse
= vr1
.vuse
;
3006 /* As we are not walking the virtual operand chain we know the
3007 shared_lookup_references are still original so we can re-use
3009 vr2
->operands
= vr1
.operands
.copy ();
3010 vr2
->type
= vr1
.type
;
3012 vr2
->hashcode
= vr1
.hashcode
;
3014 vr2
->result_vdef
= vdef
;
3015 slot
= current_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
3017 gcc_assert (!*slot
);
3024 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3025 and return true if the value number of the LHS has changed as a result. */
3028 visit_reference_op_load (tree lhs
, tree op
, gimple stmt
)
3030 bool changed
= false;
3034 last_vuse
= gimple_vuse (stmt
);
3035 last_vuse_ptr
= &last_vuse
;
3036 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3037 default_vn_walk_kind
, NULL
);
3038 last_vuse_ptr
= NULL
;
3040 /* We handle type-punning through unions by value-numbering based
3041 on offset and size of the access. Be prepared to handle a
3042 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3044 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3046 /* We will be setting the value number of lhs to the value number
3047 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3048 So first simplify and lookup this expression to see if it
3049 is already available. */
3050 tree val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
3051 if ((CONVERT_EXPR_P (val
)
3052 || TREE_CODE (val
) == VIEW_CONVERT_EXPR
)
3053 && TREE_CODE (TREE_OPERAND (val
, 0)) == SSA_NAME
)
3055 tree tem
= vn_get_expr_for (TREE_OPERAND (val
, 0));
3056 if ((CONVERT_EXPR_P (tem
)
3057 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
)
3058 && (tem
= fold_unary_ignore_overflow (TREE_CODE (val
),
3059 TREE_TYPE (val
), tem
)))
3063 if (!is_gimple_min_invariant (val
)
3064 && TREE_CODE (val
) != SSA_NAME
)
3065 result
= vn_nary_op_lookup (val
, NULL
);
3066 /* If the expression is not yet available, value-number lhs to
3067 a new SSA_NAME we create. */
3070 result
= make_temp_ssa_name (TREE_TYPE (lhs
), gimple_build_nop (),
3072 /* Initialize value-number information properly. */
3073 VN_INFO_GET (result
)->valnum
= result
;
3074 VN_INFO (result
)->value_id
= get_next_value_id ();
3075 VN_INFO (result
)->expr
= val
;
3076 VN_INFO (result
)->has_constants
= expr_has_constants (val
);
3077 VN_INFO (result
)->needs_insertion
= true;
3078 /* As all "inserted" statements are singleton SCCs, insert
3079 to the valid table. This is strictly needed to
3080 avoid re-generating new value SSA_NAMEs for the same
3081 expression during SCC iteration over and over (the
3082 optimistic table gets cleared after each iteration).
3083 We do not need to insert into the optimistic table, as
3084 lookups there will fall back to the valid table. */
3085 if (current_info
== optimistic_info
)
3087 current_info
= valid_info
;
3088 vn_nary_op_insert (val
, result
);
3089 current_info
= optimistic_info
;
3092 vn_nary_op_insert (val
, result
);
3093 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3095 fprintf (dump_file
, "Inserting name ");
3096 print_generic_expr (dump_file
, result
, 0);
3097 fprintf (dump_file
, " for expression ");
3098 print_generic_expr (dump_file
, val
, 0);
3099 fprintf (dump_file
, "\n");
3106 changed
= set_ssa_val_to (lhs
, result
);
3107 if (TREE_CODE (result
) == SSA_NAME
3108 && VN_INFO (result
)->has_constants
)
3110 VN_INFO (lhs
)->expr
= VN_INFO (result
)->expr
;
3111 VN_INFO (lhs
)->has_constants
= true;
3116 changed
= set_ssa_val_to (lhs
, lhs
);
3117 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
3124 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3125 and return true if the value number of the LHS has changed as a result. */
3128 visit_reference_op_store (tree lhs
, tree op
, gimple stmt
)
3130 bool changed
= false;
3131 vn_reference_t vnresult
= NULL
;
3132 tree result
, assign
;
3133 bool resultsame
= false;
3134 tree vuse
= gimple_vuse (stmt
);
3135 tree vdef
= gimple_vdef (stmt
);
3137 if (TREE_CODE (op
) == SSA_NAME
)
3140 /* First we want to lookup using the *vuses* from the store and see
3141 if there the last store to this location with the same address
3144 The vuses represent the memory state before the store. If the
3145 memory state, address, and value of the store is the same as the
3146 last store to this location, then this store will produce the
3147 same memory state as that store.
3149 In this case the vdef versions for this store are value numbered to those
3150 vuse versions, since they represent the same memory state after
3153 Otherwise, the vdefs for the store are used when inserting into
3154 the table, since the store generates a new memory state. */
3156 result
= vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, NULL
);
3160 if (TREE_CODE (result
) == SSA_NAME
)
3161 result
= SSA_VAL (result
);
3162 resultsame
= expressions_equal_p (result
, op
);
3165 if ((!result
|| !resultsame
)
3166 /* Only perform the following when being called from PRE
3167 which embeds tail merging. */
3168 && default_vn_walk_kind
== VN_WALK
)
3170 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3171 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
);
3174 VN_INFO (vdef
)->use_processed
= true;
3175 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3179 if (!result
|| !resultsame
)
3181 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3183 fprintf (dump_file
, "No store match\n");
3184 fprintf (dump_file
, "Value numbering store ");
3185 print_generic_expr (dump_file
, lhs
, 0);
3186 fprintf (dump_file
, " to ");
3187 print_generic_expr (dump_file
, op
, 0);
3188 fprintf (dump_file
, "\n");
3190 /* Have to set value numbers before insert, since insert is
3191 going to valueize the references in-place. */
3194 changed
|= set_ssa_val_to (vdef
, vdef
);
3197 /* Do not insert structure copies into the tables. */
3198 if (is_gimple_min_invariant (op
)
3199 || is_gimple_reg (op
))
3200 vn_reference_insert (lhs
, op
, vdef
, NULL
);
3202 /* Only perform the following when being called from PRE
3203 which embeds tail merging. */
3204 if (default_vn_walk_kind
== VN_WALK
)
3206 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
3207 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
3212 /* We had a match, so value number the vdef to have the value
3213 number of the vuse it came from. */
3215 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3216 fprintf (dump_file
, "Store matched earlier value,"
3217 "value numbering store vdefs to matching vuses.\n");
3219 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
3225 /* Visit and value number PHI, return true if the value number
3229 visit_phi (gimple phi
)
3231 bool changed
= false;
3233 tree sameval
= VN_TOP
;
3234 bool allsame
= true;
3236 /* TODO: We could check for this in init_sccvn, and replace this
3237 with a gcc_assert. */
3238 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
3239 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3241 /* See if all non-TOP arguments have the same value. TOP is
3242 equivalent to everything, so we can ignore it. */
3245 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3246 if (e
->flags
& EDGE_EXECUTABLE
)
3248 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3250 if (TREE_CODE (def
) == SSA_NAME
)
3251 def
= SSA_VAL (def
);
3254 if (sameval
== VN_TOP
)
3260 if (!expressions_equal_p (def
, sameval
))
3268 /* If all value numbered to the same value, the phi node has that
3271 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
3273 /* Otherwise, see if it is equivalent to a phi node in this block. */
3274 result
= vn_phi_lookup (phi
);
3276 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
3279 vn_phi_insert (phi
, PHI_RESULT (phi
));
3280 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
3281 VN_INFO (PHI_RESULT (phi
))->expr
= PHI_RESULT (phi
);
3282 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
3288 /* Return true if EXPR contains constants. */
3291 expr_has_constants (tree expr
)
3293 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
3296 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0));
3299 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0))
3300 || is_gimple_min_invariant (TREE_OPERAND (expr
, 1));
3301 /* Constants inside reference ops are rarely interesting, but
3302 it can take a lot of looking to find them. */
3304 case tcc_declaration
:
3307 return is_gimple_min_invariant (expr
);
3312 /* Return true if STMT contains constants. */
3315 stmt_has_constants (gimple stmt
)
3319 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
3322 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
3324 case GIMPLE_TERNARY_RHS
:
3325 tem
= gimple_assign_rhs3 (stmt
);
3326 if (TREE_CODE (tem
) == SSA_NAME
)
3327 tem
= SSA_VAL (tem
);
3328 if (is_gimple_min_invariant (tem
))
3332 case GIMPLE_BINARY_RHS
:
3333 tem
= gimple_assign_rhs2 (stmt
);
3334 if (TREE_CODE (tem
) == SSA_NAME
)
3335 tem
= SSA_VAL (tem
);
3336 if (is_gimple_min_invariant (tem
))
3340 case GIMPLE_SINGLE_RHS
:
3341 /* Constants inside reference ops are rarely interesting, but
3342 it can take a lot of looking to find them. */
3343 case GIMPLE_UNARY_RHS
:
3344 tem
= gimple_assign_rhs1 (stmt
);
3345 if (TREE_CODE (tem
) == SSA_NAME
)
3346 tem
= SSA_VAL (tem
);
3347 return is_gimple_min_invariant (tem
);
3355 /* Simplify the binary expression RHS, and return the result if
3359 simplify_binary_expression (gimple stmt
)
3361 tree result
= NULL_TREE
;
3362 tree op0
= gimple_assign_rhs1 (stmt
);
3363 tree op1
= gimple_assign_rhs2 (stmt
);
3364 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3366 /* This will not catch every single case we could combine, but will
3367 catch those with constants. The goal here is to simultaneously
3368 combine constants between expressions, but avoid infinite
3369 expansion of expressions during simplification. */
3370 op0
= vn_valueize (op0
);
3371 if (TREE_CODE (op0
) == SSA_NAME
3372 && (VN_INFO (op0
)->has_constants
3373 || TREE_CODE_CLASS (code
) == tcc_comparison
3374 || code
== COMPLEX_EXPR
))
3375 op0
= vn_get_expr_for (op0
);
3377 op1
= vn_valueize (op1
);
3378 if (TREE_CODE (op1
) == SSA_NAME
3379 && (VN_INFO (op1
)->has_constants
3380 || code
== COMPLEX_EXPR
))
3381 op1
= vn_get_expr_for (op1
);
3383 /* Pointer plus constant can be represented as invariant address.
3384 Do so to allow further propatation, see also tree forwprop. */
3385 if (code
== POINTER_PLUS_EXPR
3386 && tree_fits_uhwi_p (op1
)
3387 && TREE_CODE (op0
) == ADDR_EXPR
3388 && is_gimple_min_invariant (op0
))
3389 return build_invariant_address (TREE_TYPE (op0
),
3390 TREE_OPERAND (op0
, 0),
3391 tree_to_uhwi (op1
));
3393 /* Avoid folding if nothing changed. */
3394 if (op0
== gimple_assign_rhs1 (stmt
)
3395 && op1
== gimple_assign_rhs2 (stmt
))
3398 fold_defer_overflow_warnings ();
3400 result
= fold_binary (code
, gimple_expr_type (stmt
), op0
, op1
);
3402 STRIP_USELESS_TYPE_CONVERSION (result
);
3404 fold_undefer_overflow_warnings (result
&& valid_gimple_rhs_p (result
),
3407 /* Make sure result is not a complex expression consisting
3408 of operators of operators (IE (a + b) + (a + c))
3409 Otherwise, we will end up with unbounded expressions if
3410 fold does anything at all. */
3411 if (result
&& valid_gimple_rhs_p (result
))
3417 /* Simplify the unary expression RHS, and return the result if
3421 simplify_unary_expression (gassign
*stmt
)
3423 tree result
= NULL_TREE
;
3424 tree orig_op0
, op0
= gimple_assign_rhs1 (stmt
);
3425 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3427 /* We handle some tcc_reference codes here that are all
3428 GIMPLE_ASSIGN_SINGLE codes. */
3429 if (code
== REALPART_EXPR
3430 || code
== IMAGPART_EXPR
3431 || code
== VIEW_CONVERT_EXPR
3432 || code
== BIT_FIELD_REF
)
3433 op0
= TREE_OPERAND (op0
, 0);
3436 op0
= vn_valueize (op0
);
3437 if (TREE_CODE (op0
) == SSA_NAME
)
3439 if (VN_INFO (op0
)->has_constants
)
3440 op0
= vn_get_expr_for (op0
);
3441 else if (CONVERT_EXPR_CODE_P (code
)
3442 || code
== REALPART_EXPR
3443 || code
== IMAGPART_EXPR
3444 || code
== VIEW_CONVERT_EXPR
3445 || code
== BIT_FIELD_REF
)
3447 /* We want to do tree-combining on conversion-like expressions.
3448 Make sure we feed only SSA_NAMEs or constants to fold though. */
3449 tree tem
= vn_get_expr_for (op0
);
3450 if (UNARY_CLASS_P (tem
)
3451 || BINARY_CLASS_P (tem
)
3452 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
3453 || TREE_CODE (tem
) == SSA_NAME
3454 || TREE_CODE (tem
) == CONSTRUCTOR
3455 || is_gimple_min_invariant (tem
))
3460 /* Avoid folding if nothing changed, but remember the expression. */
3461 if (op0
== orig_op0
)
3464 if (code
== BIT_FIELD_REF
)
3466 tree rhs
= gimple_assign_rhs1 (stmt
);
3467 result
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (rhs
),
3468 op0
, TREE_OPERAND (rhs
, 1), TREE_OPERAND (rhs
, 2));
3471 result
= fold_unary_ignore_overflow (code
, gimple_expr_type (stmt
), op0
);
3474 STRIP_USELESS_TYPE_CONVERSION (result
);
3475 if (valid_gimple_rhs_p (result
))
3482 /* Try to simplify RHS using equivalences and constant folding. */
3485 try_to_simplify (gassign
*stmt
)
3487 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3490 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3491 in this case, there is no point in doing extra work. */
3492 if (code
== SSA_NAME
)
3495 /* First try constant folding based on our current lattice. */
3496 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
3498 && (TREE_CODE (tem
) == SSA_NAME
3499 || is_gimple_min_invariant (tem
)))
3502 /* If that didn't work try combining multiple statements. */
3503 switch (TREE_CODE_CLASS (code
))
3506 /* Fallthrough for some unary codes that can operate on registers. */
3507 if (!(code
== REALPART_EXPR
3508 || code
== IMAGPART_EXPR
3509 || code
== VIEW_CONVERT_EXPR
3510 || code
== BIT_FIELD_REF
))
3512 /* We could do a little more with unary ops, if they expand
3513 into binary ops, but it's debatable whether it is worth it. */
3515 return simplify_unary_expression (stmt
);
3517 case tcc_comparison
:
3519 return simplify_binary_expression (stmt
);
3528 /* Visit and value number USE, return true if the value number
3532 visit_use (tree use
)
3534 bool changed
= false;
3535 gimple stmt
= SSA_NAME_DEF_STMT (use
);
3537 mark_use_processed (use
);
3539 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
3540 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3541 && !SSA_NAME_IS_DEFAULT_DEF (use
))
3543 fprintf (dump_file
, "Value numbering ");
3544 print_generic_expr (dump_file
, use
, 0);
3545 fprintf (dump_file
, " stmt = ");
3546 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3549 /* Handle uninitialized uses. */
3550 if (SSA_NAME_IS_DEFAULT_DEF (use
))
3551 changed
= set_ssa_val_to (use
, use
);
3554 if (gimple_code (stmt
) == GIMPLE_PHI
)
3555 changed
= visit_phi (stmt
);
3556 else if (gimple_has_volatile_ops (stmt
))
3557 changed
= defs_to_varying (stmt
);
3558 else if (is_gimple_assign (stmt
))
3560 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3561 tree lhs
= gimple_assign_lhs (stmt
);
3562 tree rhs1
= gimple_assign_rhs1 (stmt
);
3565 /* Shortcut for copies. Simplifying copies is pointless,
3566 since we copy the expression and value they represent. */
3567 if (code
== SSA_NAME
3568 && TREE_CODE (lhs
) == SSA_NAME
)
3570 changed
= visit_copy (lhs
, rhs1
);
3573 simplified
= try_to_simplify (as_a
<gassign
*> (stmt
));
3576 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3578 fprintf (dump_file
, "RHS ");
3579 print_gimple_expr (dump_file
, stmt
, 0, 0);
3580 fprintf (dump_file
, " simplified to ");
3581 print_generic_expr (dump_file
, simplified
, 0);
3582 if (TREE_CODE (lhs
) == SSA_NAME
)
3583 fprintf (dump_file
, " has constants %d\n",
3584 expr_has_constants (simplified
));
3586 fprintf (dump_file
, "\n");
3589 /* Setting value numbers to constants will occasionally
3590 screw up phi congruence because constants are not
3591 uniquely associated with a single ssa name that can be
3594 && is_gimple_min_invariant (simplified
)
3595 && TREE_CODE (lhs
) == SSA_NAME
)
3597 VN_INFO (lhs
)->expr
= simplified
;
3598 VN_INFO (lhs
)->has_constants
= true;
3599 changed
= set_ssa_val_to (lhs
, simplified
);
3603 && TREE_CODE (simplified
) == SSA_NAME
3604 && TREE_CODE (lhs
) == SSA_NAME
)
3606 changed
= visit_copy (lhs
, simplified
);
3609 else if (simplified
)
3611 if (TREE_CODE (lhs
) == SSA_NAME
)
3613 VN_INFO (lhs
)->has_constants
= expr_has_constants (simplified
);
3614 /* We have to unshare the expression or else
3615 valuizing may change the IL stream. */
3616 VN_INFO (lhs
)->expr
= unshare_expr (simplified
);
3619 else if (stmt_has_constants (stmt
)
3620 && TREE_CODE (lhs
) == SSA_NAME
)
3621 VN_INFO (lhs
)->has_constants
= true;
3622 else if (TREE_CODE (lhs
) == SSA_NAME
)
3624 /* We reset expr and constantness here because we may
3625 have been value numbering optimistically, and
3626 iterating. They may become non-constant in this case,
3627 even if they were optimistically constant. */
3629 VN_INFO (lhs
)->has_constants
= false;
3630 VN_INFO (lhs
)->expr
= NULL_TREE
;
3633 if ((TREE_CODE (lhs
) == SSA_NAME
3634 /* We can substitute SSA_NAMEs that are live over
3635 abnormal edges with their constant value. */
3636 && !(gimple_assign_copy_p (stmt
)
3637 && is_gimple_min_invariant (rhs1
))
3639 && is_gimple_min_invariant (simplified
))
3640 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3641 /* Stores or copies from SSA_NAMEs that are live over
3642 abnormal edges are a problem. */
3643 || (code
== SSA_NAME
3644 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
3645 changed
= defs_to_varying (stmt
);
3646 else if (REFERENCE_CLASS_P (lhs
)
3648 changed
= visit_reference_op_store (lhs
, rhs1
, stmt
);
3649 else if (TREE_CODE (lhs
) == SSA_NAME
)
3651 if ((gimple_assign_copy_p (stmt
)
3652 && is_gimple_min_invariant (rhs1
))
3654 && is_gimple_min_invariant (simplified
)))
3656 VN_INFO (lhs
)->has_constants
= true;
3658 changed
= set_ssa_val_to (lhs
, simplified
);
3660 changed
= set_ssa_val_to (lhs
, rhs1
);
3664 /* First try to lookup the simplified expression. */
3667 enum gimple_rhs_class rhs_class
;
3670 rhs_class
= get_gimple_rhs_class (TREE_CODE (simplified
));
3671 if ((rhs_class
== GIMPLE_UNARY_RHS
3672 || rhs_class
== GIMPLE_BINARY_RHS
3673 || rhs_class
== GIMPLE_TERNARY_RHS
)
3674 && valid_gimple_rhs_p (simplified
))
3676 tree result
= vn_nary_op_lookup (simplified
, NULL
);
3679 changed
= set_ssa_val_to (lhs
, result
);
3685 /* Otherwise visit the original statement. */
3686 switch (vn_get_stmt_kind (stmt
))
3689 changed
= visit_nary_op (lhs
, stmt
);
3692 changed
= visit_reference_op_load (lhs
, rhs1
, stmt
);
3695 changed
= defs_to_varying (stmt
);
3701 changed
= defs_to_varying (stmt
);
3703 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
3705 tree lhs
= gimple_call_lhs (stmt
);
3706 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3708 /* Try constant folding based on our current lattice. */
3709 tree simplified
= gimple_fold_stmt_to_constant_1 (stmt
,
3713 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3715 fprintf (dump_file
, "call ");
3716 print_gimple_expr (dump_file
, stmt
, 0, 0);
3717 fprintf (dump_file
, " simplified to ");
3718 print_generic_expr (dump_file
, simplified
, 0);
3719 if (TREE_CODE (lhs
) == SSA_NAME
)
3720 fprintf (dump_file
, " has constants %d\n",
3721 expr_has_constants (simplified
));
3723 fprintf (dump_file
, "\n");
3726 /* Setting value numbers to constants will occasionally
3727 screw up phi congruence because constants are not
3728 uniquely associated with a single ssa name that can be
3731 && is_gimple_min_invariant (simplified
))
3733 VN_INFO (lhs
)->expr
= simplified
;
3734 VN_INFO (lhs
)->has_constants
= true;
3735 changed
= set_ssa_val_to (lhs
, simplified
);
3736 if (gimple_vdef (stmt
))
3737 changed
|= set_ssa_val_to (gimple_vdef (stmt
),
3738 SSA_VAL (gimple_vuse (stmt
)));
3742 && TREE_CODE (simplified
) == SSA_NAME
)
3744 changed
= visit_copy (lhs
, simplified
);
3745 if (gimple_vdef (stmt
))
3746 changed
|= set_ssa_val_to (gimple_vdef (stmt
),
3747 SSA_VAL (gimple_vuse (stmt
)));
3752 if (stmt_has_constants (stmt
))
3753 VN_INFO (lhs
)->has_constants
= true;
3756 /* We reset expr and constantness here because we may
3757 have been value numbering optimistically, and
3758 iterating. They may become non-constant in this case,
3759 even if they were optimistically constant. */
3760 VN_INFO (lhs
)->has_constants
= false;
3761 VN_INFO (lhs
)->expr
= NULL_TREE
;
3764 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3766 changed
= defs_to_varying (stmt
);
3772 if (!gimple_call_internal_p (stmt
)
3773 && (/* Calls to the same function with the same vuse
3774 and the same operands do not necessarily return the same
3775 value, unless they're pure or const. */
3776 gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
)
3777 /* If calls have a vdef, subsequent calls won't have
3778 the same incoming vuse. So, if 2 calls with vdef have the
3779 same vuse, we know they're not subsequent.
3780 We can value number 2 calls to the same function with the
3781 same vuse and the same operands which are not subsequent
3782 the same, because there is no code in the program that can
3783 compare the 2 values... */
3784 || (gimple_vdef (stmt
)
3785 /* ... unless the call returns a pointer which does
3786 not alias with anything else. In which case the
3787 information that the values are distinct are encoded
3789 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
3790 /* Only perform the following when being called from PRE
3791 which embeds tail merging. */
3792 && default_vn_walk_kind
== VN_WALK
)))
3793 changed
= visit_reference_op_call (lhs
, call_stmt
);
3795 changed
= defs_to_varying (stmt
);
3798 changed
= defs_to_varying (stmt
);
3804 /* Compare two operands by reverse postorder index */
3807 compare_ops (const void *pa
, const void *pb
)
3809 const tree opa
= *((const tree
*)pa
);
3810 const tree opb
= *((const tree
*)pb
);
3811 gimple opstmta
= SSA_NAME_DEF_STMT (opa
);
3812 gimple opstmtb
= SSA_NAME_DEF_STMT (opb
);
3816 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
3817 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3818 else if (gimple_nop_p (opstmta
))
3820 else if (gimple_nop_p (opstmtb
))
3823 bba
= gimple_bb (opstmta
);
3824 bbb
= gimple_bb (opstmtb
);
3827 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3835 if (gimple_code (opstmta
) == GIMPLE_PHI
3836 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3837 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3838 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3840 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3842 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3843 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3845 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3847 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3850 /* Sort an array containing members of a strongly connected component
3851 SCC so that the members are ordered by RPO number.
3852 This means that when the sort is complete, iterating through the
3853 array will give you the members in RPO order. */
3856 sort_scc (vec
<tree
> scc
)
3858 scc
.qsort (compare_ops
);
3861 /* Insert the no longer used nary ONARY to the hash INFO. */
3864 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3866 size_t size
= sizeof_vn_nary_op (onary
->length
);
3867 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
3868 &info
->nary_obstack
);
3869 memcpy (nary
, onary
, size
);
3870 vn_nary_op_insert_into (nary
, info
->nary
, false);
3873 /* Insert the no longer used phi OPHI to the hash INFO. */
3876 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3878 vn_phi_t phi
= info
->phis_pool
->allocate ();
3880 memcpy (phi
, ophi
, sizeof (*phi
));
3881 ophi
->phiargs
.create (0);
3882 slot
= info
->phis
->find_slot_with_hash (phi
, phi
->hashcode
, INSERT
);
3883 gcc_assert (!*slot
);
3887 /* Insert the no longer used reference OREF to the hash INFO. */
3890 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3893 vn_reference_s
**slot
;
3894 ref
= info
->references_pool
->allocate ();
3895 memcpy (ref
, oref
, sizeof (*ref
));
3896 oref
->operands
.create (0);
3897 slot
= info
->references
->find_slot_with_hash (ref
, ref
->hashcode
, INSERT
);
3899 free_reference (*slot
);
3903 /* Process a strongly connected component in the SSA graph. */
3906 process_scc (vec
<tree
> scc
)
3910 unsigned int iterations
= 0;
3911 bool changed
= true;
3912 vn_nary_op_iterator_type hin
;
3913 vn_phi_iterator_type hip
;
3914 vn_reference_iterator_type hir
;
3919 /* If the SCC has a single member, just visit it. */
3920 if (scc
.length () == 1)
3923 if (VN_INFO (use
)->use_processed
)
3925 /* We need to make sure it doesn't form a cycle itself, which can
3926 happen for self-referential PHI nodes. In that case we would
3927 end up inserting an expression with VN_TOP operands into the
3928 valid table which makes us derive bogus equivalences later.
3929 The cheapest way to check this is to assume it for all PHI nodes. */
3930 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
3931 /* Fallthru to iteration. */ ;
3939 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3940 print_scc (dump_file
, scc
);
3942 /* Iterate over the SCC with the optimistic table until it stops
3944 current_info
= optimistic_info
;
3949 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3950 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
3951 /* As we are value-numbering optimistically we have to
3952 clear the expression tables and the simplified expressions
3953 in each iteration until we converge. */
3954 optimistic_info
->nary
->empty ();
3955 optimistic_info
->phis
->empty ();
3956 optimistic_info
->references
->empty ();
3957 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
3958 gcc_obstack_init (&optimistic_info
->nary_obstack
);
3959 optimistic_info
->phis_pool
->release ();
3960 optimistic_info
->references_pool
->release ();
3961 FOR_EACH_VEC_ELT (scc
, i
, var
)
3962 VN_INFO (var
)->expr
= NULL_TREE
;
3963 FOR_EACH_VEC_ELT (scc
, i
, var
)
3964 changed
|= visit_use (var
);
3967 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3968 fprintf (dump_file
, "Processing SCC needed %d iterations\n", iterations
);
3969 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
3971 /* Finally, copy the contents of the no longer used optimistic
3972 table to the valid table. */
3973 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->nary
, nary
, vn_nary_op_t
, hin
)
3974 copy_nary (nary
, valid_info
);
3975 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->phis
, phi
, vn_phi_t
, hip
)
3976 copy_phi (phi
, valid_info
);
3977 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info
->references
,
3978 ref
, vn_reference_t
, hir
)
3979 copy_reference (ref
, valid_info
);
3981 current_info
= valid_info
;
3985 /* Pop the components of the found SCC for NAME off the SCC stack
3986 and process them. Returns true if all went well, false if
3987 we run into resource limits. */
3990 extract_and_process_scc_for_name (tree name
)
3995 /* Found an SCC, pop the components off the SCC stack and
3999 x
= sccstack
.pop ();
4001 VN_INFO (x
)->on_sccstack
= false;
4003 } while (x
!= name
);
4005 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4007 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
4010 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
4011 "SCC size %u exceeding %u\n", scc
.length (),
4012 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
4017 if (scc
.length () > 1)
4025 /* Depth first search on NAME to discover and process SCC's in the SSA
4027 Execution of this algorithm relies on the fact that the SCC's are
4028 popped off the stack in topological order.
4029 Returns true if successful, false if we stopped processing SCC's due
4030 to resource constraints. */
4035 vec
<ssa_op_iter
> itervec
= vNULL
;
4036 vec
<tree
> namevec
= vNULL
;
4037 use_operand_p usep
= NULL
;
4044 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
4045 VN_INFO (name
)->visited
= true;
4046 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
4048 sccstack
.safe_push (name
);
4049 VN_INFO (name
)->on_sccstack
= true;
4050 defstmt
= SSA_NAME_DEF_STMT (name
);
4052 /* Recursively DFS on our operands, looking for SCC's. */
4053 if (!gimple_nop_p (defstmt
))
4055 /* Push a new iterator. */
4056 if (gphi
*phi
= dyn_cast
<gphi
*> (defstmt
))
4057 usep
= op_iter_init_phiuse (&iter
, phi
, SSA_OP_ALL_USES
);
4059 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
4062 clear_and_done_ssa_iter (&iter
);
4066 /* If we are done processing uses of a name, go up the stack
4067 of iterators and process SCCs as we found them. */
4068 if (op_iter_done (&iter
))
4070 /* See if we found an SCC. */
4071 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
4072 if (!extract_and_process_scc_for_name (name
))
4079 /* Check if we are done. */
4080 if (namevec
.is_empty ())
4087 /* Restore the last use walker and continue walking there. */
4089 name
= namevec
.pop ();
4090 memcpy (&iter
, &itervec
.last (),
4091 sizeof (ssa_op_iter
));
4093 goto continue_walking
;
4096 use
= USE_FROM_PTR (usep
);
4098 /* Since we handle phi nodes, we will sometimes get
4099 invariants in the use expression. */
4100 if (TREE_CODE (use
) == SSA_NAME
)
4102 if (! (VN_INFO (use
)->visited
))
4104 /* Recurse by pushing the current use walking state on
4105 the stack and starting over. */
4106 itervec
.safe_push (iter
);
4107 namevec
.safe_push (name
);
4112 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
4113 VN_INFO (use
)->low
);
4115 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
4116 && VN_INFO (use
)->on_sccstack
)
4118 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
4119 VN_INFO (name
)->low
);
4123 usep
= op_iter_next_use (&iter
);
4127 /* Allocate a value number table. */
4130 allocate_vn_table (vn_tables_t table
)
4132 table
->phis
= new vn_phi_table_type (23);
4133 table
->nary
= new vn_nary_op_table_type (23);
4134 table
->references
= new vn_reference_table_type (23);
4136 gcc_obstack_init (&table
->nary_obstack
);
4137 table
->phis_pool
= new object_allocator
<vn_phi_s
> ("VN phis", 30);
4138 table
->references_pool
= new object_allocator
<vn_reference_s
>
4139 ("VN references", 30);
4142 /* Free a value number table. */
4145 free_vn_table (vn_tables_t table
)
4151 delete table
->references
;
4152 table
->references
= NULL
;
4153 obstack_free (&table
->nary_obstack
, NULL
);
4154 delete table
->phis_pool
;
4155 delete table
->references_pool
;
4163 int *rpo_numbers_temp
;
4165 calculate_dominance_info (CDI_DOMINATORS
);
4166 sccstack
.create (0);
4167 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
4169 constant_value_ids
= BITMAP_ALLOC (NULL
);
4174 vn_ssa_aux_table
.create (num_ssa_names
+ 1);
4175 /* VEC_alloc doesn't actually grow it to the right size, it just
4176 preallocates the space to do so. */
4177 vn_ssa_aux_table
.safe_grow_cleared (num_ssa_names
+ 1);
4178 gcc_obstack_init (&vn_ssa_aux_obstack
);
4180 shared_lookup_phiargs
.create (0);
4181 shared_lookup_references
.create (0);
4182 rpo_numbers
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
4184 XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
4185 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
4187 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4188 the i'th block in RPO order is bb. We want to map bb's to RPO
4189 numbers, so we need to rearrange this array. */
4190 for (j
= 0; j
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; j
++)
4191 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
4193 XDELETE (rpo_numbers_temp
);
4195 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
4197 /* Create the VN_INFO structures, and initialize value numbers to
4199 for (i
= 0; i
< num_ssa_names
; i
++)
4201 tree name
= ssa_name (i
);
4204 VN_INFO_GET (name
)->valnum
= VN_TOP
;
4205 VN_INFO (name
)->expr
= NULL_TREE
;
4206 VN_INFO (name
)->value_id
= 0;
4210 renumber_gimple_stmt_uids ();
4212 /* Create the valid and optimistic value numbering tables. */
4213 valid_info
= XCNEW (struct vn_tables_s
);
4214 allocate_vn_table (valid_info
);
4215 optimistic_info
= XCNEW (struct vn_tables_s
);
4216 allocate_vn_table (optimistic_info
);
4224 delete constant_to_value_id
;
4225 constant_to_value_id
= NULL
;
4226 BITMAP_FREE (constant_value_ids
);
4227 shared_lookup_phiargs
.release ();
4228 shared_lookup_references
.release ();
4229 XDELETEVEC (rpo_numbers
);
4231 for (i
= 0; i
< num_ssa_names
; i
++)
4233 tree name
= ssa_name (i
);
4235 && SSA_NAME_VERSION (name
) < vn_ssa_aux_table
.length ()
4236 && vn_ssa_aux_table
[SSA_NAME_VERSION (name
)]
4237 && VN_INFO (name
)->needs_insertion
)
4238 release_ssa_name (name
);
4240 obstack_free (&vn_ssa_aux_obstack
, NULL
);
4241 vn_ssa_aux_table
.release ();
4243 sccstack
.release ();
4244 free_vn_table (valid_info
);
4245 XDELETE (valid_info
);
4246 free_vn_table (optimistic_info
);
4247 XDELETE (optimistic_info
);
4250 /* Set *ID according to RESULT. */
4253 set_value_id_for_result (tree result
, unsigned int *id
)
4255 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4256 *id
= VN_INFO (result
)->value_id
;
4257 else if (result
&& is_gimple_min_invariant (result
))
4258 *id
= get_or_alloc_constant_value_id (result
);
4260 *id
= get_next_value_id ();
4263 /* Set the value ids in the valid hash tables. */
4266 set_hashtable_value_ids (void)
4268 vn_nary_op_iterator_type hin
;
4269 vn_phi_iterator_type hip
;
4270 vn_reference_iterator_type hir
;
4275 /* Now set the value ids of the things we had put in the hash
4278 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4279 set_value_id_for_result (vno
->result
, &vno
->value_id
);
4281 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4282 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4284 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4286 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4289 class sccvn_dom_walker
: public dom_walker
4293 : dom_walker (CDI_DOMINATORS
), fail (false), cond_stack (vNULL
) {}
4295 virtual void before_dom_children (basic_block
);
4296 virtual void after_dom_children (basic_block
);
4298 void record_cond (basic_block
,
4299 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4300 void record_conds (basic_block
,
4301 enum tree_code code
, tree lhs
, tree rhs
, bool value
);
4304 vec
<std::pair
<basic_block
, std::pair
<vn_nary_op_t
, vn_nary_op_t
> > >
4308 /* Record a temporary condition for the BB and its dominated blocks. */
4311 sccvn_dom_walker::record_cond (basic_block bb
,
4312 enum tree_code code
, tree lhs
, tree rhs
,
4315 tree ops
[2] = { lhs
, rhs
};
4316 vn_nary_op_t old
= NULL
;
4317 if (vn_nary_op_lookup_pieces (2, code
, boolean_type_node
, ops
, &old
))
4318 current_info
->nary
->remove_elt_with_hash (old
, old
->hashcode
);
4320 = vn_nary_op_insert_pieces (2, code
, boolean_type_node
, ops
,
4323 : boolean_false_node
, 0);
4324 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4326 fprintf (dump_file
, "Recording temporarily ");
4327 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
4328 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
4329 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
4330 fprintf (dump_file
, " == %s%s\n",
4331 value
? "true" : "false",
4332 old
? " (old entry saved)" : "");
4334 cond_stack
.safe_push (std::make_pair (bb
, std::make_pair (cond
, old
)));
4337 /* Record temporary conditions for the BB and its dominated blocks
4338 according to LHS CODE RHS == VALUE and its dominated conditions. */
4341 sccvn_dom_walker::record_conds (basic_block bb
,
4342 enum tree_code code
, tree lhs
, tree rhs
,
4345 /* Record the original condition. */
4346 record_cond (bb
, code
, lhs
, rhs
, value
);
4351 /* Record dominated conditions if the condition is true. Note that
4352 the inversion is already recorded. */
4357 record_cond (bb
, code
== LT_EXPR
? LE_EXPR
: GE_EXPR
, lhs
, rhs
, true);
4358 record_cond (bb
, NE_EXPR
, lhs
, rhs
, true);
4359 record_cond (bb
, EQ_EXPR
, lhs
, rhs
, false);
4363 record_cond (bb
, LE_EXPR
, lhs
, rhs
, true);
4364 record_cond (bb
, GE_EXPR
, lhs
, rhs
, true);
4365 record_cond (bb
, LT_EXPR
, lhs
, rhs
, false);
4366 record_cond (bb
, GT_EXPR
, lhs
, rhs
, false);
4374 /* Restore expressions and values derived from conditionals. */
4377 sccvn_dom_walker::after_dom_children (basic_block bb
)
4379 while (!cond_stack
.is_empty ()
4380 && cond_stack
.last ().first
== bb
)
4382 vn_nary_op_t cond
= cond_stack
.last ().second
.first
;
4383 vn_nary_op_t old
= cond_stack
.last ().second
.second
;
4384 current_info
->nary
->remove_elt_with_hash (cond
, cond
->hashcode
);
4386 vn_nary_op_insert_into (old
, current_info
->nary
, false);
4391 /* Value number all statements in BB. */
4394 sccvn_dom_walker::before_dom_children (basic_block bb
)
4402 /* If any of the predecessor edges that do not come from blocks dominated
4403 by us are still marked as possibly executable consider this block
4405 bool reachable
= bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
);
4406 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4407 if (!dominated_by_p (CDI_DOMINATORS
, e
->src
, bb
))
4408 reachable
|= (e
->flags
& EDGE_EXECUTABLE
);
4410 /* If the block is not reachable all outgoing edges are not
4414 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4415 fprintf (dump_file
, "Marking all outgoing edges of unreachable "
4416 "BB %d as not executable\n", bb
->index
);
4418 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4419 e
->flags
&= ~EDGE_EXECUTABLE
;
4423 /* If we have a single predecessor record the equivalence from a
4424 possible condition on the predecessor edge. */
4425 if (single_pred_p (bb
))
4427 edge e
= single_pred_edge (bb
);
4428 /* Check if there are multiple executable successor edges in
4429 the source block. Otherwise there is no additional info
4432 FOR_EACH_EDGE (e2
, ei
, e
->src
->succs
)
4434 && e2
->flags
& EDGE_EXECUTABLE
)
4436 if (e2
&& (e2
->flags
& EDGE_EXECUTABLE
))
4439 gimple stmt
= last_stmt (e
->src
);
4441 && gimple_code (stmt
) == GIMPLE_COND
)
4443 enum tree_code code
= gimple_cond_code (stmt
);
4444 tree lhs
= gimple_cond_lhs (stmt
);
4445 tree rhs
= gimple_cond_rhs (stmt
);
4446 record_conds (bb
, code
, lhs
, rhs
,
4447 (e
->flags
& EDGE_TRUE_VALUE
) != 0);
4448 code
= invert_tree_comparison (code
, HONOR_NANS (lhs
));
4449 if (code
!= ERROR_MARK
)
4450 record_conds (bb
, code
, lhs
, rhs
,
4451 (e
->flags
& EDGE_TRUE_VALUE
) == 0);
4456 /* Value-number all defs in the basic-block. */
4457 for (gphi_iterator gsi
= gsi_start_phis (bb
);
4458 !gsi_end_p (gsi
); gsi_next (&gsi
))
4460 gphi
*phi
= gsi
.phi ();
4461 tree res
= PHI_RESULT (phi
);
4462 if (!VN_INFO (res
)->visited
4469 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
4470 !gsi_end_p (gsi
); gsi_next (&gsi
))
4474 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
4475 if (!VN_INFO (op
)->visited
4483 /* Finally look at the last stmt. */
4484 gimple stmt
= last_stmt (bb
);
4488 enum gimple_code code
= gimple_code (stmt
);
4489 if (code
!= GIMPLE_COND
4490 && code
!= GIMPLE_SWITCH
4491 && code
!= GIMPLE_GOTO
)
4494 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4496 fprintf (dump_file
, "Visiting stmt ending BB %d: ", bb
->index
);
4497 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4500 /* Value-number the last stmts SSA uses. */
4503 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_USE
)
4504 gcc_assert (VN_INFO (op
)->visited
4505 || SSA_NAME_IS_DEFAULT_DEF (op
));
4507 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4508 if value-numbering can prove they are not reachable. Handling
4509 computed gotos is also possible. */
4515 tree lhs
= gimple_cond_lhs (stmt
);
4516 tree rhs
= gimple_cond_rhs (stmt
);
4517 /* Work hard in computing the condition and take into account
4518 the valueization of the defining stmt. */
4519 if (TREE_CODE (lhs
) == SSA_NAME
)
4520 lhs
= vn_get_expr_for (lhs
);
4521 if (TREE_CODE (rhs
) == SSA_NAME
)
4522 rhs
= vn_get_expr_for (rhs
);
4523 val
= fold_binary (gimple_cond_code (stmt
),
4524 boolean_type_node
, lhs
, rhs
);
4525 /* If that didn't simplify to a constant see if we have recorded
4526 temporary expressions from taken edges. */
4527 if (!val
|| TREE_CODE (val
) != INTEGER_CST
)
4530 ops
[0] = gimple_cond_lhs (stmt
);
4531 ops
[1] = gimple_cond_rhs (stmt
);
4532 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt
),
4533 boolean_type_node
, ops
, NULL
);
4538 val
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
4541 val
= gimple_goto_dest (stmt
);
4549 edge taken
= find_taken_edge (bb
, vn_valueize (val
));
4553 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4554 fprintf (dump_file
, "Marking all edges out of BB %d but (%d -> %d) as "
4555 "not executable\n", bb
->index
, bb
->index
, taken
->dest
->index
);
4557 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4559 e
->flags
&= ~EDGE_EXECUTABLE
;
4562 /* Do SCCVN. Returns true if it finished, false if we bailed out
4563 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4564 how we use the alias oracle walking during the VN process. */
4567 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
4573 default_vn_walk_kind
= default_vn_walk_kind_
;
4576 current_info
= valid_info
;
4578 for (param
= DECL_ARGUMENTS (current_function_decl
);
4580 param
= DECL_CHAIN (param
))
4582 tree def
= ssa_default_def (cfun
, param
);
4585 VN_INFO (def
)->visited
= true;
4586 VN_INFO (def
)->valnum
= def
;
4590 /* Mark all edges as possibly executable. */
4591 FOR_ALL_BB_FN (bb
, cfun
)
4595 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4596 e
->flags
|= EDGE_EXECUTABLE
;
4599 /* Walk all blocks in dominator order, value-numbering stmts
4600 SSA defs and decide whether outgoing edges are not executable. */
4601 sccvn_dom_walker walker
;
4602 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
4609 /* Initialize the value ids and prune out remaining VN_TOPs
4611 for (i
= 1; i
< num_ssa_names
; ++i
)
4613 tree name
= ssa_name (i
);
4617 info
= VN_INFO (name
);
4619 info
->valnum
= name
;
4620 if (info
->valnum
== name
4621 || info
->valnum
== VN_TOP
)
4622 info
->value_id
= get_next_value_id ();
4623 else if (is_gimple_min_invariant (info
->valnum
))
4624 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
4628 for (i
= 1; i
< num_ssa_names
; ++i
)
4630 tree name
= ssa_name (i
);
4634 info
= VN_INFO (name
);
4635 if (TREE_CODE (info
->valnum
) == SSA_NAME
4636 && info
->valnum
!= name
4637 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
4638 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
4641 set_hashtable_value_ids ();
4643 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4645 fprintf (dump_file
, "Value numbers:\n");
4646 for (i
= 0; i
< num_ssa_names
; i
++)
4648 tree name
= ssa_name (i
);
4650 && VN_INFO (name
)->visited
4651 && SSA_VAL (name
) != name
)
4653 print_generic_expr (dump_file
, name
, 0);
4654 fprintf (dump_file
, " = ");
4655 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
4656 fprintf (dump_file
, "\n");
4664 /* Return the maximum value id we have ever seen. */
4667 get_max_value_id (void)
4669 return next_value_id
;
4672 /* Return the next unique value id. */
4675 get_next_value_id (void)
4677 return next_value_id
++;
4681 /* Compare two expressions E1 and E2 and return true if they are equal. */
4684 expressions_equal_p (tree e1
, tree e2
)
4686 /* The obvious case. */
4690 /* If only one of them is null, they cannot be equal. */
4694 /* Now perform the actual comparison. */
4695 if (TREE_CODE (e1
) == TREE_CODE (e2
)
4696 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
4703 /* Return true if the nary operation NARY may trap. This is a copy
4704 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4707 vn_nary_may_trap (vn_nary_op_t nary
)
4710 tree rhs2
= NULL_TREE
;
4711 bool honor_nans
= false;
4712 bool honor_snans
= false;
4713 bool fp_operation
= false;
4714 bool honor_trapv
= false;
4718 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
4719 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
4720 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
4723 fp_operation
= FLOAT_TYPE_P (type
);
4726 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
4727 honor_snans
= flag_signaling_nans
!= 0;
4729 else if (INTEGRAL_TYPE_P (type
)
4730 && TYPE_OVERFLOW_TRAPS (type
))
4733 if (nary
->length
>= 2)
4735 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
4737 honor_nans
, honor_snans
, rhs2
,
4743 for (i
= 0; i
< nary
->length
; ++i
)
4744 if (tree_could_trap_p (nary
->op
[i
]))