1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
42 #include "langhooks.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
112 struct obstack nary_obstack
;
113 alloc_pool phis_pool
;
114 alloc_pool references_pool
;
117 static htab_t constant_to_value_id
;
118 static bitmap constant_value_ids
;
121 /* Valid hashtables storing information we have proven to be
124 static vn_tables_t valid_info
;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info
;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
135 static vn_tables_t current_info
;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers
;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id
;
153 /* Next DFS number and the stack for strongly connected component
156 static unsigned int next_dfs_num
;
157 static VEC (tree
, heap
) *sccstack
;
160 DEF_VEC_P(vn_ssa_aux_t
);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t
, heap
);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t
, heap
) *vn_ssa_aux_table
;
168 static struct obstack vn_ssa_aux_obstack
;
170 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t res
= VEC_index (vn_ssa_aux_t
, vn_ssa_aux_table
,
176 SSA_NAME_VERSION (name
));
177 gcc_checking_assert (res
);
181 /* Set the value numbering info for a given SSA name to a given
185 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
187 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
188 SSA_NAME_VERSION (name
), value
);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
195 VN_INFO_GET (tree name
)
197 vn_ssa_aux_t newinfo
;
199 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
200 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
201 if (SSA_NAME_VERSION (name
) >= VEC_length (vn_ssa_aux_t
, vn_ssa_aux_table
))
202 VEC_safe_grow (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
,
203 SSA_NAME_VERSION (name
) + 1);
204 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
205 SSA_NAME_VERSION (name
), newinfo
);
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
214 vn_get_expr_for (tree name
)
216 vn_ssa_aux_t vn
= VN_INFO (name
);
218 tree expr
= NULL_TREE
;
220 if (vn
->valnum
== VN_TOP
)
223 /* If the value-number is a constant it is the representative
225 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
228 /* Get to the information of the value of this SSA_NAME. */
229 vn
= VN_INFO (vn
->valnum
);
231 /* If the value-number is a constant it is the representative
233 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
236 /* Else if we have an expression, return it. */
237 if (vn
->expr
!= NULL_TREE
)
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt
= SSA_NAME_DEF_STMT (vn
->valnum
);
243 /* If the value number is a default-definition or a PHI result
245 if (gimple_nop_p (def_stmt
)
246 || gimple_code (def_stmt
) == GIMPLE_PHI
)
249 if (!is_gimple_assign (def_stmt
))
252 /* FIXME tuples. This is incomplete and likely will miss some
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)))
257 if ((gimple_assign_rhs_code (def_stmt
) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt
) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt
) == IMAGPART_EXPR
)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
261 expr
= fold_build1 (gimple_assign_rhs_code (def_stmt
),
262 gimple_expr_type (def_stmt
),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt
), 0));
267 expr
= fold_build1 (gimple_assign_rhs_code (def_stmt
),
268 gimple_expr_type (def_stmt
),
269 gimple_assign_rhs1 (def_stmt
));
273 expr
= fold_build2 (gimple_assign_rhs_code (def_stmt
),
274 gimple_expr_type (def_stmt
),
275 gimple_assign_rhs1 (def_stmt
),
276 gimple_assign_rhs2 (def_stmt
));
281 if (expr
== NULL_TREE
)
284 /* Cache the expression. */
291 /* Free a phi operation structure VP. */
296 vn_phi_t phi
= (vn_phi_t
) vp
;
297 VEC_free (tree
, heap
, phi
->phiargs
);
300 /* Free a reference operation structure VP. */
303 free_reference (void *vp
)
305 vn_reference_t vr
= (vn_reference_t
) vp
;
306 VEC_free (vn_reference_op_s
, heap
, vr
->operands
);
309 /* Hash table equality function for vn_constant_t. */
312 vn_constant_eq (const void *p1
, const void *p2
)
314 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
315 const struct vn_constant_s
*vc2
= (const struct vn_constant_s
*) p2
;
317 if (vc1
->hashcode
!= vc2
->hashcode
)
320 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
323 /* Hash table hash function for vn_constant_t. */
326 vn_constant_hash (const void *p1
)
328 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
329 return vc1
->hashcode
;
332 /* Lookup a value id for CONSTANT and return it. If it does not
336 get_constant_value_id (tree constant
)
339 struct vn_constant_s vc
;
341 vc
.hashcode
= vn_hash_constant_with_type (constant
);
342 vc
.constant
= constant
;
343 slot
= htab_find_slot_with_hash (constant_to_value_id
, &vc
,
344 vc
.hashcode
, NO_INSERT
);
346 return ((vn_constant_t
)*slot
)->value_id
;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
354 get_or_alloc_constant_value_id (tree constant
)
357 struct vn_constant_s vc
;
360 vc
.hashcode
= vn_hash_constant_with_type (constant
);
361 vc
.constant
= constant
;
362 slot
= htab_find_slot_with_hash (constant_to_value_id
, &vc
,
363 vc
.hashcode
, INSERT
);
365 return ((vn_constant_t
)*slot
)->value_id
;
367 vcp
= XNEW (struct vn_constant_s
);
368 vcp
->hashcode
= vc
.hashcode
;
369 vcp
->constant
= constant
;
370 vcp
->value_id
= get_next_value_id ();
371 *slot
= (void *) vcp
;
372 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
373 return vcp
->value_id
;
376 /* Return true if V is a value id for a constant. */
379 value_id_constant_p (unsigned int v
)
381 return bitmap_bit_p (constant_value_ids
, v
);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
388 vn_reference_op_eq (const void *p1
, const void *p2
)
390 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
391 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
393 return vro1
->opcode
== vro2
->opcode
394 && types_compatible_p (vro1
->type
, vro2
->type
)
395 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
396 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
397 && expressions_equal_p (vro1
->op2
, vro2
->op2
);
400 /* Compute the hash for a reference operand VRO1. */
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, hashval_t result
)
405 result
= iterative_hash_hashval_t (vro1
->opcode
, result
);
407 result
= iterative_hash_expr (vro1
->op0
, result
);
409 result
= iterative_hash_expr (vro1
->op1
, result
);
411 result
= iterative_hash_expr (vro1
->op2
, result
);
415 /* Return the hashcode for a given reference operation P1. */
418 vn_reference_hash (const void *p1
)
420 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
421 return vr1
->hashcode
;
424 /* Compute a hash for the reference operation VR1 and return it. */
427 vn_reference_compute_hash (const vn_reference_t vr1
)
429 hashval_t result
= 0;
431 vn_reference_op_t vro
;
432 HOST_WIDE_INT off
= -1;
435 FOR_EACH_VEC_ELT (vn_reference_op_s
, vr1
->operands
, i
, vro
)
437 if (vro
->opcode
== MEM_REF
)
439 else if (vro
->opcode
!= ADDR_EXPR
)
451 result
= iterative_hash_hashval_t (off
, result
);
454 && vro
->opcode
== ADDR_EXPR
)
458 tree op
= TREE_OPERAND (vro
->op0
, 0);
459 result
= iterative_hash_hashval_t (TREE_CODE (op
), result
);
460 result
= iterative_hash_expr (op
, result
);
464 result
= vn_reference_op_compute_hash (vro
, result
);
468 result
+= SSA_NAME_VERSION (vr1
->vuse
);
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1
, const void *p2
)
481 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
482 const_vn_reference_t
const vr2
= (const_vn_reference_t
) p2
;
483 if (vr1
->hashcode
!= vr2
->hashcode
)
486 /* Early out if this is not a hash collision. */
487 if (vr1
->hashcode
!= vr2
->hashcode
)
490 /* The VOP needs to be the same. */
491 if (vr1
->vuse
!= vr2
->vuse
)
494 /* If the operands are the same we are done. */
495 if (vr1
->operands
== vr2
->operands
)
498 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
501 if (INTEGRAL_TYPE_P (vr1
->type
)
502 && INTEGRAL_TYPE_P (vr2
->type
))
504 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
507 else if (INTEGRAL_TYPE_P (vr1
->type
)
508 && (TYPE_PRECISION (vr1
->type
)
509 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
511 else if (INTEGRAL_TYPE_P (vr2
->type
)
512 && (TYPE_PRECISION (vr2
->type
)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
520 HOST_WIDE_INT off1
= 0, off2
= 0;
521 vn_reference_op_t vro1
, vro2
;
522 vn_reference_op_s tem1
, tem2
;
523 bool deref1
= false, deref2
= false;
524 for (; VEC_iterate (vn_reference_op_s
, vr1
->operands
, i
, vro1
); i
++)
526 if (vro1
->opcode
== MEM_REF
)
532 for (; VEC_iterate (vn_reference_op_s
, vr2
->operands
, j
, vro2
); j
++)
534 if (vro2
->opcode
== MEM_REF
)
542 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
544 memset (&tem1
, 0, sizeof (tem1
));
545 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
546 tem1
.type
= TREE_TYPE (tem1
.op0
);
547 tem1
.opcode
= TREE_CODE (tem1
.op0
);
550 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
552 memset (&tem2
, 0, sizeof (tem2
));
553 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
554 tem2
.type
= TREE_TYPE (tem2
.op0
);
555 tem2
.opcode
= TREE_CODE (tem2
.op0
);
558 if (!vn_reference_op_eq (vro1
, vro2
))
563 while (VEC_length (vn_reference_op_s
, vr1
->operands
) != i
564 || VEC_length (vn_reference_op_s
, vr2
->operands
) != j
);
569 /* Copy the operations present in load/store REF into RESULT, a vector of
570 vn_reference_op_s's. */
573 copy_reference_ops_from_ref (tree ref
, VEC(vn_reference_op_s
, heap
) **result
)
575 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
577 vn_reference_op_s temp
;
579 memset (&temp
, 0, sizeof (temp
));
580 /* We do not care for spurious type qualifications. */
581 temp
.type
= TYPE_MAIN_VARIANT (TREE_TYPE (ref
));
582 temp
.opcode
= TREE_CODE (ref
);
583 temp
.op0
= TMR_INDEX (ref
);
584 temp
.op1
= TMR_STEP (ref
);
585 temp
.op2
= TMR_OFFSET (ref
);
587 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
589 memset (&temp
, 0, sizeof (temp
));
590 temp
.type
= NULL_TREE
;
591 temp
.opcode
= ERROR_MARK
;
592 temp
.op0
= TMR_INDEX2 (ref
);
594 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
596 memset (&temp
, 0, sizeof (temp
));
597 temp
.type
= NULL_TREE
;
598 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
599 temp
.op0
= TMR_BASE (ref
);
601 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
605 /* For non-calls, store the information that makes up the address. */
609 vn_reference_op_s temp
;
611 memset (&temp
, 0, sizeof (temp
));
612 /* We do not care for spurious type qualifications. */
613 temp
.type
= TYPE_MAIN_VARIANT (TREE_TYPE (ref
));
614 temp
.opcode
= TREE_CODE (ref
);
620 /* The base address gets its own vn_reference_op_s structure. */
621 temp
.op0
= TREE_OPERAND (ref
, 1);
622 if (host_integerp (TREE_OPERAND (ref
, 1), 0))
623 temp
.off
= TREE_INT_CST_LOW (TREE_OPERAND (ref
, 1));
626 /* Record bits and position. */
627 temp
.op0
= TREE_OPERAND (ref
, 1);
628 temp
.op1
= TREE_OPERAND (ref
, 2);
631 /* The field decl is enough to unambiguously specify the field,
632 a matching type is not necessary and a mismatching type
633 is always a spurious difference. */
634 temp
.type
= NULL_TREE
;
635 temp
.op0
= TREE_OPERAND (ref
, 1);
636 temp
.op1
= TREE_OPERAND (ref
, 2);
638 tree this_offset
= component_ref_field_offset (ref
);
640 && TREE_CODE (this_offset
) == INTEGER_CST
)
642 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
643 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
646 = double_int_add (tree_to_double_int (this_offset
),
648 (tree_to_double_int (bit_offset
),
649 uhwi_to_double_int (BITS_PER_UNIT
),
651 if (double_int_fits_in_shwi_p (off
))
657 case ARRAY_RANGE_REF
:
659 /* Record index as operand. */
660 temp
.op0
= TREE_OPERAND (ref
, 1);
661 /* Always record lower bounds and element size. */
662 temp
.op1
= array_ref_low_bound (ref
);
663 temp
.op2
= array_ref_element_size (ref
);
664 if (TREE_CODE (temp
.op0
) == INTEGER_CST
665 && TREE_CODE (temp
.op1
) == INTEGER_CST
666 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
668 double_int off
= tree_to_double_int (temp
.op0
);
669 off
= double_int_add (off
,
671 (tree_to_double_int (temp
.op1
)));
672 off
= double_int_mul (off
, tree_to_double_int (temp
.op2
));
673 if (double_int_fits_in_shwi_p (off
))
691 if (is_gimple_min_invariant (ref
))
697 /* These are only interesting for their operands, their
698 existence, and their type. They will never be the last
699 ref in the chain of references (IE they require an
700 operand), so we don't have to put anything
701 for op* as it will be handled by the iteration */
703 case VIEW_CONVERT_EXPR
:
707 /* This is only interesting for its constant offset. */
708 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
713 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
715 if (REFERENCE_CLASS_P (ref
)
716 || (TREE_CODE (ref
) == ADDR_EXPR
717 && !is_gimple_min_invariant (ref
)))
718 ref
= TREE_OPERAND (ref
, 0);
724 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
725 operands in *OPS, the reference alias set SET and the reference type TYPE.
726 Return true if something useful was produced. */
729 ao_ref_init_from_vn_reference (ao_ref
*ref
,
730 alias_set_type set
, tree type
,
731 VEC (vn_reference_op_s
, heap
) *ops
)
733 vn_reference_op_t op
;
735 tree base
= NULL_TREE
;
737 HOST_WIDE_INT offset
= 0;
738 HOST_WIDE_INT max_size
;
739 HOST_WIDE_INT size
= -1;
740 tree size_tree
= NULL_TREE
;
741 alias_set_type base_alias_set
= -1;
743 /* First get the final access size from just the outermost expression. */
744 op
= VEC_index (vn_reference_op_s
, ops
, 0);
745 if (op
->opcode
== COMPONENT_REF
)
746 size_tree
= DECL_SIZE (op
->op0
);
747 else if (op
->opcode
== BIT_FIELD_REF
)
751 enum machine_mode mode
= TYPE_MODE (type
);
753 size_tree
= TYPE_SIZE (type
);
755 size
= GET_MODE_BITSIZE (mode
);
757 if (size_tree
!= NULL_TREE
)
759 if (!host_integerp (size_tree
, 1))
762 size
= TREE_INT_CST_LOW (size_tree
);
765 /* Initially, maxsize is the same as the accessed element size.
766 In the following it will only grow (or become -1). */
769 /* Compute cumulative bit-offset for nested component-refs and array-refs,
770 and find the ultimate containing object. */
771 FOR_EACH_VEC_ELT (vn_reference_op_s
, ops
, i
, op
)
775 /* These may be in the reference ops, but we cannot do anything
776 sensible with them here. */
778 /* Apart from ADDR_EXPR arguments to MEM_REF. */
779 if (base
!= NULL_TREE
780 && TREE_CODE (base
) == MEM_REF
782 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
784 vn_reference_op_t pop
= VEC_index (vn_reference_op_s
, ops
, i
-1);
785 base
= TREE_OPERAND (op
->op0
, 0);
792 offset
+= pop
->off
* BITS_PER_UNIT
;
800 /* Record the base objects. */
802 base_alias_set
= get_deref_alias_set (op
->op0
);
803 *op0_p
= build2 (MEM_REF
, op
->type
,
805 op0_p
= &TREE_OPERAND (*op0_p
, 0);
816 /* And now the usual component-reference style ops. */
818 offset
+= tree_low_cst (op
->op1
, 0);
823 tree field
= op
->op0
;
824 /* We do not have a complete COMPONENT_REF tree here so we
825 cannot use component_ref_field_offset. Do the interesting
829 || !host_integerp (DECL_FIELD_OFFSET (field
), 1))
833 offset
+= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
))
835 offset
+= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
840 case ARRAY_RANGE_REF
:
842 /* We recorded the lower bound and the element size. */
843 if (!host_integerp (op
->op0
, 0)
844 || !host_integerp (op
->op1
, 0)
845 || !host_integerp (op
->op2
, 0))
849 HOST_WIDE_INT hindex
= TREE_INT_CST_LOW (op
->op0
);
850 hindex
-= TREE_INT_CST_LOW (op
->op1
);
851 hindex
*= TREE_INT_CST_LOW (op
->op2
);
852 hindex
*= BITS_PER_UNIT
;
864 case VIEW_CONVERT_EXPR
:
881 if (base
== NULL_TREE
)
884 ref
->ref
= NULL_TREE
;
886 ref
->offset
= offset
;
888 ref
->max_size
= max_size
;
889 ref
->ref_alias_set
= set
;
890 if (base_alias_set
!= -1)
891 ref
->base_alias_set
= base_alias_set
;
893 ref
->base_alias_set
= get_alias_set (base
);
898 /* Copy the operations present in load/store/call REF into RESULT, a vector of
899 vn_reference_op_s's. */
902 copy_reference_ops_from_call (gimple call
,
903 VEC(vn_reference_op_s
, heap
) **result
)
905 vn_reference_op_s temp
;
908 /* Copy the type, opcode, function being called and static chain. */
909 memset (&temp
, 0, sizeof (temp
));
910 temp
.type
= gimple_call_return_type (call
);
911 temp
.opcode
= CALL_EXPR
;
912 temp
.op0
= gimple_call_fn (call
);
913 temp
.op1
= gimple_call_chain (call
);
915 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
917 /* Copy the call arguments. As they can be references as well,
918 just chain them together. */
919 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
921 tree callarg
= gimple_call_arg (call
, i
);
922 copy_reference_ops_from_ref (callarg
, result
);
926 /* Create a vector of vn_reference_op_s structures from REF, a
927 REFERENCE_CLASS_P tree. The vector is not shared. */
929 static VEC(vn_reference_op_s
, heap
) *
930 create_reference_ops_from_ref (tree ref
)
932 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
934 copy_reference_ops_from_ref (ref
, &result
);
938 /* Create a vector of vn_reference_op_s structures from CALL, a
939 call statement. The vector is not shared. */
941 static VEC(vn_reference_op_s
, heap
) *
942 create_reference_ops_from_call (gimple call
)
944 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
946 copy_reference_ops_from_call (call
, &result
);
950 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
951 *I_P to point to the last element of the replacement. */
953 vn_reference_fold_indirect (VEC (vn_reference_op_s
, heap
) **ops
,
956 unsigned int i
= *i_p
;
957 vn_reference_op_t op
= VEC_index (vn_reference_op_s
, *ops
, i
);
958 vn_reference_op_t mem_op
= VEC_index (vn_reference_op_s
, *ops
, i
- 1);
960 HOST_WIDE_INT addr_offset
;
962 /* The only thing we have to do is from &OBJ.foo.bar add the offset
963 from .foo.bar to the preceeding MEM_REF offset and replace the
964 address with &OBJ. */
965 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
967 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
968 if (addr_base
!= op
->op0
)
970 double_int off
= tree_to_double_int (mem_op
->op0
);
971 off
= double_int_sext (off
, TYPE_PRECISION (TREE_TYPE (mem_op
->op0
)));
972 off
= double_int_add (off
, shwi_to_double_int (addr_offset
));
973 mem_op
->op0
= double_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
974 op
->op0
= build_fold_addr_expr (addr_base
);
975 if (host_integerp (mem_op
->op0
, 0))
976 mem_op
->off
= TREE_INT_CST_LOW (mem_op
->op0
);
982 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
983 *I_P to point to the last element of the replacement. */
985 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s
, heap
) **ops
,
988 unsigned int i
= *i_p
;
989 vn_reference_op_t op
= VEC_index (vn_reference_op_s
, *ops
, i
);
990 vn_reference_op_t mem_op
= VEC_index (vn_reference_op_s
, *ops
, i
- 1);
995 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
996 if (!is_gimple_assign (def_stmt
))
999 code
= gimple_assign_rhs_code (def_stmt
);
1000 if (code
!= ADDR_EXPR
1001 && code
!= POINTER_PLUS_EXPR
)
1004 off
= tree_to_double_int (mem_op
->op0
);
1005 off
= double_int_sext (off
, TYPE_PRECISION (TREE_TYPE (mem_op
->op0
)));
1007 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1008 from .foo.bar to the preceeding MEM_REF offset and replace the
1009 address with &OBJ. */
1010 if (code
== ADDR_EXPR
)
1012 tree addr
, addr_base
;
1013 HOST_WIDE_INT addr_offset
;
1015 addr
= gimple_assign_rhs1 (def_stmt
);
1016 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1019 || TREE_CODE (addr_base
) != MEM_REF
)
1022 off
= double_int_add (off
, shwi_to_double_int (addr_offset
));
1023 off
= double_int_add (off
, mem_ref_offset (addr_base
));
1024 op
->op0
= TREE_OPERAND (addr_base
, 0);
1029 ptr
= gimple_assign_rhs1 (def_stmt
);
1030 ptroff
= gimple_assign_rhs2 (def_stmt
);
1031 if (TREE_CODE (ptr
) != SSA_NAME
1032 || TREE_CODE (ptroff
) != INTEGER_CST
)
1035 off
= double_int_add (off
, tree_to_double_int (ptroff
));
1039 mem_op
->op0
= double_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1040 if (host_integerp (mem_op
->op0
, 0))
1041 mem_op
->off
= TREE_INT_CST_LOW (mem_op
->op0
);
1044 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1045 op
->op0
= SSA_VAL (op
->op0
);
1046 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1047 op
->opcode
= TREE_CODE (op
->op0
);
1050 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1051 vn_reference_maybe_forwprop_address (ops
, i_p
);
1052 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1053 vn_reference_fold_indirect (ops
, i_p
);
1056 /* Optimize the reference REF to a constant if possible or return
1057 NULL_TREE if not. */
1060 fully_constant_vn_reference_p (vn_reference_t ref
)
1062 VEC (vn_reference_op_s
, heap
) *operands
= ref
->operands
;
1063 vn_reference_op_t op
;
1065 /* Try to simplify the translated expression if it is
1066 a call to a builtin function with at most two arguments. */
1067 op
= VEC_index (vn_reference_op_s
, operands
, 0);
1068 if (op
->opcode
== CALL_EXPR
1069 && TREE_CODE (op
->op0
) == ADDR_EXPR
1070 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1071 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1072 && VEC_length (vn_reference_op_s
, operands
) >= 2
1073 && VEC_length (vn_reference_op_s
, operands
) <= 3)
1075 vn_reference_op_t arg0
, arg1
= NULL
;
1076 bool anyconst
= false;
1077 arg0
= VEC_index (vn_reference_op_s
, operands
, 1);
1078 if (VEC_length (vn_reference_op_s
, operands
) > 2)
1079 arg1
= VEC_index (vn_reference_op_s
, operands
, 2);
1080 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1081 || (arg0
->opcode
== ADDR_EXPR
1082 && is_gimple_min_invariant (arg0
->op0
)))
1085 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1086 || (arg1
->opcode
== ADDR_EXPR
1087 && is_gimple_min_invariant (arg1
->op0
))))
1091 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1094 arg1
? arg1
->op0
: NULL
);
1096 && TREE_CODE (folded
) == NOP_EXPR
)
1097 folded
= TREE_OPERAND (folded
, 0);
1099 && is_gimple_min_invariant (folded
))
1104 /* Simplify reads from constant strings. */
1105 else if (op
->opcode
== ARRAY_REF
1106 && TREE_CODE (op
->op0
) == INTEGER_CST
1107 && integer_zerop (op
->op1
)
1108 && VEC_length (vn_reference_op_s
, operands
) == 2)
1110 vn_reference_op_t arg0
;
1111 arg0
= VEC_index (vn_reference_op_s
, operands
, 1);
1112 if (arg0
->opcode
== STRING_CST
1113 && (TYPE_MODE (op
->type
)
1114 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
->op0
))))
1115 && GET_MODE_CLASS (TYPE_MODE (op
->type
)) == MODE_INT
1116 && GET_MODE_SIZE (TYPE_MODE (op
->type
)) == 1
1117 && compare_tree_int (op
->op0
, TREE_STRING_LENGTH (arg0
->op0
)) < 0)
1118 return build_int_cst_type (op
->type
,
1119 (TREE_STRING_POINTER (arg0
->op0
)
1120 [TREE_INT_CST_LOW (op
->op0
)]));
1126 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1127 structures into their value numbers. This is done in-place, and
1128 the vector passed in is returned. */
1130 static VEC (vn_reference_op_s
, heap
) *
1131 valueize_refs (VEC (vn_reference_op_s
, heap
) *orig
)
1133 vn_reference_op_t vro
;
1136 FOR_EACH_VEC_ELT (vn_reference_op_s
, orig
, i
, vro
)
1138 if (vro
->opcode
== SSA_NAME
1139 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1141 vro
->op0
= SSA_VAL (vro
->op0
);
1142 /* If it transforms from an SSA_NAME to a constant, update
1144 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1145 vro
->opcode
= TREE_CODE (vro
->op0
);
1147 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1148 vro
->op1
= SSA_VAL (vro
->op1
);
1149 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1150 vro
->op2
= SSA_VAL (vro
->op2
);
1151 /* If it transforms from an SSA_NAME to an address, fold with
1152 a preceding indirect reference. */
1155 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1156 && VEC_index (vn_reference_op_s
,
1157 orig
, i
- 1)->opcode
== MEM_REF
)
1158 vn_reference_fold_indirect (&orig
, &i
);
1160 && vro
->opcode
== SSA_NAME
1161 && VEC_index (vn_reference_op_s
,
1162 orig
, i
- 1)->opcode
== MEM_REF
)
1163 vn_reference_maybe_forwprop_address (&orig
, &i
);
1164 /* If it transforms a non-constant ARRAY_REF into a constant
1165 one, adjust the constant offset. */
1166 else if (vro
->opcode
== ARRAY_REF
1168 && TREE_CODE (vro
->op0
) == INTEGER_CST
1169 && TREE_CODE (vro
->op1
) == INTEGER_CST
1170 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1172 double_int off
= tree_to_double_int (vro
->op0
);
1173 off
= double_int_add (off
,
1175 (tree_to_double_int (vro
->op1
)));
1176 off
= double_int_mul (off
, tree_to_double_int (vro
->op2
));
1177 if (double_int_fits_in_shwi_p (off
))
1185 static VEC(vn_reference_op_s
, heap
) *shared_lookup_references
;
1187 /* Create a vector of vn_reference_op_s structures from REF, a
1188 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1191 static VEC(vn_reference_op_s
, heap
) *
1192 valueize_shared_reference_ops_from_ref (tree ref
)
1196 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1197 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1198 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1199 return shared_lookup_references
;
1202 /* Create a vector of vn_reference_op_s structures from CALL, a
1203 call statement. The vector is shared among all callers of
1206 static VEC(vn_reference_op_s
, heap
) *
1207 valueize_shared_reference_ops_from_call (gimple call
)
1211 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1212 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1213 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1214 return shared_lookup_references
;
1217 /* Lookup a SCCVN reference operation VR in the current hash table.
1218 Returns the resulting value number if it exists in the hash table,
1219 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1220 vn_reference_t stored in the hashtable if something is found. */
1223 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1228 hash
= vr
->hashcode
;
1229 slot
= htab_find_slot_with_hash (current_info
->references
, vr
,
1231 if (!slot
&& current_info
== optimistic_info
)
1232 slot
= htab_find_slot_with_hash (valid_info
->references
, vr
,
1237 *vnresult
= (vn_reference_t
)*slot
;
1238 return ((vn_reference_t
)*slot
)->result
;
1244 static tree
*last_vuse_ptr
;
1246 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1247 with the current VUSE and performs the expression lookup. */
1250 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
, void *vr_
)
1252 vn_reference_t vr
= (vn_reference_t
)vr_
;
1257 *last_vuse_ptr
= vuse
;
1259 /* Fixup vuse and hash. */
1261 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1262 vr
->vuse
= SSA_VAL (vuse
);
1264 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1266 hash
= vr
->hashcode
;
1267 slot
= htab_find_slot_with_hash (current_info
->references
, vr
,
1269 if (!slot
&& current_info
== optimistic_info
)
1270 slot
= htab_find_slot_with_hash (valid_info
->references
, vr
,
1278 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1279 from the statement defining VUSE and if not successful tries to
1280 translate *REFP and VR_ through an aggregate copy at the defintion
1284 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
)
1286 vn_reference_t vr
= (vn_reference_t
)vr_
;
1287 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1290 HOST_WIDE_INT offset
, maxsize
;
1291 static VEC (vn_reference_op_s
, heap
) *lhs_ops
= NULL
;
1293 bool lhs_ref_ok
= false;
1295 /* First try to disambiguate after value-replacing in the definitions LHS. */
1296 if (is_gimple_assign (def_stmt
))
1298 VEC (vn_reference_op_s
, heap
) *tem
;
1299 tree lhs
= gimple_assign_lhs (def_stmt
);
1300 /* Avoid re-allocation overhead. */
1301 VEC_truncate (vn_reference_op_s
, lhs_ops
, 0);
1302 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1304 lhs_ops
= valueize_refs (lhs_ops
);
1305 gcc_assert (lhs_ops
== tem
);
1306 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
, get_alias_set (lhs
),
1307 TREE_TYPE (lhs
), lhs_ops
);
1309 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1313 base
= ao_ref_base (ref
);
1314 offset
= ref
->offset
;
1315 maxsize
= ref
->max_size
;
1317 /* If we cannot constrain the size of the reference we cannot
1318 test if anything kills it. */
1322 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1323 from that defintion.
1325 if (is_gimple_reg_type (vr
->type
)
1326 && is_gimple_call (def_stmt
)
1327 && (fndecl
= gimple_call_fndecl (def_stmt
))
1328 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1329 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMSET
1330 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1331 && host_integerp (gimple_call_arg (def_stmt
, 2), 1)
1332 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1334 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1336 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1337 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
);
1338 size2
= TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 2)) * 8;
1339 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1340 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 2))
1341 && operand_equal_p (base
, base2
, 0)
1342 && offset2
<= offset
1343 && offset2
+ size2
>= offset
+ maxsize
)
1345 tree val
= build_zero_cst (vr
->type
);
1346 unsigned int value_id
= get_or_alloc_constant_value_id (val
);
1347 return vn_reference_insert_pieces (vuse
, vr
->set
, vr
->type
,
1348 VEC_copy (vn_reference_op_s
,
1349 heap
, vr
->operands
),
1354 /* 2) Assignment from an empty CONSTRUCTOR. */
1355 else if (is_gimple_reg_type (vr
->type
)
1356 && gimple_assign_single_p (def_stmt
)
1357 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1358 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1361 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1362 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1363 &offset2
, &size2
, &maxsize2
);
1364 if (operand_equal_p (base
, base2
, 0)
1365 && offset2
<= offset
1366 && offset2
+ size2
>= offset
+ maxsize
)
1368 tree val
= build_zero_cst (vr
->type
);
1369 unsigned int value_id
= get_or_alloc_constant_value_id (val
);
1370 return vn_reference_insert_pieces (vuse
, vr
->set
, vr
->type
,
1371 VEC_copy (vn_reference_op_s
,
1372 heap
, vr
->operands
),
1377 /* For aggregate copies translate the reference through them if
1378 the copy kills ref. */
1379 else if (gimple_assign_single_p (def_stmt
)
1380 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
1381 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
1382 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
1385 HOST_WIDE_INT offset2
, size2
;
1387 VEC (vn_reference_op_s
, heap
) *rhs
= NULL
;
1388 vn_reference_op_t vro
;
1394 /* See if the assignment kills REF. */
1395 base2
= ao_ref_base (&lhs_ref
);
1396 offset2
= lhs_ref
.offset
;
1397 size2
= lhs_ref
.size
;
1399 && !operand_equal_p (base
, base2
, 0))
1401 || offset2
+ size2
< offset
+ maxsize
)
1404 /* Find the common base of ref and the lhs. lhs_ops already
1405 contains valueized operands for the lhs. */
1406 i
= VEC_length (vn_reference_op_s
, vr
->operands
) - 1;
1407 j
= VEC_length (vn_reference_op_s
, lhs_ops
) - 1;
1408 while (j
>= 0 && i
>= 0
1409 && vn_reference_op_eq (VEC_index (vn_reference_op_s
,
1411 VEC_index (vn_reference_op_s
, lhs_ops
, j
)))
1417 /* i now points to the first additional op.
1418 ??? LHS may not be completely contained in VR, one or more
1419 VIEW_CONVERT_EXPRs could be in its way. We could at least
1420 try handling outermost VIEW_CONVERT_EXPRs. */
1424 /* Now re-write REF to be based on the rhs of the assignment. */
1425 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
1426 /* We need to pre-pend vr->operands[0..i] to rhs. */
1427 if (i
+ 1 + VEC_length (vn_reference_op_s
, rhs
)
1428 > VEC_length (vn_reference_op_s
, vr
->operands
))
1430 VEC (vn_reference_op_s
, heap
) *old
= vr
->operands
;
1431 VEC_safe_grow (vn_reference_op_s
, heap
, vr
->operands
,
1432 i
+ 1 + VEC_length (vn_reference_op_s
, rhs
));
1433 if (old
== shared_lookup_references
1434 && vr
->operands
!= old
)
1435 shared_lookup_references
= NULL
;
1438 VEC_truncate (vn_reference_op_s
, vr
->operands
,
1439 i
+ 1 + VEC_length (vn_reference_op_s
, rhs
));
1440 FOR_EACH_VEC_ELT (vn_reference_op_s
, rhs
, j
, vro
)
1441 VEC_replace (vn_reference_op_s
, vr
->operands
, i
+ 1 + j
, vro
);
1442 VEC_free (vn_reference_op_s
, heap
, rhs
);
1443 vr
->hashcode
= vn_reference_compute_hash (vr
);
1445 /* Adjust *ref from the new operands. */
1446 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
1448 /* This can happen with bitfields. */
1449 if (ref
->size
!= r
.size
)
1453 /* Do not update last seen VUSE after translating. */
1454 last_vuse_ptr
= NULL
;
1456 /* Keep looking for the adjusted *REF / VR pair. */
1460 /* Bail out and stop walking. */
1464 /* Lookup a reference operation by it's parts, in the current hash table.
1465 Returns the resulting value number if it exists in the hash table,
1466 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1467 vn_reference_t stored in the hashtable if something is found. */
1470 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
1471 VEC (vn_reference_op_s
, heap
) *operands
,
1472 vn_reference_t
*vnresult
, bool maywalk
)
1474 struct vn_reference_s vr1
;
1482 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1483 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1484 VEC_safe_grow (vn_reference_op_s
, heap
, shared_lookup_references
,
1485 VEC_length (vn_reference_op_s
, operands
));
1486 memcpy (VEC_address (vn_reference_op_s
, shared_lookup_references
),
1487 VEC_address (vn_reference_op_s
, operands
),
1488 sizeof (vn_reference_op_s
)
1489 * VEC_length (vn_reference_op_s
, operands
));
1490 vr1
.operands
= operands
= shared_lookup_references
1491 = valueize_refs (shared_lookup_references
);
1494 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1495 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
1498 vn_reference_lookup_1 (&vr1
, vnresult
);
1504 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
1506 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
1507 vn_reference_lookup_2
,
1508 vn_reference_lookup_3
, &vr1
);
1509 if (vr1
.operands
!= operands
)
1510 VEC_free (vn_reference_op_s
, heap
, vr1
.operands
);
1514 return (*vnresult
)->result
;
1519 /* Lookup OP in the current hash table, and return the resulting value
1520 number if it exists in the hash table. Return NULL_TREE if it does
1521 not exist in the hash table or if the result field of the structure
1522 was NULL.. VNRESULT will be filled in with the vn_reference_t
1523 stored in the hashtable if one exists. */
1526 vn_reference_lookup (tree op
, tree vuse
, bool maywalk
,
1527 vn_reference_t
*vnresult
)
1529 VEC (vn_reference_op_s
, heap
) *operands
;
1530 struct vn_reference_s vr1
;
1536 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1537 vr1
.operands
= operands
= valueize_shared_reference_ops_from_ref (op
);
1538 vr1
.type
= TREE_TYPE (op
);
1539 vr1
.set
= get_alias_set (op
);
1540 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1541 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
1547 vn_reference_t wvnresult
;
1549 ao_ref_init (&r
, op
);
1551 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
1552 vn_reference_lookup_2
,
1553 vn_reference_lookup_3
, &vr1
);
1554 if (vr1
.operands
!= operands
)
1555 VEC_free (vn_reference_op_s
, heap
, vr1
.operands
);
1559 *vnresult
= wvnresult
;
1560 return wvnresult
->result
;
1566 return vn_reference_lookup_1 (&vr1
, vnresult
);
1570 /* Insert OP into the current hash table with a value number of
1571 RESULT, and return the resulting reference structure we created. */
1574 vn_reference_insert (tree op
, tree result
, tree vuse
)
1579 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1580 if (TREE_CODE (result
) == SSA_NAME
)
1581 vr1
->value_id
= VN_INFO (result
)->value_id
;
1583 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
1584 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1585 vr1
->operands
= valueize_refs (create_reference_ops_from_ref (op
));
1586 vr1
->type
= TREE_TYPE (op
);
1587 vr1
->set
= get_alias_set (op
);
1588 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1589 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
1591 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1594 /* Because we lookup stores using vuses, and value number failures
1595 using the vdefs (see visit_reference_op_store for how and why),
1596 it's possible that on failure we may try to insert an already
1597 inserted store. This is not wrong, there is no ssa name for a
1598 store that we could use as a differentiator anyway. Thus, unlike
1599 the other lookup functions, you cannot gcc_assert (!*slot)
1602 /* But free the old slot in case of a collision. */
1604 free_reference (*slot
);
1610 /* Insert a reference by it's pieces into the current hash table with
1611 a value number of RESULT. Return the resulting reference
1612 structure we created. */
1615 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
1616 VEC (vn_reference_op_s
, heap
) *operands
,
1617 tree result
, unsigned int value_id
)
1623 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1624 vr1
->value_id
= value_id
;
1625 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1626 vr1
->operands
= valueize_refs (operands
);
1629 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1630 if (result
&& TREE_CODE (result
) == SSA_NAME
)
1631 result
= SSA_VAL (result
);
1632 vr1
->result
= result
;
1634 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1637 /* At this point we should have all the things inserted that we have
1638 seen before, and we should never try inserting something that
1640 gcc_assert (!*slot
);
1642 free_reference (*slot
);
1648 /* Compute and return the hash value for nary operation VBO1. */
1651 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
1656 for (i
= 0; i
< vno1
->length
; ++i
)
1657 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
1658 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
1660 if (vno1
->length
== 2
1661 && commutative_tree_code (vno1
->opcode
)
1662 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
1664 tree temp
= vno1
->op
[0];
1665 vno1
->op
[0] = vno1
->op
[1];
1669 hash
= iterative_hash_hashval_t (vno1
->opcode
, 0);
1670 for (i
= 0; i
< vno1
->length
; ++i
)
1671 hash
= iterative_hash_expr (vno1
->op
[i
], hash
);
1676 /* Return the computed hashcode for nary operation P1. */
1679 vn_nary_op_hash (const void *p1
)
1681 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1682 return vno1
->hashcode
;
1685 /* Compare nary operations P1 and P2 and return true if they are
1689 vn_nary_op_eq (const void *p1
, const void *p2
)
1691 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1692 const_vn_nary_op_t
const vno2
= (const_vn_nary_op_t
) p2
;
1695 if (vno1
->hashcode
!= vno2
->hashcode
)
1698 if (vno1
->opcode
!= vno2
->opcode
1699 || !types_compatible_p (vno1
->type
, vno2
->type
))
1702 for (i
= 0; i
< vno1
->length
; ++i
)
1703 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
1709 /* Initialize VNO from the pieces provided. */
1712 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
1713 enum tree_code code
, tree type
, tree op0
,
1714 tree op1
, tree op2
, tree op3
)
1717 vno
->length
= length
;
1721 /* The fallthrus here are deliberate. */
1722 case 4: vno
->op
[3] = op3
;
1723 case 3: vno
->op
[2] = op2
;
1724 case 2: vno
->op
[1] = op1
;
1725 case 1: vno
->op
[0] = op0
;
1731 /* Initialize VNO from OP. */
1734 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
1738 vno
->opcode
= TREE_CODE (op
);
1739 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
1740 vno
->type
= TREE_TYPE (op
);
1741 for (i
= 0; i
< vno
->length
; ++i
)
1742 vno
->op
[i
] = TREE_OPERAND (op
, i
);
1745 /* Initialize VNO from STMT. */
1748 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple stmt
)
1752 vno
->opcode
= gimple_assign_rhs_code (stmt
);
1753 vno
->length
= gimple_num_ops (stmt
) - 1;
1754 vno
->type
= gimple_expr_type (stmt
);
1755 for (i
= 0; i
< vno
->length
; ++i
)
1756 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
1757 if (vno
->opcode
== REALPART_EXPR
1758 || vno
->opcode
== IMAGPART_EXPR
1759 || vno
->opcode
== VIEW_CONVERT_EXPR
)
1760 vno
->op
[0] = TREE_OPERAND (vno
->op
[0], 0);
1763 /* Compute the hashcode for VNO and look for it in the hash table;
1764 return the resulting value number if it exists in the hash table.
1765 Return NULL_TREE if it does not exist in the hash table or if the
1766 result field of the operation is NULL. VNRESULT will contain the
1767 vn_nary_op_t from the hashtable if it exists. */
1770 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
1777 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
1778 slot
= htab_find_slot_with_hash (current_info
->nary
, vno
, vno
->hashcode
,
1780 if (!slot
&& current_info
== optimistic_info
)
1781 slot
= htab_find_slot_with_hash (valid_info
->nary
, vno
, vno
->hashcode
,
1786 *vnresult
= (vn_nary_op_t
)*slot
;
1787 return ((vn_nary_op_t
)*slot
)->result
;
1790 /* Lookup a n-ary operation by its pieces and return the resulting value
1791 number if it exists in the hash table. Return NULL_TREE if it does
1792 not exist in the hash table or if the result field of the operation
1793 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1797 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
1798 tree type
, tree op0
, tree op1
, tree op2
,
1799 tree op3
, vn_nary_op_t
*vnresult
)
1801 struct vn_nary_op_s vno1
;
1802 init_vn_nary_op_from_pieces (&vno1
, length
, code
, type
, op0
, op1
, op2
, op3
);
1803 return vn_nary_op_lookup_1 (&vno1
, vnresult
);
1806 /* Lookup OP in the current hash table, and return the resulting value
1807 number if it exists in the hash table. Return NULL_TREE if it does
1808 not exist in the hash table or if the result field of the operation
1809 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1813 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
1815 struct vn_nary_op_s vno1
;
1816 init_vn_nary_op_from_op (&vno1
, op
);
1817 return vn_nary_op_lookup_1 (&vno1
, vnresult
);
1820 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1821 value number if it exists in the hash table. Return NULL_TREE if
1822 it does not exist in the hash table. VNRESULT will contain the
1823 vn_nary_op_t from the hashtable if it exists. */
1826 vn_nary_op_lookup_stmt (gimple stmt
, vn_nary_op_t
*vnresult
)
1828 struct vn_nary_op_s vno1
;
1829 init_vn_nary_op_from_stmt (&vno1
, stmt
);
1830 return vn_nary_op_lookup_1 (&vno1
, vnresult
);
1833 /* Return the size of a vn_nary_op_t with LENGTH operands. */
1836 sizeof_vn_nary_op (unsigned int length
)
1838 return sizeof (struct vn_nary_op_s
) - sizeof (tree
) * (4 - length
);
1841 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
1844 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
1846 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
1849 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
1853 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
1855 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
1856 ¤t_info
->nary_obstack
);
1858 vno1
->value_id
= value_id
;
1859 vno1
->length
= length
;
1860 vno1
->result
= result
;
1865 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
1866 VNO->HASHCODE first. */
1869 vn_nary_op_insert_into (vn_nary_op_t vno
, htab_t table
, bool compute_hash
)
1874 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
1876 slot
= htab_find_slot_with_hash (table
, vno
, vno
->hashcode
, INSERT
);
1877 gcc_assert (!*slot
);
1883 /* Insert a n-ary operation into the current hash table using it's
1884 pieces. Return the vn_nary_op_t structure we created and put in
1888 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
1889 tree type
, tree op0
,
1890 tree op1
, tree op2
, tree op3
,
1892 unsigned int value_id
)
1896 vno1
= alloc_vn_nary_op (length
, result
, value_id
);
1897 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, op0
, op1
, op2
, op3
);
1898 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
1901 /* Insert OP into the current hash table with a value number of
1902 RESULT. Return the vn_nary_op_t structure we created and put in
1906 vn_nary_op_insert (tree op
, tree result
)
1908 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
1911 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
1912 init_vn_nary_op_from_op (vno1
, op
);
1913 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
1916 /* Insert the rhs of STMT into the current hash table with a value number of
1920 vn_nary_op_insert_stmt (gimple stmt
, tree result
)
1922 unsigned length
= gimple_num_ops (stmt
) - 1;
1925 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
1926 init_vn_nary_op_from_stmt (vno1
, stmt
);
1927 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
1930 /* Compute a hashcode for PHI operation VP1 and return it. */
1932 static inline hashval_t
1933 vn_phi_compute_hash (vn_phi_t vp1
)
1940 result
= vp1
->block
->index
;
1942 /* If all PHI arguments are constants we need to distinguish
1943 the PHI node via its type. */
1944 type
= TREE_TYPE (VEC_index (tree
, vp1
->phiargs
, 0));
1945 result
+= (INTEGRAL_TYPE_P (type
)
1946 + (INTEGRAL_TYPE_P (type
)
1947 ? TYPE_PRECISION (type
) + TYPE_UNSIGNED (type
) : 0));
1949 FOR_EACH_VEC_ELT (tree
, vp1
->phiargs
, i
, phi1op
)
1951 if (phi1op
== VN_TOP
)
1953 result
= iterative_hash_expr (phi1op
, result
);
1959 /* Return the computed hashcode for phi operation P1. */
1962 vn_phi_hash (const void *p1
)
1964 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
1965 return vp1
->hashcode
;
1968 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1971 vn_phi_eq (const void *p1
, const void *p2
)
1973 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
1974 const_vn_phi_t
const vp2
= (const_vn_phi_t
) p2
;
1976 if (vp1
->hashcode
!= vp2
->hashcode
)
1979 if (vp1
->block
== vp2
->block
)
1984 /* If the PHI nodes do not have compatible types
1985 they are not the same. */
1986 if (!types_compatible_p (TREE_TYPE (VEC_index (tree
, vp1
->phiargs
, 0)),
1987 TREE_TYPE (VEC_index (tree
, vp2
->phiargs
, 0))))
1990 /* Any phi in the same block will have it's arguments in the
1991 same edge order, because of how we store phi nodes. */
1992 FOR_EACH_VEC_ELT (tree
, vp1
->phiargs
, i
, phi1op
)
1994 tree phi2op
= VEC_index (tree
, vp2
->phiargs
, i
);
1995 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
1997 if (!expressions_equal_p (phi1op
, phi2op
))
2005 static VEC(tree
, heap
) *shared_lookup_phiargs
;
2007 /* Lookup PHI in the current hash table, and return the resulting
2008 value number if it exists in the hash table. Return NULL_TREE if
2009 it does not exist in the hash table. */
2012 vn_phi_lookup (gimple phi
)
2015 struct vn_phi_s vp1
;
2018 VEC_truncate (tree
, shared_lookup_phiargs
, 0);
2020 /* Canonicalize the SSA_NAME's to their value number. */
2021 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2023 tree def
= PHI_ARG_DEF (phi
, i
);
2024 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2025 VEC_safe_push (tree
, heap
, shared_lookup_phiargs
, def
);
2027 vp1
.phiargs
= shared_lookup_phiargs
;
2028 vp1
.block
= gimple_bb (phi
);
2029 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
2030 slot
= htab_find_slot_with_hash (current_info
->phis
, &vp1
, vp1
.hashcode
,
2032 if (!slot
&& current_info
== optimistic_info
)
2033 slot
= htab_find_slot_with_hash (valid_info
->phis
, &vp1
, vp1
.hashcode
,
2037 return ((vn_phi_t
)*slot
)->result
;
2040 /* Insert PHI into the current hash table with a value number of
2044 vn_phi_insert (gimple phi
, tree result
)
2047 vn_phi_t vp1
= (vn_phi_t
) pool_alloc (current_info
->phis_pool
);
2049 VEC (tree
, heap
) *args
= NULL
;
2051 /* Canonicalize the SSA_NAME's to their value number. */
2052 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2054 tree def
= PHI_ARG_DEF (phi
, i
);
2055 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2056 VEC_safe_push (tree
, heap
, args
, def
);
2058 vp1
->value_id
= VN_INFO (result
)->value_id
;
2059 vp1
->phiargs
= args
;
2060 vp1
->block
= gimple_bb (phi
);
2061 vp1
->result
= result
;
2062 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
2064 slot
= htab_find_slot_with_hash (current_info
->phis
, vp1
, vp1
->hashcode
,
2067 /* Because we iterate over phi operations more than once, it's
2068 possible the slot might already exist here, hence no assert.*/
2074 /* Print set of components in strongly connected component SCC to OUT. */
2077 print_scc (FILE *out
, VEC (tree
, heap
) *scc
)
2082 fprintf (out
, "SCC consists of: ");
2083 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
2085 print_generic_expr (out
, var
, 0);
2088 fprintf (out
, "\n");
2091 /* Set the value number of FROM to TO, return true if it has changed
2095 set_ssa_val_to (tree from
, tree to
)
2100 && TREE_CODE (to
) == SSA_NAME
2101 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
2104 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2105 and invariants. So assert that here. */
2106 gcc_assert (to
!= NULL_TREE
2108 || TREE_CODE (to
) == SSA_NAME
2109 || is_gimple_min_invariant (to
)));
2111 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2113 fprintf (dump_file
, "Setting value number of ");
2114 print_generic_expr (dump_file
, from
, 0);
2115 fprintf (dump_file
, " to ");
2116 print_generic_expr (dump_file
, to
, 0);
2119 currval
= SSA_VAL (from
);
2121 if (currval
!= to
&& !operand_equal_p (currval
, to
, OEP_PURE_SAME
))
2123 VN_INFO (from
)->valnum
= to
;
2124 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2125 fprintf (dump_file
, " (changed)\n");
2128 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2129 fprintf (dump_file
, "\n");
2133 /* Set all definitions in STMT to value number to themselves.
2134 Return true if a value number changed. */
2137 defs_to_varying (gimple stmt
)
2139 bool changed
= false;
2143 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2145 tree def
= DEF_FROM_PTR (defp
);
2147 VN_INFO (def
)->use_processed
= true;
2148 changed
|= set_ssa_val_to (def
, def
);
2153 static bool expr_has_constants (tree expr
);
2154 static tree
valueize_expr (tree expr
);
2156 /* Visit a copy between LHS and RHS, return true if the value number
2160 visit_copy (tree lhs
, tree rhs
)
2162 /* Follow chains of copies to their destination. */
2163 while (TREE_CODE (rhs
) == SSA_NAME
2164 && SSA_VAL (rhs
) != rhs
)
2165 rhs
= SSA_VAL (rhs
);
2167 /* The copy may have a more interesting constant filled expression
2168 (we don't, since we know our RHS is just an SSA name). */
2169 if (TREE_CODE (rhs
) == SSA_NAME
)
2171 VN_INFO (lhs
)->has_constants
= VN_INFO (rhs
)->has_constants
;
2172 VN_INFO (lhs
)->expr
= VN_INFO (rhs
)->expr
;
2175 return set_ssa_val_to (lhs
, rhs
);
2178 /* Visit a nary operator RHS, value number it, and return true if the
2179 value number of LHS has changed as a result. */
2182 visit_nary_op (tree lhs
, gimple stmt
)
2184 bool changed
= false;
2185 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
2188 changed
= set_ssa_val_to (lhs
, result
);
2191 changed
= set_ssa_val_to (lhs
, lhs
);
2192 vn_nary_op_insert_stmt (stmt
, lhs
);
2198 /* Visit a call STMT storing into LHS. Return true if the value number
2199 of the LHS has changed as a result. */
2202 visit_reference_op_call (tree lhs
, gimple stmt
)
2204 bool changed
= false;
2205 struct vn_reference_s vr1
;
2207 tree vuse
= gimple_vuse (stmt
);
2209 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2210 vr1
.operands
= valueize_shared_reference_ops_from_call (stmt
);
2211 vr1
.type
= gimple_expr_type (stmt
);
2213 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2214 result
= vn_reference_lookup_1 (&vr1
, NULL
);
2217 changed
= set_ssa_val_to (lhs
, result
);
2218 if (TREE_CODE (result
) == SSA_NAME
2219 && VN_INFO (result
)->has_constants
)
2220 VN_INFO (lhs
)->has_constants
= true;
2226 changed
= set_ssa_val_to (lhs
, lhs
);
2227 vr2
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
2228 vr2
->vuse
= vr1
.vuse
;
2229 vr2
->operands
= valueize_refs (create_reference_ops_from_call (stmt
));
2230 vr2
->type
= vr1
.type
;
2232 vr2
->hashcode
= vr1
.hashcode
;
2234 slot
= htab_find_slot_with_hash (current_info
->references
,
2235 vr2
, vr2
->hashcode
, INSERT
);
2237 free_reference (*slot
);
2244 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2245 and return true if the value number of the LHS has changed as a result. */
2248 visit_reference_op_load (tree lhs
, tree op
, gimple stmt
)
2250 bool changed
= false;
2254 last_vuse
= gimple_vuse (stmt
);
2255 last_vuse_ptr
= &last_vuse
;
2256 result
= vn_reference_lookup (op
, gimple_vuse (stmt
), true, NULL
);
2257 last_vuse_ptr
= NULL
;
2259 /* If we have a VCE, try looking up its operand as it might be stored in
2260 a different type. */
2261 if (!result
&& TREE_CODE (op
) == VIEW_CONVERT_EXPR
)
2262 result
= vn_reference_lookup (TREE_OPERAND (op
, 0), gimple_vuse (stmt
),
2265 /* We handle type-punning through unions by value-numbering based
2266 on offset and size of the access. Be prepared to handle a
2267 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2269 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
2271 /* We will be setting the value number of lhs to the value number
2272 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2273 So first simplify and lookup this expression to see if it
2274 is already available. */
2275 tree val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
2276 if ((CONVERT_EXPR_P (val
)
2277 || TREE_CODE (val
) == VIEW_CONVERT_EXPR
)
2278 && TREE_CODE (TREE_OPERAND (val
, 0)) == SSA_NAME
)
2280 tree tem
= valueize_expr (vn_get_expr_for (TREE_OPERAND (val
, 0)));
2281 if ((CONVERT_EXPR_P (tem
)
2282 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
)
2283 && (tem
= fold_unary_ignore_overflow (TREE_CODE (val
),
2284 TREE_TYPE (val
), tem
)))
2288 if (!is_gimple_min_invariant (val
)
2289 && TREE_CODE (val
) != SSA_NAME
)
2290 result
= vn_nary_op_lookup (val
, NULL
);
2291 /* If the expression is not yet available, value-number lhs to
2292 a new SSA_NAME we create. */
2295 result
= make_ssa_name (SSA_NAME_VAR (lhs
), gimple_build_nop ());
2296 /* Initialize value-number information properly. */
2297 VN_INFO_GET (result
)->valnum
= result
;
2298 VN_INFO (result
)->value_id
= get_next_value_id ();
2299 VN_INFO (result
)->expr
= val
;
2300 VN_INFO (result
)->has_constants
= expr_has_constants (val
);
2301 VN_INFO (result
)->needs_insertion
= true;
2302 /* As all "inserted" statements are singleton SCCs, insert
2303 to the valid table. This is strictly needed to
2304 avoid re-generating new value SSA_NAMEs for the same
2305 expression during SCC iteration over and over (the
2306 optimistic table gets cleared after each iteration).
2307 We do not need to insert into the optimistic table, as
2308 lookups there will fall back to the valid table. */
2309 if (current_info
== optimistic_info
)
2311 current_info
= valid_info
;
2312 vn_nary_op_insert (val
, result
);
2313 current_info
= optimistic_info
;
2316 vn_nary_op_insert (val
, result
);
2317 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2319 fprintf (dump_file
, "Inserting name ");
2320 print_generic_expr (dump_file
, result
, 0);
2321 fprintf (dump_file
, " for expression ");
2322 print_generic_expr (dump_file
, val
, 0);
2323 fprintf (dump_file
, "\n");
2330 changed
= set_ssa_val_to (lhs
, result
);
2331 if (TREE_CODE (result
) == SSA_NAME
2332 && VN_INFO (result
)->has_constants
)
2334 VN_INFO (lhs
)->expr
= VN_INFO (result
)->expr
;
2335 VN_INFO (lhs
)->has_constants
= true;
2340 changed
= set_ssa_val_to (lhs
, lhs
);
2341 vn_reference_insert (op
, lhs
, last_vuse
);
2348 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2349 and return true if the value number of the LHS has changed as a result. */
2352 visit_reference_op_store (tree lhs
, tree op
, gimple stmt
)
2354 bool changed
= false;
2356 bool resultsame
= false;
2358 /* First we want to lookup using the *vuses* from the store and see
2359 if there the last store to this location with the same address
2362 The vuses represent the memory state before the store. If the
2363 memory state, address, and value of the store is the same as the
2364 last store to this location, then this store will produce the
2365 same memory state as that store.
2367 In this case the vdef versions for this store are value numbered to those
2368 vuse versions, since they represent the same memory state after
2371 Otherwise, the vdefs for the store are used when inserting into
2372 the table, since the store generates a new memory state. */
2374 result
= vn_reference_lookup (lhs
, gimple_vuse (stmt
), false, NULL
);
2378 if (TREE_CODE (result
) == SSA_NAME
)
2379 result
= SSA_VAL (result
);
2380 if (TREE_CODE (op
) == SSA_NAME
)
2382 resultsame
= expressions_equal_p (result
, op
);
2385 if (!result
|| !resultsame
)
2389 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2391 fprintf (dump_file
, "No store match\n");
2392 fprintf (dump_file
, "Value numbering store ");
2393 print_generic_expr (dump_file
, lhs
, 0);
2394 fprintf (dump_file
, " to ");
2395 print_generic_expr (dump_file
, op
, 0);
2396 fprintf (dump_file
, "\n");
2398 /* Have to set value numbers before insert, since insert is
2399 going to valueize the references in-place. */
2400 if ((vdef
= gimple_vdef (stmt
)))
2402 VN_INFO (vdef
)->use_processed
= true;
2403 changed
|= set_ssa_val_to (vdef
, vdef
);
2406 /* Do not insert structure copies into the tables. */
2407 if (is_gimple_min_invariant (op
)
2408 || is_gimple_reg (op
))
2409 vn_reference_insert (lhs
, op
, vdef
);
2413 /* We had a match, so value number the vdef to have the value
2414 number of the vuse it came from. */
2417 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2418 fprintf (dump_file
, "Store matched earlier value,"
2419 "value numbering store vdefs to matching vuses.\n");
2421 def
= gimple_vdef (stmt
);
2422 use
= gimple_vuse (stmt
);
2424 VN_INFO (def
)->use_processed
= true;
2425 changed
|= set_ssa_val_to (def
, SSA_VAL (use
));
2431 /* Visit and value number PHI, return true if the value number
2435 visit_phi (gimple phi
)
2437 bool changed
= false;
2439 tree sameval
= VN_TOP
;
2440 bool allsame
= true;
2443 /* TODO: We could check for this in init_sccvn, and replace this
2444 with a gcc_assert. */
2445 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
2446 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
2448 /* See if all non-TOP arguments have the same value. TOP is
2449 equivalent to everything, so we can ignore it. */
2450 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2452 tree def
= PHI_ARG_DEF (phi
, i
);
2454 if (TREE_CODE (def
) == SSA_NAME
)
2455 def
= SSA_VAL (def
);
2458 if (sameval
== VN_TOP
)
2464 if (!expressions_equal_p (def
, sameval
))
2472 /* If all value numbered to the same value, the phi node has that
2476 if (is_gimple_min_invariant (sameval
))
2478 VN_INFO (PHI_RESULT (phi
))->has_constants
= true;
2479 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
2483 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
2484 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
2487 if (TREE_CODE (sameval
) == SSA_NAME
)
2488 return visit_copy (PHI_RESULT (phi
), sameval
);
2490 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
2493 /* Otherwise, see if it is equivalent to a phi node in this block. */
2494 result
= vn_phi_lookup (phi
);
2497 if (TREE_CODE (result
) == SSA_NAME
)
2498 changed
= visit_copy (PHI_RESULT (phi
), result
);
2500 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
2504 vn_phi_insert (phi
, PHI_RESULT (phi
));
2505 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
2506 VN_INFO (PHI_RESULT (phi
))->expr
= PHI_RESULT (phi
);
2507 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
2513 /* Return true if EXPR contains constants. */
2516 expr_has_constants (tree expr
)
2518 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
2521 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0));
2524 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0))
2525 || is_gimple_min_invariant (TREE_OPERAND (expr
, 1));
2526 /* Constants inside reference ops are rarely interesting, but
2527 it can take a lot of looking to find them. */
2529 case tcc_declaration
:
2532 return is_gimple_min_invariant (expr
);
2537 /* Return true if STMT contains constants. */
2540 stmt_has_constants (gimple stmt
)
2542 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
2545 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
2547 case GIMPLE_UNARY_RHS
:
2548 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
2550 case GIMPLE_BINARY_RHS
:
2551 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))
2552 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt
)));
2553 case GIMPLE_TERNARY_RHS
:
2554 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))
2555 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt
))
2556 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt
)));
2557 case GIMPLE_SINGLE_RHS
:
2558 /* Constants inside reference ops are rarely interesting, but
2559 it can take a lot of looking to find them. */
2560 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
2567 /* Replace SSA_NAMES in expr with their value numbers, and return the
2569 This is performed in place. */
2572 valueize_expr (tree expr
)
2574 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
2577 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == SSA_NAME
2578 && SSA_VAL (TREE_OPERAND (expr
, 0)) != VN_TOP
)
2579 TREE_OPERAND (expr
, 0) = SSA_VAL (TREE_OPERAND (expr
, 0));
2582 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == SSA_NAME
2583 && SSA_VAL (TREE_OPERAND (expr
, 0)) != VN_TOP
)
2584 TREE_OPERAND (expr
, 0) = SSA_VAL (TREE_OPERAND (expr
, 0));
2585 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == SSA_NAME
2586 && SSA_VAL (TREE_OPERAND (expr
, 1)) != VN_TOP
)
2587 TREE_OPERAND (expr
, 1) = SSA_VAL (TREE_OPERAND (expr
, 1));
2595 /* Simplify the binary expression RHS, and return the result if
2599 simplify_binary_expression (gimple stmt
)
2601 tree result
= NULL_TREE
;
2602 tree op0
= gimple_assign_rhs1 (stmt
);
2603 tree op1
= gimple_assign_rhs2 (stmt
);
2605 /* This will not catch every single case we could combine, but will
2606 catch those with constants. The goal here is to simultaneously
2607 combine constants between expressions, but avoid infinite
2608 expansion of expressions during simplification. */
2609 if (TREE_CODE (op0
) == SSA_NAME
)
2611 if (VN_INFO (op0
)->has_constants
2612 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)) == tcc_comparison
)
2613 op0
= valueize_expr (vn_get_expr_for (op0
));
2614 else if (SSA_VAL (op0
) != VN_TOP
&& SSA_VAL (op0
) != op0
)
2615 op0
= SSA_VAL (op0
);
2618 if (TREE_CODE (op1
) == SSA_NAME
)
2620 if (VN_INFO (op1
)->has_constants
)
2621 op1
= valueize_expr (vn_get_expr_for (op1
));
2622 else if (SSA_VAL (op1
) != VN_TOP
&& SSA_VAL (op1
) != op1
)
2623 op1
= SSA_VAL (op1
);
2626 /* Avoid folding if nothing changed. */
2627 if (op0
== gimple_assign_rhs1 (stmt
)
2628 && op1
== gimple_assign_rhs2 (stmt
))
2631 fold_defer_overflow_warnings ();
2633 result
= fold_binary (gimple_assign_rhs_code (stmt
),
2634 gimple_expr_type (stmt
), op0
, op1
);
2636 STRIP_USELESS_TYPE_CONVERSION (result
);
2638 fold_undefer_overflow_warnings (result
&& valid_gimple_rhs_p (result
),
2641 /* Make sure result is not a complex expression consisting
2642 of operators of operators (IE (a + b) + (a + c))
2643 Otherwise, we will end up with unbounded expressions if
2644 fold does anything at all. */
2645 if (result
&& valid_gimple_rhs_p (result
))
2651 /* Simplify the unary expression RHS, and return the result if
2655 simplify_unary_expression (gimple stmt
)
2657 tree result
= NULL_TREE
;
2658 tree orig_op0
, op0
= gimple_assign_rhs1 (stmt
);
2660 /* We handle some tcc_reference codes here that are all
2661 GIMPLE_ASSIGN_SINGLE codes. */
2662 if (gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2663 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2664 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2665 op0
= TREE_OPERAND (op0
, 0);
2667 if (TREE_CODE (op0
) != SSA_NAME
)
2671 if (VN_INFO (op0
)->has_constants
)
2672 op0
= valueize_expr (vn_get_expr_for (op0
));
2673 else if (gimple_assign_cast_p (stmt
)
2674 || gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2675 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2676 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2678 /* We want to do tree-combining on conversion-like expressions.
2679 Make sure we feed only SSA_NAMEs or constants to fold though. */
2680 tree tem
= valueize_expr (vn_get_expr_for (op0
));
2681 if (UNARY_CLASS_P (tem
)
2682 || BINARY_CLASS_P (tem
)
2683 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
2684 || TREE_CODE (tem
) == SSA_NAME
2685 || is_gimple_min_invariant (tem
))
2689 /* Avoid folding if nothing changed, but remember the expression. */
2690 if (op0
== orig_op0
)
2693 result
= fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt
),
2694 gimple_expr_type (stmt
), op0
);
2697 STRIP_USELESS_TYPE_CONVERSION (result
);
2698 if (valid_gimple_rhs_p (result
))
2705 /* Try to simplify RHS using equivalences and constant folding. */
2708 try_to_simplify (gimple stmt
)
2712 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2713 in this case, there is no point in doing extra work. */
2714 if (gimple_assign_copy_p (stmt
)
2715 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2718 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)))
2720 case tcc_declaration
:
2721 tem
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
));
2727 /* Do not do full-blown reference lookup here, but simplify
2728 reads from constant aggregates. */
2729 tem
= fold_const_aggregate_ref (gimple_assign_rhs1 (stmt
));
2733 /* Fallthrough for some codes that can operate on registers. */
2734 if (!(TREE_CODE (gimple_assign_rhs1 (stmt
)) == REALPART_EXPR
2735 || TREE_CODE (gimple_assign_rhs1 (stmt
)) == IMAGPART_EXPR
2736 || TREE_CODE (gimple_assign_rhs1 (stmt
)) == VIEW_CONVERT_EXPR
))
2738 /* We could do a little more with unary ops, if they expand
2739 into binary ops, but it's debatable whether it is worth it. */
2741 return simplify_unary_expression (stmt
);
2743 case tcc_comparison
:
2745 return simplify_binary_expression (stmt
);
2754 /* Visit and value number USE, return true if the value number
2758 visit_use (tree use
)
2760 bool changed
= false;
2761 gimple stmt
= SSA_NAME_DEF_STMT (use
);
2763 VN_INFO (use
)->use_processed
= true;
2765 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
2766 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
2767 && !SSA_NAME_IS_DEFAULT_DEF (use
))
2769 fprintf (dump_file
, "Value numbering ");
2770 print_generic_expr (dump_file
, use
, 0);
2771 fprintf (dump_file
, " stmt = ");
2772 print_gimple_stmt (dump_file
, stmt
, 0, 0);
2775 /* Handle uninitialized uses. */
2776 if (SSA_NAME_IS_DEFAULT_DEF (use
))
2777 changed
= set_ssa_val_to (use
, use
);
2780 if (gimple_code (stmt
) == GIMPLE_PHI
)
2781 changed
= visit_phi (stmt
);
2782 else if (!gimple_has_lhs (stmt
)
2783 || gimple_has_volatile_ops (stmt
)
2784 || stmt_could_throw_p (stmt
))
2785 changed
= defs_to_varying (stmt
);
2786 else if (is_gimple_assign (stmt
))
2788 tree lhs
= gimple_assign_lhs (stmt
);
2791 /* Shortcut for copies. Simplifying copies is pointless,
2792 since we copy the expression and value they represent. */
2793 if (gimple_assign_copy_p (stmt
)
2794 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
2795 && TREE_CODE (lhs
) == SSA_NAME
)
2797 changed
= visit_copy (lhs
, gimple_assign_rhs1 (stmt
));
2800 simplified
= try_to_simplify (stmt
);
2803 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2805 fprintf (dump_file
, "RHS ");
2806 print_gimple_expr (dump_file
, stmt
, 0, 0);
2807 fprintf (dump_file
, " simplified to ");
2808 print_generic_expr (dump_file
, simplified
, 0);
2809 if (TREE_CODE (lhs
) == SSA_NAME
)
2810 fprintf (dump_file
, " has constants %d\n",
2811 expr_has_constants (simplified
));
2813 fprintf (dump_file
, "\n");
2816 /* Setting value numbers to constants will occasionally
2817 screw up phi congruence because constants are not
2818 uniquely associated with a single ssa name that can be
2821 && is_gimple_min_invariant (simplified
)
2822 && TREE_CODE (lhs
) == SSA_NAME
)
2824 VN_INFO (lhs
)->expr
= simplified
;
2825 VN_INFO (lhs
)->has_constants
= true;
2826 changed
= set_ssa_val_to (lhs
, simplified
);
2830 && TREE_CODE (simplified
) == SSA_NAME
2831 && TREE_CODE (lhs
) == SSA_NAME
)
2833 changed
= visit_copy (lhs
, simplified
);
2836 else if (simplified
)
2838 if (TREE_CODE (lhs
) == SSA_NAME
)
2840 VN_INFO (lhs
)->has_constants
= expr_has_constants (simplified
);
2841 /* We have to unshare the expression or else
2842 valuizing may change the IL stream. */
2843 VN_INFO (lhs
)->expr
= unshare_expr (simplified
);
2846 else if (stmt_has_constants (stmt
)
2847 && TREE_CODE (lhs
) == SSA_NAME
)
2848 VN_INFO (lhs
)->has_constants
= true;
2849 else if (TREE_CODE (lhs
) == SSA_NAME
)
2851 /* We reset expr and constantness here because we may
2852 have been value numbering optimistically, and
2853 iterating. They may become non-constant in this case,
2854 even if they were optimistically constant. */
2856 VN_INFO (lhs
)->has_constants
= false;
2857 VN_INFO (lhs
)->expr
= NULL_TREE
;
2860 if ((TREE_CODE (lhs
) == SSA_NAME
2861 /* We can substitute SSA_NAMEs that are live over
2862 abnormal edges with their constant value. */
2863 && !(gimple_assign_copy_p (stmt
)
2864 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
2866 && is_gimple_min_invariant (simplified
))
2867 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
2868 /* Stores or copies from SSA_NAMEs that are live over
2869 abnormal edges are a problem. */
2870 || (gimple_assign_single_p (stmt
)
2871 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
2872 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt
))))
2873 changed
= defs_to_varying (stmt
);
2874 else if (REFERENCE_CLASS_P (lhs
) || DECL_P (lhs
))
2876 changed
= visit_reference_op_store (lhs
, gimple_assign_rhs1 (stmt
), stmt
);
2878 else if (TREE_CODE (lhs
) == SSA_NAME
)
2880 if ((gimple_assign_copy_p (stmt
)
2881 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
2883 && is_gimple_min_invariant (simplified
)))
2885 VN_INFO (lhs
)->has_constants
= true;
2887 changed
= set_ssa_val_to (lhs
, simplified
);
2889 changed
= set_ssa_val_to (lhs
, gimple_assign_rhs1 (stmt
));
2893 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
2895 case GIMPLE_UNARY_RHS
:
2896 case GIMPLE_BINARY_RHS
:
2897 case GIMPLE_TERNARY_RHS
:
2898 changed
= visit_nary_op (lhs
, stmt
);
2900 case GIMPLE_SINGLE_RHS
:
2901 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)))
2904 /* VOP-less references can go through unary case. */
2905 if ((gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2906 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2907 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2908 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0)) == SSA_NAME
)
2910 changed
= visit_nary_op (lhs
, stmt
);
2914 case tcc_declaration
:
2915 changed
= visit_reference_op_load
2916 (lhs
, gimple_assign_rhs1 (stmt
), stmt
);
2918 case tcc_expression
:
2919 if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
2921 changed
= visit_nary_op (lhs
, stmt
);
2926 changed
= defs_to_varying (stmt
);
2930 changed
= defs_to_varying (stmt
);
2936 changed
= defs_to_varying (stmt
);
2938 else if (is_gimple_call (stmt
))
2940 tree lhs
= gimple_call_lhs (stmt
);
2942 /* ??? We could try to simplify calls. */
2944 if (stmt_has_constants (stmt
)
2945 && TREE_CODE (lhs
) == SSA_NAME
)
2946 VN_INFO (lhs
)->has_constants
= true;
2947 else if (TREE_CODE (lhs
) == SSA_NAME
)
2949 /* We reset expr and constantness here because we may
2950 have been value numbering optimistically, and
2951 iterating. They may become non-constant in this case,
2952 even if they were optimistically constant. */
2953 VN_INFO (lhs
)->has_constants
= false;
2954 VN_INFO (lhs
)->expr
= NULL_TREE
;
2957 if (TREE_CODE (lhs
) == SSA_NAME
2958 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
2959 changed
= defs_to_varying (stmt
);
2960 /* ??? We should handle stores from calls. */
2961 else if (TREE_CODE (lhs
) == SSA_NAME
)
2963 if (gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
))
2964 changed
= visit_reference_op_call (lhs
, stmt
);
2966 changed
= defs_to_varying (stmt
);
2969 changed
= defs_to_varying (stmt
);
2976 /* Compare two operands by reverse postorder index */
2979 compare_ops (const void *pa
, const void *pb
)
2981 const tree opa
= *((const tree
*)pa
);
2982 const tree opb
= *((const tree
*)pb
);
2983 gimple opstmta
= SSA_NAME_DEF_STMT (opa
);
2984 gimple opstmtb
= SSA_NAME_DEF_STMT (opb
);
2988 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
2989 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
2990 else if (gimple_nop_p (opstmta
))
2992 else if (gimple_nop_p (opstmtb
))
2995 bba
= gimple_bb (opstmta
);
2996 bbb
= gimple_bb (opstmtb
);
2999 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3007 if (gimple_code (opstmta
) == GIMPLE_PHI
3008 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3009 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3010 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3012 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3014 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3015 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3017 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3019 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3022 /* Sort an array containing members of a strongly connected component
3023 SCC so that the members are ordered by RPO number.
3024 This means that when the sort is complete, iterating through the
3025 array will give you the members in RPO order. */
3028 sort_scc (VEC (tree
, heap
) *scc
)
3030 VEC_qsort (tree
, scc
, compare_ops
);
3033 /* Insert the no longer used nary ONARY to the hash INFO. */
3036 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3038 size_t size
= sizeof_vn_nary_op (onary
->length
);
3039 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
3040 &info
->nary_obstack
);
3041 memcpy (nary
, onary
, size
);
3042 vn_nary_op_insert_into (nary
, info
->nary
, false);
3045 /* Insert the no longer used phi OPHI to the hash INFO. */
3048 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3050 vn_phi_t phi
= (vn_phi_t
) pool_alloc (info
->phis_pool
);
3052 memcpy (phi
, ophi
, sizeof (*phi
));
3053 ophi
->phiargs
= NULL
;
3054 slot
= htab_find_slot_with_hash (info
->phis
, phi
, phi
->hashcode
, INSERT
);
3055 gcc_assert (!*slot
);
3059 /* Insert the no longer used reference OREF to the hash INFO. */
3062 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3066 ref
= (vn_reference_t
) pool_alloc (info
->references_pool
);
3067 memcpy (ref
, oref
, sizeof (*ref
));
3068 oref
->operands
= NULL
;
3069 slot
= htab_find_slot_with_hash (info
->references
, ref
, ref
->hashcode
,
3072 free_reference (*slot
);
3076 /* Process a strongly connected component in the SSA graph. */
3079 process_scc (VEC (tree
, heap
) *scc
)
3083 unsigned int iterations
= 0;
3084 bool changed
= true;
3090 /* If the SCC has a single member, just visit it. */
3091 if (VEC_length (tree
, scc
) == 1)
3093 tree use
= VEC_index (tree
, scc
, 0);
3094 if (VN_INFO (use
)->use_processed
)
3096 /* We need to make sure it doesn't form a cycle itself, which can
3097 happen for self-referential PHI nodes. In that case we would
3098 end up inserting an expression with VN_TOP operands into the
3099 valid table which makes us derive bogus equivalences later.
3100 The cheapest way to check this is to assume it for all PHI nodes. */
3101 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
3102 /* Fallthru to iteration. */ ;
3110 /* Iterate over the SCC with the optimistic table until it stops
3112 current_info
= optimistic_info
;
3117 /* As we are value-numbering optimistically we have to
3118 clear the expression tables and the simplified expressions
3119 in each iteration until we converge. */
3120 htab_empty (optimistic_info
->nary
);
3121 htab_empty (optimistic_info
->phis
);
3122 htab_empty (optimistic_info
->references
);
3123 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
3124 gcc_obstack_init (&optimistic_info
->nary_obstack
);
3125 empty_alloc_pool (optimistic_info
->phis_pool
);
3126 empty_alloc_pool (optimistic_info
->references_pool
);
3127 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
3128 VN_INFO (var
)->expr
= NULL_TREE
;
3129 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
3130 changed
|= visit_use (var
);
3133 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
3135 /* Finally, copy the contents of the no longer used optimistic
3136 table to the valid table. */
3137 FOR_EACH_HTAB_ELEMENT (optimistic_info
->nary
, nary
, vn_nary_op_t
, hi
)
3138 copy_nary (nary
, valid_info
);
3139 FOR_EACH_HTAB_ELEMENT (optimistic_info
->phis
, phi
, vn_phi_t
, hi
)
3140 copy_phi (phi
, valid_info
);
3141 FOR_EACH_HTAB_ELEMENT (optimistic_info
->references
, ref
, vn_reference_t
, hi
)
3142 copy_reference (ref
, valid_info
);
3144 current_info
= valid_info
;
3147 DEF_VEC_O(ssa_op_iter
);
3148 DEF_VEC_ALLOC_O(ssa_op_iter
,heap
);
3150 /* Pop the components of the found SCC for NAME off the SCC stack
3151 and process them. Returns true if all went well, false if
3152 we run into resource limits. */
3155 extract_and_process_scc_for_name (tree name
)
3157 VEC (tree
, heap
) *scc
= NULL
;
3160 /* Found an SCC, pop the components off the SCC stack and
3164 x
= VEC_pop (tree
, sccstack
);
3166 VN_INFO (x
)->on_sccstack
= false;
3167 VEC_safe_push (tree
, heap
, scc
, x
);
3168 } while (x
!= name
);
3170 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3171 if (VEC_length (tree
, scc
)
3172 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
3175 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
3176 "SCC size %u exceeding %u\n", VEC_length (tree
, scc
),
3177 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
3181 if (VEC_length (tree
, scc
) > 1)
3184 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3185 print_scc (dump_file
, scc
);
3189 VEC_free (tree
, heap
, scc
);
3194 /* Depth first search on NAME to discover and process SCC's in the SSA
3196 Execution of this algorithm relies on the fact that the SCC's are
3197 popped off the stack in topological order.
3198 Returns true if successful, false if we stopped processing SCC's due
3199 to resource constraints. */
3204 VEC(ssa_op_iter
, heap
) *itervec
= NULL
;
3205 VEC(tree
, heap
) *namevec
= NULL
;
3206 use_operand_p usep
= NULL
;
3213 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
3214 VN_INFO (name
)->visited
= true;
3215 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
3217 VEC_safe_push (tree
, heap
, sccstack
, name
);
3218 VN_INFO (name
)->on_sccstack
= true;
3219 defstmt
= SSA_NAME_DEF_STMT (name
);
3221 /* Recursively DFS on our operands, looking for SCC's. */
3222 if (!gimple_nop_p (defstmt
))
3224 /* Push a new iterator. */
3225 if (gimple_code (defstmt
) == GIMPLE_PHI
)
3226 usep
= op_iter_init_phiuse (&iter
, defstmt
, SSA_OP_ALL_USES
);
3228 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
3231 clear_and_done_ssa_iter (&iter
);
3235 /* If we are done processing uses of a name, go up the stack
3236 of iterators and process SCCs as we found them. */
3237 if (op_iter_done (&iter
))
3239 /* See if we found an SCC. */
3240 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
3241 if (!extract_and_process_scc_for_name (name
))
3243 VEC_free (tree
, heap
, namevec
);
3244 VEC_free (ssa_op_iter
, heap
, itervec
);
3248 /* Check if we are done. */
3249 if (VEC_empty (tree
, namevec
))
3251 VEC_free (tree
, heap
, namevec
);
3252 VEC_free (ssa_op_iter
, heap
, itervec
);
3256 /* Restore the last use walker and continue walking there. */
3258 name
= VEC_pop (tree
, namevec
);
3259 memcpy (&iter
, VEC_last (ssa_op_iter
, itervec
),
3260 sizeof (ssa_op_iter
));
3261 VEC_pop (ssa_op_iter
, itervec
);
3262 goto continue_walking
;
3265 use
= USE_FROM_PTR (usep
);
3267 /* Since we handle phi nodes, we will sometimes get
3268 invariants in the use expression. */
3269 if (TREE_CODE (use
) == SSA_NAME
)
3271 if (! (VN_INFO (use
)->visited
))
3273 /* Recurse by pushing the current use walking state on
3274 the stack and starting over. */
3275 VEC_safe_push(ssa_op_iter
, heap
, itervec
, &iter
);
3276 VEC_safe_push(tree
, heap
, namevec
, name
);
3281 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
3282 VN_INFO (use
)->low
);
3284 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
3285 && VN_INFO (use
)->on_sccstack
)
3287 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
3288 VN_INFO (name
)->low
);
3292 usep
= op_iter_next_use (&iter
);
3296 /* Allocate a value number table. */
3299 allocate_vn_table (vn_tables_t table
)
3301 table
->phis
= htab_create (23, vn_phi_hash
, vn_phi_eq
, free_phi
);
3302 table
->nary
= htab_create (23, vn_nary_op_hash
, vn_nary_op_eq
, NULL
);
3303 table
->references
= htab_create (23, vn_reference_hash
, vn_reference_eq
,
3306 gcc_obstack_init (&table
->nary_obstack
);
3307 table
->phis_pool
= create_alloc_pool ("VN phis",
3308 sizeof (struct vn_phi_s
),
3310 table
->references_pool
= create_alloc_pool ("VN references",
3311 sizeof (struct vn_reference_s
),
3315 /* Free a value number table. */
3318 free_vn_table (vn_tables_t table
)
3320 htab_delete (table
->phis
);
3321 htab_delete (table
->nary
);
3322 htab_delete (table
->references
);
3323 obstack_free (&table
->nary_obstack
, NULL
);
3324 free_alloc_pool (table
->phis_pool
);
3325 free_alloc_pool (table
->references_pool
);
3333 int *rpo_numbers_temp
;
3335 calculate_dominance_info (CDI_DOMINATORS
);
3337 constant_to_value_id
= htab_create (23, vn_constant_hash
, vn_constant_eq
,
3340 constant_value_ids
= BITMAP_ALLOC (NULL
);
3345 vn_ssa_aux_table
= VEC_alloc (vn_ssa_aux_t
, heap
, num_ssa_names
+ 1);
3346 /* VEC_alloc doesn't actually grow it to the right size, it just
3347 preallocates the space to do so. */
3348 VEC_safe_grow_cleared (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
, num_ssa_names
+ 1);
3349 gcc_obstack_init (&vn_ssa_aux_obstack
);
3351 shared_lookup_phiargs
= NULL
;
3352 shared_lookup_references
= NULL
;
3353 rpo_numbers
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
3354 rpo_numbers_temp
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
3355 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
3357 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3358 the i'th block in RPO order is bb. We want to map bb's to RPO
3359 numbers, so we need to rearrange this array. */
3360 for (j
= 0; j
< n_basic_blocks
- NUM_FIXED_BLOCKS
; j
++)
3361 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
3363 XDELETE (rpo_numbers_temp
);
3365 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
3367 /* Create the VN_INFO structures, and initialize value numbers to
3369 for (i
= 0; i
< num_ssa_names
; i
++)
3371 tree name
= ssa_name (i
);
3374 VN_INFO_GET (name
)->valnum
= VN_TOP
;
3375 VN_INFO (name
)->expr
= NULL_TREE
;
3376 VN_INFO (name
)->value_id
= 0;
3380 renumber_gimple_stmt_uids ();
3382 /* Create the valid and optimistic value numbering tables. */
3383 valid_info
= XCNEW (struct vn_tables_s
);
3384 allocate_vn_table (valid_info
);
3385 optimistic_info
= XCNEW (struct vn_tables_s
);
3386 allocate_vn_table (optimistic_info
);
3394 htab_delete (constant_to_value_id
);
3395 BITMAP_FREE (constant_value_ids
);
3396 VEC_free (tree
, heap
, shared_lookup_phiargs
);
3397 VEC_free (vn_reference_op_s
, heap
, shared_lookup_references
);
3398 XDELETEVEC (rpo_numbers
);
3400 for (i
= 0; i
< num_ssa_names
; i
++)
3402 tree name
= ssa_name (i
);
3404 && VN_INFO (name
)->needs_insertion
)
3405 release_ssa_name (name
);
3407 obstack_free (&vn_ssa_aux_obstack
, NULL
);
3408 VEC_free (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
);
3410 VEC_free (tree
, heap
, sccstack
);
3411 free_vn_table (valid_info
);
3412 XDELETE (valid_info
);
3413 free_vn_table (optimistic_info
);
3414 XDELETE (optimistic_info
);
3417 /* Set *ID if we computed something useful in RESULT. */
3420 set_value_id_for_result (tree result
, unsigned int *id
)
3424 if (TREE_CODE (result
) == SSA_NAME
)
3425 *id
= VN_INFO (result
)->value_id
;
3426 else if (is_gimple_min_invariant (result
))
3427 *id
= get_or_alloc_constant_value_id (result
);
3431 /* Set the value ids in the valid hash tables. */
3434 set_hashtable_value_ids (void)
3441 /* Now set the value ids of the things we had put in the hash
3444 FOR_EACH_HTAB_ELEMENT (valid_info
->nary
,
3445 vno
, vn_nary_op_t
, hi
)
3446 set_value_id_for_result (vno
->result
, &vno
->value_id
);
3448 FOR_EACH_HTAB_ELEMENT (valid_info
->phis
,
3450 set_value_id_for_result (vp
->result
, &vp
->value_id
);
3452 FOR_EACH_HTAB_ELEMENT (valid_info
->references
,
3453 vr
, vn_reference_t
, hi
)
3454 set_value_id_for_result (vr
->result
, &vr
->value_id
);
3457 /* Do SCCVN. Returns true if it finished, false if we bailed out
3458 due to resource constraints. */
3465 bool changed
= true;
3468 current_info
= valid_info
;
3470 for (param
= DECL_ARGUMENTS (current_function_decl
);
3472 param
= DECL_CHAIN (param
))
3474 if (gimple_default_def (cfun
, param
) != NULL
)
3476 tree def
= gimple_default_def (cfun
, param
);
3477 VN_INFO (def
)->valnum
= def
;
3481 for (i
= 1; i
< num_ssa_names
; ++i
)
3483 tree name
= ssa_name (i
);
3485 && VN_INFO (name
)->visited
== false
3486 && !has_zero_uses (name
))
3494 /* Initialize the value ids. */
3496 for (i
= 1; i
< num_ssa_names
; ++i
)
3498 tree name
= ssa_name (i
);
3502 info
= VN_INFO (name
);
3503 if (info
->valnum
== name
3504 || info
->valnum
== VN_TOP
)
3505 info
->value_id
= get_next_value_id ();
3506 else if (is_gimple_min_invariant (info
->valnum
))
3507 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
3510 /* Propagate until they stop changing. */
3514 for (i
= 1; i
< num_ssa_names
; ++i
)
3516 tree name
= ssa_name (i
);
3520 info
= VN_INFO (name
);
3521 if (TREE_CODE (info
->valnum
) == SSA_NAME
3522 && info
->valnum
!= name
3523 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
3526 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
3531 set_hashtable_value_ids ();
3533 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3535 fprintf (dump_file
, "Value numbers:\n");
3536 for (i
= 0; i
< num_ssa_names
; i
++)
3538 tree name
= ssa_name (i
);
3540 && VN_INFO (name
)->visited
3541 && SSA_VAL (name
) != name
)
3543 print_generic_expr (dump_file
, name
, 0);
3544 fprintf (dump_file
, " = ");
3545 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
3546 fprintf (dump_file
, "\n");
3554 /* Return the maximum value id we have ever seen. */
3557 get_max_value_id (void)
3559 return next_value_id
;
3562 /* Return the next unique value id. */
3565 get_next_value_id (void)
3567 return next_value_id
++;
3571 /* Compare two expressions E1 and E2 and return true if they are equal. */
3574 expressions_equal_p (tree e1
, tree e2
)
3576 /* The obvious case. */
3580 /* If only one of them is null, they cannot be equal. */
3584 /* Now perform the actual comparison. */
3585 if (TREE_CODE (e1
) == TREE_CODE (e2
)
3586 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
3593 /* Return true if the nary operation NARY may trap. This is a copy
3594 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3597 vn_nary_may_trap (vn_nary_op_t nary
)
3600 tree rhs2
= NULL_TREE
;
3601 bool honor_nans
= false;
3602 bool honor_snans
= false;
3603 bool fp_operation
= false;
3604 bool honor_trapv
= false;
3608 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
3609 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
3610 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
3613 fp_operation
= FLOAT_TYPE_P (type
);
3616 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
3617 honor_snans
= flag_signaling_nans
!= 0;
3619 else if (INTEGRAL_TYPE_P (type
)
3620 && TYPE_OVERFLOW_TRAPS (type
))
3623 if (nary
->length
>= 2)
3625 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
3627 honor_nans
, honor_snans
, rhs2
,
3633 for (i
= 0; i
< nary
->length
; ++i
)
3634 if (tree_could_trap_p (nary
->op
[i
]))