1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
42 #include "langhooks.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
112 struct obstack nary_obstack
;
113 alloc_pool phis_pool
;
114 alloc_pool references_pool
;
117 static htab_t constant_to_value_id
;
118 static bitmap constant_value_ids
;
121 /* Valid hashtables storing information we have proven to be
124 static vn_tables_t valid_info
;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info
;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
135 static vn_tables_t current_info
;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers
;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id
;
153 /* Next DFS number and the stack for strongly connected component
156 static unsigned int next_dfs_num
;
157 static VEC (tree
, heap
) *sccstack
;
160 DEF_VEC_P(vn_ssa_aux_t
);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t
, heap
);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t
, heap
) *vn_ssa_aux_table
;
168 static struct obstack vn_ssa_aux_obstack
;
170 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t res
= VEC_index (vn_ssa_aux_t
, vn_ssa_aux_table
,
176 SSA_NAME_VERSION (name
));
177 gcc_checking_assert (res
);
181 /* Set the value numbering info for a given SSA name to a given
185 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
187 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
188 SSA_NAME_VERSION (name
), value
);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
195 VN_INFO_GET (tree name
)
197 vn_ssa_aux_t newinfo
;
199 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
200 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
201 if (SSA_NAME_VERSION (name
) >= VEC_length (vn_ssa_aux_t
, vn_ssa_aux_table
))
202 VEC_safe_grow (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
,
203 SSA_NAME_VERSION (name
) + 1);
204 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
205 SSA_NAME_VERSION (name
), newinfo
);
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
214 vn_get_expr_for (tree name
)
216 vn_ssa_aux_t vn
= VN_INFO (name
);
218 tree expr
= NULL_TREE
;
220 if (vn
->valnum
== VN_TOP
)
223 /* If the value-number is a constant it is the representative
225 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
228 /* Get to the information of the value of this SSA_NAME. */
229 vn
= VN_INFO (vn
->valnum
);
231 /* If the value-number is a constant it is the representative
233 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
236 /* Else if we have an expression, return it. */
237 if (vn
->expr
!= NULL_TREE
)
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt
= SSA_NAME_DEF_STMT (vn
->valnum
);
243 /* If the value number is a default-definition or a PHI result
245 if (gimple_nop_p (def_stmt
)
246 || gimple_code (def_stmt
) == GIMPLE_PHI
)
249 if (!is_gimple_assign (def_stmt
))
252 /* FIXME tuples. This is incomplete and likely will miss some
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)))
257 if ((gimple_assign_rhs_code (def_stmt
) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt
) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt
) == IMAGPART_EXPR
)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
261 expr
= fold_build1 (gimple_assign_rhs_code (def_stmt
),
262 gimple_expr_type (def_stmt
),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt
), 0));
267 expr
= fold_build1 (gimple_assign_rhs_code (def_stmt
),
268 gimple_expr_type (def_stmt
),
269 gimple_assign_rhs1 (def_stmt
));
273 expr
= fold_build2 (gimple_assign_rhs_code (def_stmt
),
274 gimple_expr_type (def_stmt
),
275 gimple_assign_rhs1 (def_stmt
),
276 gimple_assign_rhs2 (def_stmt
));
281 if (expr
== NULL_TREE
)
284 /* Cache the expression. */
291 /* Free a phi operation structure VP. */
296 vn_phi_t phi
= (vn_phi_t
) vp
;
297 VEC_free (tree
, heap
, phi
->phiargs
);
300 /* Free a reference operation structure VP. */
303 free_reference (void *vp
)
305 vn_reference_t vr
= (vn_reference_t
) vp
;
306 VEC_free (vn_reference_op_s
, heap
, vr
->operands
);
309 /* Hash table equality function for vn_constant_t. */
312 vn_constant_eq (const void *p1
, const void *p2
)
314 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
315 const struct vn_constant_s
*vc2
= (const struct vn_constant_s
*) p2
;
317 if (vc1
->hashcode
!= vc2
->hashcode
)
320 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
323 /* Hash table hash function for vn_constant_t. */
326 vn_constant_hash (const void *p1
)
328 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
329 return vc1
->hashcode
;
332 /* Lookup a value id for CONSTANT and return it. If it does not
336 get_constant_value_id (tree constant
)
339 struct vn_constant_s vc
;
341 vc
.hashcode
= vn_hash_constant_with_type (constant
);
342 vc
.constant
= constant
;
343 slot
= htab_find_slot_with_hash (constant_to_value_id
, &vc
,
344 vc
.hashcode
, NO_INSERT
);
346 return ((vn_constant_t
)*slot
)->value_id
;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
354 get_or_alloc_constant_value_id (tree constant
)
357 struct vn_constant_s vc
;
360 vc
.hashcode
= vn_hash_constant_with_type (constant
);
361 vc
.constant
= constant
;
362 slot
= htab_find_slot_with_hash (constant_to_value_id
, &vc
,
363 vc
.hashcode
, INSERT
);
365 return ((vn_constant_t
)*slot
)->value_id
;
367 vcp
= XNEW (struct vn_constant_s
);
368 vcp
->hashcode
= vc
.hashcode
;
369 vcp
->constant
= constant
;
370 vcp
->value_id
= get_next_value_id ();
371 *slot
= (void *) vcp
;
372 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
373 return vcp
->value_id
;
376 /* Return true if V is a value id for a constant. */
379 value_id_constant_p (unsigned int v
)
381 return bitmap_bit_p (constant_value_ids
, v
);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
388 vn_reference_op_eq (const void *p1
, const void *p2
)
390 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
391 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
393 return vro1
->opcode
== vro2
->opcode
394 && types_compatible_p (vro1
->type
, vro2
->type
)
395 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
396 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
397 && expressions_equal_p (vro1
->op2
, vro2
->op2
);
400 /* Compute the hash for a reference operand VRO1. */
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, hashval_t result
)
405 result
= iterative_hash_hashval_t (vro1
->opcode
, result
);
407 result
= iterative_hash_expr (vro1
->op0
, result
);
409 result
= iterative_hash_expr (vro1
->op1
, result
);
411 result
= iterative_hash_expr (vro1
->op2
, result
);
415 /* Return the hashcode for a given reference operation P1. */
418 vn_reference_hash (const void *p1
)
420 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
421 return vr1
->hashcode
;
424 /* Compute a hash for the reference operation VR1 and return it. */
427 vn_reference_compute_hash (const vn_reference_t vr1
)
429 hashval_t result
= 0;
431 vn_reference_op_t vro
;
432 HOST_WIDE_INT off
= -1;
435 FOR_EACH_VEC_ELT (vn_reference_op_s
, vr1
->operands
, i
, vro
)
437 if (vro
->opcode
== MEM_REF
)
439 else if (vro
->opcode
!= ADDR_EXPR
)
451 result
= iterative_hash_hashval_t (off
, result
);
454 && vro
->opcode
== ADDR_EXPR
)
458 tree op
= TREE_OPERAND (vro
->op0
, 0);
459 result
= iterative_hash_hashval_t (TREE_CODE (op
), result
);
460 result
= iterative_hash_expr (op
, result
);
464 result
= vn_reference_op_compute_hash (vro
, result
);
468 result
+= SSA_NAME_VERSION (vr1
->vuse
);
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1
, const void *p2
)
481 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
482 const_vn_reference_t
const vr2
= (const_vn_reference_t
) p2
;
483 if (vr1
->hashcode
!= vr2
->hashcode
)
486 /* Early out if this is not a hash collision. */
487 if (vr1
->hashcode
!= vr2
->hashcode
)
490 /* The VOP needs to be the same. */
491 if (vr1
->vuse
!= vr2
->vuse
)
494 /* If the operands are the same we are done. */
495 if (vr1
->operands
== vr2
->operands
)
498 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
501 if (INTEGRAL_TYPE_P (vr1
->type
)
502 && INTEGRAL_TYPE_P (vr2
->type
))
504 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
507 else if (INTEGRAL_TYPE_P (vr1
->type
)
508 && (TYPE_PRECISION (vr1
->type
)
509 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
511 else if (INTEGRAL_TYPE_P (vr2
->type
)
512 && (TYPE_PRECISION (vr2
->type
)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
520 HOST_WIDE_INT off1
= 0, off2
= 0;
521 vn_reference_op_t vro1
, vro2
;
522 vn_reference_op_s tem1
, tem2
;
523 bool deref1
= false, deref2
= false;
524 for (; VEC_iterate (vn_reference_op_s
, vr1
->operands
, i
, vro1
); i
++)
526 if (vro1
->opcode
== MEM_REF
)
532 for (; VEC_iterate (vn_reference_op_s
, vr2
->operands
, j
, vro2
); j
++)
534 if (vro2
->opcode
== MEM_REF
)
542 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
544 memset (&tem1
, 0, sizeof (tem1
));
545 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
546 tem1
.type
= TREE_TYPE (tem1
.op0
);
547 tem1
.opcode
= TREE_CODE (tem1
.op0
);
550 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
552 memset (&tem2
, 0, sizeof (tem2
));
553 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
554 tem2
.type
= TREE_TYPE (tem2
.op0
);
555 tem2
.opcode
= TREE_CODE (tem2
.op0
);
558 if (!vn_reference_op_eq (vro1
, vro2
))
563 while (VEC_length (vn_reference_op_s
, vr1
->operands
) != i
564 || VEC_length (vn_reference_op_s
, vr2
->operands
) != j
);
569 /* Copy the operations present in load/store REF into RESULT, a vector of
570 vn_reference_op_s's. */
573 copy_reference_ops_from_ref (tree ref
, VEC(vn_reference_op_s
, heap
) **result
)
575 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
577 vn_reference_op_s temp
;
579 memset (&temp
, 0, sizeof (temp
));
580 /* We do not care for spurious type qualifications. */
581 temp
.type
= TYPE_MAIN_VARIANT (TREE_TYPE (ref
));
582 temp
.opcode
= TREE_CODE (ref
);
583 temp
.op0
= TMR_INDEX (ref
);
584 temp
.op1
= TMR_STEP (ref
);
585 temp
.op2
= TMR_OFFSET (ref
);
587 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
589 memset (&temp
, 0, sizeof (temp
));
590 temp
.type
= NULL_TREE
;
591 temp
.opcode
= ERROR_MARK
;
592 temp
.op0
= TMR_INDEX2 (ref
);
594 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
596 memset (&temp
, 0, sizeof (temp
));
597 temp
.type
= NULL_TREE
;
598 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
599 temp
.op0
= TMR_BASE (ref
);
601 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
605 /* For non-calls, store the information that makes up the address. */
609 vn_reference_op_s temp
;
611 memset (&temp
, 0, sizeof (temp
));
612 /* We do not care for spurious type qualifications. */
613 temp
.type
= TYPE_MAIN_VARIANT (TREE_TYPE (ref
));
614 temp
.opcode
= TREE_CODE (ref
);
620 /* The base address gets its own vn_reference_op_s structure. */
621 temp
.op0
= TREE_OPERAND (ref
, 1);
622 if (host_integerp (TREE_OPERAND (ref
, 1), 0))
623 temp
.off
= TREE_INT_CST_LOW (TREE_OPERAND (ref
, 1));
626 /* Record bits and position. */
627 temp
.op0
= TREE_OPERAND (ref
, 1);
628 temp
.op1
= TREE_OPERAND (ref
, 2);
631 /* The field decl is enough to unambiguously specify the field,
632 a matching type is not necessary and a mismatching type
633 is always a spurious difference. */
634 temp
.type
= NULL_TREE
;
635 temp
.op0
= TREE_OPERAND (ref
, 1);
636 temp
.op1
= TREE_OPERAND (ref
, 2);
638 tree this_offset
= component_ref_field_offset (ref
);
640 && TREE_CODE (this_offset
) == INTEGER_CST
)
642 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
643 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
646 = double_int_add (tree_to_double_int (this_offset
),
648 (tree_to_double_int (bit_offset
),
649 uhwi_to_double_int (BITS_PER_UNIT
),
651 if (double_int_fits_in_shwi_p (off
))
657 case ARRAY_RANGE_REF
:
659 /* Record index as operand. */
660 temp
.op0
= TREE_OPERAND (ref
, 1);
661 /* Always record lower bounds and element size. */
662 temp
.op1
= array_ref_low_bound (ref
);
663 temp
.op2
= array_ref_element_size (ref
);
664 if (TREE_CODE (temp
.op0
) == INTEGER_CST
665 && TREE_CODE (temp
.op1
) == INTEGER_CST
666 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
668 double_int off
= tree_to_double_int (temp
.op0
);
669 off
= double_int_add (off
,
671 (tree_to_double_int (temp
.op1
)));
672 off
= double_int_mul (off
, tree_to_double_int (temp
.op2
));
673 if (double_int_fits_in_shwi_p (off
))
691 if (is_gimple_min_invariant (ref
))
697 /* These are only interesting for their operands, their
698 existence, and their type. They will never be the last
699 ref in the chain of references (IE they require an
700 operand), so we don't have to put anything
701 for op* as it will be handled by the iteration */
703 case VIEW_CONVERT_EXPR
:
707 /* This is only interesting for its constant offset. */
708 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
713 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
715 if (REFERENCE_CLASS_P (ref
)
716 || (TREE_CODE (ref
) == ADDR_EXPR
717 && !is_gimple_min_invariant (ref
)))
718 ref
= TREE_OPERAND (ref
, 0);
724 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
725 operands in *OPS, the reference alias set SET and the reference type TYPE.
726 Return true if something useful was produced. */
729 ao_ref_init_from_vn_reference (ao_ref
*ref
,
730 alias_set_type set
, tree type
,
731 VEC (vn_reference_op_s
, heap
) *ops
)
733 vn_reference_op_t op
;
735 tree base
= NULL_TREE
;
737 HOST_WIDE_INT offset
= 0;
738 HOST_WIDE_INT max_size
;
739 HOST_WIDE_INT size
= -1;
740 tree size_tree
= NULL_TREE
;
741 alias_set_type base_alias_set
= -1;
743 /* First get the final access size from just the outermost expression. */
744 op
= VEC_index (vn_reference_op_s
, ops
, 0);
745 if (op
->opcode
== COMPONENT_REF
)
746 size_tree
= DECL_SIZE (op
->op0
);
747 else if (op
->opcode
== BIT_FIELD_REF
)
751 enum machine_mode mode
= TYPE_MODE (type
);
753 size_tree
= TYPE_SIZE (type
);
755 size
= GET_MODE_BITSIZE (mode
);
757 if (size_tree
!= NULL_TREE
)
759 if (!host_integerp (size_tree
, 1))
762 size
= TREE_INT_CST_LOW (size_tree
);
765 /* Initially, maxsize is the same as the accessed element size.
766 In the following it will only grow (or become -1). */
769 /* Compute cumulative bit-offset for nested component-refs and array-refs,
770 and find the ultimate containing object. */
771 FOR_EACH_VEC_ELT (vn_reference_op_s
, ops
, i
, op
)
775 /* These may be in the reference ops, but we cannot do anything
776 sensible with them here. */
778 /* Apart from ADDR_EXPR arguments to MEM_REF. */
779 if (base
!= NULL_TREE
780 && TREE_CODE (base
) == MEM_REF
782 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
784 vn_reference_op_t pop
= VEC_index (vn_reference_op_s
, ops
, i
-1);
785 base
= TREE_OPERAND (op
->op0
, 0);
792 offset
+= pop
->off
* BITS_PER_UNIT
;
800 /* Record the base objects. */
802 base_alias_set
= get_deref_alias_set (op
->op0
);
803 *op0_p
= build2 (MEM_REF
, op
->type
,
805 op0_p
= &TREE_OPERAND (*op0_p
, 0);
816 /* And now the usual component-reference style ops. */
818 offset
+= tree_low_cst (op
->op1
, 0);
823 tree field
= op
->op0
;
824 /* We do not have a complete COMPONENT_REF tree here so we
825 cannot use component_ref_field_offset. Do the interesting
829 || !host_integerp (DECL_FIELD_OFFSET (field
), 1))
833 offset
+= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
))
835 offset
+= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
840 case ARRAY_RANGE_REF
:
842 /* We recorded the lower bound and the element size. */
843 if (!host_integerp (op
->op0
, 0)
844 || !host_integerp (op
->op1
, 0)
845 || !host_integerp (op
->op2
, 0))
849 HOST_WIDE_INT hindex
= TREE_INT_CST_LOW (op
->op0
);
850 hindex
-= TREE_INT_CST_LOW (op
->op1
);
851 hindex
*= TREE_INT_CST_LOW (op
->op2
);
852 hindex
*= BITS_PER_UNIT
;
864 case VIEW_CONVERT_EXPR
:
881 if (base
== NULL_TREE
)
884 ref
->ref
= NULL_TREE
;
886 ref
->offset
= offset
;
888 ref
->max_size
= max_size
;
889 ref
->ref_alias_set
= set
;
890 if (base_alias_set
!= -1)
891 ref
->base_alias_set
= base_alias_set
;
893 ref
->base_alias_set
= get_alias_set (base
);
898 /* Copy the operations present in load/store/call REF into RESULT, a vector of
899 vn_reference_op_s's. */
902 copy_reference_ops_from_call (gimple call
,
903 VEC(vn_reference_op_s
, heap
) **result
)
905 vn_reference_op_s temp
;
908 /* Copy the type, opcode, function being called and static chain. */
909 memset (&temp
, 0, sizeof (temp
));
910 temp
.type
= gimple_call_return_type (call
);
911 temp
.opcode
= CALL_EXPR
;
912 temp
.op0
= gimple_call_fn (call
);
913 temp
.op1
= gimple_call_chain (call
);
915 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
917 /* Copy the call arguments. As they can be references as well,
918 just chain them together. */
919 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
921 tree callarg
= gimple_call_arg (call
, i
);
922 copy_reference_ops_from_ref (callarg
, result
);
926 /* Create a vector of vn_reference_op_s structures from REF, a
927 REFERENCE_CLASS_P tree. The vector is not shared. */
929 static VEC(vn_reference_op_s
, heap
) *
930 create_reference_ops_from_ref (tree ref
)
932 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
934 copy_reference_ops_from_ref (ref
, &result
);
938 /* Create a vector of vn_reference_op_s structures from CALL, a
939 call statement. The vector is not shared. */
941 static VEC(vn_reference_op_s
, heap
) *
942 create_reference_ops_from_call (gimple call
)
944 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
946 copy_reference_ops_from_call (call
, &result
);
950 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
951 *I_P to point to the last element of the replacement. */
953 vn_reference_fold_indirect (VEC (vn_reference_op_s
, heap
) **ops
,
956 unsigned int i
= *i_p
;
957 vn_reference_op_t op
= VEC_index (vn_reference_op_s
, *ops
, i
);
958 vn_reference_op_t mem_op
= VEC_index (vn_reference_op_s
, *ops
, i
- 1);
960 HOST_WIDE_INT addr_offset
;
962 /* The only thing we have to do is from &OBJ.foo.bar add the offset
963 from .foo.bar to the preceeding MEM_REF offset and replace the
964 address with &OBJ. */
965 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
967 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
968 if (addr_base
!= op
->op0
)
970 double_int off
= tree_to_double_int (mem_op
->op0
);
971 off
= double_int_sext (off
, TYPE_PRECISION (TREE_TYPE (mem_op
->op0
)));
972 off
= double_int_add (off
, shwi_to_double_int (addr_offset
));
973 mem_op
->op0
= double_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
974 op
->op0
= build_fold_addr_expr (addr_base
);
975 if (host_integerp (mem_op
->op0
, 0))
976 mem_op
->off
= TREE_INT_CST_LOW (mem_op
->op0
);
982 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
983 *I_P to point to the last element of the replacement. */
985 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s
, heap
) **ops
,
988 unsigned int i
= *i_p
;
989 vn_reference_op_t op
= VEC_index (vn_reference_op_s
, *ops
, i
);
990 vn_reference_op_t mem_op
= VEC_index (vn_reference_op_s
, *ops
, i
- 1);
995 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
996 if (!is_gimple_assign (def_stmt
))
999 code
= gimple_assign_rhs_code (def_stmt
);
1000 if (code
!= ADDR_EXPR
1001 && code
!= POINTER_PLUS_EXPR
)
1004 off
= tree_to_double_int (mem_op
->op0
);
1005 off
= double_int_sext (off
, TYPE_PRECISION (TREE_TYPE (mem_op
->op0
)));
1007 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1008 from .foo.bar to the preceeding MEM_REF offset and replace the
1009 address with &OBJ. */
1010 if (code
== ADDR_EXPR
)
1012 tree addr
, addr_base
;
1013 HOST_WIDE_INT addr_offset
;
1015 addr
= gimple_assign_rhs1 (def_stmt
);
1016 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1019 || TREE_CODE (addr_base
) != MEM_REF
)
1022 off
= double_int_add (off
, shwi_to_double_int (addr_offset
));
1023 off
= double_int_add (off
, mem_ref_offset (addr_base
));
1024 op
->op0
= TREE_OPERAND (addr_base
, 0);
1029 ptr
= gimple_assign_rhs1 (def_stmt
);
1030 ptroff
= gimple_assign_rhs2 (def_stmt
);
1031 if (TREE_CODE (ptr
) != SSA_NAME
1032 || TREE_CODE (ptroff
) != INTEGER_CST
)
1035 off
= double_int_add (off
, tree_to_double_int (ptroff
));
1039 mem_op
->op0
= double_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1040 if (host_integerp (mem_op
->op0
, 0))
1041 mem_op
->off
= TREE_INT_CST_LOW (mem_op
->op0
);
1044 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1045 op
->op0
= SSA_VAL (op
->op0
);
1046 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1047 op
->opcode
= TREE_CODE (op
->op0
);
1050 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1051 vn_reference_maybe_forwprop_address (ops
, i_p
);
1052 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1053 vn_reference_fold_indirect (ops
, i_p
);
1056 /* Optimize the reference REF to a constant if possible or return
1057 NULL_TREE if not. */
1060 fully_constant_vn_reference_p (vn_reference_t ref
)
1062 VEC (vn_reference_op_s
, heap
) *operands
= ref
->operands
;
1063 vn_reference_op_t op
;
1065 /* Try to simplify the translated expression if it is
1066 a call to a builtin function with at most two arguments. */
1067 op
= VEC_index (vn_reference_op_s
, operands
, 0);
1068 if (op
->opcode
== CALL_EXPR
1069 && TREE_CODE (op
->op0
) == ADDR_EXPR
1070 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1071 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1072 && VEC_length (vn_reference_op_s
, operands
) >= 2
1073 && VEC_length (vn_reference_op_s
, operands
) <= 3)
1075 vn_reference_op_t arg0
, arg1
= NULL
;
1076 bool anyconst
= false;
1077 arg0
= VEC_index (vn_reference_op_s
, operands
, 1);
1078 if (VEC_length (vn_reference_op_s
, operands
) > 2)
1079 arg1
= VEC_index (vn_reference_op_s
, operands
, 2);
1080 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1081 || (arg0
->opcode
== ADDR_EXPR
1082 && is_gimple_min_invariant (arg0
->op0
)))
1085 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1086 || (arg1
->opcode
== ADDR_EXPR
1087 && is_gimple_min_invariant (arg1
->op0
))))
1091 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1094 arg1
? arg1
->op0
: NULL
);
1096 && TREE_CODE (folded
) == NOP_EXPR
)
1097 folded
= TREE_OPERAND (folded
, 0);
1099 && is_gimple_min_invariant (folded
))
1104 /* Simplify reads from constant strings. */
1105 else if (op
->opcode
== ARRAY_REF
1106 && TREE_CODE (op
->op0
) == INTEGER_CST
1107 && integer_zerop (op
->op1
)
1108 && VEC_length (vn_reference_op_s
, operands
) == 2)
1110 vn_reference_op_t arg0
;
1111 arg0
= VEC_index (vn_reference_op_s
, operands
, 1);
1112 if (arg0
->opcode
== STRING_CST
1113 && (TYPE_MODE (op
->type
)
1114 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
->op0
))))
1115 && GET_MODE_CLASS (TYPE_MODE (op
->type
)) == MODE_INT
1116 && GET_MODE_SIZE (TYPE_MODE (op
->type
)) == 1
1117 && compare_tree_int (op
->op0
, TREE_STRING_LENGTH (arg0
->op0
)) < 0)
1118 return build_int_cst_type (op
->type
,
1119 (TREE_STRING_POINTER (arg0
->op0
)
1120 [TREE_INT_CST_LOW (op
->op0
)]));
1126 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1127 structures into their value numbers. This is done in-place, and
1128 the vector passed in is returned. */
1130 static VEC (vn_reference_op_s
, heap
) *
1131 valueize_refs (VEC (vn_reference_op_s
, heap
) *orig
)
1133 vn_reference_op_t vro
;
1136 FOR_EACH_VEC_ELT (vn_reference_op_s
, orig
, i
, vro
)
1138 if (vro
->opcode
== SSA_NAME
1139 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1141 vro
->op0
= SSA_VAL (vro
->op0
);
1142 /* If it transforms from an SSA_NAME to a constant, update
1144 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1145 vro
->opcode
= TREE_CODE (vro
->op0
);
1147 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1148 vro
->op1
= SSA_VAL (vro
->op1
);
1149 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1150 vro
->op2
= SSA_VAL (vro
->op2
);
1151 /* If it transforms from an SSA_NAME to an address, fold with
1152 a preceding indirect reference. */
1155 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1156 && VEC_index (vn_reference_op_s
,
1157 orig
, i
- 1)->opcode
== MEM_REF
)
1158 vn_reference_fold_indirect (&orig
, &i
);
1160 && vro
->opcode
== SSA_NAME
1161 && VEC_index (vn_reference_op_s
,
1162 orig
, i
- 1)->opcode
== MEM_REF
)
1163 vn_reference_maybe_forwprop_address (&orig
, &i
);
1164 /* If it transforms a non-constant ARRAY_REF into a constant
1165 one, adjust the constant offset. */
1166 else if (vro
->opcode
== ARRAY_REF
1168 && TREE_CODE (vro
->op0
) == INTEGER_CST
1169 && TREE_CODE (vro
->op1
) == INTEGER_CST
1170 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1172 double_int off
= tree_to_double_int (vro
->op0
);
1173 off
= double_int_add (off
,
1175 (tree_to_double_int (vro
->op1
)));
1176 off
= double_int_mul (off
, tree_to_double_int (vro
->op2
));
1177 if (double_int_fits_in_shwi_p (off
))
1185 static VEC(vn_reference_op_s
, heap
) *shared_lookup_references
;
1187 /* Create a vector of vn_reference_op_s structures from REF, a
1188 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1191 static VEC(vn_reference_op_s
, heap
) *
1192 valueize_shared_reference_ops_from_ref (tree ref
)
1196 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1197 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1198 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1199 return shared_lookup_references
;
1202 /* Create a vector of vn_reference_op_s structures from CALL, a
1203 call statement. The vector is shared among all callers of
1206 static VEC(vn_reference_op_s
, heap
) *
1207 valueize_shared_reference_ops_from_call (gimple call
)
1211 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1212 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1213 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1214 return shared_lookup_references
;
1217 /* Lookup a SCCVN reference operation VR in the current hash table.
1218 Returns the resulting value number if it exists in the hash table,
1219 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1220 vn_reference_t stored in the hashtable if something is found. */
1223 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1228 hash
= vr
->hashcode
;
1229 slot
= htab_find_slot_with_hash (current_info
->references
, vr
,
1231 if (!slot
&& current_info
== optimistic_info
)
1232 slot
= htab_find_slot_with_hash (valid_info
->references
, vr
,
1237 *vnresult
= (vn_reference_t
)*slot
;
1238 return ((vn_reference_t
)*slot
)->result
;
1244 static tree
*last_vuse_ptr
;
1246 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1247 with the current VUSE and performs the expression lookup. */
1250 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
, void *vr_
)
1252 vn_reference_t vr
= (vn_reference_t
)vr_
;
1257 *last_vuse_ptr
= vuse
;
1259 /* Fixup vuse and hash. */
1261 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1262 vr
->vuse
= SSA_VAL (vuse
);
1264 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1266 hash
= vr
->hashcode
;
1267 slot
= htab_find_slot_with_hash (current_info
->references
, vr
,
1269 if (!slot
&& current_info
== optimistic_info
)
1270 slot
= htab_find_slot_with_hash (valid_info
->references
, vr
,
1278 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1279 from the statement defining VUSE and if not successful tries to
1280 translate *REFP and VR_ through an aggregate copy at the defintion
1284 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
)
1286 vn_reference_t vr
= (vn_reference_t
)vr_
;
1287 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1290 HOST_WIDE_INT offset
, maxsize
;
1292 /* First try to disambiguate after value-replacing in the definitions LHS. */
1293 if (is_gimple_assign (def_stmt
))
1295 tree lhs
= gimple_assign_lhs (def_stmt
);
1297 VEC (vn_reference_op_s
, heap
) *operands
= NULL
;
1299 copy_reference_ops_from_ref (lhs
, &operands
);
1300 operands
= valueize_refs (operands
);
1301 if (ao_ref_init_from_vn_reference (&ref1
, get_alias_set (lhs
),
1302 TREE_TYPE (lhs
), operands
))
1303 res
= refs_may_alias_p_1 (ref
, &ref1
, true);
1304 VEC_free (vn_reference_op_s
, heap
, operands
);
1309 base
= ao_ref_base (ref
);
1310 offset
= ref
->offset
;
1311 maxsize
= ref
->max_size
;
1313 /* If we cannot constrain the size of the reference we cannot
1314 test if anything kills it. */
1318 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1319 from that defintion.
1321 if (is_gimple_reg_type (vr
->type
)
1322 && is_gimple_call (def_stmt
)
1323 && (fndecl
= gimple_call_fndecl (def_stmt
))
1324 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1325 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMSET
1326 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1327 && host_integerp (gimple_call_arg (def_stmt
, 2), 1)
1328 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1330 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1332 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1333 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
);
1334 size2
= TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 2)) * 8;
1335 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1336 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 2))
1337 && operand_equal_p (base
, base2
, 0)
1338 && offset2
<= offset
1339 && offset2
+ size2
>= offset
+ maxsize
)
1341 tree val
= fold_convert (vr
->type
, integer_zero_node
);
1342 unsigned int value_id
= get_or_alloc_constant_value_id (val
);
1343 return vn_reference_insert_pieces (vuse
, vr
->set
, vr
->type
,
1344 VEC_copy (vn_reference_op_s
,
1345 heap
, vr
->operands
),
1350 /* 2) Assignment from an empty CONSTRUCTOR. */
1351 else if (is_gimple_reg_type (vr
->type
)
1352 && gimple_assign_single_p (def_stmt
)
1353 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1354 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1357 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1358 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1359 &offset2
, &size2
, &maxsize2
);
1360 if (operand_equal_p (base
, base2
, 0)
1361 && offset2
<= offset
1362 && offset2
+ size2
>= offset
+ maxsize
)
1364 tree val
= fold_convert (vr
->type
, integer_zero_node
);
1365 unsigned int value_id
= get_or_alloc_constant_value_id (val
);
1366 return vn_reference_insert_pieces (vuse
, vr
->set
, vr
->type
,
1367 VEC_copy (vn_reference_op_s
,
1368 heap
, vr
->operands
),
1373 /* For aggregate copies translate the reference through them if
1374 the copy kills ref. */
1375 else if (gimple_assign_single_p (def_stmt
)
1376 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
1377 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
1378 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
1381 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1383 VEC (vn_reference_op_s
, heap
) *lhs
= NULL
, *rhs
= NULL
;
1384 vn_reference_op_t vro
;
1387 /* See if the assignment kills REF. */
1388 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1389 &offset2
, &size2
, &maxsize2
);
1390 if (!operand_equal_p (base
, base2
, 0)
1392 || offset2
+ size2
< offset
+ maxsize
)
1395 /* Find the common base of ref and the lhs. */
1396 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt
), &lhs
);
1397 i
= VEC_length (vn_reference_op_s
, vr
->operands
) - 1;
1398 j
= VEC_length (vn_reference_op_s
, lhs
) - 1;
1399 while (j
>= 0 && i
>= 0
1400 && vn_reference_op_eq (VEC_index (vn_reference_op_s
,
1402 VEC_index (vn_reference_op_s
, lhs
, j
)))
1408 VEC_free (vn_reference_op_s
, heap
, lhs
);
1409 /* i now points to the first additional op.
1410 ??? LHS may not be completely contained in VR, one or more
1411 VIEW_CONVERT_EXPRs could be in its way. We could at least
1412 try handling outermost VIEW_CONVERT_EXPRs. */
1416 /* Now re-write REF to be based on the rhs of the assignment. */
1417 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
1418 /* We need to pre-pend vr->operands[0..i] to rhs. */
1419 if (i
+ 1 + VEC_length (vn_reference_op_s
, rhs
)
1420 > VEC_length (vn_reference_op_s
, vr
->operands
))
1422 VEC (vn_reference_op_s
, heap
) *old
= vr
->operands
;
1423 VEC_safe_grow (vn_reference_op_s
, heap
, vr
->operands
,
1424 i
+ 1 + VEC_length (vn_reference_op_s
, rhs
));
1425 if (old
== shared_lookup_references
1426 && vr
->operands
!= old
)
1427 shared_lookup_references
= NULL
;
1430 VEC_truncate (vn_reference_op_s
, vr
->operands
,
1431 i
+ 1 + VEC_length (vn_reference_op_s
, rhs
));
1432 FOR_EACH_VEC_ELT (vn_reference_op_s
, rhs
, j
, vro
)
1433 VEC_replace (vn_reference_op_s
, vr
->operands
, i
+ 1 + j
, vro
);
1434 VEC_free (vn_reference_op_s
, heap
, rhs
);
1435 vr
->hashcode
= vn_reference_compute_hash (vr
);
1437 /* Adjust *ref from the new operands. */
1438 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
1440 /* This can happen with bitfields. */
1441 if (ref
->size
!= r
.size
)
1445 /* Do not update last seen VUSE after translating. */
1446 last_vuse_ptr
= NULL
;
1448 /* Keep looking for the adjusted *REF / VR pair. */
1452 /* Bail out and stop walking. */
1456 /* Lookup a reference operation by it's parts, in the current hash table.
1457 Returns the resulting value number if it exists in the hash table,
1458 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1459 vn_reference_t stored in the hashtable if something is found. */
1462 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
1463 VEC (vn_reference_op_s
, heap
) *operands
,
1464 vn_reference_t
*vnresult
, bool maywalk
)
1466 struct vn_reference_s vr1
;
1474 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1475 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1476 VEC_safe_grow (vn_reference_op_s
, heap
, shared_lookup_references
,
1477 VEC_length (vn_reference_op_s
, operands
));
1478 memcpy (VEC_address (vn_reference_op_s
, shared_lookup_references
),
1479 VEC_address (vn_reference_op_s
, operands
),
1480 sizeof (vn_reference_op_s
)
1481 * VEC_length (vn_reference_op_s
, operands
));
1482 vr1
.operands
= operands
= shared_lookup_references
1483 = valueize_refs (shared_lookup_references
);
1486 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1487 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
1490 vn_reference_lookup_1 (&vr1
, vnresult
);
1496 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
1498 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
1499 vn_reference_lookup_2
,
1500 vn_reference_lookup_3
, &vr1
);
1501 if (vr1
.operands
!= operands
)
1502 VEC_free (vn_reference_op_s
, heap
, vr1
.operands
);
1506 return (*vnresult
)->result
;
1511 /* Lookup OP in the current hash table, and return the resulting value
1512 number if it exists in the hash table. Return NULL_TREE if it does
1513 not exist in the hash table or if the result field of the structure
1514 was NULL.. VNRESULT will be filled in with the vn_reference_t
1515 stored in the hashtable if one exists. */
1518 vn_reference_lookup (tree op
, tree vuse
, bool maywalk
,
1519 vn_reference_t
*vnresult
)
1521 VEC (vn_reference_op_s
, heap
) *operands
;
1522 struct vn_reference_s vr1
;
1528 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1529 vr1
.operands
= operands
= valueize_shared_reference_ops_from_ref (op
);
1530 vr1
.type
= TREE_TYPE (op
);
1531 vr1
.set
= get_alias_set (op
);
1532 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1533 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
1539 vn_reference_t wvnresult
;
1541 ao_ref_init (&r
, op
);
1543 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
1544 vn_reference_lookup_2
,
1545 vn_reference_lookup_3
, &vr1
);
1546 if (vr1
.operands
!= operands
)
1547 VEC_free (vn_reference_op_s
, heap
, vr1
.operands
);
1551 *vnresult
= wvnresult
;
1552 return wvnresult
->result
;
1558 return vn_reference_lookup_1 (&vr1
, vnresult
);
1562 /* Insert OP into the current hash table with a value number of
1563 RESULT, and return the resulting reference structure we created. */
1566 vn_reference_insert (tree op
, tree result
, tree vuse
)
1571 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1572 if (TREE_CODE (result
) == SSA_NAME
)
1573 vr1
->value_id
= VN_INFO (result
)->value_id
;
1575 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
1576 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1577 vr1
->operands
= valueize_refs (create_reference_ops_from_ref (op
));
1578 vr1
->type
= TREE_TYPE (op
);
1579 vr1
->set
= get_alias_set (op
);
1580 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1581 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
1583 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1586 /* Because we lookup stores using vuses, and value number failures
1587 using the vdefs (see visit_reference_op_store for how and why),
1588 it's possible that on failure we may try to insert an already
1589 inserted store. This is not wrong, there is no ssa name for a
1590 store that we could use as a differentiator anyway. Thus, unlike
1591 the other lookup functions, you cannot gcc_assert (!*slot)
1594 /* But free the old slot in case of a collision. */
1596 free_reference (*slot
);
1602 /* Insert a reference by it's pieces into the current hash table with
1603 a value number of RESULT. Return the resulting reference
1604 structure we created. */
1607 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
1608 VEC (vn_reference_op_s
, heap
) *operands
,
1609 tree result
, unsigned int value_id
)
1615 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1616 vr1
->value_id
= value_id
;
1617 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1618 vr1
->operands
= valueize_refs (operands
);
1621 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1622 if (result
&& TREE_CODE (result
) == SSA_NAME
)
1623 result
= SSA_VAL (result
);
1624 vr1
->result
= result
;
1626 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1629 /* At this point we should have all the things inserted that we have
1630 seen before, and we should never try inserting something that
1632 gcc_assert (!*slot
);
1634 free_reference (*slot
);
1640 /* Compute and return the hash value for nary operation VBO1. */
1643 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
1648 for (i
= 0; i
< vno1
->length
; ++i
)
1649 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
1650 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
1652 if (vno1
->length
== 2
1653 && commutative_tree_code (vno1
->opcode
)
1654 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
1656 tree temp
= vno1
->op
[0];
1657 vno1
->op
[0] = vno1
->op
[1];
1661 hash
= iterative_hash_hashval_t (vno1
->opcode
, 0);
1662 for (i
= 0; i
< vno1
->length
; ++i
)
1663 hash
= iterative_hash_expr (vno1
->op
[i
], hash
);
1668 /* Return the computed hashcode for nary operation P1. */
1671 vn_nary_op_hash (const void *p1
)
1673 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1674 return vno1
->hashcode
;
1677 /* Compare nary operations P1 and P2 and return true if they are
1681 vn_nary_op_eq (const void *p1
, const void *p2
)
1683 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1684 const_vn_nary_op_t
const vno2
= (const_vn_nary_op_t
) p2
;
1687 if (vno1
->hashcode
!= vno2
->hashcode
)
1690 if (vno1
->opcode
!= vno2
->opcode
1691 || !types_compatible_p (vno1
->type
, vno2
->type
))
1694 for (i
= 0; i
< vno1
->length
; ++i
)
1695 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
1701 /* Lookup a n-ary operation by its pieces and return the resulting value
1702 number if it exists in the hash table. Return NULL_TREE if it does
1703 not exist in the hash table or if the result field of the operation
1704 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1708 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
1709 tree type
, tree op0
, tree op1
, tree op2
,
1710 tree op3
, vn_nary_op_t
*vnresult
)
1713 struct vn_nary_op_s vno1
;
1717 vno1
.length
= length
;
1723 vno1
.hashcode
= vn_nary_op_compute_hash (&vno1
);
1724 slot
= htab_find_slot_with_hash (current_info
->nary
, &vno1
, vno1
.hashcode
,
1726 if (!slot
&& current_info
== optimistic_info
)
1727 slot
= htab_find_slot_with_hash (valid_info
->nary
, &vno1
, vno1
.hashcode
,
1732 *vnresult
= (vn_nary_op_t
)*slot
;
1733 return ((vn_nary_op_t
)*slot
)->result
;
1736 /* Lookup OP in the current hash table, and return the resulting value
1737 number if it exists in the hash table. Return NULL_TREE if it does
1738 not exist in the hash table or if the result field of the operation
1739 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1743 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
1746 struct vn_nary_op_s vno1
;
1751 vno1
.opcode
= TREE_CODE (op
);
1752 vno1
.length
= TREE_CODE_LENGTH (TREE_CODE (op
));
1753 vno1
.type
= TREE_TYPE (op
);
1754 for (i
= 0; i
< vno1
.length
; ++i
)
1755 vno1
.op
[i
] = TREE_OPERAND (op
, i
);
1756 vno1
.hashcode
= vn_nary_op_compute_hash (&vno1
);
1757 slot
= htab_find_slot_with_hash (current_info
->nary
, &vno1
, vno1
.hashcode
,
1759 if (!slot
&& current_info
== optimistic_info
)
1760 slot
= htab_find_slot_with_hash (valid_info
->nary
, &vno1
, vno1
.hashcode
,
1765 *vnresult
= (vn_nary_op_t
)*slot
;
1766 return ((vn_nary_op_t
)*slot
)->result
;
1769 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1770 value number if it exists in the hash table. Return NULL_TREE if
1771 it does not exist in the hash table. VNRESULT will contain the
1772 vn_nary_op_t from the hashtable if it exists. */
1775 vn_nary_op_lookup_stmt (gimple stmt
, vn_nary_op_t
*vnresult
)
1778 struct vn_nary_op_s vno1
;
1783 vno1
.opcode
= gimple_assign_rhs_code (stmt
);
1784 vno1
.length
= gimple_num_ops (stmt
) - 1;
1785 vno1
.type
= gimple_expr_type (stmt
);
1786 for (i
= 0; i
< vno1
.length
; ++i
)
1787 vno1
.op
[i
] = gimple_op (stmt
, i
+ 1);
1788 if (vno1
.opcode
== REALPART_EXPR
1789 || vno1
.opcode
== IMAGPART_EXPR
1790 || vno1
.opcode
== VIEW_CONVERT_EXPR
)
1791 vno1
.op
[0] = TREE_OPERAND (vno1
.op
[0], 0);
1792 vno1
.hashcode
= vn_nary_op_compute_hash (&vno1
);
1793 slot
= htab_find_slot_with_hash (current_info
->nary
, &vno1
, vno1
.hashcode
,
1795 if (!slot
&& current_info
== optimistic_info
)
1796 slot
= htab_find_slot_with_hash (valid_info
->nary
, &vno1
, vno1
.hashcode
,
1801 *vnresult
= (vn_nary_op_t
)*slot
;
1802 return ((vn_nary_op_t
)*slot
)->result
;
1805 /* Insert a n-ary operation into the current hash table using it's
1806 pieces. Return the vn_nary_op_t structure we created and put in
1810 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
1811 tree type
, tree op0
,
1812 tree op1
, tree op2
, tree op3
,
1814 unsigned int value_id
)
1819 vno1
= (vn_nary_op_t
) obstack_alloc (¤t_info
->nary_obstack
,
1820 (sizeof (struct vn_nary_op_s
)
1821 - sizeof (tree
) * (4 - length
)));
1822 vno1
->value_id
= value_id
;
1823 vno1
->opcode
= code
;
1824 vno1
->length
= length
;
1834 vno1
->result
= result
;
1835 vno1
->hashcode
= vn_nary_op_compute_hash (vno1
);
1836 slot
= htab_find_slot_with_hash (current_info
->nary
, vno1
, vno1
->hashcode
,
1838 gcc_assert (!*slot
);
1845 /* Insert OP into the current hash table with a value number of
1846 RESULT. Return the vn_nary_op_t structure we created and put in
1850 vn_nary_op_insert (tree op
, tree result
)
1852 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
1857 vno1
= (vn_nary_op_t
) obstack_alloc (¤t_info
->nary_obstack
,
1858 (sizeof (struct vn_nary_op_s
)
1859 - sizeof (tree
) * (4 - length
)));
1860 vno1
->value_id
= VN_INFO (result
)->value_id
;
1861 vno1
->opcode
= TREE_CODE (op
);
1862 vno1
->length
= length
;
1863 vno1
->type
= TREE_TYPE (op
);
1864 for (i
= 0; i
< vno1
->length
; ++i
)
1865 vno1
->op
[i
] = TREE_OPERAND (op
, i
);
1866 vno1
->result
= result
;
1867 vno1
->hashcode
= vn_nary_op_compute_hash (vno1
);
1868 slot
= htab_find_slot_with_hash (current_info
->nary
, vno1
, vno1
->hashcode
,
1870 gcc_assert (!*slot
);
1876 /* Insert the rhs of STMT into the current hash table with a value number of
1880 vn_nary_op_insert_stmt (gimple stmt
, tree result
)
1882 unsigned length
= gimple_num_ops (stmt
) - 1;
1887 vno1
= (vn_nary_op_t
) obstack_alloc (¤t_info
->nary_obstack
,
1888 (sizeof (struct vn_nary_op_s
)
1889 - sizeof (tree
) * (4 - length
)));
1890 vno1
->value_id
= VN_INFO (result
)->value_id
;
1891 vno1
->opcode
= gimple_assign_rhs_code (stmt
);
1892 vno1
->length
= length
;
1893 vno1
->type
= gimple_expr_type (stmt
);
1894 for (i
= 0; i
< vno1
->length
; ++i
)
1895 vno1
->op
[i
] = gimple_op (stmt
, i
+ 1);
1896 if (vno1
->opcode
== REALPART_EXPR
1897 || vno1
->opcode
== IMAGPART_EXPR
1898 || vno1
->opcode
== VIEW_CONVERT_EXPR
)
1899 vno1
->op
[0] = TREE_OPERAND (vno1
->op
[0], 0);
1900 vno1
->result
= result
;
1901 vno1
->hashcode
= vn_nary_op_compute_hash (vno1
);
1902 slot
= htab_find_slot_with_hash (current_info
->nary
, vno1
, vno1
->hashcode
,
1904 gcc_assert (!*slot
);
1910 /* Compute a hashcode for PHI operation VP1 and return it. */
1912 static inline hashval_t
1913 vn_phi_compute_hash (vn_phi_t vp1
)
1920 result
= vp1
->block
->index
;
1922 /* If all PHI arguments are constants we need to distinguish
1923 the PHI node via its type. */
1924 type
= TREE_TYPE (VEC_index (tree
, vp1
->phiargs
, 0));
1925 result
+= (INTEGRAL_TYPE_P (type
)
1926 + (INTEGRAL_TYPE_P (type
)
1927 ? TYPE_PRECISION (type
) + TYPE_UNSIGNED (type
) : 0));
1929 FOR_EACH_VEC_ELT (tree
, vp1
->phiargs
, i
, phi1op
)
1931 if (phi1op
== VN_TOP
)
1933 result
= iterative_hash_expr (phi1op
, result
);
1939 /* Return the computed hashcode for phi operation P1. */
1942 vn_phi_hash (const void *p1
)
1944 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
1945 return vp1
->hashcode
;
1948 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1951 vn_phi_eq (const void *p1
, const void *p2
)
1953 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
1954 const_vn_phi_t
const vp2
= (const_vn_phi_t
) p2
;
1956 if (vp1
->hashcode
!= vp2
->hashcode
)
1959 if (vp1
->block
== vp2
->block
)
1964 /* If the PHI nodes do not have compatible types
1965 they are not the same. */
1966 if (!types_compatible_p (TREE_TYPE (VEC_index (tree
, vp1
->phiargs
, 0)),
1967 TREE_TYPE (VEC_index (tree
, vp2
->phiargs
, 0))))
1970 /* Any phi in the same block will have it's arguments in the
1971 same edge order, because of how we store phi nodes. */
1972 FOR_EACH_VEC_ELT (tree
, vp1
->phiargs
, i
, phi1op
)
1974 tree phi2op
= VEC_index (tree
, vp2
->phiargs
, i
);
1975 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
1977 if (!expressions_equal_p (phi1op
, phi2op
))
1985 static VEC(tree
, heap
) *shared_lookup_phiargs
;
1987 /* Lookup PHI in the current hash table, and return the resulting
1988 value number if it exists in the hash table. Return NULL_TREE if
1989 it does not exist in the hash table. */
1992 vn_phi_lookup (gimple phi
)
1995 struct vn_phi_s vp1
;
1998 VEC_truncate (tree
, shared_lookup_phiargs
, 0);
2000 /* Canonicalize the SSA_NAME's to their value number. */
2001 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2003 tree def
= PHI_ARG_DEF (phi
, i
);
2004 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2005 VEC_safe_push (tree
, heap
, shared_lookup_phiargs
, def
);
2007 vp1
.phiargs
= shared_lookup_phiargs
;
2008 vp1
.block
= gimple_bb (phi
);
2009 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
2010 slot
= htab_find_slot_with_hash (current_info
->phis
, &vp1
, vp1
.hashcode
,
2012 if (!slot
&& current_info
== optimistic_info
)
2013 slot
= htab_find_slot_with_hash (valid_info
->phis
, &vp1
, vp1
.hashcode
,
2017 return ((vn_phi_t
)*slot
)->result
;
2020 /* Insert PHI into the current hash table with a value number of
2024 vn_phi_insert (gimple phi
, tree result
)
2027 vn_phi_t vp1
= (vn_phi_t
) pool_alloc (current_info
->phis_pool
);
2029 VEC (tree
, heap
) *args
= NULL
;
2031 /* Canonicalize the SSA_NAME's to their value number. */
2032 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2034 tree def
= PHI_ARG_DEF (phi
, i
);
2035 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2036 VEC_safe_push (tree
, heap
, args
, def
);
2038 vp1
->value_id
= VN_INFO (result
)->value_id
;
2039 vp1
->phiargs
= args
;
2040 vp1
->block
= gimple_bb (phi
);
2041 vp1
->result
= result
;
2042 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
2044 slot
= htab_find_slot_with_hash (current_info
->phis
, vp1
, vp1
->hashcode
,
2047 /* Because we iterate over phi operations more than once, it's
2048 possible the slot might already exist here, hence no assert.*/
2054 /* Print set of components in strongly connected component SCC to OUT. */
2057 print_scc (FILE *out
, VEC (tree
, heap
) *scc
)
2062 fprintf (out
, "SCC consists of: ");
2063 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
2065 print_generic_expr (out
, var
, 0);
2068 fprintf (out
, "\n");
2071 /* Set the value number of FROM to TO, return true if it has changed
2075 set_ssa_val_to (tree from
, tree to
)
2080 && TREE_CODE (to
) == SSA_NAME
2081 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
2084 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2085 and invariants. So assert that here. */
2086 gcc_assert (to
!= NULL_TREE
2088 || TREE_CODE (to
) == SSA_NAME
2089 || is_gimple_min_invariant (to
)));
2091 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2093 fprintf (dump_file
, "Setting value number of ");
2094 print_generic_expr (dump_file
, from
, 0);
2095 fprintf (dump_file
, " to ");
2096 print_generic_expr (dump_file
, to
, 0);
2099 currval
= SSA_VAL (from
);
2101 if (currval
!= to
&& !operand_equal_p (currval
, to
, OEP_PURE_SAME
))
2103 VN_INFO (from
)->valnum
= to
;
2104 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2105 fprintf (dump_file
, " (changed)\n");
2108 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2109 fprintf (dump_file
, "\n");
2113 /* Set all definitions in STMT to value number to themselves.
2114 Return true if a value number changed. */
2117 defs_to_varying (gimple stmt
)
2119 bool changed
= false;
2123 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2125 tree def
= DEF_FROM_PTR (defp
);
2127 VN_INFO (def
)->use_processed
= true;
2128 changed
|= set_ssa_val_to (def
, def
);
2133 static bool expr_has_constants (tree expr
);
2134 static tree
valueize_expr (tree expr
);
2136 /* Visit a copy between LHS and RHS, return true if the value number
2140 visit_copy (tree lhs
, tree rhs
)
2142 /* Follow chains of copies to their destination. */
2143 while (TREE_CODE (rhs
) == SSA_NAME
2144 && SSA_VAL (rhs
) != rhs
)
2145 rhs
= SSA_VAL (rhs
);
2147 /* The copy may have a more interesting constant filled expression
2148 (we don't, since we know our RHS is just an SSA name). */
2149 if (TREE_CODE (rhs
) == SSA_NAME
)
2151 VN_INFO (lhs
)->has_constants
= VN_INFO (rhs
)->has_constants
;
2152 VN_INFO (lhs
)->expr
= VN_INFO (rhs
)->expr
;
2155 return set_ssa_val_to (lhs
, rhs
);
2158 /* Visit a unary operator RHS, value number it, and return true if the
2159 value number of LHS has changed as a result. */
2162 visit_unary_op (tree lhs
, gimple stmt
)
2164 bool changed
= false;
2165 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
2169 changed
= set_ssa_val_to (lhs
, result
);
2173 changed
= set_ssa_val_to (lhs
, lhs
);
2174 vn_nary_op_insert_stmt (stmt
, lhs
);
2180 /* Visit a binary operator RHS, value number it, and return true if the
2181 value number of LHS has changed as a result. */
2184 visit_binary_op (tree lhs
, gimple stmt
)
2186 bool changed
= false;
2187 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
2191 changed
= set_ssa_val_to (lhs
, result
);
2195 changed
= set_ssa_val_to (lhs
, lhs
);
2196 vn_nary_op_insert_stmt (stmt
, lhs
);
2202 /* Visit a call STMT storing into LHS. Return true if the value number
2203 of the LHS has changed as a result. */
2206 visit_reference_op_call (tree lhs
, gimple stmt
)
2208 bool changed
= false;
2209 struct vn_reference_s vr1
;
2211 tree vuse
= gimple_vuse (stmt
);
2213 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2214 vr1
.operands
= valueize_shared_reference_ops_from_call (stmt
);
2215 vr1
.type
= gimple_expr_type (stmt
);
2217 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2218 result
= vn_reference_lookup_1 (&vr1
, NULL
);
2221 changed
= set_ssa_val_to (lhs
, result
);
2222 if (TREE_CODE (result
) == SSA_NAME
2223 && VN_INFO (result
)->has_constants
)
2224 VN_INFO (lhs
)->has_constants
= true;
2230 changed
= set_ssa_val_to (lhs
, lhs
);
2231 vr2
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
2232 vr2
->vuse
= vr1
.vuse
;
2233 vr2
->operands
= valueize_refs (create_reference_ops_from_call (stmt
));
2234 vr2
->type
= vr1
.type
;
2236 vr2
->hashcode
= vr1
.hashcode
;
2238 slot
= htab_find_slot_with_hash (current_info
->references
,
2239 vr2
, vr2
->hashcode
, INSERT
);
2241 free_reference (*slot
);
2248 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2249 and return true if the value number of the LHS has changed as a result. */
2252 visit_reference_op_load (tree lhs
, tree op
, gimple stmt
)
2254 bool changed
= false;
2258 last_vuse
= gimple_vuse (stmt
);
2259 last_vuse_ptr
= &last_vuse
;
2260 result
= vn_reference_lookup (op
, gimple_vuse (stmt
), true, NULL
);
2261 last_vuse_ptr
= NULL
;
2263 /* If we have a VCE, try looking up its operand as it might be stored in
2264 a different type. */
2265 if (!result
&& TREE_CODE (op
) == VIEW_CONVERT_EXPR
)
2266 result
= vn_reference_lookup (TREE_OPERAND (op
, 0), gimple_vuse (stmt
),
2269 /* We handle type-punning through unions by value-numbering based
2270 on offset and size of the access. Be prepared to handle a
2271 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2273 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
2275 /* We will be setting the value number of lhs to the value number
2276 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2277 So first simplify and lookup this expression to see if it
2278 is already available. */
2279 tree val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
2280 if ((CONVERT_EXPR_P (val
)
2281 || TREE_CODE (val
) == VIEW_CONVERT_EXPR
)
2282 && TREE_CODE (TREE_OPERAND (val
, 0)) == SSA_NAME
)
2284 tree tem
= valueize_expr (vn_get_expr_for (TREE_OPERAND (val
, 0)));
2285 if ((CONVERT_EXPR_P (tem
)
2286 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
)
2287 && (tem
= fold_unary_ignore_overflow (TREE_CODE (val
),
2288 TREE_TYPE (val
), tem
)))
2292 if (!is_gimple_min_invariant (val
)
2293 && TREE_CODE (val
) != SSA_NAME
)
2294 result
= vn_nary_op_lookup (val
, NULL
);
2295 /* If the expression is not yet available, value-number lhs to
2296 a new SSA_NAME we create. */
2299 result
= make_ssa_name (SSA_NAME_VAR (lhs
), gimple_build_nop ());
2300 /* Initialize value-number information properly. */
2301 VN_INFO_GET (result
)->valnum
= result
;
2302 VN_INFO (result
)->value_id
= get_next_value_id ();
2303 VN_INFO (result
)->expr
= val
;
2304 VN_INFO (result
)->has_constants
= expr_has_constants (val
);
2305 VN_INFO (result
)->needs_insertion
= true;
2306 /* As all "inserted" statements are singleton SCCs, insert
2307 to the valid table. This is strictly needed to
2308 avoid re-generating new value SSA_NAMEs for the same
2309 expression during SCC iteration over and over (the
2310 optimistic table gets cleared after each iteration).
2311 We do not need to insert into the optimistic table, as
2312 lookups there will fall back to the valid table. */
2313 if (current_info
== optimistic_info
)
2315 current_info
= valid_info
;
2316 vn_nary_op_insert (val
, result
);
2317 current_info
= optimistic_info
;
2320 vn_nary_op_insert (val
, result
);
2321 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2323 fprintf (dump_file
, "Inserting name ");
2324 print_generic_expr (dump_file
, result
, 0);
2325 fprintf (dump_file
, " for expression ");
2326 print_generic_expr (dump_file
, val
, 0);
2327 fprintf (dump_file
, "\n");
2334 changed
= set_ssa_val_to (lhs
, result
);
2335 if (TREE_CODE (result
) == SSA_NAME
2336 && VN_INFO (result
)->has_constants
)
2338 VN_INFO (lhs
)->expr
= VN_INFO (result
)->expr
;
2339 VN_INFO (lhs
)->has_constants
= true;
2344 changed
= set_ssa_val_to (lhs
, lhs
);
2345 vn_reference_insert (op
, lhs
, last_vuse
);
2352 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2353 and return true if the value number of the LHS has changed as a result. */
2356 visit_reference_op_store (tree lhs
, tree op
, gimple stmt
)
2358 bool changed
= false;
2360 bool resultsame
= false;
2362 /* First we want to lookup using the *vuses* from the store and see
2363 if there the last store to this location with the same address
2366 The vuses represent the memory state before the store. If the
2367 memory state, address, and value of the store is the same as the
2368 last store to this location, then this store will produce the
2369 same memory state as that store.
2371 In this case the vdef versions for this store are value numbered to those
2372 vuse versions, since they represent the same memory state after
2375 Otherwise, the vdefs for the store are used when inserting into
2376 the table, since the store generates a new memory state. */
2378 result
= vn_reference_lookup (lhs
, gimple_vuse (stmt
), false, NULL
);
2382 if (TREE_CODE (result
) == SSA_NAME
)
2383 result
= SSA_VAL (result
);
2384 if (TREE_CODE (op
) == SSA_NAME
)
2386 resultsame
= expressions_equal_p (result
, op
);
2389 if (!result
|| !resultsame
)
2393 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2395 fprintf (dump_file
, "No store match\n");
2396 fprintf (dump_file
, "Value numbering store ");
2397 print_generic_expr (dump_file
, lhs
, 0);
2398 fprintf (dump_file
, " to ");
2399 print_generic_expr (dump_file
, op
, 0);
2400 fprintf (dump_file
, "\n");
2402 /* Have to set value numbers before insert, since insert is
2403 going to valueize the references in-place. */
2404 if ((vdef
= gimple_vdef (stmt
)))
2406 VN_INFO (vdef
)->use_processed
= true;
2407 changed
|= set_ssa_val_to (vdef
, vdef
);
2410 /* Do not insert structure copies into the tables. */
2411 if (is_gimple_min_invariant (op
)
2412 || is_gimple_reg (op
))
2413 vn_reference_insert (lhs
, op
, vdef
);
2417 /* We had a match, so value number the vdef to have the value
2418 number of the vuse it came from. */
2421 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2422 fprintf (dump_file
, "Store matched earlier value,"
2423 "value numbering store vdefs to matching vuses.\n");
2425 def
= gimple_vdef (stmt
);
2426 use
= gimple_vuse (stmt
);
2428 VN_INFO (def
)->use_processed
= true;
2429 changed
|= set_ssa_val_to (def
, SSA_VAL (use
));
2435 /* Visit and value number PHI, return true if the value number
2439 visit_phi (gimple phi
)
2441 bool changed
= false;
2443 tree sameval
= VN_TOP
;
2444 bool allsame
= true;
2447 /* TODO: We could check for this in init_sccvn, and replace this
2448 with a gcc_assert. */
2449 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
2450 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
2452 /* See if all non-TOP arguments have the same value. TOP is
2453 equivalent to everything, so we can ignore it. */
2454 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2456 tree def
= PHI_ARG_DEF (phi
, i
);
2458 if (TREE_CODE (def
) == SSA_NAME
)
2459 def
= SSA_VAL (def
);
2462 if (sameval
== VN_TOP
)
2468 if (!expressions_equal_p (def
, sameval
))
2476 /* If all value numbered to the same value, the phi node has that
2480 if (is_gimple_min_invariant (sameval
))
2482 VN_INFO (PHI_RESULT (phi
))->has_constants
= true;
2483 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
2487 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
2488 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
2491 if (TREE_CODE (sameval
) == SSA_NAME
)
2492 return visit_copy (PHI_RESULT (phi
), sameval
);
2494 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
2497 /* Otherwise, see if it is equivalent to a phi node in this block. */
2498 result
= vn_phi_lookup (phi
);
2501 if (TREE_CODE (result
) == SSA_NAME
)
2502 changed
= visit_copy (PHI_RESULT (phi
), result
);
2504 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
2508 vn_phi_insert (phi
, PHI_RESULT (phi
));
2509 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
2510 VN_INFO (PHI_RESULT (phi
))->expr
= PHI_RESULT (phi
);
2511 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
2517 /* Return true if EXPR contains constants. */
2520 expr_has_constants (tree expr
)
2522 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
2525 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0));
2528 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0))
2529 || is_gimple_min_invariant (TREE_OPERAND (expr
, 1));
2530 /* Constants inside reference ops are rarely interesting, but
2531 it can take a lot of looking to find them. */
2533 case tcc_declaration
:
2536 return is_gimple_min_invariant (expr
);
2541 /* Return true if STMT contains constants. */
2544 stmt_has_constants (gimple stmt
)
2546 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
2549 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
2551 case GIMPLE_UNARY_RHS
:
2552 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
2554 case GIMPLE_BINARY_RHS
:
2555 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))
2556 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt
)));
2557 case GIMPLE_TERNARY_RHS
:
2558 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))
2559 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt
))
2560 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt
)));
2561 case GIMPLE_SINGLE_RHS
:
2562 /* Constants inside reference ops are rarely interesting, but
2563 it can take a lot of looking to find them. */
2564 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
2571 /* Replace SSA_NAMES in expr with their value numbers, and return the
2573 This is performed in place. */
2576 valueize_expr (tree expr
)
2578 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
2581 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == SSA_NAME
2582 && SSA_VAL (TREE_OPERAND (expr
, 0)) != VN_TOP
)
2583 TREE_OPERAND (expr
, 0) = SSA_VAL (TREE_OPERAND (expr
, 0));
2586 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == SSA_NAME
2587 && SSA_VAL (TREE_OPERAND (expr
, 0)) != VN_TOP
)
2588 TREE_OPERAND (expr
, 0) = SSA_VAL (TREE_OPERAND (expr
, 0));
2589 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == SSA_NAME
2590 && SSA_VAL (TREE_OPERAND (expr
, 1)) != VN_TOP
)
2591 TREE_OPERAND (expr
, 1) = SSA_VAL (TREE_OPERAND (expr
, 1));
2599 /* Simplify the binary expression RHS, and return the result if
2603 simplify_binary_expression (gimple stmt
)
2605 tree result
= NULL_TREE
;
2606 tree op0
= gimple_assign_rhs1 (stmt
);
2607 tree op1
= gimple_assign_rhs2 (stmt
);
2609 /* This will not catch every single case we could combine, but will
2610 catch those with constants. The goal here is to simultaneously
2611 combine constants between expressions, but avoid infinite
2612 expansion of expressions during simplification. */
2613 if (TREE_CODE (op0
) == SSA_NAME
)
2615 if (VN_INFO (op0
)->has_constants
2616 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)) == tcc_comparison
)
2617 op0
= valueize_expr (vn_get_expr_for (op0
));
2618 else if (SSA_VAL (op0
) != VN_TOP
&& SSA_VAL (op0
) != op0
)
2619 op0
= SSA_VAL (op0
);
2622 if (TREE_CODE (op1
) == SSA_NAME
)
2624 if (VN_INFO (op1
)->has_constants
)
2625 op1
= valueize_expr (vn_get_expr_for (op1
));
2626 else if (SSA_VAL (op1
) != VN_TOP
&& SSA_VAL (op1
) != op1
)
2627 op1
= SSA_VAL (op1
);
2630 /* Avoid folding if nothing changed. */
2631 if (op0
== gimple_assign_rhs1 (stmt
)
2632 && op1
== gimple_assign_rhs2 (stmt
))
2635 fold_defer_overflow_warnings ();
2637 result
= fold_binary (gimple_assign_rhs_code (stmt
),
2638 gimple_expr_type (stmt
), op0
, op1
);
2640 STRIP_USELESS_TYPE_CONVERSION (result
);
2642 fold_undefer_overflow_warnings (result
&& valid_gimple_rhs_p (result
),
2645 /* Make sure result is not a complex expression consisting
2646 of operators of operators (IE (a + b) + (a + c))
2647 Otherwise, we will end up with unbounded expressions if
2648 fold does anything at all. */
2649 if (result
&& valid_gimple_rhs_p (result
))
2655 /* Simplify the unary expression RHS, and return the result if
2659 simplify_unary_expression (gimple stmt
)
2661 tree result
= NULL_TREE
;
2662 tree orig_op0
, op0
= gimple_assign_rhs1 (stmt
);
2664 /* We handle some tcc_reference codes here that are all
2665 GIMPLE_ASSIGN_SINGLE codes. */
2666 if (gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2667 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2668 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2669 op0
= TREE_OPERAND (op0
, 0);
2671 if (TREE_CODE (op0
) != SSA_NAME
)
2675 if (VN_INFO (op0
)->has_constants
)
2676 op0
= valueize_expr (vn_get_expr_for (op0
));
2677 else if (gimple_assign_cast_p (stmt
)
2678 || gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2679 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2680 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2682 /* We want to do tree-combining on conversion-like expressions.
2683 Make sure we feed only SSA_NAMEs or constants to fold though. */
2684 tree tem
= valueize_expr (vn_get_expr_for (op0
));
2685 if (UNARY_CLASS_P (tem
)
2686 || BINARY_CLASS_P (tem
)
2687 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
2688 || TREE_CODE (tem
) == SSA_NAME
2689 || is_gimple_min_invariant (tem
))
2693 /* Avoid folding if nothing changed, but remember the expression. */
2694 if (op0
== orig_op0
)
2697 result
= fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt
),
2698 gimple_expr_type (stmt
), op0
);
2701 STRIP_USELESS_TYPE_CONVERSION (result
);
2702 if (valid_gimple_rhs_p (result
))
2709 /* Try to simplify RHS using equivalences and constant folding. */
2712 try_to_simplify (gimple stmt
)
2716 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2717 in this case, there is no point in doing extra work. */
2718 if (gimple_assign_copy_p (stmt
)
2719 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2722 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)))
2724 case tcc_declaration
:
2725 tem
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
));
2731 /* Do not do full-blown reference lookup here, but simplify
2732 reads from constant aggregates. */
2733 tem
= fold_const_aggregate_ref (gimple_assign_rhs1 (stmt
));
2737 /* Fallthrough for some codes that can operate on registers. */
2738 if (!(TREE_CODE (gimple_assign_rhs1 (stmt
)) == REALPART_EXPR
2739 || TREE_CODE (gimple_assign_rhs1 (stmt
)) == IMAGPART_EXPR
2740 || TREE_CODE (gimple_assign_rhs1 (stmt
)) == VIEW_CONVERT_EXPR
))
2742 /* We could do a little more with unary ops, if they expand
2743 into binary ops, but it's debatable whether it is worth it. */
2745 return simplify_unary_expression (stmt
);
2747 case tcc_comparison
:
2749 return simplify_binary_expression (stmt
);
2758 /* Visit and value number USE, return true if the value number
2762 visit_use (tree use
)
2764 bool changed
= false;
2765 gimple stmt
= SSA_NAME_DEF_STMT (use
);
2767 VN_INFO (use
)->use_processed
= true;
2769 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
2770 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
2771 && !SSA_NAME_IS_DEFAULT_DEF (use
))
2773 fprintf (dump_file
, "Value numbering ");
2774 print_generic_expr (dump_file
, use
, 0);
2775 fprintf (dump_file
, " stmt = ");
2776 print_gimple_stmt (dump_file
, stmt
, 0, 0);
2779 /* Handle uninitialized uses. */
2780 if (SSA_NAME_IS_DEFAULT_DEF (use
))
2781 changed
= set_ssa_val_to (use
, use
);
2784 if (gimple_code (stmt
) == GIMPLE_PHI
)
2785 changed
= visit_phi (stmt
);
2786 else if (!gimple_has_lhs (stmt
)
2787 || gimple_has_volatile_ops (stmt
)
2788 || stmt_could_throw_p (stmt
))
2789 changed
= defs_to_varying (stmt
);
2790 else if (is_gimple_assign (stmt
))
2792 tree lhs
= gimple_assign_lhs (stmt
);
2795 /* Shortcut for copies. Simplifying copies is pointless,
2796 since we copy the expression and value they represent. */
2797 if (gimple_assign_copy_p (stmt
)
2798 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
2799 && TREE_CODE (lhs
) == SSA_NAME
)
2801 changed
= visit_copy (lhs
, gimple_assign_rhs1 (stmt
));
2804 simplified
= try_to_simplify (stmt
);
2807 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2809 fprintf (dump_file
, "RHS ");
2810 print_gimple_expr (dump_file
, stmt
, 0, 0);
2811 fprintf (dump_file
, " simplified to ");
2812 print_generic_expr (dump_file
, simplified
, 0);
2813 if (TREE_CODE (lhs
) == SSA_NAME
)
2814 fprintf (dump_file
, " has constants %d\n",
2815 expr_has_constants (simplified
));
2817 fprintf (dump_file
, "\n");
2820 /* Setting value numbers to constants will occasionally
2821 screw up phi congruence because constants are not
2822 uniquely associated with a single ssa name that can be
2825 && is_gimple_min_invariant (simplified
)
2826 && TREE_CODE (lhs
) == SSA_NAME
)
2828 VN_INFO (lhs
)->expr
= simplified
;
2829 VN_INFO (lhs
)->has_constants
= true;
2830 changed
= set_ssa_val_to (lhs
, simplified
);
2834 && TREE_CODE (simplified
) == SSA_NAME
2835 && TREE_CODE (lhs
) == SSA_NAME
)
2837 changed
= visit_copy (lhs
, simplified
);
2840 else if (simplified
)
2842 if (TREE_CODE (lhs
) == SSA_NAME
)
2844 VN_INFO (lhs
)->has_constants
= expr_has_constants (simplified
);
2845 /* We have to unshare the expression or else
2846 valuizing may change the IL stream. */
2847 VN_INFO (lhs
)->expr
= unshare_expr (simplified
);
2850 else if (stmt_has_constants (stmt
)
2851 && TREE_CODE (lhs
) == SSA_NAME
)
2852 VN_INFO (lhs
)->has_constants
= true;
2853 else if (TREE_CODE (lhs
) == SSA_NAME
)
2855 /* We reset expr and constantness here because we may
2856 have been value numbering optimistically, and
2857 iterating. They may become non-constant in this case,
2858 even if they were optimistically constant. */
2860 VN_INFO (lhs
)->has_constants
= false;
2861 VN_INFO (lhs
)->expr
= NULL_TREE
;
2864 if ((TREE_CODE (lhs
) == SSA_NAME
2865 /* We can substitute SSA_NAMEs that are live over
2866 abnormal edges with their constant value. */
2867 && !(gimple_assign_copy_p (stmt
)
2868 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
2870 && is_gimple_min_invariant (simplified
))
2871 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
2872 /* Stores or copies from SSA_NAMEs that are live over
2873 abnormal edges are a problem. */
2874 || (gimple_assign_single_p (stmt
)
2875 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
2876 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt
))))
2877 changed
= defs_to_varying (stmt
);
2878 else if (REFERENCE_CLASS_P (lhs
) || DECL_P (lhs
))
2880 changed
= visit_reference_op_store (lhs
, gimple_assign_rhs1 (stmt
), stmt
);
2882 else if (TREE_CODE (lhs
) == SSA_NAME
)
2884 if ((gimple_assign_copy_p (stmt
)
2885 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
2887 && is_gimple_min_invariant (simplified
)))
2889 VN_INFO (lhs
)->has_constants
= true;
2891 changed
= set_ssa_val_to (lhs
, simplified
);
2893 changed
= set_ssa_val_to (lhs
, gimple_assign_rhs1 (stmt
));
2897 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
2899 case GIMPLE_UNARY_RHS
:
2900 changed
= visit_unary_op (lhs
, stmt
);
2902 case GIMPLE_BINARY_RHS
:
2903 changed
= visit_binary_op (lhs
, stmt
);
2905 case GIMPLE_SINGLE_RHS
:
2906 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)))
2909 /* VOP-less references can go through unary case. */
2910 if ((gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2911 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2912 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2913 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0)) == SSA_NAME
)
2915 changed
= visit_unary_op (lhs
, stmt
);
2919 case tcc_declaration
:
2920 changed
= visit_reference_op_load
2921 (lhs
, gimple_assign_rhs1 (stmt
), stmt
);
2923 case tcc_expression
:
2924 if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
2926 changed
= visit_unary_op (lhs
, stmt
);
2931 changed
= defs_to_varying (stmt
);
2935 changed
= defs_to_varying (stmt
);
2941 changed
= defs_to_varying (stmt
);
2943 else if (is_gimple_call (stmt
))
2945 tree lhs
= gimple_call_lhs (stmt
);
2947 /* ??? We could try to simplify calls. */
2949 if (stmt_has_constants (stmt
)
2950 && TREE_CODE (lhs
) == SSA_NAME
)
2951 VN_INFO (lhs
)->has_constants
= true;
2952 else if (TREE_CODE (lhs
) == SSA_NAME
)
2954 /* We reset expr and constantness here because we may
2955 have been value numbering optimistically, and
2956 iterating. They may become non-constant in this case,
2957 even if they were optimistically constant. */
2958 VN_INFO (lhs
)->has_constants
= false;
2959 VN_INFO (lhs
)->expr
= NULL_TREE
;
2962 if (TREE_CODE (lhs
) == SSA_NAME
2963 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
2964 changed
= defs_to_varying (stmt
);
2965 /* ??? We should handle stores from calls. */
2966 else if (TREE_CODE (lhs
) == SSA_NAME
)
2968 if (gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
))
2969 changed
= visit_reference_op_call (lhs
, stmt
);
2971 changed
= defs_to_varying (stmt
);
2974 changed
= defs_to_varying (stmt
);
2981 /* Compare two operands by reverse postorder index */
2984 compare_ops (const void *pa
, const void *pb
)
2986 const tree opa
= *((const tree
*)pa
);
2987 const tree opb
= *((const tree
*)pb
);
2988 gimple opstmta
= SSA_NAME_DEF_STMT (opa
);
2989 gimple opstmtb
= SSA_NAME_DEF_STMT (opb
);
2993 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
2994 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
2995 else if (gimple_nop_p (opstmta
))
2997 else if (gimple_nop_p (opstmtb
))
3000 bba
= gimple_bb (opstmta
);
3001 bbb
= gimple_bb (opstmtb
);
3004 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3012 if (gimple_code (opstmta
) == GIMPLE_PHI
3013 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3014 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3015 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3017 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3019 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3020 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3022 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3024 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3027 /* Sort an array containing members of a strongly connected component
3028 SCC so that the members are ordered by RPO number.
3029 This means that when the sort is complete, iterating through the
3030 array will give you the members in RPO order. */
3033 sort_scc (VEC (tree
, heap
) *scc
)
3035 qsort (VEC_address (tree
, scc
),
3036 VEC_length (tree
, scc
),
3041 /* Insert the no longer used nary ONARY to the hash INFO. */
3044 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3046 size_t size
= (sizeof (struct vn_nary_op_s
)
3047 - sizeof (tree
) * (4 - onary
->length
));
3048 vn_nary_op_t nary
= (vn_nary_op_t
) obstack_alloc (&info
->nary_obstack
, size
);
3050 memcpy (nary
, onary
, size
);
3051 slot
= htab_find_slot_with_hash (info
->nary
, nary
, nary
->hashcode
, INSERT
);
3052 gcc_assert (!*slot
);
3056 /* Insert the no longer used phi OPHI to the hash INFO. */
3059 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3061 vn_phi_t phi
= (vn_phi_t
) pool_alloc (info
->phis_pool
);
3063 memcpy (phi
, ophi
, sizeof (*phi
));
3064 ophi
->phiargs
= NULL
;
3065 slot
= htab_find_slot_with_hash (info
->phis
, phi
, phi
->hashcode
, INSERT
);
3066 gcc_assert (!*slot
);
3070 /* Insert the no longer used reference OREF to the hash INFO. */
3073 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3077 ref
= (vn_reference_t
) pool_alloc (info
->references_pool
);
3078 memcpy (ref
, oref
, sizeof (*ref
));
3079 oref
->operands
= NULL
;
3080 slot
= htab_find_slot_with_hash (info
->references
, ref
, ref
->hashcode
,
3083 free_reference (*slot
);
3087 /* Process a strongly connected component in the SSA graph. */
3090 process_scc (VEC (tree
, heap
) *scc
)
3094 unsigned int iterations
= 0;
3095 bool changed
= true;
3101 /* If the SCC has a single member, just visit it. */
3102 if (VEC_length (tree
, scc
) == 1)
3104 tree use
= VEC_index (tree
, scc
, 0);
3105 if (!VN_INFO (use
)->use_processed
)
3110 /* Iterate over the SCC with the optimistic table until it stops
3112 current_info
= optimistic_info
;
3117 /* As we are value-numbering optimistically we have to
3118 clear the expression tables and the simplified expressions
3119 in each iteration until we converge. */
3120 htab_empty (optimistic_info
->nary
);
3121 htab_empty (optimistic_info
->phis
);
3122 htab_empty (optimistic_info
->references
);
3123 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
3124 gcc_obstack_init (&optimistic_info
->nary_obstack
);
3125 empty_alloc_pool (optimistic_info
->phis_pool
);
3126 empty_alloc_pool (optimistic_info
->references_pool
);
3127 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
3128 VN_INFO (var
)->expr
= NULL_TREE
;
3129 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
3130 changed
|= visit_use (var
);
3133 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
3135 /* Finally, copy the contents of the no longer used optimistic
3136 table to the valid table. */
3137 FOR_EACH_HTAB_ELEMENT (optimistic_info
->nary
, nary
, vn_nary_op_t
, hi
)
3138 copy_nary (nary
, valid_info
);
3139 FOR_EACH_HTAB_ELEMENT (optimistic_info
->phis
, phi
, vn_phi_t
, hi
)
3140 copy_phi (phi
, valid_info
);
3141 FOR_EACH_HTAB_ELEMENT (optimistic_info
->references
, ref
, vn_reference_t
, hi
)
3142 copy_reference (ref
, valid_info
);
3144 current_info
= valid_info
;
3147 DEF_VEC_O(ssa_op_iter
);
3148 DEF_VEC_ALLOC_O(ssa_op_iter
,heap
);
3150 /* Pop the components of the found SCC for NAME off the SCC stack
3151 and process them. Returns true if all went well, false if
3152 we run into resource limits. */
3155 extract_and_process_scc_for_name (tree name
)
3157 VEC (tree
, heap
) *scc
= NULL
;
3160 /* Found an SCC, pop the components off the SCC stack and
3164 x
= VEC_pop (tree
, sccstack
);
3166 VN_INFO (x
)->on_sccstack
= false;
3167 VEC_safe_push (tree
, heap
, scc
, x
);
3168 } while (x
!= name
);
3170 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3171 if (VEC_length (tree
, scc
)
3172 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
3175 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
3176 "SCC size %u exceeding %u\n", VEC_length (tree
, scc
),
3177 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
3181 if (VEC_length (tree
, scc
) > 1)
3184 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3185 print_scc (dump_file
, scc
);
3189 VEC_free (tree
, heap
, scc
);
3194 /* Depth first search on NAME to discover and process SCC's in the SSA
3196 Execution of this algorithm relies on the fact that the SCC's are
3197 popped off the stack in topological order.
3198 Returns true if successful, false if we stopped processing SCC's due
3199 to resource constraints. */
3204 VEC(ssa_op_iter
, heap
) *itervec
= NULL
;
3205 VEC(tree
, heap
) *namevec
= NULL
;
3206 use_operand_p usep
= NULL
;
3213 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
3214 VN_INFO (name
)->visited
= true;
3215 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
3217 VEC_safe_push (tree
, heap
, sccstack
, name
);
3218 VN_INFO (name
)->on_sccstack
= true;
3219 defstmt
= SSA_NAME_DEF_STMT (name
);
3221 /* Recursively DFS on our operands, looking for SCC's. */
3222 if (!gimple_nop_p (defstmt
))
3224 /* Push a new iterator. */
3225 if (gimple_code (defstmt
) == GIMPLE_PHI
)
3226 usep
= op_iter_init_phiuse (&iter
, defstmt
, SSA_OP_ALL_USES
);
3228 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
3231 clear_and_done_ssa_iter (&iter
);
3235 /* If we are done processing uses of a name, go up the stack
3236 of iterators and process SCCs as we found them. */
3237 if (op_iter_done (&iter
))
3239 /* See if we found an SCC. */
3240 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
3241 if (!extract_and_process_scc_for_name (name
))
3243 VEC_free (tree
, heap
, namevec
);
3244 VEC_free (ssa_op_iter
, heap
, itervec
);
3248 /* Check if we are done. */
3249 if (VEC_empty (tree
, namevec
))
3251 VEC_free (tree
, heap
, namevec
);
3252 VEC_free (ssa_op_iter
, heap
, itervec
);
3256 /* Restore the last use walker and continue walking there. */
3258 name
= VEC_pop (tree
, namevec
);
3259 memcpy (&iter
, VEC_last (ssa_op_iter
, itervec
),
3260 sizeof (ssa_op_iter
));
3261 VEC_pop (ssa_op_iter
, itervec
);
3262 goto continue_walking
;
3265 use
= USE_FROM_PTR (usep
);
3267 /* Since we handle phi nodes, we will sometimes get
3268 invariants in the use expression. */
3269 if (TREE_CODE (use
) == SSA_NAME
)
3271 if (! (VN_INFO (use
)->visited
))
3273 /* Recurse by pushing the current use walking state on
3274 the stack and starting over. */
3275 VEC_safe_push(ssa_op_iter
, heap
, itervec
, &iter
);
3276 VEC_safe_push(tree
, heap
, namevec
, name
);
3281 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
3282 VN_INFO (use
)->low
);
3284 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
3285 && VN_INFO (use
)->on_sccstack
)
3287 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
3288 VN_INFO (name
)->low
);
3292 usep
= op_iter_next_use (&iter
);
3296 /* Allocate a value number table. */
3299 allocate_vn_table (vn_tables_t table
)
3301 table
->phis
= htab_create (23, vn_phi_hash
, vn_phi_eq
, free_phi
);
3302 table
->nary
= htab_create (23, vn_nary_op_hash
, vn_nary_op_eq
, NULL
);
3303 table
->references
= htab_create (23, vn_reference_hash
, vn_reference_eq
,
3306 gcc_obstack_init (&table
->nary_obstack
);
3307 table
->phis_pool
= create_alloc_pool ("VN phis",
3308 sizeof (struct vn_phi_s
),
3310 table
->references_pool
= create_alloc_pool ("VN references",
3311 sizeof (struct vn_reference_s
),
3315 /* Free a value number table. */
3318 free_vn_table (vn_tables_t table
)
3320 htab_delete (table
->phis
);
3321 htab_delete (table
->nary
);
3322 htab_delete (table
->references
);
3323 obstack_free (&table
->nary_obstack
, NULL
);
3324 free_alloc_pool (table
->phis_pool
);
3325 free_alloc_pool (table
->references_pool
);
3333 int *rpo_numbers_temp
;
3335 calculate_dominance_info (CDI_DOMINATORS
);
3337 constant_to_value_id
= htab_create (23, vn_constant_hash
, vn_constant_eq
,
3340 constant_value_ids
= BITMAP_ALLOC (NULL
);
3345 vn_ssa_aux_table
= VEC_alloc (vn_ssa_aux_t
, heap
, num_ssa_names
+ 1);
3346 /* VEC_alloc doesn't actually grow it to the right size, it just
3347 preallocates the space to do so. */
3348 VEC_safe_grow_cleared (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
, num_ssa_names
+ 1);
3349 gcc_obstack_init (&vn_ssa_aux_obstack
);
3351 shared_lookup_phiargs
= NULL
;
3352 shared_lookup_references
= NULL
;
3353 rpo_numbers
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
3354 rpo_numbers_temp
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
3355 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
3357 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3358 the i'th block in RPO order is bb. We want to map bb's to RPO
3359 numbers, so we need to rearrange this array. */
3360 for (j
= 0; j
< n_basic_blocks
- NUM_FIXED_BLOCKS
; j
++)
3361 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
3363 XDELETE (rpo_numbers_temp
);
3365 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
3367 /* Create the VN_INFO structures, and initialize value numbers to
3369 for (i
= 0; i
< num_ssa_names
; i
++)
3371 tree name
= ssa_name (i
);
3374 VN_INFO_GET (name
)->valnum
= VN_TOP
;
3375 VN_INFO (name
)->expr
= NULL_TREE
;
3376 VN_INFO (name
)->value_id
= 0;
3380 renumber_gimple_stmt_uids ();
3382 /* Create the valid and optimistic value numbering tables. */
3383 valid_info
= XCNEW (struct vn_tables_s
);
3384 allocate_vn_table (valid_info
);
3385 optimistic_info
= XCNEW (struct vn_tables_s
);
3386 allocate_vn_table (optimistic_info
);
3394 htab_delete (constant_to_value_id
);
3395 BITMAP_FREE (constant_value_ids
);
3396 VEC_free (tree
, heap
, shared_lookup_phiargs
);
3397 VEC_free (vn_reference_op_s
, heap
, shared_lookup_references
);
3398 XDELETEVEC (rpo_numbers
);
3400 for (i
= 0; i
< num_ssa_names
; i
++)
3402 tree name
= ssa_name (i
);
3404 && VN_INFO (name
)->needs_insertion
)
3405 release_ssa_name (name
);
3407 obstack_free (&vn_ssa_aux_obstack
, NULL
);
3408 VEC_free (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
);
3410 VEC_free (tree
, heap
, sccstack
);
3411 free_vn_table (valid_info
);
3412 XDELETE (valid_info
);
3413 free_vn_table (optimistic_info
);
3414 XDELETE (optimistic_info
);
3417 /* Set the value ids in the valid hash tables. */
3420 set_hashtable_value_ids (void)
3427 /* Now set the value ids of the things we had put in the hash
3430 FOR_EACH_HTAB_ELEMENT (valid_info
->nary
,
3431 vno
, vn_nary_op_t
, hi
)
3435 if (TREE_CODE (vno
->result
) == SSA_NAME
)
3436 vno
->value_id
= VN_INFO (vno
->result
)->value_id
;
3437 else if (is_gimple_min_invariant (vno
->result
))
3438 vno
->value_id
= get_or_alloc_constant_value_id (vno
->result
);
3442 FOR_EACH_HTAB_ELEMENT (valid_info
->phis
,
3447 if (TREE_CODE (vp
->result
) == SSA_NAME
)
3448 vp
->value_id
= VN_INFO (vp
->result
)->value_id
;
3449 else if (is_gimple_min_invariant (vp
->result
))
3450 vp
->value_id
= get_or_alloc_constant_value_id (vp
->result
);
3454 FOR_EACH_HTAB_ELEMENT (valid_info
->references
,
3455 vr
, vn_reference_t
, hi
)
3459 if (TREE_CODE (vr
->result
) == SSA_NAME
)
3460 vr
->value_id
= VN_INFO (vr
->result
)->value_id
;
3461 else if (is_gimple_min_invariant (vr
->result
))
3462 vr
->value_id
= get_or_alloc_constant_value_id (vr
->result
);
3467 /* Do SCCVN. Returns true if it finished, false if we bailed out
3468 due to resource constraints. */
3475 bool changed
= true;
3478 current_info
= valid_info
;
3480 for (param
= DECL_ARGUMENTS (current_function_decl
);
3482 param
= DECL_CHAIN (param
))
3484 if (gimple_default_def (cfun
, param
) != NULL
)
3486 tree def
= gimple_default_def (cfun
, param
);
3487 VN_INFO (def
)->valnum
= def
;
3491 for (i
= 1; i
< num_ssa_names
; ++i
)
3493 tree name
= ssa_name (i
);
3495 && VN_INFO (name
)->visited
== false
3496 && !has_zero_uses (name
))
3504 /* Initialize the value ids. */
3506 for (i
= 1; i
< num_ssa_names
; ++i
)
3508 tree name
= ssa_name (i
);
3512 info
= VN_INFO (name
);
3513 if (info
->valnum
== name
3514 || info
->valnum
== VN_TOP
)
3515 info
->value_id
= get_next_value_id ();
3516 else if (is_gimple_min_invariant (info
->valnum
))
3517 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
3520 /* Propagate until they stop changing. */
3524 for (i
= 1; i
< num_ssa_names
; ++i
)
3526 tree name
= ssa_name (i
);
3530 info
= VN_INFO (name
);
3531 if (TREE_CODE (info
->valnum
) == SSA_NAME
3532 && info
->valnum
!= name
3533 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
3536 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
3541 set_hashtable_value_ids ();
3543 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3545 fprintf (dump_file
, "Value numbers:\n");
3546 for (i
= 0; i
< num_ssa_names
; i
++)
3548 tree name
= ssa_name (i
);
3550 && VN_INFO (name
)->visited
3551 && SSA_VAL (name
) != name
)
3553 print_generic_expr (dump_file
, name
, 0);
3554 fprintf (dump_file
, " = ");
3555 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
3556 fprintf (dump_file
, "\n");
3564 /* Return the maximum value id we have ever seen. */
3567 get_max_value_id (void)
3569 return next_value_id
;
3572 /* Return the next unique value id. */
3575 get_next_value_id (void)
3577 return next_value_id
++;
3581 /* Compare two expressions E1 and E2 and return true if they are equal. */
3584 expressions_equal_p (tree e1
, tree e2
)
3586 /* The obvious case. */
3590 /* If only one of them is null, they cannot be equal. */
3594 /* Now perform the actual comparison. */
3595 if (TREE_CODE (e1
) == TREE_CODE (e2
)
3596 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
3603 /* Return true if the nary operation NARY may trap. This is a copy
3604 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3607 vn_nary_may_trap (vn_nary_op_t nary
)
3610 tree rhs2
= NULL_TREE
;
3611 bool honor_nans
= false;
3612 bool honor_snans
= false;
3613 bool fp_operation
= false;
3614 bool honor_trapv
= false;
3618 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
3619 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
3620 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
3623 fp_operation
= FLOAT_TYPE_P (type
);
3626 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
3627 honor_snans
= flag_signaling_nans
!= 0;
3629 else if (INTEGRAL_TYPE_P (type
)
3630 && TYPE_OVERFLOW_TRAPS (type
))
3633 if (nary
->length
>= 2)
3635 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
3637 honor_nans
, honor_snans
, rhs2
,
3643 for (i
= 0; i
< nary
->length
; ++i
)
3644 if (tree_could_trap_p (nary
->op
[i
]))