1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
42 #include "langhooks.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack
;
114 alloc_pool phis_pool
;
115 alloc_pool references_pool
;
118 static htab_t constant_to_value_id
;
119 static bitmap constant_value_ids
;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info
;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info
;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info
;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers
;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id
;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num
;
158 static VEC (tree
, heap
) *sccstack
;
161 DEF_VEC_P(vn_ssa_aux_t
);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t
, heap
);
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
168 static VEC (vn_ssa_aux_t
, heap
) *vn_ssa_aux_table
;
169 static struct obstack vn_ssa_aux_obstack
;
171 /* Return the value numbering information for a given SSA name. */
176 vn_ssa_aux_t res
= VEC_index (vn_ssa_aux_t
, vn_ssa_aux_table
,
177 SSA_NAME_VERSION (name
));
178 gcc_checking_assert (res
);
182 /* Set the value numbering info for a given SSA name to a given
186 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
188 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
189 SSA_NAME_VERSION (name
), value
);
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
196 VN_INFO_GET (tree name
)
198 vn_ssa_aux_t newinfo
;
200 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
201 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
202 if (SSA_NAME_VERSION (name
) >= VEC_length (vn_ssa_aux_t
, vn_ssa_aux_table
))
203 VEC_safe_grow (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
,
204 SSA_NAME_VERSION (name
) + 1);
205 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
206 SSA_NAME_VERSION (name
), newinfo
);
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
215 vn_get_expr_for (tree name
)
217 vn_ssa_aux_t vn
= VN_INFO (name
);
219 tree expr
= NULL_TREE
;
221 if (vn
->valnum
== VN_TOP
)
224 /* If the value-number is a constant it is the representative
226 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
229 /* Get to the information of the value of this SSA_NAME. */
230 vn
= VN_INFO (vn
->valnum
);
232 /* If the value-number is a constant it is the representative
234 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
237 /* Else if we have an expression, return it. */
238 if (vn
->expr
!= NULL_TREE
)
241 /* Otherwise use the defining statement to build the expression. */
242 def_stmt
= SSA_NAME_DEF_STMT (vn
->valnum
);
244 /* If the value number is a default-definition or a PHI result
246 if (gimple_nop_p (def_stmt
)
247 || gimple_code (def_stmt
) == GIMPLE_PHI
)
250 if (!is_gimple_assign (def_stmt
))
253 /* FIXME tuples. This is incomplete and likely will miss some
255 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)))
258 if ((gimple_assign_rhs_code (def_stmt
) == VIEW_CONVERT_EXPR
259 || gimple_assign_rhs_code (def_stmt
) == REALPART_EXPR
260 || gimple_assign_rhs_code (def_stmt
) == IMAGPART_EXPR
)
261 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
262 expr
= fold_build1 (gimple_assign_rhs_code (def_stmt
),
263 gimple_expr_type (def_stmt
),
264 TREE_OPERAND (gimple_assign_rhs1 (def_stmt
), 0));
268 expr
= fold_build1 (gimple_assign_rhs_code (def_stmt
),
269 gimple_expr_type (def_stmt
),
270 gimple_assign_rhs1 (def_stmt
));
274 expr
= fold_build2 (gimple_assign_rhs_code (def_stmt
),
275 gimple_expr_type (def_stmt
),
276 gimple_assign_rhs1 (def_stmt
),
277 gimple_assign_rhs2 (def_stmt
));
282 if (expr
== NULL_TREE
)
285 /* Cache the expression. */
292 /* Free a phi operation structure VP. */
297 vn_phi_t phi
= (vn_phi_t
) vp
;
298 VEC_free (tree
, heap
, phi
->phiargs
);
301 /* Free a reference operation structure VP. */
304 free_reference (void *vp
)
306 vn_reference_t vr
= (vn_reference_t
) vp
;
307 VEC_free (vn_reference_op_s
, heap
, vr
->operands
);
310 /* Hash table equality function for vn_constant_t. */
313 vn_constant_eq (const void *p1
, const void *p2
)
315 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
316 const struct vn_constant_s
*vc2
= (const struct vn_constant_s
*) p2
;
318 if (vc1
->hashcode
!= vc2
->hashcode
)
321 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
324 /* Hash table hash function for vn_constant_t. */
327 vn_constant_hash (const void *p1
)
329 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
330 return vc1
->hashcode
;
333 /* Lookup a value id for CONSTANT and return it. If it does not
337 get_constant_value_id (tree constant
)
340 struct vn_constant_s vc
;
342 vc
.hashcode
= vn_hash_constant_with_type (constant
);
343 vc
.constant
= constant
;
344 slot
= htab_find_slot_with_hash (constant_to_value_id
, &vc
,
345 vc
.hashcode
, NO_INSERT
);
347 return ((vn_constant_t
)*slot
)->value_id
;
351 /* Lookup a value id for CONSTANT, and if it does not exist, create a
352 new one and return it. If it does exist, return it. */
355 get_or_alloc_constant_value_id (tree constant
)
358 struct vn_constant_s vc
;
361 vc
.hashcode
= vn_hash_constant_with_type (constant
);
362 vc
.constant
= constant
;
363 slot
= htab_find_slot_with_hash (constant_to_value_id
, &vc
,
364 vc
.hashcode
, INSERT
);
366 return ((vn_constant_t
)*slot
)->value_id
;
368 vcp
= XNEW (struct vn_constant_s
);
369 vcp
->hashcode
= vc
.hashcode
;
370 vcp
->constant
= constant
;
371 vcp
->value_id
= get_next_value_id ();
372 *slot
= (void *) vcp
;
373 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
374 return vcp
->value_id
;
377 /* Return true if V is a value id for a constant. */
380 value_id_constant_p (unsigned int v
)
382 return bitmap_bit_p (constant_value_ids
, v
);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
389 vn_reference_op_eq (const void *p1
, const void *p2
)
391 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
392 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
394 return vro1
->opcode
== vro2
->opcode
395 && types_compatible_p (vro1
->type
, vro2
->type
)
396 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
397 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
398 && expressions_equal_p (vro1
->op2
, vro2
->op2
);
401 /* Compute the hash for a reference operand VRO1. */
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, hashval_t result
)
406 result
= iterative_hash_hashval_t (vro1
->opcode
, result
);
408 result
= iterative_hash_expr (vro1
->op0
, result
);
410 result
= iterative_hash_expr (vro1
->op1
, result
);
412 result
= iterative_hash_expr (vro1
->op2
, result
);
416 /* Return the hashcode for a given reference operation P1. */
419 vn_reference_hash (const void *p1
)
421 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
422 return vr1
->hashcode
;
425 /* Compute a hash for the reference operation VR1 and return it. */
428 vn_reference_compute_hash (const vn_reference_t vr1
)
430 hashval_t result
= 0;
432 vn_reference_op_t vro
;
433 HOST_WIDE_INT off
= -1;
436 FOR_EACH_VEC_ELT (vn_reference_op_s
, vr1
->operands
, i
, vro
)
438 if (vro
->opcode
== MEM_REF
)
440 else if (vro
->opcode
!= ADDR_EXPR
)
452 result
= iterative_hash_hashval_t (off
, result
);
455 && vro
->opcode
== ADDR_EXPR
)
459 tree op
= TREE_OPERAND (vro
->op0
, 0);
460 result
= iterative_hash_hashval_t (TREE_CODE (op
), result
);
461 result
= iterative_hash_expr (op
, result
);
465 result
= vn_reference_op_compute_hash (vro
, result
);
469 result
+= SSA_NAME_VERSION (vr1
->vuse
);
474 /* Return true if reference operations P1 and P2 are equivalent. This
475 means they have the same set of operands and vuses. */
478 vn_reference_eq (const void *p1
, const void *p2
)
482 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
483 const_vn_reference_t
const vr2
= (const_vn_reference_t
) p2
;
484 if (vr1
->hashcode
!= vr2
->hashcode
)
487 /* Early out if this is not a hash collision. */
488 if (vr1
->hashcode
!= vr2
->hashcode
)
491 /* The VOP needs to be the same. */
492 if (vr1
->vuse
!= vr2
->vuse
)
495 /* If the operands are the same we are done. */
496 if (vr1
->operands
== vr2
->operands
)
499 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
502 if (INTEGRAL_TYPE_P (vr1
->type
)
503 && INTEGRAL_TYPE_P (vr2
->type
))
505 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
508 else if (INTEGRAL_TYPE_P (vr1
->type
)
509 && (TYPE_PRECISION (vr1
->type
)
510 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
512 else if (INTEGRAL_TYPE_P (vr2
->type
)
513 && (TYPE_PRECISION (vr2
->type
)
514 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
521 HOST_WIDE_INT off1
= 0, off2
= 0;
522 vn_reference_op_t vro1
, vro2
;
523 vn_reference_op_s tem1
, tem2
;
524 bool deref1
= false, deref2
= false;
525 for (; VEC_iterate (vn_reference_op_s
, vr1
->operands
, i
, vro1
); i
++)
527 if (vro1
->opcode
== MEM_REF
)
533 for (; VEC_iterate (vn_reference_op_s
, vr2
->operands
, j
, vro2
); j
++)
535 if (vro2
->opcode
== MEM_REF
)
543 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
545 memset (&tem1
, 0, sizeof (tem1
));
546 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
547 tem1
.type
= TREE_TYPE (tem1
.op0
);
548 tem1
.opcode
= TREE_CODE (tem1
.op0
);
551 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
553 memset (&tem2
, 0, sizeof (tem2
));
554 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
555 tem2
.type
= TREE_TYPE (tem2
.op0
);
556 tem2
.opcode
= TREE_CODE (tem2
.op0
);
559 if (!vn_reference_op_eq (vro1
, vro2
))
564 while (VEC_length (vn_reference_op_s
, vr1
->operands
) != i
565 || VEC_length (vn_reference_op_s
, vr2
->operands
) != j
);
570 /* Copy the operations present in load/store REF into RESULT, a vector of
571 vn_reference_op_s's. */
574 copy_reference_ops_from_ref (tree ref
, VEC(vn_reference_op_s
, heap
) **result
)
576 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
578 vn_reference_op_s temp
;
580 memset (&temp
, 0, sizeof (temp
));
581 /* We do not care for spurious type qualifications. */
582 temp
.type
= TYPE_MAIN_VARIANT (TREE_TYPE (ref
));
583 temp
.opcode
= TREE_CODE (ref
);
584 temp
.op0
= TMR_INDEX (ref
);
585 temp
.op1
= TMR_STEP (ref
);
586 temp
.op2
= TMR_OFFSET (ref
);
588 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
590 memset (&temp
, 0, sizeof (temp
));
591 temp
.type
= NULL_TREE
;
592 temp
.opcode
= ERROR_MARK
;
593 temp
.op0
= TMR_INDEX2 (ref
);
595 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
597 memset (&temp
, 0, sizeof (temp
));
598 temp
.type
= NULL_TREE
;
599 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
600 temp
.op0
= TMR_BASE (ref
);
602 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
606 /* For non-calls, store the information that makes up the address. */
610 vn_reference_op_s temp
;
612 memset (&temp
, 0, sizeof (temp
));
613 /* We do not care for spurious type qualifications. */
614 temp
.type
= TYPE_MAIN_VARIANT (TREE_TYPE (ref
));
615 temp
.opcode
= TREE_CODE (ref
);
621 /* The base address gets its own vn_reference_op_s structure. */
622 temp
.op0
= TREE_OPERAND (ref
, 1);
623 if (host_integerp (TREE_OPERAND (ref
, 1), 0))
624 temp
.off
= TREE_INT_CST_LOW (TREE_OPERAND (ref
, 1));
627 /* Record bits and position. */
628 temp
.op0
= TREE_OPERAND (ref
, 1);
629 temp
.op1
= TREE_OPERAND (ref
, 2);
632 /* The field decl is enough to unambiguously specify the field,
633 a matching type is not necessary and a mismatching type
634 is always a spurious difference. */
635 temp
.type
= NULL_TREE
;
636 temp
.op0
= TREE_OPERAND (ref
, 1);
637 temp
.op1
= TREE_OPERAND (ref
, 2);
639 tree this_offset
= component_ref_field_offset (ref
);
641 && TREE_CODE (this_offset
) == INTEGER_CST
)
643 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
644 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
647 = double_int_add (tree_to_double_int (this_offset
),
649 (tree_to_double_int (bit_offset
),
651 ? 3 : exact_log2 (BITS_PER_UNIT
),
652 HOST_BITS_PER_DOUBLE_INT
, true));
653 if (double_int_fits_in_shwi_p (off
))
659 case ARRAY_RANGE_REF
:
661 /* Record index as operand. */
662 temp
.op0
= TREE_OPERAND (ref
, 1);
663 /* Always record lower bounds and element size. */
664 temp
.op1
= array_ref_low_bound (ref
);
665 temp
.op2
= array_ref_element_size (ref
);
666 if (TREE_CODE (temp
.op0
) == INTEGER_CST
667 && TREE_CODE (temp
.op1
) == INTEGER_CST
668 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
670 double_int off
= tree_to_double_int (temp
.op0
);
671 off
= double_int_add (off
,
673 (tree_to_double_int (temp
.op1
)));
674 off
= double_int_mul (off
, tree_to_double_int (temp
.op2
));
675 if (double_int_fits_in_shwi_p (off
))
693 if (is_gimple_min_invariant (ref
))
699 /* These are only interesting for their operands, their
700 existence, and their type. They will never be the last
701 ref in the chain of references (IE they require an
702 operand), so we don't have to put anything
703 for op* as it will be handled by the iteration */
705 case VIEW_CONVERT_EXPR
:
709 /* This is only interesting for its constant offset. */
710 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
715 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
717 if (REFERENCE_CLASS_P (ref
)
718 || (TREE_CODE (ref
) == ADDR_EXPR
719 && !is_gimple_min_invariant (ref
)))
720 ref
= TREE_OPERAND (ref
, 0);
726 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
727 operands in *OPS, the reference alias set SET and the reference type TYPE.
728 Return true if something useful was produced. */
731 ao_ref_init_from_vn_reference (ao_ref
*ref
,
732 alias_set_type set
, tree type
,
733 VEC (vn_reference_op_s
, heap
) *ops
)
735 vn_reference_op_t op
;
737 tree base
= NULL_TREE
;
739 HOST_WIDE_INT offset
= 0;
740 HOST_WIDE_INT max_size
;
741 HOST_WIDE_INT size
= -1;
742 tree size_tree
= NULL_TREE
;
743 alias_set_type base_alias_set
= -1;
745 /* First get the final access size from just the outermost expression. */
746 op
= VEC_index (vn_reference_op_s
, ops
, 0);
747 if (op
->opcode
== COMPONENT_REF
)
748 size_tree
= DECL_SIZE (op
->op0
);
749 else if (op
->opcode
== BIT_FIELD_REF
)
753 enum machine_mode mode
= TYPE_MODE (type
);
755 size_tree
= TYPE_SIZE (type
);
757 size
= GET_MODE_BITSIZE (mode
);
759 if (size_tree
!= NULL_TREE
)
761 if (!host_integerp (size_tree
, 1))
764 size
= TREE_INT_CST_LOW (size_tree
);
767 /* Initially, maxsize is the same as the accessed element size.
768 In the following it will only grow (or become -1). */
771 /* Compute cumulative bit-offset for nested component-refs and array-refs,
772 and find the ultimate containing object. */
773 FOR_EACH_VEC_ELT (vn_reference_op_s
, ops
, i
, op
)
777 /* These may be in the reference ops, but we cannot do anything
778 sensible with them here. */
780 /* Apart from ADDR_EXPR arguments to MEM_REF. */
781 if (base
!= NULL_TREE
782 && TREE_CODE (base
) == MEM_REF
784 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
786 vn_reference_op_t pop
= VEC_index (vn_reference_op_s
, ops
, i
-1);
787 base
= TREE_OPERAND (op
->op0
, 0);
794 offset
+= pop
->off
* BITS_PER_UNIT
;
802 /* Record the base objects. */
804 base_alias_set
= get_deref_alias_set (op
->op0
);
805 *op0_p
= build2 (MEM_REF
, op
->type
,
807 op0_p
= &TREE_OPERAND (*op0_p
, 0);
818 /* And now the usual component-reference style ops. */
820 offset
+= tree_low_cst (op
->op1
, 0);
825 tree field
= op
->op0
;
826 /* We do not have a complete COMPONENT_REF tree here so we
827 cannot use component_ref_field_offset. Do the interesting
831 || !host_integerp (DECL_FIELD_OFFSET (field
), 1))
835 offset
+= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
))
837 offset
+= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
842 case ARRAY_RANGE_REF
:
844 /* We recorded the lower bound and the element size. */
845 if (!host_integerp (op
->op0
, 0)
846 || !host_integerp (op
->op1
, 0)
847 || !host_integerp (op
->op2
, 0))
851 HOST_WIDE_INT hindex
= TREE_INT_CST_LOW (op
->op0
);
852 hindex
-= TREE_INT_CST_LOW (op
->op1
);
853 hindex
*= TREE_INT_CST_LOW (op
->op2
);
854 hindex
*= BITS_PER_UNIT
;
866 case VIEW_CONVERT_EXPR
:
883 if (base
== NULL_TREE
)
886 ref
->ref
= NULL_TREE
;
888 ref
->offset
= offset
;
890 ref
->max_size
= max_size
;
891 ref
->ref_alias_set
= set
;
892 if (base_alias_set
!= -1)
893 ref
->base_alias_set
= base_alias_set
;
895 ref
->base_alias_set
= get_alias_set (base
);
900 /* Copy the operations present in load/store/call REF into RESULT, a vector of
901 vn_reference_op_s's. */
904 copy_reference_ops_from_call (gimple call
,
905 VEC(vn_reference_op_s
, heap
) **result
)
907 vn_reference_op_s temp
;
910 /* Copy the type, opcode, function being called and static chain. */
911 memset (&temp
, 0, sizeof (temp
));
912 temp
.type
= gimple_call_return_type (call
);
913 temp
.opcode
= CALL_EXPR
;
914 temp
.op0
= gimple_call_fn (call
);
915 temp
.op1
= gimple_call_chain (call
);
917 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
919 /* Copy the call arguments. As they can be references as well,
920 just chain them together. */
921 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
923 tree callarg
= gimple_call_arg (call
, i
);
924 copy_reference_ops_from_ref (callarg
, result
);
928 /* Create a vector of vn_reference_op_s structures from REF, a
929 REFERENCE_CLASS_P tree. The vector is not shared. */
931 static VEC(vn_reference_op_s
, heap
) *
932 create_reference_ops_from_ref (tree ref
)
934 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
936 copy_reference_ops_from_ref (ref
, &result
);
940 /* Create a vector of vn_reference_op_s structures from CALL, a
941 call statement. The vector is not shared. */
943 static VEC(vn_reference_op_s
, heap
) *
944 create_reference_ops_from_call (gimple call
)
946 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
948 copy_reference_ops_from_call (call
, &result
);
952 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
953 *I_P to point to the last element of the replacement. */
955 vn_reference_fold_indirect (VEC (vn_reference_op_s
, heap
) **ops
,
958 unsigned int i
= *i_p
;
959 vn_reference_op_t op
= VEC_index (vn_reference_op_s
, *ops
, i
);
960 vn_reference_op_t mem_op
= VEC_index (vn_reference_op_s
, *ops
, i
- 1);
962 HOST_WIDE_INT addr_offset
;
964 /* The only thing we have to do is from &OBJ.foo.bar add the offset
965 from .foo.bar to the preceeding MEM_REF offset and replace the
966 address with &OBJ. */
967 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
969 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
970 if (addr_base
!= op
->op0
)
972 double_int off
= tree_to_double_int (mem_op
->op0
);
973 off
= double_int_sext (off
, TYPE_PRECISION (TREE_TYPE (mem_op
->op0
)));
974 off
= double_int_add (off
, shwi_to_double_int (addr_offset
));
975 mem_op
->op0
= double_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
976 op
->op0
= build_fold_addr_expr (addr_base
);
977 if (host_integerp (mem_op
->op0
, 0))
978 mem_op
->off
= TREE_INT_CST_LOW (mem_op
->op0
);
984 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
985 *I_P to point to the last element of the replacement. */
987 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s
, heap
) **ops
,
990 unsigned int i
= *i_p
;
991 vn_reference_op_t op
= VEC_index (vn_reference_op_s
, *ops
, i
);
992 vn_reference_op_t mem_op
= VEC_index (vn_reference_op_s
, *ops
, i
- 1);
997 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
998 if (!is_gimple_assign (def_stmt
))
1001 code
= gimple_assign_rhs_code (def_stmt
);
1002 if (code
!= ADDR_EXPR
1003 && code
!= POINTER_PLUS_EXPR
)
1006 off
= tree_to_double_int (mem_op
->op0
);
1007 off
= double_int_sext (off
, TYPE_PRECISION (TREE_TYPE (mem_op
->op0
)));
1009 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1010 from .foo.bar to the preceeding MEM_REF offset and replace the
1011 address with &OBJ. */
1012 if (code
== ADDR_EXPR
)
1014 tree addr
, addr_base
;
1015 HOST_WIDE_INT addr_offset
;
1017 addr
= gimple_assign_rhs1 (def_stmt
);
1018 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1021 || TREE_CODE (addr_base
) != MEM_REF
)
1024 off
= double_int_add (off
, shwi_to_double_int (addr_offset
));
1025 off
= double_int_add (off
, mem_ref_offset (addr_base
));
1026 op
->op0
= TREE_OPERAND (addr_base
, 0);
1031 ptr
= gimple_assign_rhs1 (def_stmt
);
1032 ptroff
= gimple_assign_rhs2 (def_stmt
);
1033 if (TREE_CODE (ptr
) != SSA_NAME
1034 || TREE_CODE (ptroff
) != INTEGER_CST
)
1037 off
= double_int_add (off
, tree_to_double_int (ptroff
));
1041 mem_op
->op0
= double_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1042 if (host_integerp (mem_op
->op0
, 0))
1043 mem_op
->off
= TREE_INT_CST_LOW (mem_op
->op0
);
1046 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1047 op
->op0
= SSA_VAL (op
->op0
);
1048 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1049 op
->opcode
= TREE_CODE (op
->op0
);
1052 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1053 vn_reference_maybe_forwprop_address (ops
, i_p
);
1054 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1055 vn_reference_fold_indirect (ops
, i_p
);
1058 /* Optimize the reference REF to a constant if possible or return
1059 NULL_TREE if not. */
1062 fully_constant_vn_reference_p (vn_reference_t ref
)
1064 VEC (vn_reference_op_s
, heap
) *operands
= ref
->operands
;
1065 vn_reference_op_t op
;
1067 /* Try to simplify the translated expression if it is
1068 a call to a builtin function with at most two arguments. */
1069 op
= VEC_index (vn_reference_op_s
, operands
, 0);
1070 if (op
->opcode
== CALL_EXPR
1071 && TREE_CODE (op
->op0
) == ADDR_EXPR
1072 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1073 && DECL_BUILT_IN (TREE_OPERAND (op
->op0
, 0))
1074 && VEC_length (vn_reference_op_s
, operands
) >= 2
1075 && VEC_length (vn_reference_op_s
, operands
) <= 3)
1077 vn_reference_op_t arg0
, arg1
= NULL
;
1078 bool anyconst
= false;
1079 arg0
= VEC_index (vn_reference_op_s
, operands
, 1);
1080 if (VEC_length (vn_reference_op_s
, operands
) > 2)
1081 arg1
= VEC_index (vn_reference_op_s
, operands
, 2);
1082 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1083 || (arg0
->opcode
== ADDR_EXPR
1084 && is_gimple_min_invariant (arg0
->op0
)))
1087 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1088 || (arg1
->opcode
== ADDR_EXPR
1089 && is_gimple_min_invariant (arg1
->op0
))))
1093 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1096 arg1
? arg1
->op0
: NULL
);
1098 && TREE_CODE (folded
) == NOP_EXPR
)
1099 folded
= TREE_OPERAND (folded
, 0);
1101 && is_gimple_min_invariant (folded
))
1106 /* Simplify reads from constant strings. */
1107 else if (op
->opcode
== ARRAY_REF
1108 && TREE_CODE (op
->op0
) == INTEGER_CST
1109 && integer_zerop (op
->op1
)
1110 && VEC_length (vn_reference_op_s
, operands
) == 2)
1112 vn_reference_op_t arg0
;
1113 arg0
= VEC_index (vn_reference_op_s
, operands
, 1);
1114 if (arg0
->opcode
== STRING_CST
1115 && (TYPE_MODE (op
->type
)
1116 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
->op0
))))
1117 && GET_MODE_CLASS (TYPE_MODE (op
->type
)) == MODE_INT
1118 && GET_MODE_SIZE (TYPE_MODE (op
->type
)) == 1
1119 && compare_tree_int (op
->op0
, TREE_STRING_LENGTH (arg0
->op0
)) < 0)
1120 return build_int_cst_type (op
->type
,
1121 (TREE_STRING_POINTER (arg0
->op0
)
1122 [TREE_INT_CST_LOW (op
->op0
)]));
1128 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1129 structures into their value numbers. This is done in-place, and
1130 the vector passed in is returned. */
1132 static VEC (vn_reference_op_s
, heap
) *
1133 valueize_refs (VEC (vn_reference_op_s
, heap
) *orig
)
1135 vn_reference_op_t vro
;
1138 FOR_EACH_VEC_ELT (vn_reference_op_s
, orig
, i
, vro
)
1140 if (vro
->opcode
== SSA_NAME
1141 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1143 vro
->op0
= SSA_VAL (vro
->op0
);
1144 /* If it transforms from an SSA_NAME to a constant, update
1146 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1147 vro
->opcode
= TREE_CODE (vro
->op0
);
1149 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1150 vro
->op1
= SSA_VAL (vro
->op1
);
1151 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1152 vro
->op2
= SSA_VAL (vro
->op2
);
1153 /* If it transforms from an SSA_NAME to an address, fold with
1154 a preceding indirect reference. */
1157 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1158 && VEC_index (vn_reference_op_s
,
1159 orig
, i
- 1)->opcode
== MEM_REF
)
1160 vn_reference_fold_indirect (&orig
, &i
);
1162 && vro
->opcode
== SSA_NAME
1163 && VEC_index (vn_reference_op_s
,
1164 orig
, i
- 1)->opcode
== MEM_REF
)
1165 vn_reference_maybe_forwprop_address (&orig
, &i
);
1166 /* If it transforms a non-constant ARRAY_REF into a constant
1167 one, adjust the constant offset. */
1168 else if (vro
->opcode
== ARRAY_REF
1170 && TREE_CODE (vro
->op0
) == INTEGER_CST
1171 && TREE_CODE (vro
->op1
) == INTEGER_CST
1172 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1174 double_int off
= tree_to_double_int (vro
->op0
);
1175 off
= double_int_add (off
,
1177 (tree_to_double_int (vro
->op1
)));
1178 off
= double_int_mul (off
, tree_to_double_int (vro
->op2
));
1179 if (double_int_fits_in_shwi_p (off
))
1187 static VEC(vn_reference_op_s
, heap
) *shared_lookup_references
;
1189 /* Create a vector of vn_reference_op_s structures from REF, a
1190 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1193 static VEC(vn_reference_op_s
, heap
) *
1194 valueize_shared_reference_ops_from_ref (tree ref
)
1198 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1199 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1200 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1201 return shared_lookup_references
;
1204 /* Create a vector of vn_reference_op_s structures from CALL, a
1205 call statement. The vector is shared among all callers of
1208 static VEC(vn_reference_op_s
, heap
) *
1209 valueize_shared_reference_ops_from_call (gimple call
)
1213 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1214 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1215 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1216 return shared_lookup_references
;
1219 /* Lookup a SCCVN reference operation VR in the current hash table.
1220 Returns the resulting value number if it exists in the hash table,
1221 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1222 vn_reference_t stored in the hashtable if something is found. */
1225 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1230 hash
= vr
->hashcode
;
1231 slot
= htab_find_slot_with_hash (current_info
->references
, vr
,
1233 if (!slot
&& current_info
== optimistic_info
)
1234 slot
= htab_find_slot_with_hash (valid_info
->references
, vr
,
1239 *vnresult
= (vn_reference_t
)*slot
;
1240 return ((vn_reference_t
)*slot
)->result
;
1246 static tree
*last_vuse_ptr
;
1247 static vn_lookup_kind vn_walk_kind
;
1248 static vn_lookup_kind default_vn_walk_kind
;
1250 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1251 with the current VUSE and performs the expression lookup. */
1254 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
, void *vr_
)
1256 vn_reference_t vr
= (vn_reference_t
)vr_
;
1261 *last_vuse_ptr
= vuse
;
1263 /* Fixup vuse and hash. */
1265 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1266 vr
->vuse
= SSA_VAL (vuse
);
1268 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1270 hash
= vr
->hashcode
;
1271 slot
= htab_find_slot_with_hash (current_info
->references
, vr
,
1273 if (!slot
&& current_info
== optimistic_info
)
1274 slot
= htab_find_slot_with_hash (valid_info
->references
, vr
,
1282 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1283 from the statement defining VUSE and if not successful tries to
1284 translate *REFP and VR_ through an aggregate copy at the defintion
1288 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
)
1290 vn_reference_t vr
= (vn_reference_t
)vr_
;
1291 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1293 HOST_WIDE_INT offset
, maxsize
;
1294 static VEC (vn_reference_op_s
, heap
) *lhs_ops
= NULL
;
1296 bool lhs_ref_ok
= false;
1298 /* First try to disambiguate after value-replacing in the definitions LHS. */
1299 if (is_gimple_assign (def_stmt
))
1301 VEC (vn_reference_op_s
, heap
) *tem
;
1302 tree lhs
= gimple_assign_lhs (def_stmt
);
1303 /* Avoid re-allocation overhead. */
1304 VEC_truncate (vn_reference_op_s
, lhs_ops
, 0);
1305 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1307 lhs_ops
= valueize_refs (lhs_ops
);
1308 gcc_assert (lhs_ops
== tem
);
1309 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
, get_alias_set (lhs
),
1310 TREE_TYPE (lhs
), lhs_ops
);
1312 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1316 base
= ao_ref_base (ref
);
1317 offset
= ref
->offset
;
1318 maxsize
= ref
->max_size
;
1320 /* If we cannot constrain the size of the reference we cannot
1321 test if anything kills it. */
1325 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1326 from that defintion.
1328 if (is_gimple_reg_type (vr
->type
)
1329 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1330 && integer_zerop (gimple_call_arg (def_stmt
, 1))
1331 && host_integerp (gimple_call_arg (def_stmt
, 2), 1)
1332 && TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
)
1334 tree ref2
= TREE_OPERAND (gimple_call_arg (def_stmt
, 0), 0);
1336 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1337 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
);
1338 size2
= TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 2)) * 8;
1339 if ((unsigned HOST_WIDE_INT
)size2
/ 8
1340 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 2))
1342 && operand_equal_p (base
, base2
, 0)
1343 && offset2
<= offset
1344 && offset2
+ size2
>= offset
+ maxsize
)
1346 tree val
= build_zero_cst (vr
->type
);
1347 unsigned int value_id
= get_or_alloc_constant_value_id (val
);
1348 return vn_reference_insert_pieces (vuse
, vr
->set
, vr
->type
,
1349 VEC_copy (vn_reference_op_s
,
1350 heap
, vr
->operands
),
1355 /* 2) Assignment from an empty CONSTRUCTOR. */
1356 else if (is_gimple_reg_type (vr
->type
)
1357 && gimple_assign_single_p (def_stmt
)
1358 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
1359 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
1362 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1363 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
1364 &offset2
, &size2
, &maxsize2
);
1366 && operand_equal_p (base
, base2
, 0)
1367 && offset2
<= offset
1368 && offset2
+ size2
>= offset
+ maxsize
)
1370 tree val
= build_zero_cst (vr
->type
);
1371 unsigned int value_id
= get_or_alloc_constant_value_id (val
);
1372 return vn_reference_insert_pieces (vuse
, vr
->set
, vr
->type
,
1373 VEC_copy (vn_reference_op_s
,
1374 heap
, vr
->operands
),
1379 /* 3) For aggregate copies translate the reference through them if
1380 the copy kills ref. */
1381 else if (vn_walk_kind
== VN_WALKREWRITE
1382 && gimple_assign_single_p (def_stmt
)
1383 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
1384 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
1385 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
1388 HOST_WIDE_INT offset2
, size2
, maxsize2
;
1390 VEC (vn_reference_op_s
, heap
) *rhs
= NULL
;
1391 vn_reference_op_t vro
;
1397 /* See if the assignment kills REF. */
1398 base2
= ao_ref_base (&lhs_ref
);
1399 offset2
= lhs_ref
.offset
;
1400 size2
= lhs_ref
.size
;
1401 maxsize2
= lhs_ref
.max_size
;
1403 || (base
!= base2
&& !operand_equal_p (base
, base2
, 0))
1405 || offset2
+ size2
< offset
+ maxsize
)
1408 /* Find the common base of ref and the lhs. lhs_ops already
1409 contains valueized operands for the lhs. */
1410 i
= VEC_length (vn_reference_op_s
, vr
->operands
) - 1;
1411 j
= VEC_length (vn_reference_op_s
, lhs_ops
) - 1;
1412 while (j
>= 0 && i
>= 0
1413 && vn_reference_op_eq (VEC_index (vn_reference_op_s
,
1415 VEC_index (vn_reference_op_s
, lhs_ops
, j
)))
1421 /* i now points to the first additional op.
1422 ??? LHS may not be completely contained in VR, one or more
1423 VIEW_CONVERT_EXPRs could be in its way. We could at least
1424 try handling outermost VIEW_CONVERT_EXPRs. */
1428 /* Now re-write REF to be based on the rhs of the assignment. */
1429 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
1430 /* We need to pre-pend vr->operands[0..i] to rhs. */
1431 if (i
+ 1 + VEC_length (vn_reference_op_s
, rhs
)
1432 > VEC_length (vn_reference_op_s
, vr
->operands
))
1434 VEC (vn_reference_op_s
, heap
) *old
= vr
->operands
;
1435 VEC_safe_grow (vn_reference_op_s
, heap
, vr
->operands
,
1436 i
+ 1 + VEC_length (vn_reference_op_s
, rhs
));
1437 if (old
== shared_lookup_references
1438 && vr
->operands
!= old
)
1439 shared_lookup_references
= NULL
;
1442 VEC_truncate (vn_reference_op_s
, vr
->operands
,
1443 i
+ 1 + VEC_length (vn_reference_op_s
, rhs
));
1444 FOR_EACH_VEC_ELT (vn_reference_op_s
, rhs
, j
, vro
)
1445 VEC_replace (vn_reference_op_s
, vr
->operands
, i
+ 1 + j
, vro
);
1446 VEC_free (vn_reference_op_s
, heap
, rhs
);
1447 vr
->hashcode
= vn_reference_compute_hash (vr
);
1449 /* Adjust *ref from the new operands. */
1450 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
1452 /* This can happen with bitfields. */
1453 if (ref
->size
!= r
.size
)
1457 /* Do not update last seen VUSE after translating. */
1458 last_vuse_ptr
= NULL
;
1460 /* Keep looking for the adjusted *REF / VR pair. */
1464 /* 4) For memcpy copies translate the reference through them if
1465 the copy kills ref. */
1466 else if (vn_walk_kind
== VN_WALKREWRITE
1467 && is_gimple_reg_type (vr
->type
)
1468 /* ??? Handle BCOPY as well. */
1469 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
1470 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
1471 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
1472 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
1473 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
1474 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
1475 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
1476 && host_integerp (gimple_call_arg (def_stmt
, 2), 1))
1480 HOST_WIDE_INT rhs_offset
, copy_size
, lhs_offset
;
1481 vn_reference_op_s op
;
1485 /* Only handle non-variable, addressable refs. */
1486 if (ref
->size
!= maxsize
1487 || offset
% BITS_PER_UNIT
!= 0
1488 || ref
->size
% BITS_PER_UNIT
!= 0)
1491 /* Extract a pointer base and an offset for the destination. */
1492 lhs
= gimple_call_arg (def_stmt
, 0);
1494 if (TREE_CODE (lhs
) == SSA_NAME
)
1495 lhs
= SSA_VAL (lhs
);
1496 if (TREE_CODE (lhs
) == ADDR_EXPR
)
1498 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
1502 if (TREE_CODE (tem
) == MEM_REF
1503 && host_integerp (TREE_OPERAND (tem
, 1), 1))
1505 lhs
= TREE_OPERAND (tem
, 0);
1506 lhs_offset
+= TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1));
1508 else if (DECL_P (tem
))
1509 lhs
= build_fold_addr_expr (tem
);
1513 if (TREE_CODE (lhs
) != SSA_NAME
1514 && TREE_CODE (lhs
) != ADDR_EXPR
)
1517 /* Extract a pointer base and an offset for the source. */
1518 rhs
= gimple_call_arg (def_stmt
, 1);
1520 if (TREE_CODE (rhs
) == SSA_NAME
)
1521 rhs
= SSA_VAL (rhs
);
1522 if (TREE_CODE (rhs
) == ADDR_EXPR
)
1524 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
1528 if (TREE_CODE (tem
) == MEM_REF
1529 && host_integerp (TREE_OPERAND (tem
, 1), 1))
1531 rhs
= TREE_OPERAND (tem
, 0);
1532 rhs_offset
+= TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1));
1534 else if (DECL_P (tem
))
1535 rhs
= build_fold_addr_expr (tem
);
1539 if (TREE_CODE (rhs
) != SSA_NAME
1540 && TREE_CODE (rhs
) != ADDR_EXPR
)
1543 copy_size
= TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 2));
1545 /* The bases of the destination and the references have to agree. */
1546 if ((TREE_CODE (base
) != MEM_REF
1548 || (TREE_CODE (base
) == MEM_REF
1549 && (TREE_OPERAND (base
, 0) != lhs
1550 || !host_integerp (TREE_OPERAND (base
, 1), 1)))
1552 && (TREE_CODE (lhs
) != ADDR_EXPR
1553 || TREE_OPERAND (lhs
, 0) != base
)))
1556 /* And the access has to be contained within the memcpy destination. */
1557 at
= offset
/ BITS_PER_UNIT
;
1558 if (TREE_CODE (base
) == MEM_REF
)
1559 at
+= TREE_INT_CST_LOW (TREE_OPERAND (base
, 1));
1561 || lhs_offset
+ copy_size
< at
+ maxsize
/ BITS_PER_UNIT
)
1564 /* Make room for 2 operands in the new reference. */
1565 if (VEC_length (vn_reference_op_s
, vr
->operands
) < 2)
1567 VEC (vn_reference_op_s
, heap
) *old
= vr
->operands
;
1568 VEC_safe_grow (vn_reference_op_s
, heap
, vr
->operands
, 2);
1569 if (old
== shared_lookup_references
1570 && vr
->operands
!= old
)
1571 shared_lookup_references
= NULL
;
1574 VEC_truncate (vn_reference_op_s
, vr
->operands
, 2);
1576 /* The looked-through reference is a simple MEM_REF. */
1577 memset (&op
, 0, sizeof (op
));
1579 op
.opcode
= MEM_REF
;
1580 op
.op0
= build_int_cst (ptr_type_node
, at
- rhs_offset
);
1581 op
.off
= at
- lhs_offset
+ rhs_offset
;
1582 VEC_replace (vn_reference_op_s
, vr
->operands
, 0, &op
);
1583 op
.type
= TYPE_MAIN_VARIANT (TREE_TYPE (rhs
));
1584 op
.opcode
= TREE_CODE (rhs
);
1587 VEC_replace (vn_reference_op_s
, vr
->operands
, 1, &op
);
1588 vr
->hashcode
= vn_reference_compute_hash (vr
);
1590 /* Adjust *ref from the new operands. */
1591 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
1593 /* This can happen with bitfields. */
1594 if (ref
->size
!= r
.size
)
1598 /* Do not update last seen VUSE after translating. */
1599 last_vuse_ptr
= NULL
;
1601 /* Keep looking for the adjusted *REF / VR pair. */
1605 /* Bail out and stop walking. */
1609 /* Lookup a reference operation by it's parts, in the current hash table.
1610 Returns the resulting value number if it exists in the hash table,
1611 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1612 vn_reference_t stored in the hashtable if something is found. */
1615 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
1616 VEC (vn_reference_op_s
, heap
) *operands
,
1617 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
1619 struct vn_reference_s vr1
;
1627 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1628 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
1629 VEC_safe_grow (vn_reference_op_s
, heap
, shared_lookup_references
,
1630 VEC_length (vn_reference_op_s
, operands
));
1631 memcpy (VEC_address (vn_reference_op_s
, shared_lookup_references
),
1632 VEC_address (vn_reference_op_s
, operands
),
1633 sizeof (vn_reference_op_s
)
1634 * VEC_length (vn_reference_op_s
, operands
));
1635 vr1
.operands
= operands
= shared_lookup_references
1636 = valueize_refs (shared_lookup_references
);
1639 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1640 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
1643 vn_reference_lookup_1 (&vr1
, vnresult
);
1645 && kind
!= VN_NOWALK
1649 vn_walk_kind
= kind
;
1650 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
1652 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
1653 vn_reference_lookup_2
,
1654 vn_reference_lookup_3
, &vr1
);
1655 if (vr1
.operands
!= operands
)
1656 VEC_free (vn_reference_op_s
, heap
, vr1
.operands
);
1660 return (*vnresult
)->result
;
1665 /* Lookup OP in the current hash table, and return the resulting value
1666 number if it exists in the hash table. Return NULL_TREE if it does
1667 not exist in the hash table or if the result field of the structure
1668 was NULL.. VNRESULT will be filled in with the vn_reference_t
1669 stored in the hashtable if one exists. */
1672 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
1673 vn_reference_t
*vnresult
)
1675 VEC (vn_reference_op_s
, heap
) *operands
;
1676 struct vn_reference_s vr1
;
1682 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1683 vr1
.operands
= operands
= valueize_shared_reference_ops_from_ref (op
);
1684 vr1
.type
= TREE_TYPE (op
);
1685 vr1
.set
= get_alias_set (op
);
1686 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1687 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
1690 if (kind
!= VN_NOWALK
1693 vn_reference_t wvnresult
;
1695 ao_ref_init (&r
, op
);
1696 vn_walk_kind
= kind
;
1698 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
1699 vn_reference_lookup_2
,
1700 vn_reference_lookup_3
, &vr1
);
1701 if (vr1
.operands
!= operands
)
1702 VEC_free (vn_reference_op_s
, heap
, vr1
.operands
);
1706 *vnresult
= wvnresult
;
1707 return wvnresult
->result
;
1713 return vn_reference_lookup_1 (&vr1
, vnresult
);
1717 /* Insert OP into the current hash table with a value number of
1718 RESULT, and return the resulting reference structure we created. */
1721 vn_reference_insert (tree op
, tree result
, tree vuse
)
1726 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1727 if (TREE_CODE (result
) == SSA_NAME
)
1728 vr1
->value_id
= VN_INFO (result
)->value_id
;
1730 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
1731 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1732 vr1
->operands
= valueize_refs (create_reference_ops_from_ref (op
));
1733 vr1
->type
= TREE_TYPE (op
);
1734 vr1
->set
= get_alias_set (op
);
1735 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1736 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
1738 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1741 /* Because we lookup stores using vuses, and value number failures
1742 using the vdefs (see visit_reference_op_store for how and why),
1743 it's possible that on failure we may try to insert an already
1744 inserted store. This is not wrong, there is no ssa name for a
1745 store that we could use as a differentiator anyway. Thus, unlike
1746 the other lookup functions, you cannot gcc_assert (!*slot)
1749 /* But free the old slot in case of a collision. */
1751 free_reference (*slot
);
1757 /* Insert a reference by it's pieces into the current hash table with
1758 a value number of RESULT. Return the resulting reference
1759 structure we created. */
1762 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
1763 VEC (vn_reference_op_s
, heap
) *operands
,
1764 tree result
, unsigned int value_id
)
1770 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1771 vr1
->value_id
= value_id
;
1772 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1773 vr1
->operands
= valueize_refs (operands
);
1776 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1777 if (result
&& TREE_CODE (result
) == SSA_NAME
)
1778 result
= SSA_VAL (result
);
1779 vr1
->result
= result
;
1781 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1784 /* At this point we should have all the things inserted that we have
1785 seen before, and we should never try inserting something that
1787 gcc_assert (!*slot
);
1789 free_reference (*slot
);
1795 /* Compute and return the hash value for nary operation VBO1. */
1798 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
1803 for (i
= 0; i
< vno1
->length
; ++i
)
1804 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
1805 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
1807 if (vno1
->length
== 2
1808 && commutative_tree_code (vno1
->opcode
)
1809 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
1811 tree temp
= vno1
->op
[0];
1812 vno1
->op
[0] = vno1
->op
[1];
1816 hash
= iterative_hash_hashval_t (vno1
->opcode
, 0);
1817 for (i
= 0; i
< vno1
->length
; ++i
)
1818 hash
= iterative_hash_expr (vno1
->op
[i
], hash
);
1823 /* Return the computed hashcode for nary operation P1. */
1826 vn_nary_op_hash (const void *p1
)
1828 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1829 return vno1
->hashcode
;
1832 /* Compare nary operations P1 and P2 and return true if they are
1836 vn_nary_op_eq (const void *p1
, const void *p2
)
1838 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1839 const_vn_nary_op_t
const vno2
= (const_vn_nary_op_t
) p2
;
1842 if (vno1
->hashcode
!= vno2
->hashcode
)
1845 if (vno1
->opcode
!= vno2
->opcode
1846 || !types_compatible_p (vno1
->type
, vno2
->type
))
1849 for (i
= 0; i
< vno1
->length
; ++i
)
1850 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
1856 /* Initialize VNO from the pieces provided. */
1859 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
1860 enum tree_code code
, tree type
, tree op0
,
1861 tree op1
, tree op2
, tree op3
)
1864 vno
->length
= length
;
1868 /* The fallthrus here are deliberate. */
1869 case 4: vno
->op
[3] = op3
;
1870 case 3: vno
->op
[2] = op2
;
1871 case 2: vno
->op
[1] = op1
;
1872 case 1: vno
->op
[0] = op0
;
1878 /* Initialize VNO from OP. */
1881 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
1885 vno
->opcode
= TREE_CODE (op
);
1886 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
1887 vno
->type
= TREE_TYPE (op
);
1888 for (i
= 0; i
< vno
->length
; ++i
)
1889 vno
->op
[i
] = TREE_OPERAND (op
, i
);
1892 /* Initialize VNO from STMT. */
1895 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple stmt
)
1899 vno
->opcode
= gimple_assign_rhs_code (stmt
);
1900 vno
->length
= gimple_num_ops (stmt
) - 1;
1901 vno
->type
= gimple_expr_type (stmt
);
1902 for (i
= 0; i
< vno
->length
; ++i
)
1903 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
1904 if (vno
->opcode
== REALPART_EXPR
1905 || vno
->opcode
== IMAGPART_EXPR
1906 || vno
->opcode
== VIEW_CONVERT_EXPR
)
1907 vno
->op
[0] = TREE_OPERAND (vno
->op
[0], 0);
1910 /* Compute the hashcode for VNO and look for it in the hash table;
1911 return the resulting value number if it exists in the hash table.
1912 Return NULL_TREE if it does not exist in the hash table or if the
1913 result field of the operation is NULL. VNRESULT will contain the
1914 vn_nary_op_t from the hashtable if it exists. */
1917 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
1924 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
1925 slot
= htab_find_slot_with_hash (current_info
->nary
, vno
, vno
->hashcode
,
1927 if (!slot
&& current_info
== optimistic_info
)
1928 slot
= htab_find_slot_with_hash (valid_info
->nary
, vno
, vno
->hashcode
,
1933 *vnresult
= (vn_nary_op_t
)*slot
;
1934 return ((vn_nary_op_t
)*slot
)->result
;
1937 /* Lookup a n-ary operation by its pieces and return the resulting value
1938 number if it exists in the hash table. Return NULL_TREE if it does
1939 not exist in the hash table or if the result field of the operation
1940 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1944 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
1945 tree type
, tree op0
, tree op1
, tree op2
,
1946 tree op3
, vn_nary_op_t
*vnresult
)
1948 struct vn_nary_op_s vno1
;
1949 init_vn_nary_op_from_pieces (&vno1
, length
, code
, type
, op0
, op1
, op2
, op3
);
1950 return vn_nary_op_lookup_1 (&vno1
, vnresult
);
1953 /* Lookup OP in the current hash table, and return the resulting value
1954 number if it exists in the hash table. Return NULL_TREE if it does
1955 not exist in the hash table or if the result field of the operation
1956 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1960 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
1962 struct vn_nary_op_s vno1
;
1963 init_vn_nary_op_from_op (&vno1
, op
);
1964 return vn_nary_op_lookup_1 (&vno1
, vnresult
);
1967 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1968 value number if it exists in the hash table. Return NULL_TREE if
1969 it does not exist in the hash table. VNRESULT will contain the
1970 vn_nary_op_t from the hashtable if it exists. */
1973 vn_nary_op_lookup_stmt (gimple stmt
, vn_nary_op_t
*vnresult
)
1975 struct vn_nary_op_s vno1
;
1976 init_vn_nary_op_from_stmt (&vno1
, stmt
);
1977 return vn_nary_op_lookup_1 (&vno1
, vnresult
);
1980 /* Return the size of a vn_nary_op_t with LENGTH operands. */
1983 sizeof_vn_nary_op (unsigned int length
)
1985 return sizeof (struct vn_nary_op_s
) - sizeof (tree
) * (4 - length
);
1988 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
1991 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
1993 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
1996 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2000 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2002 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
,
2003 ¤t_info
->nary_obstack
);
2005 vno1
->value_id
= value_id
;
2006 vno1
->length
= length
;
2007 vno1
->result
= result
;
2012 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2013 VNO->HASHCODE first. */
2016 vn_nary_op_insert_into (vn_nary_op_t vno
, htab_t table
, bool compute_hash
)
2021 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2023 slot
= htab_find_slot_with_hash (table
, vno
, vno
->hashcode
, INSERT
);
2024 gcc_assert (!*slot
);
2030 /* Insert a n-ary operation into the current hash table using it's
2031 pieces. Return the vn_nary_op_t structure we created and put in
2035 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
2036 tree type
, tree op0
,
2037 tree op1
, tree op2
, tree op3
,
2039 unsigned int value_id
)
2043 vno1
= alloc_vn_nary_op (length
, result
, value_id
);
2044 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, op0
, op1
, op2
, op3
);
2045 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2048 /* Insert OP into the current hash table with a value number of
2049 RESULT. Return the vn_nary_op_t structure we created and put in
2053 vn_nary_op_insert (tree op
, tree result
)
2055 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2058 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2059 init_vn_nary_op_from_op (vno1
, op
);
2060 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2063 /* Insert the rhs of STMT into the current hash table with a value number of
2067 vn_nary_op_insert_stmt (gimple stmt
, tree result
)
2069 unsigned length
= gimple_num_ops (stmt
) - 1;
2072 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
2073 init_vn_nary_op_from_stmt (vno1
, stmt
);
2074 return vn_nary_op_insert_into (vno1
, current_info
->nary
, true);
2077 /* Compute a hashcode for PHI operation VP1 and return it. */
2079 static inline hashval_t
2080 vn_phi_compute_hash (vn_phi_t vp1
)
2087 result
= vp1
->block
->index
;
2089 /* If all PHI arguments are constants we need to distinguish
2090 the PHI node via its type. */
2091 type
= TREE_TYPE (VEC_index (tree
, vp1
->phiargs
, 0));
2092 result
+= (INTEGRAL_TYPE_P (type
)
2093 + (INTEGRAL_TYPE_P (type
)
2094 ? TYPE_PRECISION (type
) + TYPE_UNSIGNED (type
) : 0));
2096 FOR_EACH_VEC_ELT (tree
, vp1
->phiargs
, i
, phi1op
)
2098 if (phi1op
== VN_TOP
)
2100 result
= iterative_hash_expr (phi1op
, result
);
2106 /* Return the computed hashcode for phi operation P1. */
2109 vn_phi_hash (const void *p1
)
2111 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
2112 return vp1
->hashcode
;
2115 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2118 vn_phi_eq (const void *p1
, const void *p2
)
2120 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
2121 const_vn_phi_t
const vp2
= (const_vn_phi_t
) p2
;
2123 if (vp1
->hashcode
!= vp2
->hashcode
)
2126 if (vp1
->block
== vp2
->block
)
2131 /* If the PHI nodes do not have compatible types
2132 they are not the same. */
2133 if (!types_compatible_p (TREE_TYPE (VEC_index (tree
, vp1
->phiargs
, 0)),
2134 TREE_TYPE (VEC_index (tree
, vp2
->phiargs
, 0))))
2137 /* Any phi in the same block will have it's arguments in the
2138 same edge order, because of how we store phi nodes. */
2139 FOR_EACH_VEC_ELT (tree
, vp1
->phiargs
, i
, phi1op
)
2141 tree phi2op
= VEC_index (tree
, vp2
->phiargs
, i
);
2142 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
2144 if (!expressions_equal_p (phi1op
, phi2op
))
2152 static VEC(tree
, heap
) *shared_lookup_phiargs
;
2154 /* Lookup PHI in the current hash table, and return the resulting
2155 value number if it exists in the hash table. Return NULL_TREE if
2156 it does not exist in the hash table. */
2159 vn_phi_lookup (gimple phi
)
2162 struct vn_phi_s vp1
;
2165 VEC_truncate (tree
, shared_lookup_phiargs
, 0);
2167 /* Canonicalize the SSA_NAME's to their value number. */
2168 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2170 tree def
= PHI_ARG_DEF (phi
, i
);
2171 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2172 VEC_safe_push (tree
, heap
, shared_lookup_phiargs
, def
);
2174 vp1
.phiargs
= shared_lookup_phiargs
;
2175 vp1
.block
= gimple_bb (phi
);
2176 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
2177 slot
= htab_find_slot_with_hash (current_info
->phis
, &vp1
, vp1
.hashcode
,
2179 if (!slot
&& current_info
== optimistic_info
)
2180 slot
= htab_find_slot_with_hash (valid_info
->phis
, &vp1
, vp1
.hashcode
,
2184 return ((vn_phi_t
)*slot
)->result
;
2187 /* Insert PHI into the current hash table with a value number of
2191 vn_phi_insert (gimple phi
, tree result
)
2194 vn_phi_t vp1
= (vn_phi_t
) pool_alloc (current_info
->phis_pool
);
2196 VEC (tree
, heap
) *args
= NULL
;
2198 /* Canonicalize the SSA_NAME's to their value number. */
2199 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2201 tree def
= PHI_ARG_DEF (phi
, i
);
2202 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
2203 VEC_safe_push (tree
, heap
, args
, def
);
2205 vp1
->value_id
= VN_INFO (result
)->value_id
;
2206 vp1
->phiargs
= args
;
2207 vp1
->block
= gimple_bb (phi
);
2208 vp1
->result
= result
;
2209 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
2211 slot
= htab_find_slot_with_hash (current_info
->phis
, vp1
, vp1
->hashcode
,
2214 /* Because we iterate over phi operations more than once, it's
2215 possible the slot might already exist here, hence no assert.*/
2221 /* Print set of components in strongly connected component SCC to OUT. */
2224 print_scc (FILE *out
, VEC (tree
, heap
) *scc
)
2229 fprintf (out
, "SCC consists of: ");
2230 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
2232 print_generic_expr (out
, var
, 0);
2235 fprintf (out
, "\n");
2238 /* Set the value number of FROM to TO, return true if it has changed
2242 set_ssa_val_to (tree from
, tree to
)
2244 tree currval
= SSA_VAL (from
);
2248 if (currval
== from
)
2250 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2252 fprintf (dump_file
, "Not changing value number of ");
2253 print_generic_expr (dump_file
, from
, 0);
2254 fprintf (dump_file
, " from VARYING to ");
2255 print_generic_expr (dump_file
, to
, 0);
2256 fprintf (dump_file
, "\n");
2260 else if (TREE_CODE (to
) == SSA_NAME
2261 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
2265 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2266 and invariants. So assert that here. */
2267 gcc_assert (to
!= NULL_TREE
2269 || TREE_CODE (to
) == SSA_NAME
2270 || is_gimple_min_invariant (to
)));
2272 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2274 fprintf (dump_file
, "Setting value number of ");
2275 print_generic_expr (dump_file
, from
, 0);
2276 fprintf (dump_file
, " to ");
2277 print_generic_expr (dump_file
, to
, 0);
2280 if (currval
!= to
&& !operand_equal_p (currval
, to
, OEP_PURE_SAME
))
2282 VN_INFO (from
)->valnum
= to
;
2283 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2284 fprintf (dump_file
, " (changed)\n");
2287 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2288 fprintf (dump_file
, "\n");
2292 /* Set all definitions in STMT to value number to themselves.
2293 Return true if a value number changed. */
2296 defs_to_varying (gimple stmt
)
2298 bool changed
= false;
2302 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2304 tree def
= DEF_FROM_PTR (defp
);
2306 VN_INFO (def
)->use_processed
= true;
2307 changed
|= set_ssa_val_to (def
, def
);
2312 static bool expr_has_constants (tree expr
);
2313 static tree
valueize_expr (tree expr
);
2315 /* Visit a copy between LHS and RHS, return true if the value number
2319 visit_copy (tree lhs
, tree rhs
)
2321 /* Follow chains of copies to their destination. */
2322 while (TREE_CODE (rhs
) == SSA_NAME
2323 && SSA_VAL (rhs
) != rhs
)
2324 rhs
= SSA_VAL (rhs
);
2326 /* The copy may have a more interesting constant filled expression
2327 (we don't, since we know our RHS is just an SSA name). */
2328 if (TREE_CODE (rhs
) == SSA_NAME
)
2330 VN_INFO (lhs
)->has_constants
= VN_INFO (rhs
)->has_constants
;
2331 VN_INFO (lhs
)->expr
= VN_INFO (rhs
)->expr
;
2334 return set_ssa_val_to (lhs
, rhs
);
2337 /* Visit a nary operator RHS, value number it, and return true if the
2338 value number of LHS has changed as a result. */
2341 visit_nary_op (tree lhs
, gimple stmt
)
2343 bool changed
= false;
2344 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
2347 changed
= set_ssa_val_to (lhs
, result
);
2350 changed
= set_ssa_val_to (lhs
, lhs
);
2351 vn_nary_op_insert_stmt (stmt
, lhs
);
2357 /* Visit a call STMT storing into LHS. Return true if the value number
2358 of the LHS has changed as a result. */
2361 visit_reference_op_call (tree lhs
, gimple stmt
)
2363 bool changed
= false;
2364 struct vn_reference_s vr1
;
2366 tree vuse
= gimple_vuse (stmt
);
2368 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2369 vr1
.operands
= valueize_shared_reference_ops_from_call (stmt
);
2370 vr1
.type
= gimple_expr_type (stmt
);
2372 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2373 result
= vn_reference_lookup_1 (&vr1
, NULL
);
2376 changed
= set_ssa_val_to (lhs
, result
);
2377 if (TREE_CODE (result
) == SSA_NAME
2378 && VN_INFO (result
)->has_constants
)
2379 VN_INFO (lhs
)->has_constants
= true;
2385 changed
= set_ssa_val_to (lhs
, lhs
);
2386 vr2
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
2387 vr2
->vuse
= vr1
.vuse
;
2388 vr2
->operands
= valueize_refs (create_reference_ops_from_call (stmt
));
2389 vr2
->type
= vr1
.type
;
2391 vr2
->hashcode
= vr1
.hashcode
;
2393 slot
= htab_find_slot_with_hash (current_info
->references
,
2394 vr2
, vr2
->hashcode
, INSERT
);
2396 free_reference (*slot
);
2403 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2404 and return true if the value number of the LHS has changed as a result. */
2407 visit_reference_op_load (tree lhs
, tree op
, gimple stmt
)
2409 bool changed
= false;
2413 last_vuse
= gimple_vuse (stmt
);
2414 last_vuse_ptr
= &last_vuse
;
2415 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
2416 default_vn_walk_kind
, NULL
);
2417 last_vuse_ptr
= NULL
;
2419 /* If we have a VCE, try looking up its operand as it might be stored in
2420 a different type. */
2421 if (!result
&& TREE_CODE (op
) == VIEW_CONVERT_EXPR
)
2422 result
= vn_reference_lookup (TREE_OPERAND (op
, 0), gimple_vuse (stmt
),
2423 default_vn_walk_kind
, NULL
);
2425 /* We handle type-punning through unions by value-numbering based
2426 on offset and size of the access. Be prepared to handle a
2427 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2429 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
2431 /* We will be setting the value number of lhs to the value number
2432 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2433 So first simplify and lookup this expression to see if it
2434 is already available. */
2435 tree val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
2436 if ((CONVERT_EXPR_P (val
)
2437 || TREE_CODE (val
) == VIEW_CONVERT_EXPR
)
2438 && TREE_CODE (TREE_OPERAND (val
, 0)) == SSA_NAME
)
2440 tree tem
= valueize_expr (vn_get_expr_for (TREE_OPERAND (val
, 0)));
2441 if ((CONVERT_EXPR_P (tem
)
2442 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
)
2443 && (tem
= fold_unary_ignore_overflow (TREE_CODE (val
),
2444 TREE_TYPE (val
), tem
)))
2448 if (!is_gimple_min_invariant (val
)
2449 && TREE_CODE (val
) != SSA_NAME
)
2450 result
= vn_nary_op_lookup (val
, NULL
);
2451 /* If the expression is not yet available, value-number lhs to
2452 a new SSA_NAME we create. */
2455 result
= make_ssa_name (SSA_NAME_VAR (lhs
), gimple_build_nop ());
2456 /* Initialize value-number information properly. */
2457 VN_INFO_GET (result
)->valnum
= result
;
2458 VN_INFO (result
)->value_id
= get_next_value_id ();
2459 VN_INFO (result
)->expr
= val
;
2460 VN_INFO (result
)->has_constants
= expr_has_constants (val
);
2461 VN_INFO (result
)->needs_insertion
= true;
2462 /* As all "inserted" statements are singleton SCCs, insert
2463 to the valid table. This is strictly needed to
2464 avoid re-generating new value SSA_NAMEs for the same
2465 expression during SCC iteration over and over (the
2466 optimistic table gets cleared after each iteration).
2467 We do not need to insert into the optimistic table, as
2468 lookups there will fall back to the valid table. */
2469 if (current_info
== optimistic_info
)
2471 current_info
= valid_info
;
2472 vn_nary_op_insert (val
, result
);
2473 current_info
= optimistic_info
;
2476 vn_nary_op_insert (val
, result
);
2477 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2479 fprintf (dump_file
, "Inserting name ");
2480 print_generic_expr (dump_file
, result
, 0);
2481 fprintf (dump_file
, " for expression ");
2482 print_generic_expr (dump_file
, val
, 0);
2483 fprintf (dump_file
, "\n");
2490 changed
= set_ssa_val_to (lhs
, result
);
2491 if (TREE_CODE (result
) == SSA_NAME
2492 && VN_INFO (result
)->has_constants
)
2494 VN_INFO (lhs
)->expr
= VN_INFO (result
)->expr
;
2495 VN_INFO (lhs
)->has_constants
= true;
2500 changed
= set_ssa_val_to (lhs
, lhs
);
2501 vn_reference_insert (op
, lhs
, last_vuse
);
2508 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2509 and return true if the value number of the LHS has changed as a result. */
2512 visit_reference_op_store (tree lhs
, tree op
, gimple stmt
)
2514 bool changed
= false;
2516 bool resultsame
= false;
2518 /* First we want to lookup using the *vuses* from the store and see
2519 if there the last store to this location with the same address
2522 The vuses represent the memory state before the store. If the
2523 memory state, address, and value of the store is the same as the
2524 last store to this location, then this store will produce the
2525 same memory state as that store.
2527 In this case the vdef versions for this store are value numbered to those
2528 vuse versions, since they represent the same memory state after
2531 Otherwise, the vdefs for the store are used when inserting into
2532 the table, since the store generates a new memory state. */
2534 result
= vn_reference_lookup (lhs
, gimple_vuse (stmt
), VN_NOWALK
, NULL
);
2538 if (TREE_CODE (result
) == SSA_NAME
)
2539 result
= SSA_VAL (result
);
2540 if (TREE_CODE (op
) == SSA_NAME
)
2542 resultsame
= expressions_equal_p (result
, op
);
2545 if (!result
|| !resultsame
)
2549 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2551 fprintf (dump_file
, "No store match\n");
2552 fprintf (dump_file
, "Value numbering store ");
2553 print_generic_expr (dump_file
, lhs
, 0);
2554 fprintf (dump_file
, " to ");
2555 print_generic_expr (dump_file
, op
, 0);
2556 fprintf (dump_file
, "\n");
2558 /* Have to set value numbers before insert, since insert is
2559 going to valueize the references in-place. */
2560 if ((vdef
= gimple_vdef (stmt
)))
2562 VN_INFO (vdef
)->use_processed
= true;
2563 changed
|= set_ssa_val_to (vdef
, vdef
);
2566 /* Do not insert structure copies into the tables. */
2567 if (is_gimple_min_invariant (op
)
2568 || is_gimple_reg (op
))
2569 vn_reference_insert (lhs
, op
, vdef
);
2573 /* We had a match, so value number the vdef to have the value
2574 number of the vuse it came from. */
2577 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2578 fprintf (dump_file
, "Store matched earlier value,"
2579 "value numbering store vdefs to matching vuses.\n");
2581 def
= gimple_vdef (stmt
);
2582 use
= gimple_vuse (stmt
);
2584 VN_INFO (def
)->use_processed
= true;
2585 changed
|= set_ssa_val_to (def
, SSA_VAL (use
));
2591 /* Visit and value number PHI, return true if the value number
2595 visit_phi (gimple phi
)
2597 bool changed
= false;
2599 tree sameval
= VN_TOP
;
2600 bool allsame
= true;
2603 /* TODO: We could check for this in init_sccvn, and replace this
2604 with a gcc_assert. */
2605 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
2606 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
2608 /* See if all non-TOP arguments have the same value. TOP is
2609 equivalent to everything, so we can ignore it. */
2610 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
2612 tree def
= PHI_ARG_DEF (phi
, i
);
2614 if (TREE_CODE (def
) == SSA_NAME
)
2615 def
= SSA_VAL (def
);
2618 if (sameval
== VN_TOP
)
2624 if (!expressions_equal_p (def
, sameval
))
2632 /* If all value numbered to the same value, the phi node has that
2636 if (is_gimple_min_invariant (sameval
))
2638 VN_INFO (PHI_RESULT (phi
))->has_constants
= true;
2639 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
2643 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
2644 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
2647 if (TREE_CODE (sameval
) == SSA_NAME
)
2648 return visit_copy (PHI_RESULT (phi
), sameval
);
2650 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
2653 /* Otherwise, see if it is equivalent to a phi node in this block. */
2654 result
= vn_phi_lookup (phi
);
2657 if (TREE_CODE (result
) == SSA_NAME
)
2658 changed
= visit_copy (PHI_RESULT (phi
), result
);
2660 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
2664 vn_phi_insert (phi
, PHI_RESULT (phi
));
2665 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
2666 VN_INFO (PHI_RESULT (phi
))->expr
= PHI_RESULT (phi
);
2667 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
2673 /* Return true if EXPR contains constants. */
2676 expr_has_constants (tree expr
)
2678 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
2681 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0));
2684 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0))
2685 || is_gimple_min_invariant (TREE_OPERAND (expr
, 1));
2686 /* Constants inside reference ops are rarely interesting, but
2687 it can take a lot of looking to find them. */
2689 case tcc_declaration
:
2692 return is_gimple_min_invariant (expr
);
2697 /* Return true if STMT contains constants. */
2700 stmt_has_constants (gimple stmt
)
2702 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
2705 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
2707 case GIMPLE_UNARY_RHS
:
2708 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
2710 case GIMPLE_BINARY_RHS
:
2711 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))
2712 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt
)));
2713 case GIMPLE_TERNARY_RHS
:
2714 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))
2715 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt
))
2716 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt
)));
2717 case GIMPLE_SINGLE_RHS
:
2718 /* Constants inside reference ops are rarely interesting, but
2719 it can take a lot of looking to find them. */
2720 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
2727 /* Replace SSA_NAMES in expr with their value numbers, and return the
2729 This is performed in place. */
2732 valueize_expr (tree expr
)
2734 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
2737 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == SSA_NAME
2738 && SSA_VAL (TREE_OPERAND (expr
, 0)) != VN_TOP
)
2739 TREE_OPERAND (expr
, 0) = SSA_VAL (TREE_OPERAND (expr
, 0));
2742 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == SSA_NAME
2743 && SSA_VAL (TREE_OPERAND (expr
, 0)) != VN_TOP
)
2744 TREE_OPERAND (expr
, 0) = SSA_VAL (TREE_OPERAND (expr
, 0));
2745 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == SSA_NAME
2746 && SSA_VAL (TREE_OPERAND (expr
, 1)) != VN_TOP
)
2747 TREE_OPERAND (expr
, 1) = SSA_VAL (TREE_OPERAND (expr
, 1));
2755 /* Simplify the binary expression RHS, and return the result if
2759 simplify_binary_expression (gimple stmt
)
2761 tree result
= NULL_TREE
;
2762 tree op0
= gimple_assign_rhs1 (stmt
);
2763 tree op1
= gimple_assign_rhs2 (stmt
);
2765 /* This will not catch every single case we could combine, but will
2766 catch those with constants. The goal here is to simultaneously
2767 combine constants between expressions, but avoid infinite
2768 expansion of expressions during simplification. */
2769 if (TREE_CODE (op0
) == SSA_NAME
)
2771 if (VN_INFO (op0
)->has_constants
2772 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)) == tcc_comparison
)
2773 op0
= valueize_expr (vn_get_expr_for (op0
));
2774 else if (SSA_VAL (op0
) != VN_TOP
&& SSA_VAL (op0
) != op0
)
2775 op0
= SSA_VAL (op0
);
2778 if (TREE_CODE (op1
) == SSA_NAME
)
2780 if (VN_INFO (op1
)->has_constants
)
2781 op1
= valueize_expr (vn_get_expr_for (op1
));
2782 else if (SSA_VAL (op1
) != VN_TOP
&& SSA_VAL (op1
) != op1
)
2783 op1
= SSA_VAL (op1
);
2786 /* Pointer plus constant can be represented as invariant address.
2787 Do so to allow further propatation, see also tree forwprop. */
2788 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2789 && host_integerp (op1
, 1)
2790 && TREE_CODE (op0
) == ADDR_EXPR
2791 && is_gimple_min_invariant (op0
))
2792 return build_invariant_address (TREE_TYPE (op0
),
2793 TREE_OPERAND (op0
, 0),
2794 TREE_INT_CST_LOW (op1
));
2796 /* Avoid folding if nothing changed. */
2797 if (op0
== gimple_assign_rhs1 (stmt
)
2798 && op1
== gimple_assign_rhs2 (stmt
))
2801 fold_defer_overflow_warnings ();
2803 result
= fold_binary (gimple_assign_rhs_code (stmt
),
2804 gimple_expr_type (stmt
), op0
, op1
);
2806 STRIP_USELESS_TYPE_CONVERSION (result
);
2808 fold_undefer_overflow_warnings (result
&& valid_gimple_rhs_p (result
),
2811 /* Make sure result is not a complex expression consisting
2812 of operators of operators (IE (a + b) + (a + c))
2813 Otherwise, we will end up with unbounded expressions if
2814 fold does anything at all. */
2815 if (result
&& valid_gimple_rhs_p (result
))
2821 /* Simplify the unary expression RHS, and return the result if
2825 simplify_unary_expression (gimple stmt
)
2827 tree result
= NULL_TREE
;
2828 tree orig_op0
, op0
= gimple_assign_rhs1 (stmt
);
2830 /* We handle some tcc_reference codes here that are all
2831 GIMPLE_ASSIGN_SINGLE codes. */
2832 if (gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2833 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2834 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2835 op0
= TREE_OPERAND (op0
, 0);
2837 if (TREE_CODE (op0
) != SSA_NAME
)
2841 if (VN_INFO (op0
)->has_constants
)
2842 op0
= valueize_expr (vn_get_expr_for (op0
));
2843 else if (gimple_assign_cast_p (stmt
)
2844 || gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2845 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2846 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2848 /* We want to do tree-combining on conversion-like expressions.
2849 Make sure we feed only SSA_NAMEs or constants to fold though. */
2850 tree tem
= valueize_expr (vn_get_expr_for (op0
));
2851 if (UNARY_CLASS_P (tem
)
2852 || BINARY_CLASS_P (tem
)
2853 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
2854 || TREE_CODE (tem
) == SSA_NAME
2855 || is_gimple_min_invariant (tem
))
2859 /* Avoid folding if nothing changed, but remember the expression. */
2860 if (op0
== orig_op0
)
2863 result
= fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt
),
2864 gimple_expr_type (stmt
), op0
);
2867 STRIP_USELESS_TYPE_CONVERSION (result
);
2868 if (valid_gimple_rhs_p (result
))
2875 /* Valueize NAME if it is an SSA name, otherwise just return it. */
2878 vn_valueize (tree name
)
2880 if (TREE_CODE (name
) == SSA_NAME
)
2882 tree tem
= SSA_VAL (name
);
2883 return tem
== VN_TOP
? name
: tem
;
2888 /* Try to simplify RHS using equivalences and constant folding. */
2891 try_to_simplify (gimple stmt
)
2895 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2896 in this case, there is no point in doing extra work. */
2897 if (gimple_assign_copy_p (stmt
)
2898 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2901 /* First try constant folding based on our current lattice. */
2902 tem
= gimple_fold_stmt_to_constant (stmt
, vn_valueize
);
2906 /* If that didn't work try combining multiple statements. */
2907 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)))
2910 /* Fallthrough for some codes that can operate on registers. */
2911 if (!(TREE_CODE (gimple_assign_rhs1 (stmt
)) == REALPART_EXPR
2912 || TREE_CODE (gimple_assign_rhs1 (stmt
)) == IMAGPART_EXPR
2913 || TREE_CODE (gimple_assign_rhs1 (stmt
)) == VIEW_CONVERT_EXPR
))
2915 /* We could do a little more with unary ops, if they expand
2916 into binary ops, but it's debatable whether it is worth it. */
2918 return simplify_unary_expression (stmt
);
2920 case tcc_comparison
:
2922 return simplify_binary_expression (stmt
);
2931 /* Visit and value number USE, return true if the value number
2935 visit_use (tree use
)
2937 bool changed
= false;
2938 gimple stmt
= SSA_NAME_DEF_STMT (use
);
2940 VN_INFO (use
)->use_processed
= true;
2942 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
2943 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
2944 && !SSA_NAME_IS_DEFAULT_DEF (use
))
2946 fprintf (dump_file
, "Value numbering ");
2947 print_generic_expr (dump_file
, use
, 0);
2948 fprintf (dump_file
, " stmt = ");
2949 print_gimple_stmt (dump_file
, stmt
, 0, 0);
2952 /* Handle uninitialized uses. */
2953 if (SSA_NAME_IS_DEFAULT_DEF (use
))
2954 changed
= set_ssa_val_to (use
, use
);
2957 if (gimple_code (stmt
) == GIMPLE_PHI
)
2958 changed
= visit_phi (stmt
);
2959 else if (!gimple_has_lhs (stmt
)
2960 || gimple_has_volatile_ops (stmt
)
2961 || stmt_could_throw_p (stmt
))
2962 changed
= defs_to_varying (stmt
);
2963 else if (is_gimple_assign (stmt
))
2965 tree lhs
= gimple_assign_lhs (stmt
);
2968 /* Shortcut for copies. Simplifying copies is pointless,
2969 since we copy the expression and value they represent. */
2970 if (gimple_assign_copy_p (stmt
)
2971 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
2972 && TREE_CODE (lhs
) == SSA_NAME
)
2974 changed
= visit_copy (lhs
, gimple_assign_rhs1 (stmt
));
2977 simplified
= try_to_simplify (stmt
);
2980 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2982 fprintf (dump_file
, "RHS ");
2983 print_gimple_expr (dump_file
, stmt
, 0, 0);
2984 fprintf (dump_file
, " simplified to ");
2985 print_generic_expr (dump_file
, simplified
, 0);
2986 if (TREE_CODE (lhs
) == SSA_NAME
)
2987 fprintf (dump_file
, " has constants %d\n",
2988 expr_has_constants (simplified
));
2990 fprintf (dump_file
, "\n");
2993 /* Setting value numbers to constants will occasionally
2994 screw up phi congruence because constants are not
2995 uniquely associated with a single ssa name that can be
2998 && is_gimple_min_invariant (simplified
)
2999 && TREE_CODE (lhs
) == SSA_NAME
)
3001 VN_INFO (lhs
)->expr
= simplified
;
3002 VN_INFO (lhs
)->has_constants
= true;
3003 changed
= set_ssa_val_to (lhs
, simplified
);
3007 && TREE_CODE (simplified
) == SSA_NAME
3008 && TREE_CODE (lhs
) == SSA_NAME
)
3010 changed
= visit_copy (lhs
, simplified
);
3013 else if (simplified
)
3015 if (TREE_CODE (lhs
) == SSA_NAME
)
3017 VN_INFO (lhs
)->has_constants
= expr_has_constants (simplified
);
3018 /* We have to unshare the expression or else
3019 valuizing may change the IL stream. */
3020 VN_INFO (lhs
)->expr
= unshare_expr (simplified
);
3023 else if (stmt_has_constants (stmt
)
3024 && TREE_CODE (lhs
) == SSA_NAME
)
3025 VN_INFO (lhs
)->has_constants
= true;
3026 else if (TREE_CODE (lhs
) == SSA_NAME
)
3028 /* We reset expr and constantness here because we may
3029 have been value numbering optimistically, and
3030 iterating. They may become non-constant in this case,
3031 even if they were optimistically constant. */
3033 VN_INFO (lhs
)->has_constants
= false;
3034 VN_INFO (lhs
)->expr
= NULL_TREE
;
3037 if ((TREE_CODE (lhs
) == SSA_NAME
3038 /* We can substitute SSA_NAMEs that are live over
3039 abnormal edges with their constant value. */
3040 && !(gimple_assign_copy_p (stmt
)
3041 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
3043 && is_gimple_min_invariant (simplified
))
3044 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3045 /* Stores or copies from SSA_NAMEs that are live over
3046 abnormal edges are a problem. */
3047 || (gimple_assign_single_p (stmt
)
3048 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
3049 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt
))))
3050 changed
= defs_to_varying (stmt
);
3051 else if (REFERENCE_CLASS_P (lhs
) || DECL_P (lhs
))
3053 changed
= visit_reference_op_store (lhs
, gimple_assign_rhs1 (stmt
), stmt
);
3055 else if (TREE_CODE (lhs
) == SSA_NAME
)
3057 if ((gimple_assign_copy_p (stmt
)
3058 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
3060 && is_gimple_min_invariant (simplified
)))
3062 VN_INFO (lhs
)->has_constants
= true;
3064 changed
= set_ssa_val_to (lhs
, simplified
);
3066 changed
= set_ssa_val_to (lhs
, gimple_assign_rhs1 (stmt
));
3070 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
3072 case GIMPLE_UNARY_RHS
:
3073 case GIMPLE_BINARY_RHS
:
3074 case GIMPLE_TERNARY_RHS
:
3075 changed
= visit_nary_op (lhs
, stmt
);
3077 case GIMPLE_SINGLE_RHS
:
3078 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)))
3081 /* VOP-less references can go through unary case. */
3082 if ((gimple_assign_rhs_code (stmt
) == REALPART_EXPR
3083 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
3084 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
3085 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0)) == SSA_NAME
)
3087 changed
= visit_nary_op (lhs
, stmt
);
3091 case tcc_declaration
:
3092 changed
= visit_reference_op_load
3093 (lhs
, gimple_assign_rhs1 (stmt
), stmt
);
3095 case tcc_expression
:
3096 if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
3098 changed
= visit_nary_op (lhs
, stmt
);
3103 changed
= defs_to_varying (stmt
);
3107 changed
= defs_to_varying (stmt
);
3113 changed
= defs_to_varying (stmt
);
3115 else if (is_gimple_call (stmt
))
3117 tree lhs
= gimple_call_lhs (stmt
);
3119 /* ??? We could try to simplify calls. */
3121 if (stmt_has_constants (stmt
)
3122 && TREE_CODE (lhs
) == SSA_NAME
)
3123 VN_INFO (lhs
)->has_constants
= true;
3124 else if (TREE_CODE (lhs
) == SSA_NAME
)
3126 /* We reset expr and constantness here because we may
3127 have been value numbering optimistically, and
3128 iterating. They may become non-constant in this case,
3129 even if they were optimistically constant. */
3130 VN_INFO (lhs
)->has_constants
= false;
3131 VN_INFO (lhs
)->expr
= NULL_TREE
;
3134 if (TREE_CODE (lhs
) == SSA_NAME
3135 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
3136 changed
= defs_to_varying (stmt
);
3137 /* ??? We should handle stores from calls. */
3138 else if (TREE_CODE (lhs
) == SSA_NAME
)
3140 if (!gimple_call_internal_p (stmt
)
3141 && gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
))
3142 changed
= visit_reference_op_call (lhs
, stmt
);
3144 changed
= defs_to_varying (stmt
);
3147 changed
= defs_to_varying (stmt
);
3154 /* Compare two operands by reverse postorder index */
3157 compare_ops (const void *pa
, const void *pb
)
3159 const tree opa
= *((const tree
*)pa
);
3160 const tree opb
= *((const tree
*)pb
);
3161 gimple opstmta
= SSA_NAME_DEF_STMT (opa
);
3162 gimple opstmtb
= SSA_NAME_DEF_STMT (opb
);
3166 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
3167 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3168 else if (gimple_nop_p (opstmta
))
3170 else if (gimple_nop_p (opstmtb
))
3173 bba
= gimple_bb (opstmta
);
3174 bbb
= gimple_bb (opstmtb
);
3177 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3185 if (gimple_code (opstmta
) == GIMPLE_PHI
3186 && gimple_code (opstmtb
) == GIMPLE_PHI
)
3187 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3188 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
3190 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
3192 else if (gimple_uid (opstmta
) != gimple_uid (opstmtb
))
3193 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
3195 return SSA_NAME_VERSION (opa
) - SSA_NAME_VERSION (opb
);
3197 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
3200 /* Sort an array containing members of a strongly connected component
3201 SCC so that the members are ordered by RPO number.
3202 This means that when the sort is complete, iterating through the
3203 array will give you the members in RPO order. */
3206 sort_scc (VEC (tree
, heap
) *scc
)
3208 VEC_qsort (tree
, scc
, compare_ops
);
3211 /* Insert the no longer used nary ONARY to the hash INFO. */
3214 copy_nary (vn_nary_op_t onary
, vn_tables_t info
)
3216 size_t size
= sizeof_vn_nary_op (onary
->length
);
3217 vn_nary_op_t nary
= alloc_vn_nary_op_noinit (onary
->length
,
3218 &info
->nary_obstack
);
3219 memcpy (nary
, onary
, size
);
3220 vn_nary_op_insert_into (nary
, info
->nary
, false);
3223 /* Insert the no longer used phi OPHI to the hash INFO. */
3226 copy_phi (vn_phi_t ophi
, vn_tables_t info
)
3228 vn_phi_t phi
= (vn_phi_t
) pool_alloc (info
->phis_pool
);
3230 memcpy (phi
, ophi
, sizeof (*phi
));
3231 ophi
->phiargs
= NULL
;
3232 slot
= htab_find_slot_with_hash (info
->phis
, phi
, phi
->hashcode
, INSERT
);
3233 gcc_assert (!*slot
);
3237 /* Insert the no longer used reference OREF to the hash INFO. */
3240 copy_reference (vn_reference_t oref
, vn_tables_t info
)
3244 ref
= (vn_reference_t
) pool_alloc (info
->references_pool
);
3245 memcpy (ref
, oref
, sizeof (*ref
));
3246 oref
->operands
= NULL
;
3247 slot
= htab_find_slot_with_hash (info
->references
, ref
, ref
->hashcode
,
3250 free_reference (*slot
);
3254 /* Process a strongly connected component in the SSA graph. */
3257 process_scc (VEC (tree
, heap
) *scc
)
3261 unsigned int iterations
= 0;
3262 bool changed
= true;
3268 /* If the SCC has a single member, just visit it. */
3269 if (VEC_length (tree
, scc
) == 1)
3271 tree use
= VEC_index (tree
, scc
, 0);
3272 if (VN_INFO (use
)->use_processed
)
3274 /* We need to make sure it doesn't form a cycle itself, which can
3275 happen for self-referential PHI nodes. In that case we would
3276 end up inserting an expression with VN_TOP operands into the
3277 valid table which makes us derive bogus equivalences later.
3278 The cheapest way to check this is to assume it for all PHI nodes. */
3279 if (gimple_code (SSA_NAME_DEF_STMT (use
)) == GIMPLE_PHI
)
3280 /* Fallthru to iteration. */ ;
3288 /* Iterate over the SCC with the optimistic table until it stops
3290 current_info
= optimistic_info
;
3295 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3296 fprintf (dump_file
, "Starting iteration %d\n", iterations
);
3297 /* As we are value-numbering optimistically we have to
3298 clear the expression tables and the simplified expressions
3299 in each iteration until we converge. */
3300 htab_empty (optimistic_info
->nary
);
3301 htab_empty (optimistic_info
->phis
);
3302 htab_empty (optimistic_info
->references
);
3303 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
3304 gcc_obstack_init (&optimistic_info
->nary_obstack
);
3305 empty_alloc_pool (optimistic_info
->phis_pool
);
3306 empty_alloc_pool (optimistic_info
->references_pool
);
3307 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
3308 VN_INFO (var
)->expr
= NULL_TREE
;
3309 FOR_EACH_VEC_ELT (tree
, scc
, i
, var
)
3310 changed
|= visit_use (var
);
3313 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
3315 /* Finally, copy the contents of the no longer used optimistic
3316 table to the valid table. */
3317 FOR_EACH_HTAB_ELEMENT (optimistic_info
->nary
, nary
, vn_nary_op_t
, hi
)
3318 copy_nary (nary
, valid_info
);
3319 FOR_EACH_HTAB_ELEMENT (optimistic_info
->phis
, phi
, vn_phi_t
, hi
)
3320 copy_phi (phi
, valid_info
);
3321 FOR_EACH_HTAB_ELEMENT (optimistic_info
->references
, ref
, vn_reference_t
, hi
)
3322 copy_reference (ref
, valid_info
);
3324 current_info
= valid_info
;
3327 DEF_VEC_O(ssa_op_iter
);
3328 DEF_VEC_ALLOC_O(ssa_op_iter
,heap
);
3330 /* Pop the components of the found SCC for NAME off the SCC stack
3331 and process them. Returns true if all went well, false if
3332 we run into resource limits. */
3335 extract_and_process_scc_for_name (tree name
)
3337 VEC (tree
, heap
) *scc
= NULL
;
3340 /* Found an SCC, pop the components off the SCC stack and
3344 x
= VEC_pop (tree
, sccstack
);
3346 VN_INFO (x
)->on_sccstack
= false;
3347 VEC_safe_push (tree
, heap
, scc
, x
);
3348 } while (x
!= name
);
3350 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3351 if (VEC_length (tree
, scc
)
3352 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
3355 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
3356 "SCC size %u exceeding %u\n", VEC_length (tree
, scc
),
3357 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
3361 if (VEC_length (tree
, scc
) > 1)
3364 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3365 print_scc (dump_file
, scc
);
3369 VEC_free (tree
, heap
, scc
);
3374 /* Depth first search on NAME to discover and process SCC's in the SSA
3376 Execution of this algorithm relies on the fact that the SCC's are
3377 popped off the stack in topological order.
3378 Returns true if successful, false if we stopped processing SCC's due
3379 to resource constraints. */
3384 VEC(ssa_op_iter
, heap
) *itervec
= NULL
;
3385 VEC(tree
, heap
) *namevec
= NULL
;
3386 use_operand_p usep
= NULL
;
3393 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
3394 VN_INFO (name
)->visited
= true;
3395 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
3397 VEC_safe_push (tree
, heap
, sccstack
, name
);
3398 VN_INFO (name
)->on_sccstack
= true;
3399 defstmt
= SSA_NAME_DEF_STMT (name
);
3401 /* Recursively DFS on our operands, looking for SCC's. */
3402 if (!gimple_nop_p (defstmt
))
3404 /* Push a new iterator. */
3405 if (gimple_code (defstmt
) == GIMPLE_PHI
)
3406 usep
= op_iter_init_phiuse (&iter
, defstmt
, SSA_OP_ALL_USES
);
3408 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
3411 clear_and_done_ssa_iter (&iter
);
3415 /* If we are done processing uses of a name, go up the stack
3416 of iterators and process SCCs as we found them. */
3417 if (op_iter_done (&iter
))
3419 /* See if we found an SCC. */
3420 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
3421 if (!extract_and_process_scc_for_name (name
))
3423 VEC_free (tree
, heap
, namevec
);
3424 VEC_free (ssa_op_iter
, heap
, itervec
);
3428 /* Check if we are done. */
3429 if (VEC_empty (tree
, namevec
))
3431 VEC_free (tree
, heap
, namevec
);
3432 VEC_free (ssa_op_iter
, heap
, itervec
);
3436 /* Restore the last use walker and continue walking there. */
3438 name
= VEC_pop (tree
, namevec
);
3439 memcpy (&iter
, VEC_last (ssa_op_iter
, itervec
),
3440 sizeof (ssa_op_iter
));
3441 VEC_pop (ssa_op_iter
, itervec
);
3442 goto continue_walking
;
3445 use
= USE_FROM_PTR (usep
);
3447 /* Since we handle phi nodes, we will sometimes get
3448 invariants in the use expression. */
3449 if (TREE_CODE (use
) == SSA_NAME
)
3451 if (! (VN_INFO (use
)->visited
))
3453 /* Recurse by pushing the current use walking state on
3454 the stack and starting over. */
3455 VEC_safe_push(ssa_op_iter
, heap
, itervec
, &iter
);
3456 VEC_safe_push(tree
, heap
, namevec
, name
);
3461 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
3462 VN_INFO (use
)->low
);
3464 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
3465 && VN_INFO (use
)->on_sccstack
)
3467 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
3468 VN_INFO (name
)->low
);
3472 usep
= op_iter_next_use (&iter
);
3476 /* Allocate a value number table. */
3479 allocate_vn_table (vn_tables_t table
)
3481 table
->phis
= htab_create (23, vn_phi_hash
, vn_phi_eq
, free_phi
);
3482 table
->nary
= htab_create (23, vn_nary_op_hash
, vn_nary_op_eq
, NULL
);
3483 table
->references
= htab_create (23, vn_reference_hash
, vn_reference_eq
,
3486 gcc_obstack_init (&table
->nary_obstack
);
3487 table
->phis_pool
= create_alloc_pool ("VN phis",
3488 sizeof (struct vn_phi_s
),
3490 table
->references_pool
= create_alloc_pool ("VN references",
3491 sizeof (struct vn_reference_s
),
3495 /* Free a value number table. */
3498 free_vn_table (vn_tables_t table
)
3500 htab_delete (table
->phis
);
3501 htab_delete (table
->nary
);
3502 htab_delete (table
->references
);
3503 obstack_free (&table
->nary_obstack
, NULL
);
3504 free_alloc_pool (table
->phis_pool
);
3505 free_alloc_pool (table
->references_pool
);
3513 int *rpo_numbers_temp
;
3515 calculate_dominance_info (CDI_DOMINATORS
);
3517 constant_to_value_id
= htab_create (23, vn_constant_hash
, vn_constant_eq
,
3520 constant_value_ids
= BITMAP_ALLOC (NULL
);
3525 vn_ssa_aux_table
= VEC_alloc (vn_ssa_aux_t
, heap
, num_ssa_names
+ 1);
3526 /* VEC_alloc doesn't actually grow it to the right size, it just
3527 preallocates the space to do so. */
3528 VEC_safe_grow_cleared (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
, num_ssa_names
+ 1);
3529 gcc_obstack_init (&vn_ssa_aux_obstack
);
3531 shared_lookup_phiargs
= NULL
;
3532 shared_lookup_references
= NULL
;
3533 rpo_numbers
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
3534 rpo_numbers_temp
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
3535 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
3537 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3538 the i'th block in RPO order is bb. We want to map bb's to RPO
3539 numbers, so we need to rearrange this array. */
3540 for (j
= 0; j
< n_basic_blocks
- NUM_FIXED_BLOCKS
; j
++)
3541 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
3543 XDELETE (rpo_numbers_temp
);
3545 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
3547 /* Create the VN_INFO structures, and initialize value numbers to
3549 for (i
= 0; i
< num_ssa_names
; i
++)
3551 tree name
= ssa_name (i
);
3554 VN_INFO_GET (name
)->valnum
= VN_TOP
;
3555 VN_INFO (name
)->expr
= NULL_TREE
;
3556 VN_INFO (name
)->value_id
= 0;
3560 renumber_gimple_stmt_uids ();
3562 /* Create the valid and optimistic value numbering tables. */
3563 valid_info
= XCNEW (struct vn_tables_s
);
3564 allocate_vn_table (valid_info
);
3565 optimistic_info
= XCNEW (struct vn_tables_s
);
3566 allocate_vn_table (optimistic_info
);
3574 htab_delete (constant_to_value_id
);
3575 BITMAP_FREE (constant_value_ids
);
3576 VEC_free (tree
, heap
, shared_lookup_phiargs
);
3577 VEC_free (vn_reference_op_s
, heap
, shared_lookup_references
);
3578 XDELETEVEC (rpo_numbers
);
3580 for (i
= 0; i
< num_ssa_names
; i
++)
3582 tree name
= ssa_name (i
);
3584 && VN_INFO (name
)->needs_insertion
)
3585 release_ssa_name (name
);
3587 obstack_free (&vn_ssa_aux_obstack
, NULL
);
3588 VEC_free (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
);
3590 VEC_free (tree
, heap
, sccstack
);
3591 free_vn_table (valid_info
);
3592 XDELETE (valid_info
);
3593 free_vn_table (optimistic_info
);
3594 XDELETE (optimistic_info
);
3597 /* Set *ID if we computed something useful in RESULT. */
3600 set_value_id_for_result (tree result
, unsigned int *id
)
3604 if (TREE_CODE (result
) == SSA_NAME
)
3605 *id
= VN_INFO (result
)->value_id
;
3606 else if (is_gimple_min_invariant (result
))
3607 *id
= get_or_alloc_constant_value_id (result
);
3611 /* Set the value ids in the valid hash tables. */
3614 set_hashtable_value_ids (void)
3621 /* Now set the value ids of the things we had put in the hash
3624 FOR_EACH_HTAB_ELEMENT (valid_info
->nary
,
3625 vno
, vn_nary_op_t
, hi
)
3626 set_value_id_for_result (vno
->result
, &vno
->value_id
);
3628 FOR_EACH_HTAB_ELEMENT (valid_info
->phis
,
3630 set_value_id_for_result (vp
->result
, &vp
->value_id
);
3632 FOR_EACH_HTAB_ELEMENT (valid_info
->references
,
3633 vr
, vn_reference_t
, hi
)
3634 set_value_id_for_result (vr
->result
, &vr
->value_id
);
3637 /* Do SCCVN. Returns true if it finished, false if we bailed out
3638 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3639 how we use the alias oracle walking during the VN process. */
3642 run_scc_vn (vn_lookup_kind default_vn_walk_kind_
)
3646 bool changed
= true;
3648 default_vn_walk_kind
= default_vn_walk_kind_
;
3651 current_info
= valid_info
;
3653 for (param
= DECL_ARGUMENTS (current_function_decl
);
3655 param
= DECL_CHAIN (param
))
3657 if (gimple_default_def (cfun
, param
) != NULL
)
3659 tree def
= gimple_default_def (cfun
, param
);
3660 VN_INFO (def
)->valnum
= def
;
3664 for (i
= 1; i
< num_ssa_names
; ++i
)
3666 tree name
= ssa_name (i
);
3668 && VN_INFO (name
)->visited
== false
3669 && !has_zero_uses (name
))
3677 /* Initialize the value ids. */
3679 for (i
= 1; i
< num_ssa_names
; ++i
)
3681 tree name
= ssa_name (i
);
3685 info
= VN_INFO (name
);
3686 if (info
->valnum
== name
3687 || info
->valnum
== VN_TOP
)
3688 info
->value_id
= get_next_value_id ();
3689 else if (is_gimple_min_invariant (info
->valnum
))
3690 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
3693 /* Propagate until they stop changing. */
3697 for (i
= 1; i
< num_ssa_names
; ++i
)
3699 tree name
= ssa_name (i
);
3703 info
= VN_INFO (name
);
3704 if (TREE_CODE (info
->valnum
) == SSA_NAME
3705 && info
->valnum
!= name
3706 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
3709 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
3714 set_hashtable_value_ids ();
3716 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3718 fprintf (dump_file
, "Value numbers:\n");
3719 for (i
= 0; i
< num_ssa_names
; i
++)
3721 tree name
= ssa_name (i
);
3723 && VN_INFO (name
)->visited
3724 && SSA_VAL (name
) != name
)
3726 print_generic_expr (dump_file
, name
, 0);
3727 fprintf (dump_file
, " = ");
3728 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
3729 fprintf (dump_file
, "\n");
3737 /* Return the maximum value id we have ever seen. */
3740 get_max_value_id (void)
3742 return next_value_id
;
3745 /* Return the next unique value id. */
3748 get_next_value_id (void)
3750 return next_value_id
++;
3754 /* Compare two expressions E1 and E2 and return true if they are equal. */
3757 expressions_equal_p (tree e1
, tree e2
)
3759 /* The obvious case. */
3763 /* If only one of them is null, they cannot be equal. */
3767 /* Now perform the actual comparison. */
3768 if (TREE_CODE (e1
) == TREE_CODE (e2
)
3769 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
3776 /* Return true if the nary operation NARY may trap. This is a copy
3777 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3780 vn_nary_may_trap (vn_nary_op_t nary
)
3783 tree rhs2
= NULL_TREE
;
3784 bool honor_nans
= false;
3785 bool honor_snans
= false;
3786 bool fp_operation
= false;
3787 bool honor_trapv
= false;
3791 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
3792 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
3793 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
3796 fp_operation
= FLOAT_TYPE_P (type
);
3799 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
3800 honor_snans
= flag_signaling_nans
!= 0;
3802 else if (INTEGRAL_TYPE_P (type
)
3803 && TYPE_OVERFLOW_TRAPS (type
))
3806 if (nary
->length
>= 2)
3808 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
3810 honor_nans
, honor_snans
, rhs2
,
3816 for (i
= 0; i
< nary
->length
; ++i
)
3817 if (tree_could_trap_p (nary
->op
[i
]))