1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack
;
114 alloc_pool phis_pool
;
115 alloc_pool references_pool
;
118 static htab_t constant_to_value_id
;
119 static bitmap constant_value_ids
;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info
;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info
;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info
;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers
;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id
;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num
;
158 static VEC (tree
, heap
) *sccstack
;
160 static bool may_insert
;
163 DEF_VEC_P(vn_ssa_aux_t
);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t
, heap
);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t
, heap
) *vn_ssa_aux_table
;
171 static struct obstack vn_ssa_aux_obstack
;
173 /* Return the value numbering information for a given SSA name. */
178 vn_ssa_aux_t res
= VEC_index (vn_ssa_aux_t
, vn_ssa_aux_table
,
179 SSA_NAME_VERSION (name
));
184 /* Set the value numbering info for a given SSA name to a given
188 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
190 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
191 SSA_NAME_VERSION (name
), value
);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
198 VN_INFO_GET (tree name
)
200 vn_ssa_aux_t newinfo
;
202 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
203 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
204 if (SSA_NAME_VERSION (name
) >= VEC_length (vn_ssa_aux_t
, vn_ssa_aux_table
))
205 VEC_safe_grow (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
,
206 SSA_NAME_VERSION (name
) + 1);
207 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
208 SSA_NAME_VERSION (name
), newinfo
);
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
217 vn_get_expr_for (tree name
)
219 vn_ssa_aux_t vn
= VN_INFO (name
);
221 tree expr
= NULL_TREE
;
223 if (vn
->valnum
== VN_TOP
)
226 /* If the value-number is a constant it is the representative
228 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
231 /* Get to the information of the value of this SSA_NAME. */
232 vn
= VN_INFO (vn
->valnum
);
234 /* If the value-number is a constant it is the representative
236 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
239 /* Else if we have an expression, return it. */
240 if (vn
->expr
!= NULL_TREE
)
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt
= SSA_NAME_DEF_STMT (vn
->valnum
);
246 /* If the value number is a default-definition or a PHI result
248 if (gimple_nop_p (def_stmt
)
249 || gimple_code (def_stmt
) == GIMPLE_PHI
)
252 if (!is_gimple_assign (def_stmt
))
255 /* FIXME tuples. This is incomplete and likely will miss some
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)))
260 if (gimple_assign_rhs_code (def_stmt
) == VIEW_CONVERT_EXPR
261 && gimple_assign_rhs_code (def_stmt
) == REALPART_EXPR
262 && gimple_assign_rhs_code (def_stmt
) == IMAGPART_EXPR
)
263 expr
= fold_build1 (gimple_assign_rhs_code (def_stmt
),
264 gimple_expr_type (def_stmt
),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt
), 0));
269 expr
= fold_build1 (gimple_assign_rhs_code (def_stmt
),
270 gimple_expr_type (def_stmt
),
271 gimple_assign_rhs1 (def_stmt
));
275 expr
= fold_build2 (gimple_assign_rhs_code (def_stmt
),
276 gimple_expr_type (def_stmt
),
277 gimple_assign_rhs1 (def_stmt
),
278 gimple_assign_rhs2 (def_stmt
));
283 if (expr
== NULL_TREE
)
286 /* Cache the expression. */
293 /* Free a phi operation structure VP. */
298 vn_phi_t phi
= (vn_phi_t
) vp
;
299 VEC_free (tree
, heap
, phi
->phiargs
);
302 /* Free a reference operation structure VP. */
305 free_reference (void *vp
)
307 vn_reference_t vr
= (vn_reference_t
) vp
;
308 VEC_free (vn_reference_op_s
, heap
, vr
->operands
);
311 /* Hash table equality function for vn_constant_t. */
314 vn_constant_eq (const void *p1
, const void *p2
)
316 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
317 const struct vn_constant_s
*vc2
= (const struct vn_constant_s
*) p2
;
319 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
322 /* Hash table hash function for vn_constant_t. */
325 vn_constant_hash (const void *p1
)
327 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
328 return vc1
->hashcode
;
331 /* Lookup a value id for CONSTANT and return it. If it does not
335 get_constant_value_id (tree constant
)
338 struct vn_constant_s vc
;
340 vc
.hashcode
= vn_hash_constant_with_type (constant
);
341 vc
.constant
= constant
;
342 slot
= htab_find_slot_with_hash (constant_to_value_id
, &vc
,
343 vc
.hashcode
, NO_INSERT
);
345 return ((vn_constant_t
)*slot
)->value_id
;
349 /* Lookup a value id for CONSTANT, and if it does not exist, create a
350 new one and return it. If it does exist, return it. */
353 get_or_alloc_constant_value_id (tree constant
)
356 vn_constant_t vc
= XNEW (struct vn_constant_s
);
358 vc
->hashcode
= vn_hash_constant_with_type (constant
);
359 vc
->constant
= constant
;
360 slot
= htab_find_slot_with_hash (constant_to_value_id
, vc
,
361 vc
->hashcode
, INSERT
);
365 return ((vn_constant_t
)*slot
)->value_id
;
367 vc
->value_id
= get_next_value_id ();
369 bitmap_set_bit (constant_value_ids
, vc
->value_id
);
373 /* Return true if V is a value id for a constant. */
376 value_id_constant_p (unsigned int v
)
378 return bitmap_bit_p (constant_value_ids
, v
);
381 /* Compare two reference operands P1 and P2 for equality. Return true if
382 they are equal, and false otherwise. */
385 vn_reference_op_eq (const void *p1
, const void *p2
)
387 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
388 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
389 return vro1
->opcode
== vro2
->opcode
390 && vro1
->type
== vro2
->type
391 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
392 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
393 && expressions_equal_p (vro1
->op2
, vro2
->op2
);
396 /* Compute the hash for a reference operand VRO1. */
399 vn_reference_op_compute_hash (const vn_reference_op_t vro1
)
401 return iterative_hash_expr (vro1
->op0
, vro1
->opcode
)
402 + iterative_hash_expr (vro1
->op1
, vro1
->opcode
)
403 + iterative_hash_expr (vro1
->op2
, vro1
->opcode
);
406 /* Return the hashcode for a given reference operation P1. */
409 vn_reference_hash (const void *p1
)
411 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
412 return vr1
->hashcode
;
415 /* Compute a hash for the reference operation VR1 and return it. */
418 vn_reference_compute_hash (const vn_reference_t vr1
)
420 hashval_t result
= 0;
423 vn_reference_op_t vro
;
425 for (i
= 0; VEC_iterate (tree
, vr1
->vuses
, i
, v
); i
++)
426 result
+= iterative_hash_expr (v
, 0);
427 for (i
= 0; VEC_iterate (vn_reference_op_s
, vr1
->operands
, i
, vro
); i
++)
428 result
+= vn_reference_op_compute_hash (vro
);
433 /* Return true if reference operations P1 and P2 are equivalent. This
434 means they have the same set of operands and vuses. */
437 vn_reference_eq (const void *p1
, const void *p2
)
441 vn_reference_op_t vro
;
443 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
444 const_vn_reference_t
const vr2
= (const_vn_reference_t
) p2
;
446 if (vr1
->vuses
== vr2
->vuses
447 && vr1
->operands
== vr2
->operands
)
450 /* Impossible for them to be equivalent if they have different
452 if (VEC_length (tree
, vr1
->vuses
) != VEC_length (tree
, vr2
->vuses
))
455 /* We require that address operands be canonicalized in a way that
456 two memory references will have the same operands if they are
458 if (VEC_length (vn_reference_op_s
, vr1
->operands
)
459 != VEC_length (vn_reference_op_s
, vr2
->operands
))
462 /* The memory state is more often different than the address of the
463 store/load, so check it first. */
464 for (i
= 0; VEC_iterate (tree
, vr1
->vuses
, i
, v
); i
++)
466 if (VEC_index (tree
, vr2
->vuses
, i
) != v
)
470 for (i
= 0; VEC_iterate (vn_reference_op_s
, vr1
->operands
, i
, vro
); i
++)
472 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s
, vr2
->operands
, i
),
479 /* Place the vuses from STMT into *result. */
482 vuses_to_vec (gimple stmt
, VEC (tree
, gc
) **result
)
490 VEC_reserve_exact (tree
, gc
, *result
,
491 num_ssa_operands (stmt
, SSA_OP_VIRTUAL_USES
));
493 FOR_EACH_SSA_TREE_OPERAND (vuse
, stmt
, iter
, SSA_OP_VIRTUAL_USES
)
494 VEC_quick_push (tree
, *result
, vuse
);
498 /* Copy the VUSE names in STMT into a vector, and return
502 copy_vuses_from_stmt (gimple stmt
)
504 VEC (tree
, gc
) *vuses
= NULL
;
506 vuses_to_vec (stmt
, &vuses
);
511 /* Place the vdefs from STMT into *result. */
514 vdefs_to_vec (gimple stmt
, VEC (tree
, gc
) **result
)
522 *result
= VEC_alloc (tree
, gc
, num_ssa_operands (stmt
, SSA_OP_VIRTUAL_DEFS
));
524 FOR_EACH_SSA_TREE_OPERAND (vdef
, stmt
, iter
, SSA_OP_VIRTUAL_DEFS
)
525 VEC_quick_push (tree
, *result
, vdef
);
528 /* Copy the names of vdef results in STMT into a vector, and return
531 static VEC (tree
, gc
) *
532 copy_vdefs_from_stmt (gimple stmt
)
534 VEC (tree
, gc
) *vdefs
= NULL
;
536 vdefs_to_vec (stmt
, &vdefs
);
541 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
542 static VEC (tree
, gc
) *shared_lookup_vops
;
544 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
545 This function will overwrite the current SHARED_LOOKUP_VOPS
549 shared_vuses_from_stmt (gimple stmt
)
551 VEC_truncate (tree
, shared_lookup_vops
, 0);
552 vuses_to_vec (stmt
, &shared_lookup_vops
);
554 return shared_lookup_vops
;
557 /* Copy the operations present in load/store REF into RESULT, a vector of
558 vn_reference_op_s's. */
561 copy_reference_ops_from_ref (tree ref
, VEC(vn_reference_op_s
, heap
) **result
)
563 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
565 vn_reference_op_s temp
;
567 memset (&temp
, 0, sizeof (temp
));
568 /* We do not care for spurious type qualifications. */
569 temp
.type
= TYPE_MAIN_VARIANT (TREE_TYPE (ref
));
570 temp
.opcode
= TREE_CODE (ref
);
571 temp
.op0
= TMR_SYMBOL (ref
) ? TMR_SYMBOL (ref
) : TMR_BASE (ref
);
572 temp
.op1
= TMR_INDEX (ref
);
573 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
575 memset (&temp
, 0, sizeof (temp
));
576 temp
.type
= NULL_TREE
;
577 temp
.opcode
= TREE_CODE (ref
);
578 temp
.op0
= TMR_STEP (ref
);
579 temp
.op1
= TMR_OFFSET (ref
);
580 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
584 /* For non-calls, store the information that makes up the address. */
588 vn_reference_op_s temp
;
590 memset (&temp
, 0, sizeof (temp
));
591 /* We do not care for spurious type qualifications. */
592 temp
.type
= TYPE_MAIN_VARIANT (TREE_TYPE (ref
));
593 temp
.opcode
= TREE_CODE (ref
);
597 case ALIGN_INDIRECT_REF
:
599 /* The only operand is the address, which gets its own
600 vn_reference_op_s structure. */
602 case MISALIGNED_INDIRECT_REF
:
603 temp
.op0
= TREE_OPERAND (ref
, 1);
606 /* Record bits and position. */
607 temp
.op0
= TREE_OPERAND (ref
, 1);
608 temp
.op1
= TREE_OPERAND (ref
, 2);
611 /* The field decl is enough to unambiguously specify the field,
612 a matching type is not necessary and a mismatching type
613 is always a spurious difference. */
614 temp
.type
= NULL_TREE
;
616 /* If this is a reference to a union member, record the union
617 member size as operand. Do so only if we are doing
618 expression insertion (during FRE), as PRE currently gets
619 confused with this. */
621 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref
, 1))) == UNION_TYPE
622 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref
, 1)))
623 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1))))
624 temp
.op0
= TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)));
627 /* Record field as operand. */
628 temp
.op0
= TREE_OPERAND (ref
, 1);
629 temp
.op1
= TREE_OPERAND (ref
, 2);
631 case ARRAY_RANGE_REF
:
633 /* Record index as operand. */
634 temp
.op0
= TREE_OPERAND (ref
, 1);
635 temp
.op1
= TREE_OPERAND (ref
, 2);
636 temp
.op2
= TREE_OPERAND (ref
, 3);
652 if (is_gimple_min_invariant (ref
))
658 /* These are only interesting for their operands, their
659 existence, and their type. They will never be the last
660 ref in the chain of references (IE they require an
661 operand), so we don't have to put anything
662 for op* as it will be handled by the iteration */
665 case VIEW_CONVERT_EXPR
:
670 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
672 if (REFERENCE_CLASS_P (ref
)
673 || (TREE_CODE (ref
) == ADDR_EXPR
674 && !is_gimple_min_invariant (ref
)))
675 ref
= TREE_OPERAND (ref
, 0);
681 /* Re-create a reference tree from the reference ops OPS.
682 Returns NULL_TREE if the ops were not handled.
683 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
686 get_ref_from_reference_ops (VEC(vn_reference_op_s
, heap
) *ops
)
688 vn_reference_op_t op
;
690 tree ref
, *op0_p
= &ref
;
692 for (i
= 0; VEC_iterate (vn_reference_op_s
, ops
, i
, op
); ++i
)
699 case ALIGN_INDIRECT_REF
:
701 *op0_p
= build1 (op
->opcode
, op
->type
, NULL_TREE
);
702 op0_p
= &TREE_OPERAND (*op0_p
, 0);
705 case MISALIGNED_INDIRECT_REF
:
706 *op0_p
= build2 (MISALIGNED_INDIRECT_REF
, op
->type
,
708 op0_p
= &TREE_OPERAND (*op0_p
, 0);
712 *op0_p
= build3 (BIT_FIELD_REF
, op
->type
, NULL_TREE
,
714 op0_p
= &TREE_OPERAND (*op0_p
, 0);
718 *op0_p
= build3 (COMPONENT_REF
, TREE_TYPE (op
->op0
), NULL_TREE
,
720 op0_p
= &TREE_OPERAND (*op0_p
, 0);
723 case ARRAY_RANGE_REF
:
725 *op0_p
= build4 (op
->opcode
, op
->type
, NULL_TREE
,
726 op
->op0
, op
->op1
, op
->op2
);
727 op0_p
= &TREE_OPERAND (*op0_p
, 0);
745 if (op
->op0
!= NULL_TREE
)
747 gcc_assert (is_gimple_min_invariant (op
->op0
));
754 case VIEW_CONVERT_EXPR
:
755 *op0_p
= build1 (op
->opcode
, op
->type
, NULL_TREE
);
756 op0_p
= &TREE_OPERAND (*op0_p
, 0);
767 /* Copy the operations present in load/store/call REF into RESULT, a vector of
768 vn_reference_op_s's. */
771 copy_reference_ops_from_call (gimple call
,
772 VEC(vn_reference_op_s
, heap
) **result
)
774 vn_reference_op_s temp
;
777 /* Copy the type, opcode, function being called and static chain. */
778 memset (&temp
, 0, sizeof (temp
));
779 temp
.type
= gimple_call_return_type (call
);
780 temp
.opcode
= CALL_EXPR
;
781 temp
.op0
= gimple_call_fn (call
);
782 temp
.op1
= gimple_call_chain (call
);
783 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
785 /* Copy the call arguments. As they can be references as well,
786 just chain them together. */
787 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
789 tree callarg
= gimple_call_arg (call
, i
);
790 copy_reference_ops_from_ref (callarg
, result
);
794 /* Create a vector of vn_reference_op_s structures from REF, a
795 REFERENCE_CLASS_P tree. The vector is not shared. */
797 static VEC(vn_reference_op_s
, heap
) *
798 create_reference_ops_from_ref (tree ref
)
800 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
802 copy_reference_ops_from_ref (ref
, &result
);
806 /* Create a vector of vn_reference_op_s structures from CALL, a
807 call statement. The vector is not shared. */
809 static VEC(vn_reference_op_s
, heap
) *
810 create_reference_ops_from_call (gimple call
)
812 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
814 copy_reference_ops_from_call (call
, &result
);
818 static VEC(vn_reference_op_s
, heap
) *shared_lookup_references
;
820 /* Create a vector of vn_reference_op_s structures from REF, a
821 REFERENCE_CLASS_P tree. The vector is shared among all callers of
824 static VEC(vn_reference_op_s
, heap
) *
825 shared_reference_ops_from_ref (tree ref
)
829 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
830 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
831 return shared_lookup_references
;
834 /* Create a vector of vn_reference_op_s structures from CALL, a
835 call statement. The vector is shared among all callers of
838 static VEC(vn_reference_op_s
, heap
) *
839 shared_reference_ops_from_call (gimple call
)
843 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
844 copy_reference_ops_from_call (call
, &shared_lookup_references
);
845 return shared_lookup_references
;
849 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
850 structures into their value numbers. This is done in-place, and
851 the vector passed in is returned. */
853 static VEC (vn_reference_op_s
, heap
) *
854 valueize_refs (VEC (vn_reference_op_s
, heap
) *orig
)
856 vn_reference_op_t vro
;
859 for (i
= 0; VEC_iterate (vn_reference_op_s
, orig
, i
, vro
); i
++)
861 if (vro
->opcode
== SSA_NAME
862 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
864 vro
->op0
= SSA_VAL (vro
->op0
);
865 /* If it transforms from an SSA_NAME to a constant, update
867 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
868 vro
->opcode
= TREE_CODE (vro
->op0
);
870 /* TODO: Do we want to valueize op2 and op1 of
871 ARRAY_REF/COMPONENT_REF for Ada */
878 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
879 their value numbers. This is done in-place, and the vector passed
882 static VEC (tree
, gc
) *
883 valueize_vuses (VEC (tree
, gc
) *orig
)
885 bool made_replacement
= false;
889 for (i
= 0; VEC_iterate (tree
, orig
, i
, vuse
); i
++)
891 if (vuse
!= SSA_VAL (vuse
))
893 made_replacement
= true;
894 VEC_replace (tree
, orig
, i
, SSA_VAL (vuse
));
898 if (made_replacement
&& VEC_length (tree
, orig
) > 1)
904 /* Return the single reference statement defining all virtual uses
905 in VUSES or NULL_TREE, if there are multiple defining statements.
906 Take into account only definitions that alias REF if following
910 get_def_ref_stmt_vuses (tree ref
, VEC (tree
, gc
) *vuses
)
916 gcc_assert (VEC_length (tree
, vuses
) >= 1);
918 def_stmt
= SSA_NAME_DEF_STMT (VEC_index (tree
, vuses
, 0));
919 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
921 /* We can only handle lookups over PHI nodes for a single
923 if (VEC_length (tree
, vuses
) == 1)
925 def_stmt
= get_single_def_stmt_from_phi (ref
, def_stmt
);
932 /* Verify each VUSE reaches the same defining stmt. */
933 for (i
= 1; VEC_iterate (tree
, vuses
, i
, vuse
); ++i
)
935 gimple tmp
= SSA_NAME_DEF_STMT (vuse
);
940 /* Now see if the definition aliases ref, and loop until it does. */
943 && is_gimple_assign (def_stmt
)
944 && !refs_may_alias_p (ref
, gimple_get_lhs (def_stmt
)))
945 def_stmt
= get_single_def_stmt_with_phi (ref
, def_stmt
);
950 /* Lookup a SCCVN reference operation VR in the current hash table.
951 Returns the resulting value number if it exists in the hash table,
952 NULL_TREE otherwise. VNRESULT will be filled in with the actual
953 vn_reference_t stored in the hashtable if something is found. */
956 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
962 slot
= htab_find_slot_with_hash (current_info
->references
, vr
,
964 if (!slot
&& current_info
== optimistic_info
)
965 slot
= htab_find_slot_with_hash (valid_info
->references
, vr
,
970 *vnresult
= (vn_reference_t
)*slot
;
971 return ((vn_reference_t
)*slot
)->result
;
978 /* Lookup a reference operation by it's parts, in the current hash table.
979 Returns the resulting value number if it exists in the hash table,
980 NULL_TREE otherwise. VNRESULT will be filled in with the actual
981 vn_reference_t stored in the hashtable if something is found. */
984 vn_reference_lookup_pieces (VEC (tree
, gc
) *vuses
,
985 VEC (vn_reference_op_s
, heap
) *operands
,
986 vn_reference_t
*vnresult
, bool maywalk
)
988 struct vn_reference_s vr1
;
993 vr1
.vuses
= valueize_vuses (vuses
);
994 vr1
.operands
= valueize_refs (operands
);
995 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
996 result
= vn_reference_lookup_1 (&vr1
, vnresult
);
998 /* If there is a single defining statement for all virtual uses, we can
999 use that, following virtual use-def chains. */
1003 && VEC_length (tree
, vr1
.vuses
) >= 1)
1005 tree ref
= get_ref_from_reference_ops (operands
);
1008 && (def_stmt
= get_def_ref_stmt_vuses (ref
, vr1
.vuses
))
1009 && is_gimple_assign (def_stmt
))
1011 /* We are now at an aliasing definition for the vuses we want to
1012 look up. Re-do the lookup with the vdefs for this stmt. */
1013 vdefs_to_vec (def_stmt
, &vuses
);
1014 vr1
.vuses
= valueize_vuses (vuses
);
1015 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1016 result
= vn_reference_lookup_1 (&vr1
, vnresult
);
1023 /* Lookup OP in the current hash table, and return the resulting value
1024 number if it exists in the hash table. Return NULL_TREE if it does
1025 not exist in the hash table or if the result field of the structure
1026 was NULL.. VNRESULT will be filled in with the vn_reference_t
1027 stored in the hashtable if one exists. */
1030 vn_reference_lookup (tree op
, VEC (tree
, gc
) *vuses
, bool maywalk
,
1031 vn_reference_t
*vnresult
)
1033 struct vn_reference_s vr1
;
1039 vr1
.vuses
= valueize_vuses (vuses
);
1040 vr1
.operands
= valueize_refs (shared_reference_ops_from_ref (op
));
1041 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1042 result
= vn_reference_lookup_1 (&vr1
, vnresult
);
1044 /* If there is a single defining statement for all virtual uses, we can
1045 use that, following virtual use-def chains. */
1049 && VEC_length (tree
, vr1
.vuses
) >= 1
1050 && (def_stmt
= get_def_ref_stmt_vuses (op
, vr1
.vuses
))
1051 && is_gimple_assign (def_stmt
))
1053 /* We are now at an aliasing definition for the vuses we want to
1054 look up. Re-do the lookup with the vdefs for this stmt. */
1055 vdefs_to_vec (def_stmt
, &vuses
);
1056 vr1
.vuses
= valueize_vuses (vuses
);
1057 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1058 result
= vn_reference_lookup_1 (&vr1
, vnresult
);
1065 /* Insert OP into the current hash table with a value number of
1066 RESULT, and return the resulting reference structure we created. */
1069 vn_reference_insert (tree op
, tree result
, VEC (tree
, gc
) *vuses
)
1074 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1075 if (TREE_CODE (result
) == SSA_NAME
)
1076 vr1
->value_id
= VN_INFO (result
)->value_id
;
1078 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
1079 vr1
->vuses
= valueize_vuses (vuses
);
1080 vr1
->operands
= valueize_refs (create_reference_ops_from_ref (op
));
1081 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1082 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
1084 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1087 /* Because we lookup stores using vuses, and value number failures
1088 using the vdefs (see visit_reference_op_store for how and why),
1089 it's possible that on failure we may try to insert an already
1090 inserted store. This is not wrong, there is no ssa name for a
1091 store that we could use as a differentiator anyway. Thus, unlike
1092 the other lookup functions, you cannot gcc_assert (!*slot)
1095 /* But free the old slot in case of a collision. */
1097 free_reference (*slot
);
1103 /* Insert a reference by it's pieces into the current hash table with
1104 a value number of RESULT. Return the resulting reference
1105 structure we created. */
1108 vn_reference_insert_pieces (VEC (tree
, gc
) *vuses
,
1109 VEC (vn_reference_op_s
, heap
) *operands
,
1110 tree result
, unsigned int value_id
)
1116 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1117 vr1
->value_id
= value_id
;
1118 vr1
->vuses
= valueize_vuses (vuses
);
1119 vr1
->operands
= valueize_refs (operands
);
1120 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1121 if (result
&& TREE_CODE (result
) == SSA_NAME
)
1122 result
= SSA_VAL (result
);
1123 vr1
->result
= result
;
1125 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1128 /* At this point we should have all the things inserted that we have
1129 seen before, and we should never try inserting something that
1131 gcc_assert (!*slot
);
1133 free_reference (*slot
);
1139 /* Compute and return the hash value for nary operation VBO1. */
1142 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
1147 for (i
= 0; i
< vno1
->length
; ++i
)
1148 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
1149 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
1151 if (vno1
->length
== 2
1152 && commutative_tree_code (vno1
->opcode
)
1153 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
1155 tree temp
= vno1
->op
[0];
1156 vno1
->op
[0] = vno1
->op
[1];
1160 for (i
= 0; i
< vno1
->length
; ++i
)
1161 hash
+= iterative_hash_expr (vno1
->op
[i
], vno1
->opcode
);
1166 /* Return the computed hashcode for nary operation P1. */
1169 vn_nary_op_hash (const void *p1
)
1171 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1172 return vno1
->hashcode
;
1175 /* Compare nary operations P1 and P2 and return true if they are
1179 vn_nary_op_eq (const void *p1
, const void *p2
)
1181 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1182 const_vn_nary_op_t
const vno2
= (const_vn_nary_op_t
) p2
;
1185 if (vno1
->opcode
!= vno2
->opcode
1186 || vno1
->type
!= vno2
->type
)
1189 for (i
= 0; i
< vno1
->length
; ++i
)
1190 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
1196 /* Lookup a n-ary operation by its pieces and return the resulting value
1197 number if it exists in the hash table. Return NULL_TREE if it does
1198 not exist in the hash table or if the result field of the operation
1199 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1203 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
1204 tree type
, tree op0
, tree op1
, tree op2
,
1205 tree op3
, vn_nary_op_t
*vnresult
)
1208 struct vn_nary_op_s vno1
;
1212 vno1
.length
= length
;
1218 vno1
.hashcode
= vn_nary_op_compute_hash (&vno1
);
1219 slot
= htab_find_slot_with_hash (current_info
->nary
, &vno1
, vno1
.hashcode
,
1221 if (!slot
&& current_info
== optimistic_info
)
1222 slot
= htab_find_slot_with_hash (valid_info
->nary
, &vno1
, vno1
.hashcode
,
1227 *vnresult
= (vn_nary_op_t
)*slot
;
1228 return ((vn_nary_op_t
)*slot
)->result
;
1231 /* Lookup OP in the current hash table, and return the resulting value
1232 number if it exists in the hash table. Return NULL_TREE if it does
1233 not exist in the hash table or if the result field of the operation
1234 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1238 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
1241 struct vn_nary_op_s vno1
;
1246 vno1
.opcode
= TREE_CODE (op
);
1247 vno1
.length
= TREE_CODE_LENGTH (TREE_CODE (op
));
1248 vno1
.type
= TREE_TYPE (op
);
1249 for (i
= 0; i
< vno1
.length
; ++i
)
1250 vno1
.op
[i
] = TREE_OPERAND (op
, i
);
1251 vno1
.hashcode
= vn_nary_op_compute_hash (&vno1
);
1252 slot
= htab_find_slot_with_hash (current_info
->nary
, &vno1
, vno1
.hashcode
,
1254 if (!slot
&& current_info
== optimistic_info
)
1255 slot
= htab_find_slot_with_hash (valid_info
->nary
, &vno1
, vno1
.hashcode
,
1260 *vnresult
= (vn_nary_op_t
)*slot
;
1261 return ((vn_nary_op_t
)*slot
)->result
;
1264 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1265 value number if it exists in the hash table. Return NULL_TREE if
1266 it does not exist in the hash table. VNRESULT will contain the
1267 vn_nary_op_t from the hashtable if it exists. */
1270 vn_nary_op_lookup_stmt (gimple stmt
, vn_nary_op_t
*vnresult
)
1273 struct vn_nary_op_s vno1
;
1278 vno1
.opcode
= gimple_assign_rhs_code (stmt
);
1279 vno1
.length
= gimple_num_ops (stmt
) - 1;
1280 vno1
.type
= TREE_TYPE (gimple_assign_lhs (stmt
));
1281 for (i
= 0; i
< vno1
.length
; ++i
)
1282 vno1
.op
[i
] = gimple_op (stmt
, i
+ 1);
1283 vno1
.hashcode
= vn_nary_op_compute_hash (&vno1
);
1284 slot
= htab_find_slot_with_hash (current_info
->nary
, &vno1
, vno1
.hashcode
,
1286 if (!slot
&& current_info
== optimistic_info
)
1287 slot
= htab_find_slot_with_hash (valid_info
->nary
, &vno1
, vno1
.hashcode
,
1292 *vnresult
= (vn_nary_op_t
)*slot
;
1293 return ((vn_nary_op_t
)*slot
)->result
;
1296 /* Insert a n-ary operation into the current hash table using it's
1297 pieces. Return the vn_nary_op_t structure we created and put in
1301 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
1302 tree type
, tree op0
,
1303 tree op1
, tree op2
, tree op3
,
1305 unsigned int value_id
)
1310 vno1
= (vn_nary_op_t
) obstack_alloc (¤t_info
->nary_obstack
,
1311 (sizeof (struct vn_nary_op_s
)
1312 - sizeof (tree
) * (4 - length
)));
1313 vno1
->value_id
= value_id
;
1314 vno1
->opcode
= code
;
1315 vno1
->length
= length
;
1325 vno1
->result
= result
;
1326 vno1
->hashcode
= vn_nary_op_compute_hash (vno1
);
1327 slot
= htab_find_slot_with_hash (current_info
->nary
, vno1
, vno1
->hashcode
,
1329 gcc_assert (!*slot
);
1336 /* Insert OP into the current hash table with a value number of
1337 RESULT. Return the vn_nary_op_t structure we created and put in
1341 vn_nary_op_insert (tree op
, tree result
)
1343 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
1348 vno1
= (vn_nary_op_t
) obstack_alloc (¤t_info
->nary_obstack
,
1349 (sizeof (struct vn_nary_op_s
)
1350 - sizeof (tree
) * (4 - length
)));
1351 vno1
->value_id
= VN_INFO (result
)->value_id
;
1352 vno1
->opcode
= TREE_CODE (op
);
1353 vno1
->length
= length
;
1354 vno1
->type
= TREE_TYPE (op
);
1355 for (i
= 0; i
< vno1
->length
; ++i
)
1356 vno1
->op
[i
] = TREE_OPERAND (op
, i
);
1357 vno1
->result
= result
;
1358 vno1
->hashcode
= vn_nary_op_compute_hash (vno1
);
1359 slot
= htab_find_slot_with_hash (current_info
->nary
, vno1
, vno1
->hashcode
,
1361 gcc_assert (!*slot
);
1367 /* Insert the rhs of STMT into the current hash table with a value number of
1371 vn_nary_op_insert_stmt (gimple stmt
, tree result
)
1373 unsigned length
= gimple_num_ops (stmt
) - 1;
1378 vno1
= (vn_nary_op_t
) obstack_alloc (¤t_info
->nary_obstack
,
1379 (sizeof (struct vn_nary_op_s
)
1380 - sizeof (tree
) * (4 - length
)));
1381 vno1
->value_id
= VN_INFO (result
)->value_id
;
1382 vno1
->opcode
= gimple_assign_rhs_code (stmt
);
1383 vno1
->length
= length
;
1384 vno1
->type
= TREE_TYPE (gimple_assign_lhs (stmt
));
1385 for (i
= 0; i
< vno1
->length
; ++i
)
1386 vno1
->op
[i
] = gimple_op (stmt
, i
+ 1);
1387 vno1
->result
= result
;
1388 vno1
->hashcode
= vn_nary_op_compute_hash (vno1
);
1389 slot
= htab_find_slot_with_hash (current_info
->nary
, vno1
, vno1
->hashcode
,
1391 gcc_assert (!*slot
);
1397 /* Compute a hashcode for PHI operation VP1 and return it. */
1399 static inline hashval_t
1400 vn_phi_compute_hash (vn_phi_t vp1
)
1402 hashval_t result
= 0;
1406 result
= vp1
->block
->index
;
1408 for (i
= 0; VEC_iterate (tree
, vp1
->phiargs
, i
, phi1op
); i
++)
1410 if (phi1op
== VN_TOP
)
1412 result
+= iterative_hash_expr (phi1op
, result
);
1418 /* Return the computed hashcode for phi operation P1. */
1421 vn_phi_hash (const void *p1
)
1423 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
1424 return vp1
->hashcode
;
1427 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1430 vn_phi_eq (const void *p1
, const void *p2
)
1432 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
1433 const_vn_phi_t
const vp2
= (const_vn_phi_t
) p2
;
1435 if (vp1
->block
== vp2
->block
)
1440 /* Any phi in the same block will have it's arguments in the
1441 same edge order, because of how we store phi nodes. */
1442 for (i
= 0; VEC_iterate (tree
, vp1
->phiargs
, i
, phi1op
); i
++)
1444 tree phi2op
= VEC_index (tree
, vp2
->phiargs
, i
);
1445 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
1447 if (!expressions_equal_p (phi1op
, phi2op
))
1455 static VEC(tree
, heap
) *shared_lookup_phiargs
;
1457 /* Lookup PHI in the current hash table, and return the resulting
1458 value number if it exists in the hash table. Return NULL_TREE if
1459 it does not exist in the hash table. */
1462 vn_phi_lookup (gimple phi
)
1465 struct vn_phi_s vp1
;
1468 VEC_truncate (tree
, shared_lookup_phiargs
, 0);
1470 /* Canonicalize the SSA_NAME's to their value number. */
1471 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1473 tree def
= PHI_ARG_DEF (phi
, i
);
1474 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
1475 VEC_safe_push (tree
, heap
, shared_lookup_phiargs
, def
);
1477 vp1
.phiargs
= shared_lookup_phiargs
;
1478 vp1
.block
= gimple_bb (phi
);
1479 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
1480 slot
= htab_find_slot_with_hash (current_info
->phis
, &vp1
, vp1
.hashcode
,
1482 if (!slot
&& current_info
== optimistic_info
)
1483 slot
= htab_find_slot_with_hash (valid_info
->phis
, &vp1
, vp1
.hashcode
,
1487 return ((vn_phi_t
)*slot
)->result
;
1490 /* Insert PHI into the current hash table with a value number of
1494 vn_phi_insert (gimple phi
, tree result
)
1497 vn_phi_t vp1
= (vn_phi_t
) pool_alloc (current_info
->phis_pool
);
1499 VEC (tree
, heap
) *args
= NULL
;
1501 /* Canonicalize the SSA_NAME's to their value number. */
1502 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1504 tree def
= PHI_ARG_DEF (phi
, i
);
1505 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
1506 VEC_safe_push (tree
, heap
, args
, def
);
1508 vp1
->value_id
= VN_INFO (result
)->value_id
;
1509 vp1
->phiargs
= args
;
1510 vp1
->block
= gimple_bb (phi
);
1511 vp1
->result
= result
;
1512 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
1514 slot
= htab_find_slot_with_hash (current_info
->phis
, vp1
, vp1
->hashcode
,
1517 /* Because we iterate over phi operations more than once, it's
1518 possible the slot might already exist here, hence no assert.*/
1524 /* Print set of components in strongly connected component SCC to OUT. */
1527 print_scc (FILE *out
, VEC (tree
, heap
) *scc
)
1532 fprintf (out
, "SCC consists of: ");
1533 for (i
= 0; VEC_iterate (tree
, scc
, i
, var
); i
++)
1535 print_generic_expr (out
, var
, 0);
1538 fprintf (out
, "\n");
1541 /* Set the value number of FROM to TO, return true if it has changed
1545 set_ssa_val_to (tree from
, tree to
)
1550 && TREE_CODE (to
) == SSA_NAME
1551 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
1554 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1555 and invariants. So assert that here. */
1556 gcc_assert (to
!= NULL_TREE
1558 || TREE_CODE (to
) == SSA_NAME
1559 || is_gimple_min_invariant (to
)));
1561 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1563 fprintf (dump_file
, "Setting value number of ");
1564 print_generic_expr (dump_file
, from
, 0);
1565 fprintf (dump_file
, " to ");
1566 print_generic_expr (dump_file
, to
, 0);
1567 fprintf (dump_file
, "\n");
1570 currval
= SSA_VAL (from
);
1572 if (currval
!= to
&& !operand_equal_p (currval
, to
, OEP_PURE_SAME
))
1574 SSA_VAL (from
) = to
;
1580 /* Set all definitions in STMT to value number to themselves.
1581 Return true if a value number changed. */
1584 defs_to_varying (gimple stmt
)
1586 bool changed
= false;
1590 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1592 tree def
= DEF_FROM_PTR (defp
);
1594 VN_INFO (def
)->use_processed
= true;
1595 changed
|= set_ssa_val_to (def
, def
);
1600 static bool expr_has_constants (tree expr
);
1601 static tree
try_to_simplify (gimple stmt
);
1603 /* Visit a copy between LHS and RHS, return true if the value number
1607 visit_copy (tree lhs
, tree rhs
)
1609 /* Follow chains of copies to their destination. */
1610 while (SSA_VAL (rhs
) != rhs
&& TREE_CODE (SSA_VAL (rhs
)) == SSA_NAME
)
1611 rhs
= SSA_VAL (rhs
);
1613 /* The copy may have a more interesting constant filled expression
1614 (we don't, since we know our RHS is just an SSA name). */
1615 VN_INFO (lhs
)->has_constants
= VN_INFO (rhs
)->has_constants
;
1616 VN_INFO (lhs
)->expr
= VN_INFO (rhs
)->expr
;
1618 return set_ssa_val_to (lhs
, rhs
);
1621 /* Visit a unary operator RHS, value number it, and return true if the
1622 value number of LHS has changed as a result. */
1625 visit_unary_op (tree lhs
, gimple stmt
)
1627 bool changed
= false;
1628 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
1632 changed
= set_ssa_val_to (lhs
, result
);
1636 changed
= set_ssa_val_to (lhs
, lhs
);
1637 vn_nary_op_insert_stmt (stmt
, lhs
);
1643 /* Visit a binary operator RHS, value number it, and return true if the
1644 value number of LHS has changed as a result. */
1647 visit_binary_op (tree lhs
, gimple stmt
)
1649 bool changed
= false;
1650 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
1654 changed
= set_ssa_val_to (lhs
, result
);
1658 changed
= set_ssa_val_to (lhs
, lhs
);
1659 vn_nary_op_insert_stmt (stmt
, lhs
);
1665 /* Visit a call STMT storing into LHS. Return true if the value number
1666 of the LHS has changed as a result. */
1669 visit_reference_op_call (tree lhs
, gimple stmt
)
1671 bool changed
= false;
1672 struct vn_reference_s vr1
;
1675 vr1
.vuses
= valueize_vuses (shared_vuses_from_stmt (stmt
));
1676 vr1
.operands
= valueize_refs (shared_reference_ops_from_call (stmt
));
1677 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1678 result
= vn_reference_lookup_1 (&vr1
, NULL
);
1681 changed
= set_ssa_val_to (lhs
, result
);
1682 if (TREE_CODE (result
) == SSA_NAME
1683 && VN_INFO (result
)->has_constants
)
1684 VN_INFO (lhs
)->has_constants
= true;
1690 changed
= set_ssa_val_to (lhs
, lhs
);
1691 vr2
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1692 vr2
->vuses
= valueize_vuses (copy_vuses_from_stmt (stmt
));
1693 vr2
->operands
= valueize_refs (create_reference_ops_from_call (stmt
));
1694 vr2
->hashcode
= vr1
.hashcode
;
1696 slot
= htab_find_slot_with_hash (current_info
->references
,
1697 vr2
, vr2
->hashcode
, INSERT
);
1699 free_reference (*slot
);
1706 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1707 and return true if the value number of the LHS has changed as a result. */
1710 visit_reference_op_load (tree lhs
, tree op
, gimple stmt
)
1712 bool changed
= false;
1713 tree result
= vn_reference_lookup (op
, shared_vuses_from_stmt (stmt
), true,
1716 /* We handle type-punning through unions by value-numbering based
1717 on offset and size of the access. Be prepared to handle a
1718 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1720 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
1722 /* We will be setting the value number of lhs to the value number
1723 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1724 So first simplify and lookup this expression to see if it
1725 is already available. */
1726 tree val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
1728 && !is_gimple_min_invariant (val
)
1729 && TREE_CODE (val
) != SSA_NAME
)
1731 tree tem
= try_to_simplify (stmt
);
1736 if (!is_gimple_min_invariant (val
)
1737 && TREE_CODE (val
) != SSA_NAME
)
1738 result
= vn_nary_op_lookup (val
, NULL
);
1739 /* If the expression is not yet available, value-number lhs to
1740 a new SSA_NAME we create. */
1741 if (!result
&& may_insert
)
1743 result
= make_ssa_name (SSA_NAME_VAR (lhs
), NULL
);
1744 /* Initialize value-number information properly. */
1745 VN_INFO_GET (result
)->valnum
= result
;
1746 VN_INFO (result
)->value_id
= get_next_value_id ();
1747 VN_INFO (result
)->expr
= val
;
1748 VN_INFO (result
)->has_constants
= expr_has_constants (val
);
1749 VN_INFO (result
)->needs_insertion
= true;
1750 /* As all "inserted" statements are singleton SCCs, insert
1751 to the valid table. This is strictly needed to
1752 avoid re-generating new value SSA_NAMEs for the same
1753 expression during SCC iteration over and over (the
1754 optimistic table gets cleared after each iteration).
1755 We do not need to insert into the optimistic table, as
1756 lookups there will fall back to the valid table. */
1757 if (current_info
== optimistic_info
)
1759 current_info
= valid_info
;
1760 vn_nary_op_insert (val
, result
);
1761 current_info
= optimistic_info
;
1764 vn_nary_op_insert (val
, result
);
1765 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1767 fprintf (dump_file
, "Inserting name ");
1768 print_generic_expr (dump_file
, result
, 0);
1769 fprintf (dump_file
, " for expression ");
1770 print_generic_expr (dump_file
, val
, 0);
1771 fprintf (dump_file
, "\n");
1778 changed
= set_ssa_val_to (lhs
, result
);
1779 if (TREE_CODE (result
) == SSA_NAME
1780 && VN_INFO (result
)->has_constants
)
1782 VN_INFO (lhs
)->expr
= VN_INFO (result
)->expr
;
1783 VN_INFO (lhs
)->has_constants
= true;
1788 changed
= set_ssa_val_to (lhs
, lhs
);
1789 vn_reference_insert (op
, lhs
, copy_vuses_from_stmt (stmt
));
1796 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1797 and return true if the value number of the LHS has changed as a result. */
1800 visit_reference_op_store (tree lhs
, tree op
, gimple stmt
)
1802 bool changed
= false;
1804 bool resultsame
= false;
1806 /* First we want to lookup using the *vuses* from the store and see
1807 if there the last store to this location with the same address
1810 The vuses represent the memory state before the store. If the
1811 memory state, address, and value of the store is the same as the
1812 last store to this location, then this store will produce the
1813 same memory state as that store.
1815 In this case the vdef versions for this store are value numbered to those
1816 vuse versions, since they represent the same memory state after
1819 Otherwise, the vdefs for the store are used when inserting into
1820 the table, since the store generates a new memory state. */
1822 result
= vn_reference_lookup (lhs
, shared_vuses_from_stmt (stmt
), false,
1827 if (TREE_CODE (result
) == SSA_NAME
)
1828 result
= SSA_VAL (result
);
1829 if (TREE_CODE (op
) == SSA_NAME
)
1831 resultsame
= expressions_equal_p (result
, op
);
1834 if (!result
|| !resultsame
)
1836 VEC(tree
, gc
) *vdefs
= copy_vdefs_from_stmt (stmt
);
1840 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1842 fprintf (dump_file
, "No store match\n");
1843 fprintf (dump_file
, "Value numbering store ");
1844 print_generic_expr (dump_file
, lhs
, 0);
1845 fprintf (dump_file
, " to ");
1846 print_generic_expr (dump_file
, op
, 0);
1847 fprintf (dump_file
, "\n");
1849 /* Have to set value numbers before insert, since insert is
1850 going to valueize the references in-place. */
1851 for (i
= 0; VEC_iterate (tree
, vdefs
, i
, vdef
); i
++)
1853 VN_INFO (vdef
)->use_processed
= true;
1854 changed
|= set_ssa_val_to (vdef
, vdef
);
1857 /* Do not insert structure copies into the tables. */
1858 if (is_gimple_min_invariant (op
)
1859 || is_gimple_reg (op
))
1860 vn_reference_insert (lhs
, op
, vdefs
);
1864 /* We had a match, so value number the vdefs to have the value
1865 number of the vuses they came from. */
1866 ssa_op_iter op_iter
;
1870 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1871 fprintf (dump_file
, "Store matched earlier value,"
1872 "value numbering store vdefs to matching vuses.\n");
1874 FOR_EACH_SSA_VDEF_OPERAND (var
, vv
, stmt
, op_iter
)
1876 tree def
= DEF_FROM_PTR (var
);
1879 /* Uh, if the vuse is a multiuse, we can't really do much
1880 here, sadly, since we don't know which value number of
1881 which vuse to use. */
1882 if (VUSE_VECT_NUM_ELEM (*vv
) != 1)
1885 use
= VUSE_ELEMENT_VAR (*vv
, 0);
1887 VN_INFO (def
)->use_processed
= true;
1888 changed
|= set_ssa_val_to (def
, SSA_VAL (use
));
1895 /* Visit and value number PHI, return true if the value number
1899 visit_phi (gimple phi
)
1901 bool changed
= false;
1903 tree sameval
= VN_TOP
;
1904 bool allsame
= true;
1907 /* TODO: We could check for this in init_sccvn, and replace this
1908 with a gcc_assert. */
1909 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
1910 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
1912 /* See if all non-TOP arguments have the same value. TOP is
1913 equivalent to everything, so we can ignore it. */
1914 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1916 tree def
= PHI_ARG_DEF (phi
, i
);
1918 if (TREE_CODE (def
) == SSA_NAME
)
1919 def
= SSA_VAL (def
);
1922 if (sameval
== VN_TOP
)
1928 if (!expressions_equal_p (def
, sameval
))
1936 /* If all value numbered to the same value, the phi node has that
1940 if (is_gimple_min_invariant (sameval
))
1942 VN_INFO (PHI_RESULT (phi
))->has_constants
= true;
1943 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
1947 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
1948 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
1951 if (TREE_CODE (sameval
) == SSA_NAME
)
1952 return visit_copy (PHI_RESULT (phi
), sameval
);
1954 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
1957 /* Otherwise, see if it is equivalent to a phi node in this block. */
1958 result
= vn_phi_lookup (phi
);
1961 if (TREE_CODE (result
) == SSA_NAME
)
1962 changed
= visit_copy (PHI_RESULT (phi
), result
);
1964 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
1968 vn_phi_insert (phi
, PHI_RESULT (phi
));
1969 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
1970 VN_INFO (PHI_RESULT (phi
))->expr
= PHI_RESULT (phi
);
1971 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
1977 /* Return true if EXPR contains constants. */
1980 expr_has_constants (tree expr
)
1982 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
1985 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0));
1988 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0))
1989 || is_gimple_min_invariant (TREE_OPERAND (expr
, 1));
1990 /* Constants inside reference ops are rarely interesting, but
1991 it can take a lot of looking to find them. */
1993 case tcc_declaration
:
1996 return is_gimple_min_invariant (expr
);
2001 /* Return true if STMT contains constants. */
2004 stmt_has_constants (gimple stmt
)
2006 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
2009 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
2011 case GIMPLE_UNARY_RHS
:
2012 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
2014 case GIMPLE_BINARY_RHS
:
2015 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))
2016 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt
)));
2017 case GIMPLE_SINGLE_RHS
:
2018 /* Constants inside reference ops are rarely interesting, but
2019 it can take a lot of looking to find them. */
2020 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
2027 /* Replace SSA_NAMES in expr with their value numbers, and return the
2029 This is performed in place. */
2032 valueize_expr (tree expr
)
2034 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
2037 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == SSA_NAME
2038 && SSA_VAL (TREE_OPERAND (expr
, 0)) != VN_TOP
)
2039 TREE_OPERAND (expr
, 0) = SSA_VAL (TREE_OPERAND (expr
, 0));
2042 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == SSA_NAME
2043 && SSA_VAL (TREE_OPERAND (expr
, 0)) != VN_TOP
)
2044 TREE_OPERAND (expr
, 0) = SSA_VAL (TREE_OPERAND (expr
, 0));
2045 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == SSA_NAME
2046 && SSA_VAL (TREE_OPERAND (expr
, 1)) != VN_TOP
)
2047 TREE_OPERAND (expr
, 1) = SSA_VAL (TREE_OPERAND (expr
, 1));
2055 /* Simplify the binary expression RHS, and return the result if
2059 simplify_binary_expression (gimple stmt
)
2061 tree result
= NULL_TREE
;
2062 tree op0
= gimple_assign_rhs1 (stmt
);
2063 tree op1
= gimple_assign_rhs2 (stmt
);
2065 /* This will not catch every single case we could combine, but will
2066 catch those with constants. The goal here is to simultaneously
2067 combine constants between expressions, but avoid infinite
2068 expansion of expressions during simplification. */
2069 if (TREE_CODE (op0
) == SSA_NAME
)
2071 if (VN_INFO (op0
)->has_constants
2072 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)) == tcc_comparison
)
2073 op0
= valueize_expr (vn_get_expr_for (op0
));
2074 else if (SSA_VAL (op0
) != VN_TOP
&& SSA_VAL (op0
) != op0
)
2075 op0
= SSA_VAL (op0
);
2078 if (TREE_CODE (op1
) == SSA_NAME
)
2080 if (VN_INFO (op1
)->has_constants
)
2081 op1
= valueize_expr (vn_get_expr_for (op1
));
2082 else if (SSA_VAL (op1
) != VN_TOP
&& SSA_VAL (op1
) != op1
)
2083 op1
= SSA_VAL (op1
);
2086 /* Avoid folding if nothing changed. */
2087 if (op0
== gimple_assign_rhs1 (stmt
)
2088 && op1
== gimple_assign_rhs2 (stmt
))
2091 fold_defer_overflow_warnings ();
2093 result
= fold_binary (gimple_assign_rhs_code (stmt
),
2094 TREE_TYPE (gimple_get_lhs (stmt
)), op0
, op1
);
2096 fold_undefer_overflow_warnings (result
&& valid_gimple_rhs_p (result
),
2099 /* Make sure result is not a complex expression consisting
2100 of operators of operators (IE (a + b) + (a + c))
2101 Otherwise, we will end up with unbounded expressions if
2102 fold does anything at all. */
2103 if (result
&& valid_gimple_rhs_p (result
))
2109 /* Simplify the unary expression RHS, and return the result if
2113 simplify_unary_expression (gimple stmt
)
2115 tree result
= NULL_TREE
;
2116 tree orig_op0
, op0
= gimple_assign_rhs1 (stmt
);
2118 /* We handle some tcc_reference codes here that are all
2119 GIMPLE_ASSIGN_SINGLE codes. */
2120 if (gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2121 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2122 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2123 op0
= TREE_OPERAND (op0
, 0);
2125 if (TREE_CODE (op0
) != SSA_NAME
)
2129 if (VN_INFO (op0
)->has_constants
)
2130 op0
= valueize_expr (vn_get_expr_for (op0
));
2131 else if (gimple_assign_cast_p (stmt
)
2132 || gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2133 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2134 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2136 /* We want to do tree-combining on conversion-like expressions.
2137 Make sure we feed only SSA_NAMEs or constants to fold though. */
2138 tree tem
= valueize_expr (vn_get_expr_for (op0
));
2139 if (UNARY_CLASS_P (tem
)
2140 || BINARY_CLASS_P (tem
)
2141 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
2142 || TREE_CODE (tem
) == SSA_NAME
2143 || is_gimple_min_invariant (tem
))
2147 /* Avoid folding if nothing changed, but remember the expression. */
2148 if (op0
== orig_op0
)
2151 result
= fold_unary (gimple_assign_rhs_code (stmt
),
2152 gimple_expr_type (stmt
), op0
);
2155 STRIP_USELESS_TYPE_CONVERSION (result
);
2156 if (valid_gimple_rhs_p (result
))
2163 /* Try to simplify RHS using equivalences and constant folding. */
2166 try_to_simplify (gimple stmt
)
2170 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2171 in this case, there is no point in doing extra work. */
2172 if (gimple_assign_copy_p (stmt
)
2173 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2176 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)))
2178 case tcc_declaration
:
2179 tem
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
));
2185 /* Do not do full-blown reference lookup here, but simplify
2186 reads from constant aggregates. */
2187 tem
= fold_const_aggregate_ref (gimple_assign_rhs1 (stmt
));
2191 /* Fallthrough for some codes that can operate on registers. */
2192 if (!(TREE_CODE (gimple_assign_rhs1 (stmt
)) == REALPART_EXPR
2193 || TREE_CODE (gimple_assign_rhs1 (stmt
)) == IMAGPART_EXPR
2194 || TREE_CODE (gimple_assign_rhs1 (stmt
)) == VIEW_CONVERT_EXPR
))
2196 /* We could do a little more with unary ops, if they expand
2197 into binary ops, but it's debatable whether it is worth it. */
2199 return simplify_unary_expression (stmt
);
2201 case tcc_comparison
:
2203 return simplify_binary_expression (stmt
);
2212 /* Visit and value number USE, return true if the value number
2216 visit_use (tree use
)
2218 bool changed
= false;
2219 gimple stmt
= SSA_NAME_DEF_STMT (use
);
2221 VN_INFO (use
)->use_processed
= true;
2223 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
2224 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
2225 && !SSA_NAME_IS_DEFAULT_DEF (use
))
2227 fprintf (dump_file
, "Value numbering ");
2228 print_generic_expr (dump_file
, use
, 0);
2229 fprintf (dump_file
, " stmt = ");
2230 print_gimple_stmt (dump_file
, stmt
, 0, 0);
2233 /* Handle uninitialized uses. */
2234 if (SSA_NAME_IS_DEFAULT_DEF (use
))
2235 changed
= set_ssa_val_to (use
, use
);
2238 if (gimple_code (stmt
) == GIMPLE_PHI
)
2239 changed
= visit_phi (stmt
);
2240 else if (!gimple_has_lhs (stmt
)
2241 || gimple_has_volatile_ops (stmt
)
2242 || stmt_could_throw_p (stmt
))
2243 changed
= defs_to_varying (stmt
);
2244 else if (is_gimple_assign (stmt
))
2246 tree lhs
= gimple_assign_lhs (stmt
);
2249 /* Shortcut for copies. Simplifying copies is pointless,
2250 since we copy the expression and value they represent. */
2251 if (gimple_assign_copy_p (stmt
)
2252 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
2253 && TREE_CODE (lhs
) == SSA_NAME
)
2255 changed
= visit_copy (lhs
, gimple_assign_rhs1 (stmt
));
2258 simplified
= try_to_simplify (stmt
);
2261 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2263 fprintf (dump_file
, "RHS ");
2264 print_gimple_expr (dump_file
, stmt
, 0, 0);
2265 fprintf (dump_file
, " simplified to ");
2266 print_generic_expr (dump_file
, simplified
, 0);
2267 if (TREE_CODE (lhs
) == SSA_NAME
)
2268 fprintf (dump_file
, " has constants %d\n",
2269 expr_has_constants (simplified
));
2271 fprintf (dump_file
, "\n");
2274 /* Setting value numbers to constants will occasionally
2275 screw up phi congruence because constants are not
2276 uniquely associated with a single ssa name that can be
2279 && is_gimple_min_invariant (simplified
)
2280 && TREE_CODE (lhs
) == SSA_NAME
)
2282 VN_INFO (lhs
)->expr
= simplified
;
2283 VN_INFO (lhs
)->has_constants
= true;
2284 changed
= set_ssa_val_to (lhs
, simplified
);
2288 && TREE_CODE (simplified
) == SSA_NAME
2289 && TREE_CODE (lhs
) == SSA_NAME
)
2291 changed
= visit_copy (lhs
, simplified
);
2294 else if (simplified
)
2296 if (TREE_CODE (lhs
) == SSA_NAME
)
2298 VN_INFO (lhs
)->has_constants
= expr_has_constants (simplified
);
2299 /* We have to unshare the expression or else
2300 valuizing may change the IL stream. */
2301 VN_INFO (lhs
)->expr
= unshare_expr (simplified
);
2304 else if (stmt_has_constants (stmt
)
2305 && TREE_CODE (lhs
) == SSA_NAME
)
2306 VN_INFO (lhs
)->has_constants
= true;
2307 else if (TREE_CODE (lhs
) == SSA_NAME
)
2309 /* We reset expr and constantness here because we may
2310 have been value numbering optimistically, and
2311 iterating. They may become non-constant in this case,
2312 even if they were optimistically constant. */
2314 VN_INFO (lhs
)->has_constants
= false;
2315 VN_INFO (lhs
)->expr
= NULL_TREE
;
2318 if (TREE_CODE (lhs
) == SSA_NAME
2319 /* We can substitute SSA_NAMEs that are live over
2320 abnormal edges with their constant value. */
2321 && !(gimple_assign_copy_p (stmt
)
2322 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
2324 && is_gimple_min_invariant (simplified
))
2325 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
2326 changed
= defs_to_varying (stmt
);
2327 else if (REFERENCE_CLASS_P (lhs
) || DECL_P (lhs
))
2329 changed
= visit_reference_op_store (lhs
, gimple_assign_rhs1 (stmt
), stmt
);
2331 else if (TREE_CODE (lhs
) == SSA_NAME
)
2333 if ((gimple_assign_copy_p (stmt
)
2334 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
2336 && is_gimple_min_invariant (simplified
)))
2338 VN_INFO (lhs
)->has_constants
= true;
2340 changed
= set_ssa_val_to (lhs
, simplified
);
2342 changed
= set_ssa_val_to (lhs
, gimple_assign_rhs1 (stmt
));
2346 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
2348 case GIMPLE_UNARY_RHS
:
2349 changed
= visit_unary_op (lhs
, stmt
);
2351 case GIMPLE_BINARY_RHS
:
2352 changed
= visit_binary_op (lhs
, stmt
);
2354 case GIMPLE_SINGLE_RHS
:
2355 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)))
2357 case tcc_declaration
:
2359 changed
= visit_reference_op_load
2360 (lhs
, gimple_assign_rhs1 (stmt
), stmt
);
2362 case tcc_expression
:
2363 if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
2365 changed
= visit_unary_op (lhs
, stmt
);
2370 changed
= defs_to_varying (stmt
);
2374 changed
= defs_to_varying (stmt
);
2380 changed
= defs_to_varying (stmt
);
2382 else if (is_gimple_call (stmt
))
2384 tree lhs
= gimple_call_lhs (stmt
);
2386 /* ??? We could try to simplify calls. */
2388 if (stmt_has_constants (stmt
)
2389 && TREE_CODE (lhs
) == SSA_NAME
)
2390 VN_INFO (lhs
)->has_constants
= true;
2391 else if (TREE_CODE (lhs
) == SSA_NAME
)
2393 /* We reset expr and constantness here because we may
2394 have been value numbering optimistically, and
2395 iterating. They may become non-constant in this case,
2396 even if they were optimistically constant. */
2397 VN_INFO (lhs
)->has_constants
= false;
2398 VN_INFO (lhs
)->expr
= NULL_TREE
;
2401 if (TREE_CODE (lhs
) == SSA_NAME
2402 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
2403 changed
= defs_to_varying (stmt
);
2404 /* ??? We should handle stores from calls. */
2405 else if (TREE_CODE (lhs
) == SSA_NAME
)
2407 if (gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
))
2408 changed
= visit_reference_op_call (lhs
, stmt
);
2410 changed
= defs_to_varying (stmt
);
2413 changed
= defs_to_varying (stmt
);
2420 /* Compare two operands by reverse postorder index */
2423 compare_ops (const void *pa
, const void *pb
)
2425 const tree opa
= *((const tree
*)pa
);
2426 const tree opb
= *((const tree
*)pb
);
2427 gimple opstmta
= SSA_NAME_DEF_STMT (opa
);
2428 gimple opstmtb
= SSA_NAME_DEF_STMT (opb
);
2432 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
2434 else if (gimple_nop_p (opstmta
))
2436 else if (gimple_nop_p (opstmtb
))
2439 bba
= gimple_bb (opstmta
);
2440 bbb
= gimple_bb (opstmtb
);
2451 if (gimple_code (opstmta
) == GIMPLE_PHI
2452 && gimple_code (opstmtb
) == GIMPLE_PHI
)
2454 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
2456 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
2458 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
2460 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
2463 /* Sort an array containing members of a strongly connected component
2464 SCC so that the members are ordered by RPO number.
2465 This means that when the sort is complete, iterating through the
2466 array will give you the members in RPO order. */
2469 sort_scc (VEC (tree
, heap
) *scc
)
2471 qsort (VEC_address (tree
, scc
),
2472 VEC_length (tree
, scc
),
2477 /* Process a strongly connected component in the SSA graph. */
2480 process_scc (VEC (tree
, heap
) *scc
)
2482 /* If the SCC has a single member, just visit it. */
2484 if (VEC_length (tree
, scc
) == 1)
2486 tree use
= VEC_index (tree
, scc
, 0);
2487 if (!VN_INFO (use
)->use_processed
)
2494 unsigned int iterations
= 0;
2495 bool changed
= true;
2497 /* Iterate over the SCC with the optimistic table until it stops
2499 current_info
= optimistic_info
;
2504 /* As we are value-numbering optimistically we have to
2505 clear the expression tables and the simplified expressions
2506 in each iteration until we converge. */
2507 htab_empty (optimistic_info
->nary
);
2508 htab_empty (optimistic_info
->phis
);
2509 htab_empty (optimistic_info
->references
);
2510 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
2511 gcc_obstack_init (&optimistic_info
->nary_obstack
);
2512 empty_alloc_pool (optimistic_info
->phis_pool
);
2513 empty_alloc_pool (optimistic_info
->references_pool
);
2514 for (i
= 0; VEC_iterate (tree
, scc
, i
, var
); i
++)
2515 VN_INFO (var
)->expr
= NULL_TREE
;
2516 for (i
= 0; VEC_iterate (tree
, scc
, i
, var
); i
++)
2517 changed
|= visit_use (var
);
2520 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
2522 /* Finally, visit the SCC once using the valid table. */
2523 current_info
= valid_info
;
2524 for (i
= 0; VEC_iterate (tree
, scc
, i
, var
); i
++)
2529 DEF_VEC_O(ssa_op_iter
);
2530 DEF_VEC_ALLOC_O(ssa_op_iter
,heap
);
2532 /* Pop the components of the found SCC for NAME off the SCC stack
2533 and process them. Returns true if all went well, false if
2534 we run into resource limits. */
2537 extract_and_process_scc_for_name (tree name
)
2539 VEC (tree
, heap
) *scc
= NULL
;
2542 /* Found an SCC, pop the components off the SCC stack and
2546 x
= VEC_pop (tree
, sccstack
);
2548 VN_INFO (x
)->on_sccstack
= false;
2549 VEC_safe_push (tree
, heap
, scc
, x
);
2550 } while (x
!= name
);
2552 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2553 if (VEC_length (tree
, scc
)
2554 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
2557 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
2558 "SCC size %u exceeding %u\n", VEC_length (tree
, scc
),
2559 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
2563 if (VEC_length (tree
, scc
) > 1)
2566 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2567 print_scc (dump_file
, scc
);
2571 VEC_free (tree
, heap
, scc
);
2576 /* Depth first search on NAME to discover and process SCC's in the SSA
2578 Execution of this algorithm relies on the fact that the SCC's are
2579 popped off the stack in topological order.
2580 Returns true if successful, false if we stopped processing SCC's due
2581 to resource constraints. */
2586 VEC(ssa_op_iter
, heap
) *itervec
= NULL
;
2587 VEC(tree
, heap
) *namevec
= NULL
;
2588 use_operand_p usep
= NULL
;
2595 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
2596 VN_INFO (name
)->visited
= true;
2597 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
2599 VEC_safe_push (tree
, heap
, sccstack
, name
);
2600 VN_INFO (name
)->on_sccstack
= true;
2601 defstmt
= SSA_NAME_DEF_STMT (name
);
2603 /* Recursively DFS on our operands, looking for SCC's. */
2604 if (!gimple_nop_p (defstmt
))
2606 /* Push a new iterator. */
2607 if (gimple_code (defstmt
) == GIMPLE_PHI
)
2608 usep
= op_iter_init_phiuse (&iter
, defstmt
, SSA_OP_ALL_USES
);
2610 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
2617 /* If we are done processing uses of a name, go up the stack
2618 of iterators and process SCCs as we found them. */
2619 if (op_iter_done (&iter
))
2621 /* See if we found an SCC. */
2622 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
2623 if (!extract_and_process_scc_for_name (name
))
2625 VEC_free (tree
, heap
, namevec
);
2626 VEC_free (ssa_op_iter
, heap
, itervec
);
2630 /* Check if we are done. */
2631 if (VEC_empty (tree
, namevec
))
2633 VEC_free (tree
, heap
, namevec
);
2634 VEC_free (ssa_op_iter
, heap
, itervec
);
2638 /* Restore the last use walker and continue walking there. */
2640 name
= VEC_pop (tree
, namevec
);
2641 memcpy (&iter
, VEC_last (ssa_op_iter
, itervec
),
2642 sizeof (ssa_op_iter
));
2643 VEC_pop (ssa_op_iter
, itervec
);
2644 goto continue_walking
;
2647 use
= USE_FROM_PTR (usep
);
2649 /* Since we handle phi nodes, we will sometimes get
2650 invariants in the use expression. */
2651 if (TREE_CODE (use
) == SSA_NAME
)
2653 if (! (VN_INFO (use
)->visited
))
2655 /* Recurse by pushing the current use walking state on
2656 the stack and starting over. */
2657 VEC_safe_push(ssa_op_iter
, heap
, itervec
, &iter
);
2658 VEC_safe_push(tree
, heap
, namevec
, name
);
2663 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
2664 VN_INFO (use
)->low
);
2666 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
2667 && VN_INFO (use
)->on_sccstack
)
2669 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
2670 VN_INFO (name
)->low
);
2674 usep
= op_iter_next_use (&iter
);
2678 /* Allocate a value number table. */
2681 allocate_vn_table (vn_tables_t table
)
2683 table
->phis
= htab_create (23, vn_phi_hash
, vn_phi_eq
, free_phi
);
2684 table
->nary
= htab_create (23, vn_nary_op_hash
, vn_nary_op_eq
, NULL
);
2685 table
->references
= htab_create (23, vn_reference_hash
, vn_reference_eq
,
2688 gcc_obstack_init (&table
->nary_obstack
);
2689 table
->phis_pool
= create_alloc_pool ("VN phis",
2690 sizeof (struct vn_phi_s
),
2692 table
->references_pool
= create_alloc_pool ("VN references",
2693 sizeof (struct vn_reference_s
),
2697 /* Free a value number table. */
2700 free_vn_table (vn_tables_t table
)
2702 htab_delete (table
->phis
);
2703 htab_delete (table
->nary
);
2704 htab_delete (table
->references
);
2705 obstack_free (&table
->nary_obstack
, NULL
);
2706 free_alloc_pool (table
->phis_pool
);
2707 free_alloc_pool (table
->references_pool
);
2715 int *rpo_numbers_temp
;
2717 calculate_dominance_info (CDI_DOMINATORS
);
2719 constant_to_value_id
= htab_create (23, vn_constant_hash
, vn_constant_eq
,
2722 constant_value_ids
= BITMAP_ALLOC (NULL
);
2727 vn_ssa_aux_table
= VEC_alloc (vn_ssa_aux_t
, heap
, num_ssa_names
+ 1);
2728 /* VEC_alloc doesn't actually grow it to the right size, it just
2729 preallocates the space to do so. */
2730 VEC_safe_grow_cleared (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
, num_ssa_names
+ 1);
2731 gcc_obstack_init (&vn_ssa_aux_obstack
);
2733 shared_lookup_phiargs
= NULL
;
2734 shared_lookup_vops
= NULL
;
2735 shared_lookup_references
= NULL
;
2736 rpo_numbers
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
2737 rpo_numbers_temp
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
2738 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
2740 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2741 the i'th block in RPO order is bb. We want to map bb's to RPO
2742 numbers, so we need to rearrange this array. */
2743 for (j
= 0; j
< n_basic_blocks
- NUM_FIXED_BLOCKS
; j
++)
2744 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
2746 XDELETE (rpo_numbers_temp
);
2748 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
2750 /* Create the VN_INFO structures, and initialize value numbers to
2752 for (i
= 0; i
< num_ssa_names
; i
++)
2754 tree name
= ssa_name (i
);
2757 VN_INFO_GET (name
)->valnum
= VN_TOP
;
2758 VN_INFO (name
)->expr
= NULL_TREE
;
2759 VN_INFO (name
)->value_id
= 0;
2763 renumber_gimple_stmt_uids ();
2765 /* Create the valid and optimistic value numbering tables. */
2766 valid_info
= XCNEW (struct vn_tables_s
);
2767 allocate_vn_table (valid_info
);
2768 optimistic_info
= XCNEW (struct vn_tables_s
);
2769 allocate_vn_table (optimistic_info
);
2777 htab_delete (constant_to_value_id
);
2778 BITMAP_FREE (constant_value_ids
);
2779 VEC_free (tree
, heap
, shared_lookup_phiargs
);
2780 VEC_free (tree
, gc
, shared_lookup_vops
);
2781 VEC_free (vn_reference_op_s
, heap
, shared_lookup_references
);
2782 XDELETEVEC (rpo_numbers
);
2784 for (i
= 0; i
< num_ssa_names
; i
++)
2786 tree name
= ssa_name (i
);
2788 && VN_INFO (name
)->needs_insertion
)
2789 release_ssa_name (name
);
2791 obstack_free (&vn_ssa_aux_obstack
, NULL
);
2792 VEC_free (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
);
2794 VEC_free (tree
, heap
, sccstack
);
2795 free_vn_table (valid_info
);
2796 XDELETE (valid_info
);
2797 free_vn_table (optimistic_info
);
2798 XDELETE (optimistic_info
);
2801 /* Set the value ids in the valid hash tables. */
2804 set_hashtable_value_ids (void)
2811 /* Now set the value ids of the things we had put in the hash
2814 FOR_EACH_HTAB_ELEMENT (valid_info
->nary
,
2815 vno
, vn_nary_op_t
, hi
)
2819 if (TREE_CODE (vno
->result
) == SSA_NAME
)
2820 vno
->value_id
= VN_INFO (vno
->result
)->value_id
;
2821 else if (is_gimple_min_invariant (vno
->result
))
2822 vno
->value_id
= get_or_alloc_constant_value_id (vno
->result
);
2826 FOR_EACH_HTAB_ELEMENT (valid_info
->phis
,
2831 if (TREE_CODE (vp
->result
) == SSA_NAME
)
2832 vp
->value_id
= VN_INFO (vp
->result
)->value_id
;
2833 else if (is_gimple_min_invariant (vp
->result
))
2834 vp
->value_id
= get_or_alloc_constant_value_id (vp
->result
);
2838 FOR_EACH_HTAB_ELEMENT (valid_info
->references
,
2839 vr
, vn_reference_t
, hi
)
2843 if (TREE_CODE (vr
->result
) == SSA_NAME
)
2844 vr
->value_id
= VN_INFO (vr
->result
)->value_id
;
2845 else if (is_gimple_min_invariant (vr
->result
))
2846 vr
->value_id
= get_or_alloc_constant_value_id (vr
->result
);
2851 /* Do SCCVN. Returns true if it finished, false if we bailed out
2852 due to resource constraints. */
2855 run_scc_vn (bool may_insert_arg
)
2859 bool changed
= true;
2861 may_insert
= may_insert_arg
;
2864 current_info
= valid_info
;
2866 for (param
= DECL_ARGUMENTS (current_function_decl
);
2868 param
= TREE_CHAIN (param
))
2870 if (gimple_default_def (cfun
, param
) != NULL
)
2872 tree def
= gimple_default_def (cfun
, param
);
2873 SSA_VAL (def
) = def
;
2877 for (i
= 1; i
< num_ssa_names
; ++i
)
2879 tree name
= ssa_name (i
);
2881 && VN_INFO (name
)->visited
== false
2882 && !has_zero_uses (name
))
2891 /* Initialize the value ids. */
2893 for (i
= 1; i
< num_ssa_names
; ++i
)
2895 tree name
= ssa_name (i
);
2899 info
= VN_INFO (name
);
2900 if (info
->valnum
== name
)
2901 info
->value_id
= get_next_value_id ();
2902 else if (is_gimple_min_invariant (info
->valnum
))
2903 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
2906 /* Propagate until they stop changing. */
2910 for (i
= 1; i
< num_ssa_names
; ++i
)
2912 tree name
= ssa_name (i
);
2916 info
= VN_INFO (name
);
2917 if (TREE_CODE (info
->valnum
) == SSA_NAME
2918 && info
->valnum
!= name
2919 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
2922 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
2927 set_hashtable_value_ids ();
2929 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2931 fprintf (dump_file
, "Value numbers:\n");
2932 for (i
= 0; i
< num_ssa_names
; i
++)
2934 tree name
= ssa_name (i
);
2936 && VN_INFO (name
)->visited
2937 && SSA_VAL (name
) != name
)
2939 print_generic_expr (dump_file
, name
, 0);
2940 fprintf (dump_file
, " = ");
2941 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
2942 fprintf (dump_file
, "\n");
2951 /* Return the maximum value id we have ever seen. */
2954 get_max_value_id (void)
2956 return next_value_id
;
2959 /* Return the next unique value id. */
2962 get_next_value_id (void)
2964 return next_value_id
++;
2968 /* Compare two expressions E1 and E2 and return true if they are equal. */
2971 expressions_equal_p (tree e1
, tree e2
)
2973 /* The obvious case. */
2977 /* If only one of them is null, they cannot be equal. */
2981 /* Recurse on elements of lists. */
2982 if (TREE_CODE (e1
) == TREE_LIST
&& TREE_CODE (e2
) == TREE_LIST
)
2986 for (lop1
= e1
, lop2
= e2
;
2988 lop1
= TREE_CHAIN (lop1
), lop2
= TREE_CHAIN (lop2
))
2992 if (!expressions_equal_p (TREE_VALUE (lop1
), TREE_VALUE (lop2
)))
2998 /* Now perform the actual comparison. */
2999 if (TREE_CODE (e1
) == TREE_CODE (e2
)
3000 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
3006 /* Sort the VUSE array so that we can do equality comparisons
3007 quicker on two vuse vecs. */
3010 sort_vuses (VEC (tree
,gc
) *vuses
)
3012 if (VEC_length (tree
, vuses
) > 1)
3013 qsort (VEC_address (tree
, vuses
),
3014 VEC_length (tree
, vuses
),
3019 /* Sort the VUSE array so that we can do equality comparisons
3020 quicker on two vuse vecs. */
3023 sort_vuses_heap (VEC (tree
,heap
) *vuses
)
3025 if (VEC_length (tree
, vuses
) > 1)
3026 qsort (VEC_address (tree
, vuses
),
3027 VEC_length (tree
, vuses
),