1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack
;
114 alloc_pool phis_pool
;
115 alloc_pool references_pool
;
118 static htab_t constant_to_value_id
;
119 static bitmap constant_value_ids
;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info
;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info
;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info
;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers
;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id
;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num
;
158 static VEC (tree
, heap
) *sccstack
;
160 static bool may_insert
;
163 DEF_VEC_P(vn_ssa_aux_t
);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t
, heap
);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t
, heap
) *vn_ssa_aux_table
;
171 static struct obstack vn_ssa_aux_obstack
;
173 /* Return the value numbering information for a given SSA name. */
178 vn_ssa_aux_t res
= VEC_index (vn_ssa_aux_t
, vn_ssa_aux_table
,
179 SSA_NAME_VERSION (name
));
184 /* Set the value numbering info for a given SSA name to a given
188 VN_INFO_SET (tree name
, vn_ssa_aux_t value
)
190 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
191 SSA_NAME_VERSION (name
), value
);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
198 VN_INFO_GET (tree name
)
200 vn_ssa_aux_t newinfo
;
202 newinfo
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
203 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
204 if (SSA_NAME_VERSION (name
) >= VEC_length (vn_ssa_aux_t
, vn_ssa_aux_table
))
205 VEC_safe_grow (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
,
206 SSA_NAME_VERSION (name
) + 1);
207 VEC_replace (vn_ssa_aux_t
, vn_ssa_aux_table
,
208 SSA_NAME_VERSION (name
), newinfo
);
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
217 vn_get_expr_for (tree name
)
219 vn_ssa_aux_t vn
= VN_INFO (name
);
221 tree expr
= NULL_TREE
;
223 if (vn
->valnum
== VN_TOP
)
226 /* If the value-number is a constant it is the representative
228 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
231 /* Get to the information of the value of this SSA_NAME. */
232 vn
= VN_INFO (vn
->valnum
);
234 /* If the value-number is a constant it is the representative
236 if (TREE_CODE (vn
->valnum
) != SSA_NAME
)
239 /* Else if we have an expression, return it. */
240 if (vn
->expr
!= NULL_TREE
)
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt
= SSA_NAME_DEF_STMT (vn
->valnum
);
246 /* If the value number is a default-definition or a PHI result
248 if (gimple_nop_p (def_stmt
)
249 || gimple_code (def_stmt
) == GIMPLE_PHI
)
252 if (!is_gimple_assign (def_stmt
))
255 /* FIXME tuples. This is incomplete and likely will miss some
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)))
260 if (gimple_assign_rhs_code (def_stmt
) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt
) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt
) == IMAGPART_EXPR
)
263 expr
= fold_build1 (gimple_assign_rhs_code (def_stmt
),
264 gimple_expr_type (def_stmt
),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt
), 0));
269 expr
= fold_build1 (gimple_assign_rhs_code (def_stmt
),
270 gimple_expr_type (def_stmt
),
271 gimple_assign_rhs1 (def_stmt
));
275 expr
= fold_build2 (gimple_assign_rhs_code (def_stmt
),
276 gimple_expr_type (def_stmt
),
277 gimple_assign_rhs1 (def_stmt
),
278 gimple_assign_rhs2 (def_stmt
));
283 if (expr
== NULL_TREE
)
286 /* Cache the expression. */
293 /* Free a phi operation structure VP. */
298 vn_phi_t phi
= (vn_phi_t
) vp
;
299 VEC_free (tree
, heap
, phi
->phiargs
);
302 /* Free a reference operation structure VP. */
305 free_reference (void *vp
)
307 vn_reference_t vr
= (vn_reference_t
) vp
;
308 VEC_free (vn_reference_op_s
, heap
, vr
->operands
);
311 /* Hash table equality function for vn_constant_t. */
314 vn_constant_eq (const void *p1
, const void *p2
)
316 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
317 const struct vn_constant_s
*vc2
= (const struct vn_constant_s
*) p2
;
319 if (vc1
->hashcode
!= vc2
->hashcode
)
322 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
325 /* Hash table hash function for vn_constant_t. */
328 vn_constant_hash (const void *p1
)
330 const struct vn_constant_s
*vc1
= (const struct vn_constant_s
*) p1
;
331 return vc1
->hashcode
;
334 /* Lookup a value id for CONSTANT and return it. If it does not
338 get_constant_value_id (tree constant
)
341 struct vn_constant_s vc
;
343 vc
.hashcode
= vn_hash_constant_with_type (constant
);
344 vc
.constant
= constant
;
345 slot
= htab_find_slot_with_hash (constant_to_value_id
, &vc
,
346 vc
.hashcode
, NO_INSERT
);
348 return ((vn_constant_t
)*slot
)->value_id
;
352 /* Lookup a value id for CONSTANT, and if it does not exist, create a
353 new one and return it. If it does exist, return it. */
356 get_or_alloc_constant_value_id (tree constant
)
359 vn_constant_t vc
= XNEW (struct vn_constant_s
);
361 vc
->hashcode
= vn_hash_constant_with_type (constant
);
362 vc
->constant
= constant
;
363 slot
= htab_find_slot_with_hash (constant_to_value_id
, vc
,
364 vc
->hashcode
, INSERT
);
368 return ((vn_constant_t
)*slot
)->value_id
;
370 vc
->value_id
= get_next_value_id ();
372 bitmap_set_bit (constant_value_ids
, vc
->value_id
);
376 /* Return true if V is a value id for a constant. */
379 value_id_constant_p (unsigned int v
)
381 return bitmap_bit_p (constant_value_ids
, v
);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
388 vn_reference_op_eq (const void *p1
, const void *p2
)
390 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
391 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
393 return vro1
->opcode
== vro2
->opcode
394 && types_compatible_p (vro1
->type
, vro2
->type
)
395 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
396 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
397 && expressions_equal_p (vro1
->op2
, vro2
->op2
);
400 /* Compute the hash for a reference operand VRO1. */
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1
)
405 hashval_t result
= 0;
407 result
+= iterative_hash_expr (vro1
->op0
, vro1
->opcode
);
409 result
+= iterative_hash_expr (vro1
->op1
, vro1
->opcode
);
411 result
+= iterative_hash_expr (vro1
->op2
, vro1
->opcode
);
415 /* Return the hashcode for a given reference operation P1. */
418 vn_reference_hash (const void *p1
)
420 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
421 return vr1
->hashcode
;
424 /* Compute a hash for the reference operation VR1 and return it. */
427 vn_reference_compute_hash (const vn_reference_t vr1
)
431 vn_reference_op_t vro
;
433 result
= iterative_hash_expr (vr1
->vuse
, 0);
434 for (i
= 0; VEC_iterate (vn_reference_op_s
, vr1
->operands
, i
, vro
); i
++)
435 result
+= vn_reference_op_compute_hash (vro
);
440 /* Return true if reference operations P1 and P2 are equivalent. This
441 means they have the same set of operands and vuses. */
444 vn_reference_eq (const void *p1
, const void *p2
)
447 vn_reference_op_t vro
;
449 const_vn_reference_t
const vr1
= (const_vn_reference_t
) p1
;
450 const_vn_reference_t
const vr2
= (const_vn_reference_t
) p2
;
451 if (vr1
->hashcode
!= vr2
->hashcode
)
454 /* Early out if this is not a hash collision. */
455 if (vr1
->hashcode
!= vr2
->hashcode
)
458 /* The VOP needs to be the same. */
459 if (vr1
->vuse
!= vr2
->vuse
)
462 /* If the operands are the same we are done. */
463 if (vr1
->operands
== vr2
->operands
)
466 /* We require that address operands be canonicalized in a way that
467 two memory references will have the same operands if they are
469 if (VEC_length (vn_reference_op_s
, vr1
->operands
)
470 != VEC_length (vn_reference_op_s
, vr2
->operands
))
473 for (i
= 0; VEC_iterate (vn_reference_op_s
, vr1
->operands
, i
, vro
); i
++)
474 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s
, vr2
->operands
, i
),
481 /* Copy the operations present in load/store REF into RESULT, a vector of
482 vn_reference_op_s's. */
485 copy_reference_ops_from_ref (tree ref
, VEC(vn_reference_op_s
, heap
) **result
)
487 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
489 vn_reference_op_s temp
;
491 memset (&temp
, 0, sizeof (temp
));
492 /* We do not care for spurious type qualifications. */
493 temp
.type
= TYPE_MAIN_VARIANT (TREE_TYPE (ref
));
494 temp
.opcode
= TREE_CODE (ref
);
495 temp
.op0
= TMR_SYMBOL (ref
) ? TMR_SYMBOL (ref
) : TMR_BASE (ref
);
496 temp
.op1
= TMR_INDEX (ref
);
497 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
499 memset (&temp
, 0, sizeof (temp
));
500 temp
.type
= NULL_TREE
;
501 temp
.opcode
= TREE_CODE (ref
);
502 temp
.op0
= TMR_STEP (ref
);
503 temp
.op1
= TMR_OFFSET (ref
);
504 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
508 /* For non-calls, store the information that makes up the address. */
512 vn_reference_op_s temp
;
514 memset (&temp
, 0, sizeof (temp
));
515 /* We do not care for spurious type qualifications. */
516 temp
.type
= TYPE_MAIN_VARIANT (TREE_TYPE (ref
));
517 temp
.opcode
= TREE_CODE (ref
);
521 case ALIGN_INDIRECT_REF
:
523 /* The only operand is the address, which gets its own
524 vn_reference_op_s structure. */
526 case MISALIGNED_INDIRECT_REF
:
527 temp
.op0
= TREE_OPERAND (ref
, 1);
530 /* Record bits and position. */
531 temp
.op0
= TREE_OPERAND (ref
, 1);
532 temp
.op1
= TREE_OPERAND (ref
, 2);
535 /* The field decl is enough to unambiguously specify the field,
536 a matching type is not necessary and a mismatching type
537 is always a spurious difference. */
538 temp
.type
= NULL_TREE
;
539 /* If this is a reference to a union member, record the union
540 member size as operand. Do so only if we are doing
541 expression insertion (during FRE), as PRE currently gets
542 confused with this. */
544 && TREE_OPERAND (ref
, 2) == NULL_TREE
545 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref
, 1))) == UNION_TYPE
546 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref
, 1)))
547 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1))))
548 temp
.op0
= TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)));
551 /* Record field as operand. */
552 temp
.op0
= TREE_OPERAND (ref
, 1);
553 temp
.op1
= TREE_OPERAND (ref
, 2);
556 case ARRAY_RANGE_REF
:
558 /* Record index as operand. */
559 temp
.op0
= TREE_OPERAND (ref
, 1);
560 temp
.op1
= TREE_OPERAND (ref
, 2);
561 temp
.op2
= TREE_OPERAND (ref
, 3);
579 if (is_gimple_min_invariant (ref
))
585 /* These are only interesting for their operands, their
586 existence, and their type. They will never be the last
587 ref in the chain of references (IE they require an
588 operand), so we don't have to put anything
589 for op* as it will be handled by the iteration */
592 case VIEW_CONVERT_EXPR
:
597 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
599 if (REFERENCE_CLASS_P (ref
)
600 || (TREE_CODE (ref
) == ADDR_EXPR
601 && !is_gimple_min_invariant (ref
)))
602 ref
= TREE_OPERAND (ref
, 0);
608 /* Re-create a reference tree from the reference ops OPS.
609 Returns NULL_TREE if the ops were not handled.
610 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
613 get_ref_from_reference_ops (VEC(vn_reference_op_s
, heap
) *ops
)
615 vn_reference_op_t op
;
617 tree ref
, *op0_p
= &ref
;
619 for (i
= 0; VEC_iterate (vn_reference_op_s
, ops
, i
, op
); ++i
)
626 case ALIGN_INDIRECT_REF
:
628 *op0_p
= build1 (op
->opcode
, op
->type
, NULL_TREE
);
629 op0_p
= &TREE_OPERAND (*op0_p
, 0);
632 case MISALIGNED_INDIRECT_REF
:
633 *op0_p
= build2 (MISALIGNED_INDIRECT_REF
, op
->type
,
635 op0_p
= &TREE_OPERAND (*op0_p
, 0);
639 *op0_p
= build3 (BIT_FIELD_REF
, op
->type
, NULL_TREE
,
641 op0_p
= &TREE_OPERAND (*op0_p
, 0);
645 *op0_p
= build3 (COMPONENT_REF
, TREE_TYPE (op
->op0
), NULL_TREE
,
647 op0_p
= &TREE_OPERAND (*op0_p
, 0);
650 case ARRAY_RANGE_REF
:
652 *op0_p
= build4 (op
->opcode
, op
->type
, NULL_TREE
,
653 op
->op0
, op
->op1
, op
->op2
);
654 op0_p
= &TREE_OPERAND (*op0_p
, 0);
674 if (op
->op0
!= NULL_TREE
)
676 gcc_assert (is_gimple_min_invariant (op
->op0
));
683 case VIEW_CONVERT_EXPR
:
684 *op0_p
= build1 (op
->opcode
, op
->type
, NULL_TREE
);
685 op0_p
= &TREE_OPERAND (*op0_p
, 0);
696 /* Copy the operations present in load/store/call REF into RESULT, a vector of
697 vn_reference_op_s's. */
700 copy_reference_ops_from_call (gimple call
,
701 VEC(vn_reference_op_s
, heap
) **result
)
703 vn_reference_op_s temp
;
706 /* Copy the type, opcode, function being called and static chain. */
707 memset (&temp
, 0, sizeof (temp
));
708 temp
.type
= gimple_call_return_type (call
);
709 temp
.opcode
= CALL_EXPR
;
710 temp
.op0
= gimple_call_fn (call
);
711 temp
.op1
= gimple_call_chain (call
);
712 VEC_safe_push (vn_reference_op_s
, heap
, *result
, &temp
);
714 /* Copy the call arguments. As they can be references as well,
715 just chain them together. */
716 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
718 tree callarg
= gimple_call_arg (call
, i
);
719 copy_reference_ops_from_ref (callarg
, result
);
723 /* Create a vector of vn_reference_op_s structures from REF, a
724 REFERENCE_CLASS_P tree. The vector is not shared. */
726 static VEC(vn_reference_op_s
, heap
) *
727 create_reference_ops_from_ref (tree ref
)
729 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
731 copy_reference_ops_from_ref (ref
, &result
);
735 /* Create a vector of vn_reference_op_s structures from CALL, a
736 call statement. The vector is not shared. */
738 static VEC(vn_reference_op_s
, heap
) *
739 create_reference_ops_from_call (gimple call
)
741 VEC (vn_reference_op_s
, heap
) *result
= NULL
;
743 copy_reference_ops_from_call (call
, &result
);
747 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
748 *I_P to point to the last element of the replacement. */
750 vn_reference_fold_indirect (VEC (vn_reference_op_s
, heap
) **ops
,
753 VEC(vn_reference_op_s
, heap
) *mem
= NULL
;
754 vn_reference_op_t op
;
755 unsigned int i
= *i_p
;
758 /* Get ops for the addressed object. */
759 op
= VEC_index (vn_reference_op_s
, *ops
, i
);
760 copy_reference_ops_from_ref (TREE_OPERAND (op
->op0
, 0), &mem
);
762 /* Do the replacement - we should have at least one op in mem now. */
763 if (VEC_length (vn_reference_op_s
, mem
) == 1)
765 VEC_replace (vn_reference_op_s
, *ops
, i
- 1,
766 VEC_index (vn_reference_op_s
, mem
, 0));
767 VEC_ordered_remove (vn_reference_op_s
, *ops
, i
);
770 else if (VEC_length (vn_reference_op_s
, mem
) == 2)
772 VEC_replace (vn_reference_op_s
, *ops
, i
- 1,
773 VEC_index (vn_reference_op_s
, mem
, 0));
774 VEC_replace (vn_reference_op_s
, *ops
, i
,
775 VEC_index (vn_reference_op_s
, mem
, 1));
777 else if (VEC_length (vn_reference_op_s
, mem
) > 2)
779 VEC_replace (vn_reference_op_s
, *ops
, i
- 1,
780 VEC_index (vn_reference_op_s
, mem
, 0));
781 VEC_replace (vn_reference_op_s
, *ops
, i
,
782 VEC_index (vn_reference_op_s
, mem
, 1));
783 /* ??? There is no VEC_splice. */
784 for (j
= 2; VEC_iterate (vn_reference_op_s
, mem
, j
, op
); j
++)
785 VEC_safe_insert (vn_reference_op_s
, heap
, *ops
, ++i
, op
);
790 VEC_free (vn_reference_op_s
, heap
, mem
);
794 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
795 structures into their value numbers. This is done in-place, and
796 the vector passed in is returned. */
798 static VEC (vn_reference_op_s
, heap
) *
799 valueize_refs (VEC (vn_reference_op_s
, heap
) *orig
)
801 vn_reference_op_t vro
;
804 for (i
= 0; VEC_iterate (vn_reference_op_s
, orig
, i
, vro
); i
++)
806 if (vro
->opcode
== SSA_NAME
807 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
809 vro
->op0
= SSA_VAL (vro
->op0
);
810 /* If it transforms from an SSA_NAME to a constant, update
812 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
813 vro
->opcode
= TREE_CODE (vro
->op0
);
814 /* If it transforms from an SSA_NAME to an address, fold with
815 a preceding indirect reference. */
816 if (i
> 0 && TREE_CODE (vro
->op0
) == ADDR_EXPR
817 && VEC_index (vn_reference_op_s
,
818 orig
, i
- 1)->opcode
== INDIRECT_REF
)
819 vn_reference_fold_indirect (&orig
, &i
);
821 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
822 vro
->op1
= SSA_VAL (vro
->op1
);
823 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
824 vro
->op2
= SSA_VAL (vro
->op2
);
830 static VEC(vn_reference_op_s
, heap
) *shared_lookup_references
;
832 /* Create a vector of vn_reference_op_s structures from REF, a
833 REFERENCE_CLASS_P tree. The vector is shared among all callers of
836 static VEC(vn_reference_op_s
, heap
) *
837 valueize_shared_reference_ops_from_ref (tree ref
)
841 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
842 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
843 shared_lookup_references
= valueize_refs (shared_lookup_references
);
844 return shared_lookup_references
;
847 /* Create a vector of vn_reference_op_s structures from CALL, a
848 call statement. The vector is shared among all callers of
851 static VEC(vn_reference_op_s
, heap
) *
852 valueize_shared_reference_ops_from_call (gimple call
)
856 VEC_truncate (vn_reference_op_s
, shared_lookup_references
, 0);
857 copy_reference_ops_from_call (call
, &shared_lookup_references
);
858 shared_lookup_references
= valueize_refs (shared_lookup_references
);
859 return shared_lookup_references
;
862 /* Lookup a SCCVN reference operation VR in the current hash table.
863 Returns the resulting value number if it exists in the hash table,
864 NULL_TREE otherwise. VNRESULT will be filled in with the actual
865 vn_reference_t stored in the hashtable if something is found. */
868 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
874 slot
= htab_find_slot_with_hash (current_info
->references
, vr
,
876 if (!slot
&& current_info
== optimistic_info
)
877 slot
= htab_find_slot_with_hash (valid_info
->references
, vr
,
882 *vnresult
= (vn_reference_t
)*slot
;
883 return ((vn_reference_t
)*slot
)->result
;
889 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
890 with the current VUSE and performs the expression lookup. */
893 vn_reference_lookup_2 (tree op ATTRIBUTE_UNUSED
, tree vuse
, void *vr_
)
895 vn_reference_t vr
= (vn_reference_t
)vr_
;
899 /* Fixup vuse and hash. */
900 vr
->hashcode
= vr
->hashcode
- iterative_hash_expr (vr
->vuse
, 0);
901 vr
->vuse
= SSA_VAL (vuse
);
902 vr
->hashcode
= vr
->hashcode
+ iterative_hash_expr (vr
->vuse
, 0);
905 slot
= htab_find_slot_with_hash (current_info
->references
, vr
,
907 if (!slot
&& current_info
== optimistic_info
)
908 slot
= htab_find_slot_with_hash (valid_info
->references
, vr
,
916 /* Lookup a reference operation by it's parts, in the current hash table.
917 Returns the resulting value number if it exists in the hash table,
918 NULL_TREE otherwise. VNRESULT will be filled in with the actual
919 vn_reference_t stored in the hashtable if something is found. */
922 vn_reference_lookup_pieces (tree vuse
,
923 VEC (vn_reference_op_s
, heap
) *operands
,
924 vn_reference_t
*vnresult
, bool maywalk
)
926 struct vn_reference_s vr1
;
933 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
934 vr1
.operands
= valueize_refs (operands
);
935 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
936 vn_reference_lookup_1 (&vr1
, vnresult
);
942 tree ref
= get_ref_from_reference_ops (operands
);
946 (vn_reference_t
)walk_non_aliased_vuses (ref
, vr1
.vuse
,
947 vn_reference_lookup_2
, &vr1
);
951 return (*vnresult
)->result
;
956 /* Lookup OP in the current hash table, and return the resulting value
957 number if it exists in the hash table. Return NULL_TREE if it does
958 not exist in the hash table or if the result field of the structure
959 was NULL.. VNRESULT will be filled in with the vn_reference_t
960 stored in the hashtable if one exists. */
963 vn_reference_lookup (tree op
, tree vuse
, bool maywalk
,
964 vn_reference_t
*vnresult
)
966 struct vn_reference_s vr1
;
971 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
972 vr1
.operands
= valueize_shared_reference_ops_from_ref (op
);
973 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
978 vn_reference_t wvnresult
;
980 (vn_reference_t
)walk_non_aliased_vuses (op
, vr1
.vuse
,
981 vn_reference_lookup_2
, &vr1
);
985 *vnresult
= wvnresult
;
986 return wvnresult
->result
;
992 return vn_reference_lookup_1 (&vr1
, vnresult
);
996 /* Insert OP into the current hash table with a value number of
997 RESULT, and return the resulting reference structure we created. */
1000 vn_reference_insert (tree op
, tree result
, tree vuse
)
1005 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1006 if (TREE_CODE (result
) == SSA_NAME
)
1007 vr1
->value_id
= VN_INFO (result
)->value_id
;
1009 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
1010 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1011 vr1
->operands
= valueize_refs (create_reference_ops_from_ref (op
));
1012 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1013 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
1015 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1018 /* Because we lookup stores using vuses, and value number failures
1019 using the vdefs (see visit_reference_op_store for how and why),
1020 it's possible that on failure we may try to insert an already
1021 inserted store. This is not wrong, there is no ssa name for a
1022 store that we could use as a differentiator anyway. Thus, unlike
1023 the other lookup functions, you cannot gcc_assert (!*slot)
1026 /* But free the old slot in case of a collision. */
1028 free_reference (*slot
);
1034 /* Insert a reference by it's pieces into the current hash table with
1035 a value number of RESULT. Return the resulting reference
1036 structure we created. */
1039 vn_reference_insert_pieces (tree vuse
,
1040 VEC (vn_reference_op_s
, heap
) *operands
,
1041 tree result
, unsigned int value_id
)
1047 vr1
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1048 vr1
->value_id
= value_id
;
1049 vr1
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1050 vr1
->operands
= valueize_refs (operands
);
1051 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
1052 if (result
&& TREE_CODE (result
) == SSA_NAME
)
1053 result
= SSA_VAL (result
);
1054 vr1
->result
= result
;
1056 slot
= htab_find_slot_with_hash (current_info
->references
, vr1
, vr1
->hashcode
,
1059 /* At this point we should have all the things inserted that we have
1060 seen before, and we should never try inserting something that
1062 gcc_assert (!*slot
);
1064 free_reference (*slot
);
1070 /* Compute and return the hash value for nary operation VBO1. */
1073 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
1078 for (i
= 0; i
< vno1
->length
; ++i
)
1079 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
1080 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
1082 if (vno1
->length
== 2
1083 && commutative_tree_code (vno1
->opcode
)
1084 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1], false))
1086 tree temp
= vno1
->op
[0];
1087 vno1
->op
[0] = vno1
->op
[1];
1091 for (i
= 0; i
< vno1
->length
; ++i
)
1092 hash
+= iterative_hash_expr (vno1
->op
[i
], vno1
->opcode
);
1097 /* Return the computed hashcode for nary operation P1. */
1100 vn_nary_op_hash (const void *p1
)
1102 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1103 return vno1
->hashcode
;
1106 /* Compare nary operations P1 and P2 and return true if they are
1110 vn_nary_op_eq (const void *p1
, const void *p2
)
1112 const_vn_nary_op_t
const vno1
= (const_vn_nary_op_t
) p1
;
1113 const_vn_nary_op_t
const vno2
= (const_vn_nary_op_t
) p2
;
1116 if (vno1
->hashcode
!= vno2
->hashcode
)
1119 if (vno1
->opcode
!= vno2
->opcode
1120 || !types_compatible_p (vno1
->type
, vno2
->type
))
1123 for (i
= 0; i
< vno1
->length
; ++i
)
1124 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
1130 /* Lookup a n-ary operation by its pieces and return the resulting value
1131 number if it exists in the hash table. Return NULL_TREE if it does
1132 not exist in the hash table or if the result field of the operation
1133 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1137 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
1138 tree type
, tree op0
, tree op1
, tree op2
,
1139 tree op3
, vn_nary_op_t
*vnresult
)
1142 struct vn_nary_op_s vno1
;
1146 vno1
.length
= length
;
1152 vno1
.hashcode
= vn_nary_op_compute_hash (&vno1
);
1153 slot
= htab_find_slot_with_hash (current_info
->nary
, &vno1
, vno1
.hashcode
,
1155 if (!slot
&& current_info
== optimistic_info
)
1156 slot
= htab_find_slot_with_hash (valid_info
->nary
, &vno1
, vno1
.hashcode
,
1161 *vnresult
= (vn_nary_op_t
)*slot
;
1162 return ((vn_nary_op_t
)*slot
)->result
;
1165 /* Lookup OP in the current hash table, and return the resulting value
1166 number if it exists in the hash table. Return NULL_TREE if it does
1167 not exist in the hash table or if the result field of the operation
1168 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1172 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
1175 struct vn_nary_op_s vno1
;
1180 vno1
.opcode
= TREE_CODE (op
);
1181 vno1
.length
= TREE_CODE_LENGTH (TREE_CODE (op
));
1182 vno1
.type
= TREE_TYPE (op
);
1183 for (i
= 0; i
< vno1
.length
; ++i
)
1184 vno1
.op
[i
] = TREE_OPERAND (op
, i
);
1185 vno1
.hashcode
= vn_nary_op_compute_hash (&vno1
);
1186 slot
= htab_find_slot_with_hash (current_info
->nary
, &vno1
, vno1
.hashcode
,
1188 if (!slot
&& current_info
== optimistic_info
)
1189 slot
= htab_find_slot_with_hash (valid_info
->nary
, &vno1
, vno1
.hashcode
,
1194 *vnresult
= (vn_nary_op_t
)*slot
;
1195 return ((vn_nary_op_t
)*slot
)->result
;
1198 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1199 value number if it exists in the hash table. Return NULL_TREE if
1200 it does not exist in the hash table. VNRESULT will contain the
1201 vn_nary_op_t from the hashtable if it exists. */
1204 vn_nary_op_lookup_stmt (gimple stmt
, vn_nary_op_t
*vnresult
)
1207 struct vn_nary_op_s vno1
;
1212 vno1
.opcode
= gimple_assign_rhs_code (stmt
);
1213 vno1
.length
= gimple_num_ops (stmt
) - 1;
1214 vno1
.type
= TREE_TYPE (gimple_assign_lhs (stmt
));
1215 for (i
= 0; i
< vno1
.length
; ++i
)
1216 vno1
.op
[i
] = gimple_op (stmt
, i
+ 1);
1217 if (vno1
.opcode
== REALPART_EXPR
1218 || vno1
.opcode
== IMAGPART_EXPR
1219 || vno1
.opcode
== VIEW_CONVERT_EXPR
)
1220 vno1
.op
[0] = TREE_OPERAND (vno1
.op
[0], 0);
1221 vno1
.hashcode
= vn_nary_op_compute_hash (&vno1
);
1222 slot
= htab_find_slot_with_hash (current_info
->nary
, &vno1
, vno1
.hashcode
,
1224 if (!slot
&& current_info
== optimistic_info
)
1225 slot
= htab_find_slot_with_hash (valid_info
->nary
, &vno1
, vno1
.hashcode
,
1230 *vnresult
= (vn_nary_op_t
)*slot
;
1231 return ((vn_nary_op_t
)*slot
)->result
;
1234 /* Insert a n-ary operation into the current hash table using it's
1235 pieces. Return the vn_nary_op_t structure we created and put in
1239 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
1240 tree type
, tree op0
,
1241 tree op1
, tree op2
, tree op3
,
1243 unsigned int value_id
)
1248 vno1
= (vn_nary_op_t
) obstack_alloc (¤t_info
->nary_obstack
,
1249 (sizeof (struct vn_nary_op_s
)
1250 - sizeof (tree
) * (4 - length
)));
1251 vno1
->value_id
= value_id
;
1252 vno1
->opcode
= code
;
1253 vno1
->length
= length
;
1263 vno1
->result
= result
;
1264 vno1
->hashcode
= vn_nary_op_compute_hash (vno1
);
1265 slot
= htab_find_slot_with_hash (current_info
->nary
, vno1
, vno1
->hashcode
,
1267 gcc_assert (!*slot
);
1274 /* Insert OP into the current hash table with a value number of
1275 RESULT. Return the vn_nary_op_t structure we created and put in
1279 vn_nary_op_insert (tree op
, tree result
)
1281 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
1286 vno1
= (vn_nary_op_t
) obstack_alloc (¤t_info
->nary_obstack
,
1287 (sizeof (struct vn_nary_op_s
)
1288 - sizeof (tree
) * (4 - length
)));
1289 vno1
->value_id
= VN_INFO (result
)->value_id
;
1290 vno1
->opcode
= TREE_CODE (op
);
1291 vno1
->length
= length
;
1292 vno1
->type
= TREE_TYPE (op
);
1293 for (i
= 0; i
< vno1
->length
; ++i
)
1294 vno1
->op
[i
] = TREE_OPERAND (op
, i
);
1295 vno1
->result
= result
;
1296 vno1
->hashcode
= vn_nary_op_compute_hash (vno1
);
1297 slot
= htab_find_slot_with_hash (current_info
->nary
, vno1
, vno1
->hashcode
,
1299 gcc_assert (!*slot
);
1305 /* Insert the rhs of STMT into the current hash table with a value number of
1309 vn_nary_op_insert_stmt (gimple stmt
, tree result
)
1311 unsigned length
= gimple_num_ops (stmt
) - 1;
1316 vno1
= (vn_nary_op_t
) obstack_alloc (¤t_info
->nary_obstack
,
1317 (sizeof (struct vn_nary_op_s
)
1318 - sizeof (tree
) * (4 - length
)));
1319 vno1
->value_id
= VN_INFO (result
)->value_id
;
1320 vno1
->opcode
= gimple_assign_rhs_code (stmt
);
1321 vno1
->length
= length
;
1322 vno1
->type
= TREE_TYPE (gimple_assign_lhs (stmt
));
1323 for (i
= 0; i
< vno1
->length
; ++i
)
1324 vno1
->op
[i
] = gimple_op (stmt
, i
+ 1);
1325 if (vno1
->opcode
== REALPART_EXPR
1326 || vno1
->opcode
== IMAGPART_EXPR
1327 || vno1
->opcode
== VIEW_CONVERT_EXPR
)
1328 vno1
->op
[0] = TREE_OPERAND (vno1
->op
[0], 0);
1329 vno1
->result
= result
;
1330 vno1
->hashcode
= vn_nary_op_compute_hash (vno1
);
1331 slot
= htab_find_slot_with_hash (current_info
->nary
, vno1
, vno1
->hashcode
,
1333 gcc_assert (!*slot
);
1339 /* Compute a hashcode for PHI operation VP1 and return it. */
1341 static inline hashval_t
1342 vn_phi_compute_hash (vn_phi_t vp1
)
1344 hashval_t result
= 0;
1349 result
= vp1
->block
->index
;
1351 /* If all PHI arguments are constants we need to distinguish
1352 the PHI node via its type. */
1353 type
= TREE_TYPE (VEC_index (tree
, vp1
->phiargs
, 0));
1354 result
+= (INTEGRAL_TYPE_P (type
)
1355 + (INTEGRAL_TYPE_P (type
)
1356 ? TYPE_PRECISION (type
) + TYPE_UNSIGNED (type
) : 0));
1358 for (i
= 0; VEC_iterate (tree
, vp1
->phiargs
, i
, phi1op
); i
++)
1360 if (phi1op
== VN_TOP
)
1362 result
+= iterative_hash_expr (phi1op
, result
);
1368 /* Return the computed hashcode for phi operation P1. */
1371 vn_phi_hash (const void *p1
)
1373 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
1374 return vp1
->hashcode
;
1377 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1380 vn_phi_eq (const void *p1
, const void *p2
)
1382 const_vn_phi_t
const vp1
= (const_vn_phi_t
) p1
;
1383 const_vn_phi_t
const vp2
= (const_vn_phi_t
) p2
;
1385 if (vp1
->hashcode
!= vp2
->hashcode
)
1388 if (vp1
->block
== vp2
->block
)
1393 /* If the PHI nodes do not have compatible types
1394 they are not the same. */
1395 if (!types_compatible_p (TREE_TYPE (VEC_index (tree
, vp1
->phiargs
, 0)),
1396 TREE_TYPE (VEC_index (tree
, vp2
->phiargs
, 0))))
1399 /* Any phi in the same block will have it's arguments in the
1400 same edge order, because of how we store phi nodes. */
1401 for (i
= 0; VEC_iterate (tree
, vp1
->phiargs
, i
, phi1op
); i
++)
1403 tree phi2op
= VEC_index (tree
, vp2
->phiargs
, i
);
1404 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
1406 if (!expressions_equal_p (phi1op
, phi2op
))
1414 static VEC(tree
, heap
) *shared_lookup_phiargs
;
1416 /* Lookup PHI in the current hash table, and return the resulting
1417 value number if it exists in the hash table. Return NULL_TREE if
1418 it does not exist in the hash table. */
1421 vn_phi_lookup (gimple phi
)
1424 struct vn_phi_s vp1
;
1427 VEC_truncate (tree
, shared_lookup_phiargs
, 0);
1429 /* Canonicalize the SSA_NAME's to their value number. */
1430 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1432 tree def
= PHI_ARG_DEF (phi
, i
);
1433 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
1434 VEC_safe_push (tree
, heap
, shared_lookup_phiargs
, def
);
1436 vp1
.phiargs
= shared_lookup_phiargs
;
1437 vp1
.block
= gimple_bb (phi
);
1438 vp1
.hashcode
= vn_phi_compute_hash (&vp1
);
1439 slot
= htab_find_slot_with_hash (current_info
->phis
, &vp1
, vp1
.hashcode
,
1441 if (!slot
&& current_info
== optimistic_info
)
1442 slot
= htab_find_slot_with_hash (valid_info
->phis
, &vp1
, vp1
.hashcode
,
1446 return ((vn_phi_t
)*slot
)->result
;
1449 /* Insert PHI into the current hash table with a value number of
1453 vn_phi_insert (gimple phi
, tree result
)
1456 vn_phi_t vp1
= (vn_phi_t
) pool_alloc (current_info
->phis_pool
);
1458 VEC (tree
, heap
) *args
= NULL
;
1460 /* Canonicalize the SSA_NAME's to their value number. */
1461 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1463 tree def
= PHI_ARG_DEF (phi
, i
);
1464 def
= TREE_CODE (def
) == SSA_NAME
? SSA_VAL (def
) : def
;
1465 VEC_safe_push (tree
, heap
, args
, def
);
1467 vp1
->value_id
= VN_INFO (result
)->value_id
;
1468 vp1
->phiargs
= args
;
1469 vp1
->block
= gimple_bb (phi
);
1470 vp1
->result
= result
;
1471 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
1473 slot
= htab_find_slot_with_hash (current_info
->phis
, vp1
, vp1
->hashcode
,
1476 /* Because we iterate over phi operations more than once, it's
1477 possible the slot might already exist here, hence no assert.*/
1483 /* Print set of components in strongly connected component SCC to OUT. */
1486 print_scc (FILE *out
, VEC (tree
, heap
) *scc
)
1491 fprintf (out
, "SCC consists of: ");
1492 for (i
= 0; VEC_iterate (tree
, scc
, i
, var
); i
++)
1494 print_generic_expr (out
, var
, 0);
1497 fprintf (out
, "\n");
1500 /* Set the value number of FROM to TO, return true if it has changed
1504 set_ssa_val_to (tree from
, tree to
)
1509 && TREE_CODE (to
) == SSA_NAME
1510 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
1513 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1514 and invariants. So assert that here. */
1515 gcc_assert (to
!= NULL_TREE
1517 || TREE_CODE (to
) == SSA_NAME
1518 || is_gimple_min_invariant (to
)));
1520 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1522 fprintf (dump_file
, "Setting value number of ");
1523 print_generic_expr (dump_file
, from
, 0);
1524 fprintf (dump_file
, " to ");
1525 print_generic_expr (dump_file
, to
, 0);
1528 currval
= SSA_VAL (from
);
1530 if (currval
!= to
&& !operand_equal_p (currval
, to
, OEP_PURE_SAME
))
1532 VN_INFO (from
)->valnum
= to
;
1533 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1534 fprintf (dump_file
, " (changed)\n");
1537 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1538 fprintf (dump_file
, "\n");
1542 /* Set all definitions in STMT to value number to themselves.
1543 Return true if a value number changed. */
1546 defs_to_varying (gimple stmt
)
1548 bool changed
= false;
1552 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1554 tree def
= DEF_FROM_PTR (defp
);
1556 VN_INFO (def
)->use_processed
= true;
1557 changed
|= set_ssa_val_to (def
, def
);
1562 static bool expr_has_constants (tree expr
);
1563 static tree
valueize_expr (tree expr
);
1565 /* Visit a copy between LHS and RHS, return true if the value number
1569 visit_copy (tree lhs
, tree rhs
)
1571 /* Follow chains of copies to their destination. */
1572 while (TREE_CODE (rhs
) == SSA_NAME
1573 && SSA_VAL (rhs
) != rhs
)
1574 rhs
= SSA_VAL (rhs
);
1576 /* The copy may have a more interesting constant filled expression
1577 (we don't, since we know our RHS is just an SSA name). */
1578 if (TREE_CODE (rhs
) == SSA_NAME
)
1580 VN_INFO (lhs
)->has_constants
= VN_INFO (rhs
)->has_constants
;
1581 VN_INFO (lhs
)->expr
= VN_INFO (rhs
)->expr
;
1584 return set_ssa_val_to (lhs
, rhs
);
1587 /* Visit a unary operator RHS, value number it, and return true if the
1588 value number of LHS has changed as a result. */
1591 visit_unary_op (tree lhs
, gimple stmt
)
1593 bool changed
= false;
1594 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
1598 changed
= set_ssa_val_to (lhs
, result
);
1602 changed
= set_ssa_val_to (lhs
, lhs
);
1603 vn_nary_op_insert_stmt (stmt
, lhs
);
1609 /* Visit a binary operator RHS, value number it, and return true if the
1610 value number of LHS has changed as a result. */
1613 visit_binary_op (tree lhs
, gimple stmt
)
1615 bool changed
= false;
1616 tree result
= vn_nary_op_lookup_stmt (stmt
, NULL
);
1620 changed
= set_ssa_val_to (lhs
, result
);
1624 changed
= set_ssa_val_to (lhs
, lhs
);
1625 vn_nary_op_insert_stmt (stmt
, lhs
);
1631 /* Visit a call STMT storing into LHS. Return true if the value number
1632 of the LHS has changed as a result. */
1635 visit_reference_op_call (tree lhs
, gimple stmt
)
1637 bool changed
= false;
1638 struct vn_reference_s vr1
;
1640 tree vuse
= gimple_vuse (stmt
);
1642 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1643 vr1
.operands
= valueize_shared_reference_ops_from_call (stmt
);
1644 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1645 result
= vn_reference_lookup_1 (&vr1
, NULL
);
1648 changed
= set_ssa_val_to (lhs
, result
);
1649 if (TREE_CODE (result
) == SSA_NAME
1650 && VN_INFO (result
)->has_constants
)
1651 VN_INFO (lhs
)->has_constants
= true;
1657 changed
= set_ssa_val_to (lhs
, lhs
);
1658 vr2
= (vn_reference_t
) pool_alloc (current_info
->references_pool
);
1659 vr2
->vuse
= vr1
.vuse
;
1660 vr2
->operands
= valueize_refs (create_reference_ops_from_call (stmt
));
1661 vr2
->hashcode
= vr1
.hashcode
;
1663 slot
= htab_find_slot_with_hash (current_info
->references
,
1664 vr2
, vr2
->hashcode
, INSERT
);
1666 free_reference (*slot
);
1673 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1674 and return true if the value number of the LHS has changed as a result. */
1677 visit_reference_op_load (tree lhs
, tree op
, gimple stmt
)
1679 bool changed
= false;
1680 tree result
= vn_reference_lookup (op
, gimple_vuse (stmt
), true, NULL
);
1682 /* We handle type-punning through unions by value-numbering based
1683 on offset and size of the access. Be prepared to handle a
1684 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1686 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
1688 /* We will be setting the value number of lhs to the value number
1689 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1690 So first simplify and lookup this expression to see if it
1691 is already available. */
1692 tree val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
1693 if ((CONVERT_EXPR_P (val
)
1694 || TREE_CODE (val
) == VIEW_CONVERT_EXPR
)
1695 && TREE_CODE (TREE_OPERAND (val
, 0)) == SSA_NAME
)
1697 tree tem
= valueize_expr (vn_get_expr_for (TREE_OPERAND (val
, 0)));
1698 if ((CONVERT_EXPR_P (tem
)
1699 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
)
1700 && (tem
= fold_unary_ignore_overflow (TREE_CODE (val
),
1701 TREE_TYPE (val
), tem
)))
1705 if (!is_gimple_min_invariant (val
)
1706 && TREE_CODE (val
) != SSA_NAME
)
1707 result
= vn_nary_op_lookup (val
, NULL
);
1708 /* If the expression is not yet available, value-number lhs to
1709 a new SSA_NAME we create. */
1710 if (!result
&& may_insert
)
1712 result
= make_ssa_name (SSA_NAME_VAR (lhs
), NULL
);
1713 /* Initialize value-number information properly. */
1714 VN_INFO_GET (result
)->valnum
= result
;
1715 VN_INFO (result
)->value_id
= get_next_value_id ();
1716 VN_INFO (result
)->expr
= val
;
1717 VN_INFO (result
)->has_constants
= expr_has_constants (val
);
1718 VN_INFO (result
)->needs_insertion
= true;
1719 /* As all "inserted" statements are singleton SCCs, insert
1720 to the valid table. This is strictly needed to
1721 avoid re-generating new value SSA_NAMEs for the same
1722 expression during SCC iteration over and over (the
1723 optimistic table gets cleared after each iteration).
1724 We do not need to insert into the optimistic table, as
1725 lookups there will fall back to the valid table. */
1726 if (current_info
== optimistic_info
)
1728 current_info
= valid_info
;
1729 vn_nary_op_insert (val
, result
);
1730 current_info
= optimistic_info
;
1733 vn_nary_op_insert (val
, result
);
1734 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1736 fprintf (dump_file
, "Inserting name ");
1737 print_generic_expr (dump_file
, result
, 0);
1738 fprintf (dump_file
, " for expression ");
1739 print_generic_expr (dump_file
, val
, 0);
1740 fprintf (dump_file
, "\n");
1747 changed
= set_ssa_val_to (lhs
, result
);
1748 if (TREE_CODE (result
) == SSA_NAME
1749 && VN_INFO (result
)->has_constants
)
1751 VN_INFO (lhs
)->expr
= VN_INFO (result
)->expr
;
1752 VN_INFO (lhs
)->has_constants
= true;
1757 changed
= set_ssa_val_to (lhs
, lhs
);
1758 vn_reference_insert (op
, lhs
, gimple_vuse (stmt
));
1765 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1766 and return true if the value number of the LHS has changed as a result. */
1769 visit_reference_op_store (tree lhs
, tree op
, gimple stmt
)
1771 bool changed
= false;
1773 bool resultsame
= false;
1775 /* First we want to lookup using the *vuses* from the store and see
1776 if there the last store to this location with the same address
1779 The vuses represent the memory state before the store. If the
1780 memory state, address, and value of the store is the same as the
1781 last store to this location, then this store will produce the
1782 same memory state as that store.
1784 In this case the vdef versions for this store are value numbered to those
1785 vuse versions, since they represent the same memory state after
1788 Otherwise, the vdefs for the store are used when inserting into
1789 the table, since the store generates a new memory state. */
1791 result
= vn_reference_lookup (lhs
, gimple_vuse (stmt
), false, NULL
);
1795 if (TREE_CODE (result
) == SSA_NAME
)
1796 result
= SSA_VAL (result
);
1797 if (TREE_CODE (op
) == SSA_NAME
)
1799 resultsame
= expressions_equal_p (result
, op
);
1802 if (!result
|| !resultsame
)
1806 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1808 fprintf (dump_file
, "No store match\n");
1809 fprintf (dump_file
, "Value numbering store ");
1810 print_generic_expr (dump_file
, lhs
, 0);
1811 fprintf (dump_file
, " to ");
1812 print_generic_expr (dump_file
, op
, 0);
1813 fprintf (dump_file
, "\n");
1815 /* Have to set value numbers before insert, since insert is
1816 going to valueize the references in-place. */
1817 if ((vdef
= gimple_vdef (stmt
)))
1819 VN_INFO (vdef
)->use_processed
= true;
1820 changed
|= set_ssa_val_to (vdef
, vdef
);
1823 /* Do not insert structure copies into the tables. */
1824 if (is_gimple_min_invariant (op
)
1825 || is_gimple_reg (op
))
1826 vn_reference_insert (lhs
, op
, vdef
);
1830 /* We had a match, so value number the vdef to have the value
1831 number of the vuse it came from. */
1834 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1835 fprintf (dump_file
, "Store matched earlier value,"
1836 "value numbering store vdefs to matching vuses.\n");
1838 def
= gimple_vdef (stmt
);
1839 use
= gimple_vuse (stmt
);
1841 VN_INFO (def
)->use_processed
= true;
1842 changed
|= set_ssa_val_to (def
, SSA_VAL (use
));
1848 /* Visit and value number PHI, return true if the value number
1852 visit_phi (gimple phi
)
1854 bool changed
= false;
1856 tree sameval
= VN_TOP
;
1857 bool allsame
= true;
1860 /* TODO: We could check for this in init_sccvn, and replace this
1861 with a gcc_assert. */
1862 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
1863 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
1865 /* See if all non-TOP arguments have the same value. TOP is
1866 equivalent to everything, so we can ignore it. */
1867 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1869 tree def
= PHI_ARG_DEF (phi
, i
);
1871 if (TREE_CODE (def
) == SSA_NAME
)
1872 def
= SSA_VAL (def
);
1875 if (sameval
== VN_TOP
)
1881 if (!expressions_equal_p (def
, sameval
))
1889 /* If all value numbered to the same value, the phi node has that
1893 if (is_gimple_min_invariant (sameval
))
1895 VN_INFO (PHI_RESULT (phi
))->has_constants
= true;
1896 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
1900 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
1901 VN_INFO (PHI_RESULT (phi
))->expr
= sameval
;
1904 if (TREE_CODE (sameval
) == SSA_NAME
)
1905 return visit_copy (PHI_RESULT (phi
), sameval
);
1907 return set_ssa_val_to (PHI_RESULT (phi
), sameval
);
1910 /* Otherwise, see if it is equivalent to a phi node in this block. */
1911 result
= vn_phi_lookup (phi
);
1914 if (TREE_CODE (result
) == SSA_NAME
)
1915 changed
= visit_copy (PHI_RESULT (phi
), result
);
1917 changed
= set_ssa_val_to (PHI_RESULT (phi
), result
);
1921 vn_phi_insert (phi
, PHI_RESULT (phi
));
1922 VN_INFO (PHI_RESULT (phi
))->has_constants
= false;
1923 VN_INFO (PHI_RESULT (phi
))->expr
= PHI_RESULT (phi
);
1924 changed
= set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
1930 /* Return true if EXPR contains constants. */
1933 expr_has_constants (tree expr
)
1935 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
1938 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0));
1941 return is_gimple_min_invariant (TREE_OPERAND (expr
, 0))
1942 || is_gimple_min_invariant (TREE_OPERAND (expr
, 1));
1943 /* Constants inside reference ops are rarely interesting, but
1944 it can take a lot of looking to find them. */
1946 case tcc_declaration
:
1949 return is_gimple_min_invariant (expr
);
1954 /* Return true if STMT contains constants. */
1957 stmt_has_constants (gimple stmt
)
1959 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
1962 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
1964 case GIMPLE_UNARY_RHS
:
1965 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
1967 case GIMPLE_BINARY_RHS
:
1968 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))
1969 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt
)));
1970 case GIMPLE_SINGLE_RHS
:
1971 /* Constants inside reference ops are rarely interesting, but
1972 it can take a lot of looking to find them. */
1973 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt
));
1980 /* Replace SSA_NAMES in expr with their value numbers, and return the
1982 This is performed in place. */
1985 valueize_expr (tree expr
)
1987 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
1990 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == SSA_NAME
1991 && SSA_VAL (TREE_OPERAND (expr
, 0)) != VN_TOP
)
1992 TREE_OPERAND (expr
, 0) = SSA_VAL (TREE_OPERAND (expr
, 0));
1995 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == SSA_NAME
1996 && SSA_VAL (TREE_OPERAND (expr
, 0)) != VN_TOP
)
1997 TREE_OPERAND (expr
, 0) = SSA_VAL (TREE_OPERAND (expr
, 0));
1998 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == SSA_NAME
1999 && SSA_VAL (TREE_OPERAND (expr
, 1)) != VN_TOP
)
2000 TREE_OPERAND (expr
, 1) = SSA_VAL (TREE_OPERAND (expr
, 1));
2008 /* Simplify the binary expression RHS, and return the result if
2012 simplify_binary_expression (gimple stmt
)
2014 tree result
= NULL_TREE
;
2015 tree op0
= gimple_assign_rhs1 (stmt
);
2016 tree op1
= gimple_assign_rhs2 (stmt
);
2018 /* This will not catch every single case we could combine, but will
2019 catch those with constants. The goal here is to simultaneously
2020 combine constants between expressions, but avoid infinite
2021 expansion of expressions during simplification. */
2022 if (TREE_CODE (op0
) == SSA_NAME
)
2024 if (VN_INFO (op0
)->has_constants
2025 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)) == tcc_comparison
)
2026 op0
= valueize_expr (vn_get_expr_for (op0
));
2027 else if (SSA_VAL (op0
) != VN_TOP
&& SSA_VAL (op0
) != op0
)
2028 op0
= SSA_VAL (op0
);
2031 if (TREE_CODE (op1
) == SSA_NAME
)
2033 if (VN_INFO (op1
)->has_constants
)
2034 op1
= valueize_expr (vn_get_expr_for (op1
));
2035 else if (SSA_VAL (op1
) != VN_TOP
&& SSA_VAL (op1
) != op1
)
2036 op1
= SSA_VAL (op1
);
2039 /* Avoid folding if nothing changed. */
2040 if (op0
== gimple_assign_rhs1 (stmt
)
2041 && op1
== gimple_assign_rhs2 (stmt
))
2044 fold_defer_overflow_warnings ();
2046 result
= fold_binary (gimple_assign_rhs_code (stmt
),
2047 TREE_TYPE (gimple_get_lhs (stmt
)), op0
, op1
);
2049 STRIP_USELESS_TYPE_CONVERSION (result
);
2051 fold_undefer_overflow_warnings (result
&& valid_gimple_rhs_p (result
),
2054 /* Make sure result is not a complex expression consisting
2055 of operators of operators (IE (a + b) + (a + c))
2056 Otherwise, we will end up with unbounded expressions if
2057 fold does anything at all. */
2058 if (result
&& valid_gimple_rhs_p (result
))
2064 /* Simplify the unary expression RHS, and return the result if
2068 simplify_unary_expression (gimple stmt
)
2070 tree result
= NULL_TREE
;
2071 tree orig_op0
, op0
= gimple_assign_rhs1 (stmt
);
2073 /* We handle some tcc_reference codes here that are all
2074 GIMPLE_ASSIGN_SINGLE codes. */
2075 if (gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2076 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2077 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2078 op0
= TREE_OPERAND (op0
, 0);
2080 if (TREE_CODE (op0
) != SSA_NAME
)
2084 if (VN_INFO (op0
)->has_constants
)
2085 op0
= valueize_expr (vn_get_expr_for (op0
));
2086 else if (gimple_assign_cast_p (stmt
)
2087 || gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2088 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2089 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2091 /* We want to do tree-combining on conversion-like expressions.
2092 Make sure we feed only SSA_NAMEs or constants to fold though. */
2093 tree tem
= valueize_expr (vn_get_expr_for (op0
));
2094 if (UNARY_CLASS_P (tem
)
2095 || BINARY_CLASS_P (tem
)
2096 || TREE_CODE (tem
) == VIEW_CONVERT_EXPR
2097 || TREE_CODE (tem
) == SSA_NAME
2098 || is_gimple_min_invariant (tem
))
2102 /* Avoid folding if nothing changed, but remember the expression. */
2103 if (op0
== orig_op0
)
2106 result
= fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt
),
2107 gimple_expr_type (stmt
), op0
);
2110 STRIP_USELESS_TYPE_CONVERSION (result
);
2111 if (valid_gimple_rhs_p (result
))
2118 /* Try to simplify RHS using equivalences and constant folding. */
2121 try_to_simplify (gimple stmt
)
2125 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2126 in this case, there is no point in doing extra work. */
2127 if (gimple_assign_copy_p (stmt
)
2128 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2131 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)))
2133 case tcc_declaration
:
2134 tem
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
));
2140 /* Do not do full-blown reference lookup here, but simplify
2141 reads from constant aggregates. */
2142 tem
= fold_const_aggregate_ref (gimple_assign_rhs1 (stmt
));
2146 /* Fallthrough for some codes that can operate on registers. */
2147 if (!(TREE_CODE (gimple_assign_rhs1 (stmt
)) == REALPART_EXPR
2148 || TREE_CODE (gimple_assign_rhs1 (stmt
)) == IMAGPART_EXPR
2149 || TREE_CODE (gimple_assign_rhs1 (stmt
)) == VIEW_CONVERT_EXPR
))
2151 /* We could do a little more with unary ops, if they expand
2152 into binary ops, but it's debatable whether it is worth it. */
2154 return simplify_unary_expression (stmt
);
2156 case tcc_comparison
:
2158 return simplify_binary_expression (stmt
);
2167 /* Visit and value number USE, return true if the value number
2171 visit_use (tree use
)
2173 bool changed
= false;
2174 gimple stmt
= SSA_NAME_DEF_STMT (use
);
2176 VN_INFO (use
)->use_processed
= true;
2178 gcc_assert (!SSA_NAME_IN_FREE_LIST (use
));
2179 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
2180 && !SSA_NAME_IS_DEFAULT_DEF (use
))
2182 fprintf (dump_file
, "Value numbering ");
2183 print_generic_expr (dump_file
, use
, 0);
2184 fprintf (dump_file
, " stmt = ");
2185 print_gimple_stmt (dump_file
, stmt
, 0, 0);
2188 /* Handle uninitialized uses. */
2189 if (SSA_NAME_IS_DEFAULT_DEF (use
))
2190 changed
= set_ssa_val_to (use
, use
);
2193 if (gimple_code (stmt
) == GIMPLE_PHI
)
2194 changed
= visit_phi (stmt
);
2195 else if (!gimple_has_lhs (stmt
)
2196 || gimple_has_volatile_ops (stmt
)
2197 || stmt_could_throw_p (stmt
))
2198 changed
= defs_to_varying (stmt
);
2199 else if (is_gimple_assign (stmt
))
2201 tree lhs
= gimple_assign_lhs (stmt
);
2204 /* Shortcut for copies. Simplifying copies is pointless,
2205 since we copy the expression and value they represent. */
2206 if (gimple_assign_copy_p (stmt
)
2207 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
2208 && TREE_CODE (lhs
) == SSA_NAME
)
2210 changed
= visit_copy (lhs
, gimple_assign_rhs1 (stmt
));
2213 simplified
= try_to_simplify (stmt
);
2216 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2218 fprintf (dump_file
, "RHS ");
2219 print_gimple_expr (dump_file
, stmt
, 0, 0);
2220 fprintf (dump_file
, " simplified to ");
2221 print_generic_expr (dump_file
, simplified
, 0);
2222 if (TREE_CODE (lhs
) == SSA_NAME
)
2223 fprintf (dump_file
, " has constants %d\n",
2224 expr_has_constants (simplified
));
2226 fprintf (dump_file
, "\n");
2229 /* Setting value numbers to constants will occasionally
2230 screw up phi congruence because constants are not
2231 uniquely associated with a single ssa name that can be
2234 && is_gimple_min_invariant (simplified
)
2235 && TREE_CODE (lhs
) == SSA_NAME
)
2237 VN_INFO (lhs
)->expr
= simplified
;
2238 VN_INFO (lhs
)->has_constants
= true;
2239 changed
= set_ssa_val_to (lhs
, simplified
);
2243 && TREE_CODE (simplified
) == SSA_NAME
2244 && TREE_CODE (lhs
) == SSA_NAME
)
2246 changed
= visit_copy (lhs
, simplified
);
2249 else if (simplified
)
2251 if (TREE_CODE (lhs
) == SSA_NAME
)
2253 VN_INFO (lhs
)->has_constants
= expr_has_constants (simplified
);
2254 /* We have to unshare the expression or else
2255 valuizing may change the IL stream. */
2256 VN_INFO (lhs
)->expr
= unshare_expr (simplified
);
2259 else if (stmt_has_constants (stmt
)
2260 && TREE_CODE (lhs
) == SSA_NAME
)
2261 VN_INFO (lhs
)->has_constants
= true;
2262 else if (TREE_CODE (lhs
) == SSA_NAME
)
2264 /* We reset expr and constantness here because we may
2265 have been value numbering optimistically, and
2266 iterating. They may become non-constant in this case,
2267 even if they were optimistically constant. */
2269 VN_INFO (lhs
)->has_constants
= false;
2270 VN_INFO (lhs
)->expr
= NULL_TREE
;
2273 if ((TREE_CODE (lhs
) == SSA_NAME
2274 /* We can substitute SSA_NAMEs that are live over
2275 abnormal edges with their constant value. */
2276 && !(gimple_assign_copy_p (stmt
)
2277 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
2279 && is_gimple_min_invariant (simplified
))
2280 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
2281 /* Stores or copies from SSA_NAMEs that are live over
2282 abnormal edges are a problem. */
2283 || (gimple_assign_single_p (stmt
)
2284 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
2285 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt
))))
2286 changed
= defs_to_varying (stmt
);
2287 else if (REFERENCE_CLASS_P (lhs
) || DECL_P (lhs
))
2289 changed
= visit_reference_op_store (lhs
, gimple_assign_rhs1 (stmt
), stmt
);
2291 else if (TREE_CODE (lhs
) == SSA_NAME
)
2293 if ((gimple_assign_copy_p (stmt
)
2294 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
2296 && is_gimple_min_invariant (simplified
)))
2298 VN_INFO (lhs
)->has_constants
= true;
2300 changed
= set_ssa_val_to (lhs
, simplified
);
2302 changed
= set_ssa_val_to (lhs
, gimple_assign_rhs1 (stmt
));
2306 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
)))
2308 case GIMPLE_UNARY_RHS
:
2309 changed
= visit_unary_op (lhs
, stmt
);
2311 case GIMPLE_BINARY_RHS
:
2312 changed
= visit_binary_op (lhs
, stmt
);
2314 case GIMPLE_SINGLE_RHS
:
2315 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
)))
2318 /* VOP-less references can go through unary case. */
2319 if ((gimple_assign_rhs_code (stmt
) == REALPART_EXPR
2320 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
2321 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
2322 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0)) == SSA_NAME
)
2324 changed
= visit_unary_op (lhs
, stmt
);
2328 case tcc_declaration
:
2329 changed
= visit_reference_op_load
2330 (lhs
, gimple_assign_rhs1 (stmt
), stmt
);
2332 case tcc_expression
:
2333 if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
2335 changed
= visit_unary_op (lhs
, stmt
);
2340 changed
= defs_to_varying (stmt
);
2344 changed
= defs_to_varying (stmt
);
2350 changed
= defs_to_varying (stmt
);
2352 else if (is_gimple_call (stmt
))
2354 tree lhs
= gimple_call_lhs (stmt
);
2356 /* ??? We could try to simplify calls. */
2358 if (stmt_has_constants (stmt
)
2359 && TREE_CODE (lhs
) == SSA_NAME
)
2360 VN_INFO (lhs
)->has_constants
= true;
2361 else if (TREE_CODE (lhs
) == SSA_NAME
)
2363 /* We reset expr and constantness here because we may
2364 have been value numbering optimistically, and
2365 iterating. They may become non-constant in this case,
2366 even if they were optimistically constant. */
2367 VN_INFO (lhs
)->has_constants
= false;
2368 VN_INFO (lhs
)->expr
= NULL_TREE
;
2371 if (TREE_CODE (lhs
) == SSA_NAME
2372 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
2373 changed
= defs_to_varying (stmt
);
2374 /* ??? We should handle stores from calls. */
2375 else if (TREE_CODE (lhs
) == SSA_NAME
)
2377 if (gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
))
2378 changed
= visit_reference_op_call (lhs
, stmt
);
2380 changed
= defs_to_varying (stmt
);
2383 changed
= defs_to_varying (stmt
);
2390 /* Compare two operands by reverse postorder index */
2393 compare_ops (const void *pa
, const void *pb
)
2395 const tree opa
= *((const tree
*)pa
);
2396 const tree opb
= *((const tree
*)pb
);
2397 gimple opstmta
= SSA_NAME_DEF_STMT (opa
);
2398 gimple opstmtb
= SSA_NAME_DEF_STMT (opb
);
2402 if (gimple_nop_p (opstmta
) && gimple_nop_p (opstmtb
))
2404 else if (gimple_nop_p (opstmta
))
2406 else if (gimple_nop_p (opstmtb
))
2409 bba
= gimple_bb (opstmta
);
2410 bbb
= gimple_bb (opstmtb
);
2421 if (gimple_code (opstmta
) == GIMPLE_PHI
2422 && gimple_code (opstmtb
) == GIMPLE_PHI
)
2424 else if (gimple_code (opstmta
) == GIMPLE_PHI
)
2426 else if (gimple_code (opstmtb
) == GIMPLE_PHI
)
2428 return gimple_uid (opstmta
) - gimple_uid (opstmtb
);
2430 return rpo_numbers
[bba
->index
] - rpo_numbers
[bbb
->index
];
2433 /* Sort an array containing members of a strongly connected component
2434 SCC so that the members are ordered by RPO number.
2435 This means that when the sort is complete, iterating through the
2436 array will give you the members in RPO order. */
2439 sort_scc (VEC (tree
, heap
) *scc
)
2441 qsort (VEC_address (tree
, scc
),
2442 VEC_length (tree
, scc
),
2447 /* Process a strongly connected component in the SSA graph. */
2450 process_scc (VEC (tree
, heap
) *scc
)
2452 /* If the SCC has a single member, just visit it. */
2454 if (VEC_length (tree
, scc
) == 1)
2456 tree use
= VEC_index (tree
, scc
, 0);
2457 if (!VN_INFO (use
)->use_processed
)
2464 unsigned int iterations
= 0;
2465 bool changed
= true;
2467 /* Iterate over the SCC with the optimistic table until it stops
2469 current_info
= optimistic_info
;
2474 /* As we are value-numbering optimistically we have to
2475 clear the expression tables and the simplified expressions
2476 in each iteration until we converge. */
2477 htab_empty (optimistic_info
->nary
);
2478 htab_empty (optimistic_info
->phis
);
2479 htab_empty (optimistic_info
->references
);
2480 obstack_free (&optimistic_info
->nary_obstack
, NULL
);
2481 gcc_obstack_init (&optimistic_info
->nary_obstack
);
2482 empty_alloc_pool (optimistic_info
->phis_pool
);
2483 empty_alloc_pool (optimistic_info
->references_pool
);
2484 for (i
= 0; VEC_iterate (tree
, scc
, i
, var
); i
++)
2485 VN_INFO (var
)->expr
= NULL_TREE
;
2486 for (i
= 0; VEC_iterate (tree
, scc
, i
, var
); i
++)
2487 changed
|= visit_use (var
);
2490 statistics_histogram_event (cfun
, "SCC iterations", iterations
);
2492 /* Finally, visit the SCC once using the valid table. */
2493 current_info
= valid_info
;
2494 for (i
= 0; VEC_iterate (tree
, scc
, i
, var
); i
++)
2499 DEF_VEC_O(ssa_op_iter
);
2500 DEF_VEC_ALLOC_O(ssa_op_iter
,heap
);
2502 /* Pop the components of the found SCC for NAME off the SCC stack
2503 and process them. Returns true if all went well, false if
2504 we run into resource limits. */
2507 extract_and_process_scc_for_name (tree name
)
2509 VEC (tree
, heap
) *scc
= NULL
;
2512 /* Found an SCC, pop the components off the SCC stack and
2516 x
= VEC_pop (tree
, sccstack
);
2518 VN_INFO (x
)->on_sccstack
= false;
2519 VEC_safe_push (tree
, heap
, scc
, x
);
2520 } while (x
!= name
);
2522 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2523 if (VEC_length (tree
, scc
)
2524 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
))
2527 fprintf (dump_file
, "WARNING: Giving up with SCCVN due to "
2528 "SCC size %u exceeding %u\n", VEC_length (tree
, scc
),
2529 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE
));
2533 if (VEC_length (tree
, scc
) > 1)
2536 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2537 print_scc (dump_file
, scc
);
2541 VEC_free (tree
, heap
, scc
);
2546 /* Depth first search on NAME to discover and process SCC's in the SSA
2548 Execution of this algorithm relies on the fact that the SCC's are
2549 popped off the stack in topological order.
2550 Returns true if successful, false if we stopped processing SCC's due
2551 to resource constraints. */
2556 VEC(ssa_op_iter
, heap
) *itervec
= NULL
;
2557 VEC(tree
, heap
) *namevec
= NULL
;
2558 use_operand_p usep
= NULL
;
2565 VN_INFO (name
)->dfsnum
= next_dfs_num
++;
2566 VN_INFO (name
)->visited
= true;
2567 VN_INFO (name
)->low
= VN_INFO (name
)->dfsnum
;
2569 VEC_safe_push (tree
, heap
, sccstack
, name
);
2570 VN_INFO (name
)->on_sccstack
= true;
2571 defstmt
= SSA_NAME_DEF_STMT (name
);
2573 /* Recursively DFS on our operands, looking for SCC's. */
2574 if (!gimple_nop_p (defstmt
))
2576 /* Push a new iterator. */
2577 if (gimple_code (defstmt
) == GIMPLE_PHI
)
2578 usep
= op_iter_init_phiuse (&iter
, defstmt
, SSA_OP_ALL_USES
);
2580 usep
= op_iter_init_use (&iter
, defstmt
, SSA_OP_ALL_USES
);
2583 clear_and_done_ssa_iter (&iter
);
2587 /* If we are done processing uses of a name, go up the stack
2588 of iterators and process SCCs as we found them. */
2589 if (op_iter_done (&iter
))
2591 /* See if we found an SCC. */
2592 if (VN_INFO (name
)->low
== VN_INFO (name
)->dfsnum
)
2593 if (!extract_and_process_scc_for_name (name
))
2595 VEC_free (tree
, heap
, namevec
);
2596 VEC_free (ssa_op_iter
, heap
, itervec
);
2600 /* Check if we are done. */
2601 if (VEC_empty (tree
, namevec
))
2603 VEC_free (tree
, heap
, namevec
);
2604 VEC_free (ssa_op_iter
, heap
, itervec
);
2608 /* Restore the last use walker and continue walking there. */
2610 name
= VEC_pop (tree
, namevec
);
2611 memcpy (&iter
, VEC_last (ssa_op_iter
, itervec
),
2612 sizeof (ssa_op_iter
));
2613 VEC_pop (ssa_op_iter
, itervec
);
2614 goto continue_walking
;
2617 use
= USE_FROM_PTR (usep
);
2619 /* Since we handle phi nodes, we will sometimes get
2620 invariants in the use expression. */
2621 if (TREE_CODE (use
) == SSA_NAME
)
2623 if (! (VN_INFO (use
)->visited
))
2625 /* Recurse by pushing the current use walking state on
2626 the stack and starting over. */
2627 VEC_safe_push(ssa_op_iter
, heap
, itervec
, &iter
);
2628 VEC_safe_push(tree
, heap
, namevec
, name
);
2633 VN_INFO (name
)->low
= MIN (VN_INFO (name
)->low
,
2634 VN_INFO (use
)->low
);
2636 if (VN_INFO (use
)->dfsnum
< VN_INFO (name
)->dfsnum
2637 && VN_INFO (use
)->on_sccstack
)
2639 VN_INFO (name
)->low
= MIN (VN_INFO (use
)->dfsnum
,
2640 VN_INFO (name
)->low
);
2644 usep
= op_iter_next_use (&iter
);
2648 /* Allocate a value number table. */
2651 allocate_vn_table (vn_tables_t table
)
2653 table
->phis
= htab_create (23, vn_phi_hash
, vn_phi_eq
, free_phi
);
2654 table
->nary
= htab_create (23, vn_nary_op_hash
, vn_nary_op_eq
, NULL
);
2655 table
->references
= htab_create (23, vn_reference_hash
, vn_reference_eq
,
2658 gcc_obstack_init (&table
->nary_obstack
);
2659 table
->phis_pool
= create_alloc_pool ("VN phis",
2660 sizeof (struct vn_phi_s
),
2662 table
->references_pool
= create_alloc_pool ("VN references",
2663 sizeof (struct vn_reference_s
),
2667 /* Free a value number table. */
2670 free_vn_table (vn_tables_t table
)
2672 htab_delete (table
->phis
);
2673 htab_delete (table
->nary
);
2674 htab_delete (table
->references
);
2675 obstack_free (&table
->nary_obstack
, NULL
);
2676 free_alloc_pool (table
->phis_pool
);
2677 free_alloc_pool (table
->references_pool
);
2685 int *rpo_numbers_temp
;
2687 calculate_dominance_info (CDI_DOMINATORS
);
2689 constant_to_value_id
= htab_create (23, vn_constant_hash
, vn_constant_eq
,
2692 constant_value_ids
= BITMAP_ALLOC (NULL
);
2697 vn_ssa_aux_table
= VEC_alloc (vn_ssa_aux_t
, heap
, num_ssa_names
+ 1);
2698 /* VEC_alloc doesn't actually grow it to the right size, it just
2699 preallocates the space to do so. */
2700 VEC_safe_grow_cleared (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
, num_ssa_names
+ 1);
2701 gcc_obstack_init (&vn_ssa_aux_obstack
);
2703 shared_lookup_phiargs
= NULL
;
2704 shared_lookup_references
= NULL
;
2705 rpo_numbers
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
2706 rpo_numbers_temp
= XCNEWVEC (int, last_basic_block
+ NUM_FIXED_BLOCKS
);
2707 pre_and_rev_post_order_compute (NULL
, rpo_numbers_temp
, false);
2709 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2710 the i'th block in RPO order is bb. We want to map bb's to RPO
2711 numbers, so we need to rearrange this array. */
2712 for (j
= 0; j
< n_basic_blocks
- NUM_FIXED_BLOCKS
; j
++)
2713 rpo_numbers
[rpo_numbers_temp
[j
]] = j
;
2715 XDELETE (rpo_numbers_temp
);
2717 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
2719 /* Create the VN_INFO structures, and initialize value numbers to
2721 for (i
= 0; i
< num_ssa_names
; i
++)
2723 tree name
= ssa_name (i
);
2726 VN_INFO_GET (name
)->valnum
= VN_TOP
;
2727 VN_INFO (name
)->expr
= NULL_TREE
;
2728 VN_INFO (name
)->value_id
= 0;
2732 renumber_gimple_stmt_uids ();
2734 /* Create the valid and optimistic value numbering tables. */
2735 valid_info
= XCNEW (struct vn_tables_s
);
2736 allocate_vn_table (valid_info
);
2737 optimistic_info
= XCNEW (struct vn_tables_s
);
2738 allocate_vn_table (optimistic_info
);
2746 htab_delete (constant_to_value_id
);
2747 BITMAP_FREE (constant_value_ids
);
2748 VEC_free (tree
, heap
, shared_lookup_phiargs
);
2749 VEC_free (vn_reference_op_s
, heap
, shared_lookup_references
);
2750 XDELETEVEC (rpo_numbers
);
2752 for (i
= 0; i
< num_ssa_names
; i
++)
2754 tree name
= ssa_name (i
);
2756 && VN_INFO (name
)->needs_insertion
)
2757 release_ssa_name (name
);
2759 obstack_free (&vn_ssa_aux_obstack
, NULL
);
2760 VEC_free (vn_ssa_aux_t
, heap
, vn_ssa_aux_table
);
2762 VEC_free (tree
, heap
, sccstack
);
2763 free_vn_table (valid_info
);
2764 XDELETE (valid_info
);
2765 free_vn_table (optimistic_info
);
2766 XDELETE (optimistic_info
);
2769 /* Set the value ids in the valid hash tables. */
2772 set_hashtable_value_ids (void)
2779 /* Now set the value ids of the things we had put in the hash
2782 FOR_EACH_HTAB_ELEMENT (valid_info
->nary
,
2783 vno
, vn_nary_op_t
, hi
)
2787 if (TREE_CODE (vno
->result
) == SSA_NAME
)
2788 vno
->value_id
= VN_INFO (vno
->result
)->value_id
;
2789 else if (is_gimple_min_invariant (vno
->result
))
2790 vno
->value_id
= get_or_alloc_constant_value_id (vno
->result
);
2794 FOR_EACH_HTAB_ELEMENT (valid_info
->phis
,
2799 if (TREE_CODE (vp
->result
) == SSA_NAME
)
2800 vp
->value_id
= VN_INFO (vp
->result
)->value_id
;
2801 else if (is_gimple_min_invariant (vp
->result
))
2802 vp
->value_id
= get_or_alloc_constant_value_id (vp
->result
);
2806 FOR_EACH_HTAB_ELEMENT (valid_info
->references
,
2807 vr
, vn_reference_t
, hi
)
2811 if (TREE_CODE (vr
->result
) == SSA_NAME
)
2812 vr
->value_id
= VN_INFO (vr
->result
)->value_id
;
2813 else if (is_gimple_min_invariant (vr
->result
))
2814 vr
->value_id
= get_or_alloc_constant_value_id (vr
->result
);
2819 /* Do SCCVN. Returns true if it finished, false if we bailed out
2820 due to resource constraints. */
2823 run_scc_vn (bool may_insert_arg
)
2827 bool changed
= true;
2829 may_insert
= may_insert_arg
;
2832 current_info
= valid_info
;
2834 for (param
= DECL_ARGUMENTS (current_function_decl
);
2836 param
= TREE_CHAIN (param
))
2838 if (gimple_default_def (cfun
, param
) != NULL
)
2840 tree def
= gimple_default_def (cfun
, param
);
2841 VN_INFO (def
)->valnum
= def
;
2845 for (i
= 1; i
< num_ssa_names
; ++i
)
2847 tree name
= ssa_name (i
);
2849 && VN_INFO (name
)->visited
== false
2850 && !has_zero_uses (name
))
2859 /* Initialize the value ids. */
2861 for (i
= 1; i
< num_ssa_names
; ++i
)
2863 tree name
= ssa_name (i
);
2867 info
= VN_INFO (name
);
2868 if (info
->valnum
== name
)
2869 info
->value_id
= get_next_value_id ();
2870 else if (is_gimple_min_invariant (info
->valnum
))
2871 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
2874 /* Propagate until they stop changing. */
2878 for (i
= 1; i
< num_ssa_names
; ++i
)
2880 tree name
= ssa_name (i
);
2884 info
= VN_INFO (name
);
2885 if (TREE_CODE (info
->valnum
) == SSA_NAME
2886 && info
->valnum
!= name
2887 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
2890 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
2895 set_hashtable_value_ids ();
2897 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2899 fprintf (dump_file
, "Value numbers:\n");
2900 for (i
= 0; i
< num_ssa_names
; i
++)
2902 tree name
= ssa_name (i
);
2904 && VN_INFO (name
)->visited
2905 && SSA_VAL (name
) != name
)
2907 print_generic_expr (dump_file
, name
, 0);
2908 fprintf (dump_file
, " = ");
2909 print_generic_expr (dump_file
, SSA_VAL (name
), 0);
2910 fprintf (dump_file
, "\n");
2919 /* Return the maximum value id we have ever seen. */
2922 get_max_value_id (void)
2924 return next_value_id
;
2927 /* Return the next unique value id. */
2930 get_next_value_id (void)
2932 return next_value_id
++;
2936 /* Compare two expressions E1 and E2 and return true if they are equal. */
2939 expressions_equal_p (tree e1
, tree e2
)
2941 /* The obvious case. */
2945 /* If only one of them is null, they cannot be equal. */
2949 /* Recurse on elements of lists. */
2950 if (TREE_CODE (e1
) == TREE_LIST
&& TREE_CODE (e2
) == TREE_LIST
)
2954 for (lop1
= e1
, lop2
= e2
;
2956 lop1
= TREE_CHAIN (lop1
), lop2
= TREE_CHAIN (lop2
))
2960 if (!expressions_equal_p (TREE_VALUE (lop1
), TREE_VALUE (lop2
)))
2966 /* Now perform the actual comparison. */
2967 if (TREE_CODE (e1
) == TREE_CODE (e2
)
2968 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
2975 /* Return true if the nary operation NARY may trap. This is a copy
2976 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
2979 vn_nary_may_trap (vn_nary_op_t nary
)
2983 bool honor_nans
= false;
2984 bool honor_snans
= false;
2985 bool fp_operation
= false;
2986 bool honor_trapv
= false;
2990 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
2991 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
2992 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
2995 fp_operation
= FLOAT_TYPE_P (type
);
2998 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
2999 honor_snans
= flag_signaling_nans
!= 0;
3001 else if (INTEGRAL_TYPE_P (type
)
3002 && TYPE_OVERFLOW_TRAPS (type
))
3006 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
3008 honor_nans
, honor_snans
, rhs2
,
3014 for (i
= 0; i
< nary
->length
; ++i
)
3015 if (tree_could_trap_p (nary
->op
[i
]))