2009-01-18 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob78af47ed5ebca88cf654b5331da119783e982ad8
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t
176 VN_INFO (tree name)
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
216 tree
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
224 return name;
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
259 case tcc_reference:
260 if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
264 gimple_expr_type (def_stmt),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
266 break;
268 case tcc_unary:
269 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
270 gimple_expr_type (def_stmt),
271 gimple_assign_rhs1 (def_stmt));
272 break;
274 case tcc_binary:
275 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
276 gimple_expr_type (def_stmt),
277 gimple_assign_rhs1 (def_stmt),
278 gimple_assign_rhs2 (def_stmt));
279 break;
281 default:;
283 if (expr == NULL_TREE)
284 return vn->valnum;
286 /* Cache the expression. */
287 vn->expr = expr;
289 return expr;
293 /* Free a phi operation structure VP. */
295 static void
296 free_phi (void *vp)
298 vn_phi_t phi = (vn_phi_t) vp;
299 VEC_free (tree, heap, phi->phiargs);
302 /* Free a reference operation structure VP. */
304 static void
305 free_reference (void *vp)
307 vn_reference_t vr = (vn_reference_t) vp;
308 VEC_free (vn_reference_op_s, heap, vr->operands);
311 /* Hash table equality function for vn_constant_t. */
313 static int
314 vn_constant_eq (const void *p1, const void *p2)
316 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
317 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
319 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
322 /* Hash table hash function for vn_constant_t. */
324 static hashval_t
325 vn_constant_hash (const void *p1)
327 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
328 return vc1->hashcode;
331 /* Lookup a value id for CONSTANT and return it. If it does not
332 exist returns 0. */
334 unsigned int
335 get_constant_value_id (tree constant)
337 void **slot;
338 struct vn_constant_s vc;
340 vc.hashcode = vn_hash_constant_with_type (constant);
341 vc.constant = constant;
342 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
343 vc.hashcode, NO_INSERT);
344 if (slot)
345 return ((vn_constant_t)*slot)->value_id;
346 return 0;
349 /* Lookup a value id for CONSTANT, and if it does not exist, create a
350 new one and return it. If it does exist, return it. */
352 unsigned int
353 get_or_alloc_constant_value_id (tree constant)
355 void **slot;
356 vn_constant_t vc = XNEW (struct vn_constant_s);
358 vc->hashcode = vn_hash_constant_with_type (constant);
359 vc->constant = constant;
360 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
361 vc->hashcode, INSERT);
362 if (*slot)
364 free (vc);
365 return ((vn_constant_t)*slot)->value_id;
367 vc->value_id = get_next_value_id ();
368 *slot = vc;
369 bitmap_set_bit (constant_value_ids, vc->value_id);
370 return vc->value_id;
373 /* Return true if V is a value id for a constant. */
375 bool
376 value_id_constant_p (unsigned int v)
378 return bitmap_bit_p (constant_value_ids, v);
381 /* Compare two reference operands P1 and P2 for equality. Return true if
382 they are equal, and false otherwise. */
384 static int
385 vn_reference_op_eq (const void *p1, const void *p2)
387 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
388 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
389 return vro1->opcode == vro2->opcode
390 && types_compatible_p (vro1->type, vro2->type)
391 && expressions_equal_p (vro1->op0, vro2->op0)
392 && expressions_equal_p (vro1->op1, vro2->op1)
393 && expressions_equal_p (vro1->op2, vro2->op2);
396 /* Compute the hash for a reference operand VRO1. */
398 static hashval_t
399 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
401 return iterative_hash_expr (vro1->op0, vro1->opcode)
402 + iterative_hash_expr (vro1->op1, vro1->opcode)
403 + iterative_hash_expr (vro1->op2, vro1->opcode);
406 /* Return the hashcode for a given reference operation P1. */
408 static hashval_t
409 vn_reference_hash (const void *p1)
411 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
412 return vr1->hashcode;
415 /* Compute a hash for the reference operation VR1 and return it. */
417 hashval_t
418 vn_reference_compute_hash (const vn_reference_t vr1)
420 hashval_t result = 0;
421 tree v;
422 int i;
423 vn_reference_op_t vro;
425 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
426 result += iterative_hash_expr (v, 0);
427 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
428 result += vn_reference_op_compute_hash (vro);
430 return result;
433 /* Return true if reference operations P1 and P2 are equivalent. This
434 means they have the same set of operands and vuses. */
437 vn_reference_eq (const void *p1, const void *p2)
439 tree v;
440 int i;
441 vn_reference_op_t vro;
443 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
444 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
446 if (vr1->vuses == vr2->vuses
447 && vr1->operands == vr2->operands)
448 return true;
450 /* Impossible for them to be equivalent if they have different
451 number of vuses. */
452 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
453 return false;
455 /* We require that address operands be canonicalized in a way that
456 two memory references will have the same operands if they are
457 equivalent. */
458 if (VEC_length (vn_reference_op_s, vr1->operands)
459 != VEC_length (vn_reference_op_s, vr2->operands))
460 return false;
462 /* The memory state is more often different than the address of the
463 store/load, so check it first. */
464 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
466 if (VEC_index (tree, vr2->vuses, i) != v)
467 return false;
470 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
472 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
473 vro))
474 return false;
476 return true;
479 /* Place the vuses from STMT into *result. */
481 static inline void
482 vuses_to_vec (gimple stmt, VEC (tree, gc) **result)
484 ssa_op_iter iter;
485 tree vuse;
487 if (!stmt)
488 return;
490 VEC_reserve_exact (tree, gc, *result,
491 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
493 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
494 VEC_quick_push (tree, *result, vuse);
498 /* Copy the VUSE names in STMT into a vector, and return
499 the vector. */
501 static VEC (tree, gc) *
502 copy_vuses_from_stmt (gimple stmt)
504 VEC (tree, gc) *vuses = NULL;
506 vuses_to_vec (stmt, &vuses);
508 return vuses;
511 /* Place the vdefs from STMT into *result. */
513 static inline void
514 vdefs_to_vec (gimple stmt, VEC (tree, gc) **result)
516 ssa_op_iter iter;
517 tree vdef;
519 if (!stmt)
520 return;
522 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
524 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
525 VEC_quick_push (tree, *result, vdef);
528 /* Copy the names of vdef results in STMT into a vector, and return
529 the vector. */
531 static VEC (tree, gc) *
532 copy_vdefs_from_stmt (gimple stmt)
534 VEC (tree, gc) *vdefs = NULL;
536 vdefs_to_vec (stmt, &vdefs);
538 return vdefs;
541 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
542 static VEC (tree, gc) *shared_lookup_vops;
544 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
545 This function will overwrite the current SHARED_LOOKUP_VOPS
546 variable. */
548 VEC (tree, gc) *
549 shared_vuses_from_stmt (gimple stmt)
551 VEC_truncate (tree, shared_lookup_vops, 0);
552 vuses_to_vec (stmt, &shared_lookup_vops);
554 return shared_lookup_vops;
557 /* Copy the operations present in load/store REF into RESULT, a vector of
558 vn_reference_op_s's. */
560 void
561 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
563 if (TREE_CODE (ref) == TARGET_MEM_REF)
565 vn_reference_op_s temp;
567 memset (&temp, 0, sizeof (temp));
568 /* We do not care for spurious type qualifications. */
569 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
570 temp.opcode = TREE_CODE (ref);
571 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
572 temp.op1 = TMR_INDEX (ref);
573 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
575 memset (&temp, 0, sizeof (temp));
576 temp.type = NULL_TREE;
577 temp.opcode = TREE_CODE (ref);
578 temp.op0 = TMR_STEP (ref);
579 temp.op1 = TMR_OFFSET (ref);
580 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
581 return;
584 /* For non-calls, store the information that makes up the address. */
586 while (ref)
588 vn_reference_op_s temp;
590 memset (&temp, 0, sizeof (temp));
591 /* We do not care for spurious type qualifications. */
592 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
593 temp.opcode = TREE_CODE (ref);
595 switch (temp.opcode)
597 case ALIGN_INDIRECT_REF:
598 case INDIRECT_REF:
599 /* The only operand is the address, which gets its own
600 vn_reference_op_s structure. */
601 break;
602 case MISALIGNED_INDIRECT_REF:
603 temp.op0 = TREE_OPERAND (ref, 1);
604 break;
605 case BIT_FIELD_REF:
606 /* Record bits and position. */
607 temp.op0 = TREE_OPERAND (ref, 1);
608 temp.op1 = TREE_OPERAND (ref, 2);
609 break;
610 case COMPONENT_REF:
611 /* The field decl is enough to unambiguously specify the field,
612 a matching type is not necessary and a mismatching type
613 is always a spurious difference. */
614 temp.type = NULL_TREE;
615 /* If this is a reference to a union member, record the union
616 member size as operand. Do so only if we are doing
617 expression insertion (during FRE), as PRE currently gets
618 confused with this. */
619 if (may_insert
620 && TREE_OPERAND (ref, 2) == NULL_TREE
621 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
622 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
623 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
624 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
625 else
627 /* Record field as operand. */
628 temp.op0 = TREE_OPERAND (ref, 1);
629 temp.op1 = TREE_OPERAND (ref, 2);
631 break;
632 case ARRAY_RANGE_REF:
633 case ARRAY_REF:
634 /* Record index as operand. */
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
637 temp.op2 = TREE_OPERAND (ref, 3);
638 break;
639 case STRING_CST:
640 case INTEGER_CST:
641 case COMPLEX_CST:
642 case VECTOR_CST:
643 case REAL_CST:
644 case CONSTRUCTOR:
645 case VAR_DECL:
646 case PARM_DECL:
647 case CONST_DECL:
648 case RESULT_DECL:
649 case SSA_NAME:
650 temp.op0 = ref;
651 break;
652 case ADDR_EXPR:
653 if (is_gimple_min_invariant (ref))
655 temp.op0 = ref;
656 break;
658 /* Fallthrough. */
659 /* These are only interesting for their operands, their
660 existence, and their type. They will never be the last
661 ref in the chain of references (IE they require an
662 operand), so we don't have to put anything
663 for op* as it will be handled by the iteration */
664 case IMAGPART_EXPR:
665 case REALPART_EXPR:
666 case VIEW_CONVERT_EXPR:
667 break;
668 default:
669 gcc_unreachable ();
671 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
673 if (REFERENCE_CLASS_P (ref)
674 || (TREE_CODE (ref) == ADDR_EXPR
675 && !is_gimple_min_invariant (ref)))
676 ref = TREE_OPERAND (ref, 0);
677 else
678 ref = NULL_TREE;
682 /* Re-create a reference tree from the reference ops OPS.
683 Returns NULL_TREE if the ops were not handled.
684 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
686 static tree
687 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
689 vn_reference_op_t op;
690 unsigned i;
691 tree ref, *op0_p = &ref;
693 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
695 switch (op->opcode)
697 case CALL_EXPR:
698 return NULL_TREE;
700 case ALIGN_INDIRECT_REF:
701 case INDIRECT_REF:
702 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
703 op0_p = &TREE_OPERAND (*op0_p, 0);
704 break;
706 case MISALIGNED_INDIRECT_REF:
707 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
708 NULL_TREE, op->op0);
709 op0_p = &TREE_OPERAND (*op0_p, 0);
710 break;
712 case BIT_FIELD_REF:
713 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
714 op->op0, op->op1);
715 op0_p = &TREE_OPERAND (*op0_p, 0);
716 break;
718 case COMPONENT_REF:
719 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
720 op->op0, op->op1);
721 op0_p = &TREE_OPERAND (*op0_p, 0);
722 break;
724 case ARRAY_RANGE_REF:
725 case ARRAY_REF:
726 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
727 op->op0, op->op1, op->op2);
728 op0_p = &TREE_OPERAND (*op0_p, 0);
729 break;
731 case STRING_CST:
732 case INTEGER_CST:
733 case COMPLEX_CST:
734 case VECTOR_CST:
735 case REAL_CST:
736 case CONSTRUCTOR:
737 case VAR_DECL:
738 case PARM_DECL:
739 case CONST_DECL:
740 case RESULT_DECL:
741 case SSA_NAME:
742 *op0_p = op->op0;
743 break;
745 case ADDR_EXPR:
746 if (op->op0 != NULL_TREE)
748 gcc_assert (is_gimple_min_invariant (op->op0));
749 *op0_p = op->op0;
750 break;
752 /* Fallthrough. */
753 case IMAGPART_EXPR:
754 case REALPART_EXPR:
755 case VIEW_CONVERT_EXPR:
756 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
757 op0_p = &TREE_OPERAND (*op0_p, 0);
758 break;
760 default:
761 return NULL_TREE;
765 return ref;
768 /* Copy the operations present in load/store/call REF into RESULT, a vector of
769 vn_reference_op_s's. */
771 void
772 copy_reference_ops_from_call (gimple call,
773 VEC(vn_reference_op_s, heap) **result)
775 vn_reference_op_s temp;
776 unsigned i;
778 /* Copy the type, opcode, function being called and static chain. */
779 memset (&temp, 0, sizeof (temp));
780 temp.type = gimple_call_return_type (call);
781 temp.opcode = CALL_EXPR;
782 temp.op0 = gimple_call_fn (call);
783 temp.op1 = gimple_call_chain (call);
784 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
786 /* Copy the call arguments. As they can be references as well,
787 just chain them together. */
788 for (i = 0; i < gimple_call_num_args (call); ++i)
790 tree callarg = gimple_call_arg (call, i);
791 copy_reference_ops_from_ref (callarg, result);
795 /* Create a vector of vn_reference_op_s structures from REF, a
796 REFERENCE_CLASS_P tree. The vector is not shared. */
798 static VEC(vn_reference_op_s, heap) *
799 create_reference_ops_from_ref (tree ref)
801 VEC (vn_reference_op_s, heap) *result = NULL;
803 copy_reference_ops_from_ref (ref, &result);
804 return result;
807 /* Create a vector of vn_reference_op_s structures from CALL, a
808 call statement. The vector is not shared. */
810 static VEC(vn_reference_op_s, heap) *
811 create_reference_ops_from_call (gimple call)
813 VEC (vn_reference_op_s, heap) *result = NULL;
815 copy_reference_ops_from_call (call, &result);
816 return result;
819 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
821 /* Create a vector of vn_reference_op_s structures from REF, a
822 REFERENCE_CLASS_P tree. The vector is shared among all callers of
823 this function. */
825 static VEC(vn_reference_op_s, heap) *
826 shared_reference_ops_from_ref (tree ref)
828 if (!ref)
829 return NULL;
830 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
831 copy_reference_ops_from_ref (ref, &shared_lookup_references);
832 return shared_lookup_references;
835 /* Create a vector of vn_reference_op_s structures from CALL, a
836 call statement. The vector is shared among all callers of
837 this function. */
839 static VEC(vn_reference_op_s, heap) *
840 shared_reference_ops_from_call (gimple call)
842 if (!call)
843 return NULL;
844 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
845 copy_reference_ops_from_call (call, &shared_lookup_references);
846 return shared_lookup_references;
850 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
851 structures into their value numbers. This is done in-place, and
852 the vector passed in is returned. */
854 static VEC (vn_reference_op_s, heap) *
855 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
857 vn_reference_op_t vro;
858 int i;
860 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
862 if (vro->opcode == SSA_NAME
863 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
865 vro->op0 = SSA_VAL (vro->op0);
866 /* If it transforms from an SSA_NAME to a constant, update
867 the opcode. */
868 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
869 vro->opcode = TREE_CODE (vro->op0);
871 /* TODO: Do we want to valueize op2 and op1 of
872 ARRAY_REF/COMPONENT_REF for Ada */
876 return orig;
879 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
880 their value numbers. This is done in-place, and the vector passed
881 in is returned. */
883 static VEC (tree, gc) *
884 valueize_vuses (VEC (tree, gc) *orig)
886 bool made_replacement = false;
887 tree vuse;
888 int i;
890 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
892 if (vuse != SSA_VAL (vuse))
894 made_replacement = true;
895 VEC_replace (tree, orig, i, SSA_VAL (vuse));
899 if (made_replacement && VEC_length (tree, orig) > 1)
900 sort_vuses (orig);
902 return orig;
905 /* Return the single reference statement defining all virtual uses
906 in VUSES or NULL_TREE, if there are multiple defining statements.
907 Take into account only definitions that alias REF if following
908 back-edges. */
910 static gimple
911 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
913 gimple def_stmt;
914 tree vuse;
915 unsigned int i;
917 gcc_assert (VEC_length (tree, vuses) >= 1);
919 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
920 if (gimple_code (def_stmt) == GIMPLE_PHI)
922 /* We can only handle lookups over PHI nodes for a single
923 virtual operand. */
924 if (VEC_length (tree, vuses) == 1)
926 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
927 goto cont;
929 else
930 return NULL;
933 /* Verify each VUSE reaches the same defining stmt. */
934 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
936 gimple tmp = SSA_NAME_DEF_STMT (vuse);
937 if (tmp != def_stmt)
938 return NULL;
941 /* Now see if the definition aliases ref, and loop until it does. */
942 cont:
943 while (def_stmt
944 && is_gimple_assign (def_stmt)
945 && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt)))
946 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
948 return def_stmt;
951 /* Lookup a SCCVN reference operation VR in the current hash table.
952 Returns the resulting value number if it exists in the hash table,
953 NULL_TREE otherwise. VNRESULT will be filled in with the actual
954 vn_reference_t stored in the hashtable if something is found. */
956 static tree
957 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
959 void **slot;
960 hashval_t hash;
962 hash = vr->hashcode;
963 slot = htab_find_slot_with_hash (current_info->references, vr,
964 hash, NO_INSERT);
965 if (!slot && current_info == optimistic_info)
966 slot = htab_find_slot_with_hash (valid_info->references, vr,
967 hash, NO_INSERT);
968 if (slot)
970 if (vnresult)
971 *vnresult = (vn_reference_t)*slot;
972 return ((vn_reference_t)*slot)->result;
975 return NULL_TREE;
979 /* Lookup a reference operation by it's parts, in the current hash table.
980 Returns the resulting value number if it exists in the hash table,
981 NULL_TREE otherwise. VNRESULT will be filled in with the actual
982 vn_reference_t stored in the hashtable if something is found. */
984 tree
985 vn_reference_lookup_pieces (VEC (tree, gc) *vuses,
986 VEC (vn_reference_op_s, heap) *operands,
987 vn_reference_t *vnresult, bool maywalk)
989 struct vn_reference_s vr1;
990 tree result;
991 if (vnresult)
992 *vnresult = NULL;
994 vr1.vuses = valueize_vuses (vuses);
995 vr1.operands = valueize_refs (operands);
996 vr1.hashcode = vn_reference_compute_hash (&vr1);
997 result = vn_reference_lookup_1 (&vr1, vnresult);
999 /* If there is a single defining statement for all virtual uses, we can
1000 use that, following virtual use-def chains. */
1001 if (!result
1002 && maywalk
1003 && vr1.vuses
1004 && VEC_length (tree, vr1.vuses) >= 1)
1006 tree ref = get_ref_from_reference_ops (operands);
1007 gimple def_stmt;
1008 if (ref
1009 && (def_stmt = get_def_ref_stmt_vuses (ref, vr1.vuses))
1010 && is_gimple_assign (def_stmt))
1012 /* We are now at an aliasing definition for the vuses we want to
1013 look up. Re-do the lookup with the vdefs for this stmt. */
1014 vdefs_to_vec (def_stmt, &vuses);
1015 vr1.vuses = valueize_vuses (vuses);
1016 vr1.hashcode = vn_reference_compute_hash (&vr1);
1017 result = vn_reference_lookup_1 (&vr1, vnresult);
1021 return result;
1024 /* Lookup OP in the current hash table, and return the resulting value
1025 number if it exists in the hash table. Return NULL_TREE if it does
1026 not exist in the hash table or if the result field of the structure
1027 was NULL.. VNRESULT will be filled in with the vn_reference_t
1028 stored in the hashtable if one exists. */
1030 tree
1031 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
1032 vn_reference_t *vnresult)
1034 struct vn_reference_s vr1;
1035 tree result;
1036 gimple def_stmt;
1037 if (vnresult)
1038 *vnresult = NULL;
1040 vr1.vuses = valueize_vuses (vuses);
1041 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
1042 vr1.hashcode = vn_reference_compute_hash (&vr1);
1043 result = vn_reference_lookup_1 (&vr1, vnresult);
1045 /* If there is a single defining statement for all virtual uses, we can
1046 use that, following virtual use-def chains. */
1047 if (!result
1048 && maywalk
1049 && vr1.vuses
1050 && VEC_length (tree, vr1.vuses) >= 1
1051 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
1052 && is_gimple_assign (def_stmt))
1054 /* We are now at an aliasing definition for the vuses we want to
1055 look up. Re-do the lookup with the vdefs for this stmt. */
1056 vdefs_to_vec (def_stmt, &vuses);
1057 vr1.vuses = valueize_vuses (vuses);
1058 vr1.hashcode = vn_reference_compute_hash (&vr1);
1059 result = vn_reference_lookup_1 (&vr1, vnresult);
1062 return result;
1066 /* Insert OP into the current hash table with a value number of
1067 RESULT, and return the resulting reference structure we created. */
1069 vn_reference_t
1070 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
1072 void **slot;
1073 vn_reference_t vr1;
1075 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1076 if (TREE_CODE (result) == SSA_NAME)
1077 vr1->value_id = VN_INFO (result)->value_id;
1078 else
1079 vr1->value_id = get_or_alloc_constant_value_id (result);
1080 vr1->vuses = valueize_vuses (vuses);
1081 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1082 vr1->hashcode = vn_reference_compute_hash (vr1);
1083 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1085 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1086 INSERT);
1088 /* Because we lookup stores using vuses, and value number failures
1089 using the vdefs (see visit_reference_op_store for how and why),
1090 it's possible that on failure we may try to insert an already
1091 inserted store. This is not wrong, there is no ssa name for a
1092 store that we could use as a differentiator anyway. Thus, unlike
1093 the other lookup functions, you cannot gcc_assert (!*slot)
1094 here. */
1096 /* But free the old slot in case of a collision. */
1097 if (*slot)
1098 free_reference (*slot);
1100 *slot = vr1;
1101 return vr1;
1104 /* Insert a reference by it's pieces into the current hash table with
1105 a value number of RESULT. Return the resulting reference
1106 structure we created. */
1108 vn_reference_t
1109 vn_reference_insert_pieces (VEC (tree, gc) *vuses,
1110 VEC (vn_reference_op_s, heap) *operands,
1111 tree result, unsigned int value_id)
1114 void **slot;
1115 vn_reference_t vr1;
1117 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1118 vr1->value_id = value_id;
1119 vr1->vuses = valueize_vuses (vuses);
1120 vr1->operands = valueize_refs (operands);
1121 vr1->hashcode = vn_reference_compute_hash (vr1);
1122 if (result && TREE_CODE (result) == SSA_NAME)
1123 result = SSA_VAL (result);
1124 vr1->result = result;
1126 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1127 INSERT);
1129 /* At this point we should have all the things inserted that we have
1130 seen before, and we should never try inserting something that
1131 already exists. */
1132 gcc_assert (!*slot);
1133 if (*slot)
1134 free_reference (*slot);
1136 *slot = vr1;
1137 return vr1;
1140 /* Compute and return the hash value for nary operation VBO1. */
1142 inline hashval_t
1143 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1145 hashval_t hash = 0;
1146 unsigned i;
1148 for (i = 0; i < vno1->length; ++i)
1149 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1150 vno1->op[i] = SSA_VAL (vno1->op[i]);
1152 if (vno1->length == 2
1153 && commutative_tree_code (vno1->opcode)
1154 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1156 tree temp = vno1->op[0];
1157 vno1->op[0] = vno1->op[1];
1158 vno1->op[1] = temp;
1161 for (i = 0; i < vno1->length; ++i)
1162 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1164 return hash;
1167 /* Return the computed hashcode for nary operation P1. */
1169 static hashval_t
1170 vn_nary_op_hash (const void *p1)
1172 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1173 return vno1->hashcode;
1176 /* Compare nary operations P1 and P2 and return true if they are
1177 equivalent. */
1180 vn_nary_op_eq (const void *p1, const void *p2)
1182 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1183 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1184 unsigned i;
1186 if (vno1->opcode != vno2->opcode
1187 || !types_compatible_p (vno1->type, vno2->type))
1188 return false;
1190 for (i = 0; i < vno1->length; ++i)
1191 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1192 return false;
1194 return true;
1197 /* Lookup a n-ary operation by its pieces and return the resulting value
1198 number if it exists in the hash table. Return NULL_TREE if it does
1199 not exist in the hash table or if the result field of the operation
1200 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1201 if it exists. */
1203 tree
1204 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1205 tree type, tree op0, tree op1, tree op2,
1206 tree op3, vn_nary_op_t *vnresult)
1208 void **slot;
1209 struct vn_nary_op_s vno1;
1210 if (vnresult)
1211 *vnresult = NULL;
1212 vno1.opcode = code;
1213 vno1.length = length;
1214 vno1.type = type;
1215 vno1.op[0] = op0;
1216 vno1.op[1] = op1;
1217 vno1.op[2] = op2;
1218 vno1.op[3] = op3;
1219 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1220 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1221 NO_INSERT);
1222 if (!slot && current_info == optimistic_info)
1223 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1224 NO_INSERT);
1225 if (!slot)
1226 return NULL_TREE;
1227 if (vnresult)
1228 *vnresult = (vn_nary_op_t)*slot;
1229 return ((vn_nary_op_t)*slot)->result;
1232 /* Lookup OP in the current hash table, and return the resulting value
1233 number if it exists in the hash table. Return NULL_TREE if it does
1234 not exist in the hash table or if the result field of the operation
1235 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1236 if it exists. */
1238 tree
1239 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1241 void **slot;
1242 struct vn_nary_op_s vno1;
1243 unsigned i;
1245 if (vnresult)
1246 *vnresult = NULL;
1247 vno1.opcode = TREE_CODE (op);
1248 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1249 vno1.type = TREE_TYPE (op);
1250 for (i = 0; i < vno1.length; ++i)
1251 vno1.op[i] = TREE_OPERAND (op, i);
1252 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1253 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1254 NO_INSERT);
1255 if (!slot && current_info == optimistic_info)
1256 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1257 NO_INSERT);
1258 if (!slot)
1259 return NULL_TREE;
1260 if (vnresult)
1261 *vnresult = (vn_nary_op_t)*slot;
1262 return ((vn_nary_op_t)*slot)->result;
1265 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1266 value number if it exists in the hash table. Return NULL_TREE if
1267 it does not exist in the hash table. VNRESULT will contain the
1268 vn_nary_op_t from the hashtable if it exists. */
1270 tree
1271 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1273 void **slot;
1274 struct vn_nary_op_s vno1;
1275 unsigned i;
1277 if (vnresult)
1278 *vnresult = NULL;
1279 vno1.opcode = gimple_assign_rhs_code (stmt);
1280 vno1.length = gimple_num_ops (stmt) - 1;
1281 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
1282 for (i = 0; i < vno1.length; ++i)
1283 vno1.op[i] = gimple_op (stmt, i + 1);
1284 if (vno1.opcode == REALPART_EXPR
1285 || vno1.opcode == IMAGPART_EXPR
1286 || vno1.opcode == VIEW_CONVERT_EXPR)
1287 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1288 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1289 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1290 NO_INSERT);
1291 if (!slot && current_info == optimistic_info)
1292 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1293 NO_INSERT);
1294 if (!slot)
1295 return NULL_TREE;
1296 if (vnresult)
1297 *vnresult = (vn_nary_op_t)*slot;
1298 return ((vn_nary_op_t)*slot)->result;
1301 /* Insert a n-ary operation into the current hash table using it's
1302 pieces. Return the vn_nary_op_t structure we created and put in
1303 the hashtable. */
1305 vn_nary_op_t
1306 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1307 tree type, tree op0,
1308 tree op1, tree op2, tree op3,
1309 tree result,
1310 unsigned int value_id)
1312 void **slot;
1313 vn_nary_op_t vno1;
1315 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1316 (sizeof (struct vn_nary_op_s)
1317 - sizeof (tree) * (4 - length)));
1318 vno1->value_id = value_id;
1319 vno1->opcode = code;
1320 vno1->length = length;
1321 vno1->type = type;
1322 if (length >= 1)
1323 vno1->op[0] = op0;
1324 if (length >= 2)
1325 vno1->op[1] = op1;
1326 if (length >= 3)
1327 vno1->op[2] = op2;
1328 if (length >= 4)
1329 vno1->op[3] = op3;
1330 vno1->result = result;
1331 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1332 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1333 INSERT);
1334 gcc_assert (!*slot);
1336 *slot = vno1;
1337 return vno1;
1341 /* Insert OP into the current hash table with a value number of
1342 RESULT. Return the vn_nary_op_t structure we created and put in
1343 the hashtable. */
1345 vn_nary_op_t
1346 vn_nary_op_insert (tree op, tree result)
1348 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1349 void **slot;
1350 vn_nary_op_t vno1;
1351 unsigned i;
1353 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1354 (sizeof (struct vn_nary_op_s)
1355 - sizeof (tree) * (4 - length)));
1356 vno1->value_id = VN_INFO (result)->value_id;
1357 vno1->opcode = TREE_CODE (op);
1358 vno1->length = length;
1359 vno1->type = TREE_TYPE (op);
1360 for (i = 0; i < vno1->length; ++i)
1361 vno1->op[i] = TREE_OPERAND (op, i);
1362 vno1->result = result;
1363 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1364 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1365 INSERT);
1366 gcc_assert (!*slot);
1368 *slot = vno1;
1369 return vno1;
1372 /* Insert the rhs of STMT into the current hash table with a value number of
1373 RESULT. */
1375 vn_nary_op_t
1376 vn_nary_op_insert_stmt (gimple stmt, tree result)
1378 unsigned length = gimple_num_ops (stmt) - 1;
1379 void **slot;
1380 vn_nary_op_t vno1;
1381 unsigned i;
1383 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1384 (sizeof (struct vn_nary_op_s)
1385 - sizeof (tree) * (4 - length)));
1386 vno1->value_id = VN_INFO (result)->value_id;
1387 vno1->opcode = gimple_assign_rhs_code (stmt);
1388 vno1->length = length;
1389 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
1390 for (i = 0; i < vno1->length; ++i)
1391 vno1->op[i] = gimple_op (stmt, i + 1);
1392 if (vno1->opcode == REALPART_EXPR
1393 || vno1->opcode == IMAGPART_EXPR
1394 || vno1->opcode == VIEW_CONVERT_EXPR)
1395 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1396 vno1->result = result;
1397 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1398 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1399 INSERT);
1400 gcc_assert (!*slot);
1402 *slot = vno1;
1403 return vno1;
1406 /* Compute a hashcode for PHI operation VP1 and return it. */
1408 static inline hashval_t
1409 vn_phi_compute_hash (vn_phi_t vp1)
1411 hashval_t result = 0;
1412 int i;
1413 tree phi1op;
1414 tree type;
1416 result = vp1->block->index;
1418 /* If all PHI arguments are constants we need to distinguish
1419 the PHI node via its type. */
1420 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1421 result += (INTEGRAL_TYPE_P (type)
1422 + (INTEGRAL_TYPE_P (type)
1423 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1425 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1427 if (phi1op == VN_TOP)
1428 continue;
1429 result += iterative_hash_expr (phi1op, result);
1432 return result;
1435 /* Return the computed hashcode for phi operation P1. */
1437 static hashval_t
1438 vn_phi_hash (const void *p1)
1440 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1441 return vp1->hashcode;
1444 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1446 static int
1447 vn_phi_eq (const void *p1, const void *p2)
1449 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1450 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1452 if (vp1->block == vp2->block)
1454 int i;
1455 tree phi1op;
1457 /* If the PHI nodes do not have compatible types
1458 they are not the same. */
1459 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1460 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1461 return false;
1463 /* Any phi in the same block will have it's arguments in the
1464 same edge order, because of how we store phi nodes. */
1465 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1467 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1468 if (phi1op == VN_TOP || phi2op == VN_TOP)
1469 continue;
1470 if (!expressions_equal_p (phi1op, phi2op))
1471 return false;
1473 return true;
1475 return false;
1478 static VEC(tree, heap) *shared_lookup_phiargs;
1480 /* Lookup PHI in the current hash table, and return the resulting
1481 value number if it exists in the hash table. Return NULL_TREE if
1482 it does not exist in the hash table. */
1484 tree
1485 vn_phi_lookup (gimple phi)
1487 void **slot;
1488 struct vn_phi_s vp1;
1489 unsigned i;
1491 VEC_truncate (tree, shared_lookup_phiargs, 0);
1493 /* Canonicalize the SSA_NAME's to their value number. */
1494 for (i = 0; i < gimple_phi_num_args (phi); i++)
1496 tree def = PHI_ARG_DEF (phi, i);
1497 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1498 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1500 vp1.phiargs = shared_lookup_phiargs;
1501 vp1.block = gimple_bb (phi);
1502 vp1.hashcode = vn_phi_compute_hash (&vp1);
1503 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1504 NO_INSERT);
1505 if (!slot && current_info == optimistic_info)
1506 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1507 NO_INSERT);
1508 if (!slot)
1509 return NULL_TREE;
1510 return ((vn_phi_t)*slot)->result;
1513 /* Insert PHI into the current hash table with a value number of
1514 RESULT. */
1516 static vn_phi_t
1517 vn_phi_insert (gimple phi, tree result)
1519 void **slot;
1520 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1521 unsigned i;
1522 VEC (tree, heap) *args = NULL;
1524 /* Canonicalize the SSA_NAME's to their value number. */
1525 for (i = 0; i < gimple_phi_num_args (phi); i++)
1527 tree def = PHI_ARG_DEF (phi, i);
1528 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1529 VEC_safe_push (tree, heap, args, def);
1531 vp1->value_id = VN_INFO (result)->value_id;
1532 vp1->phiargs = args;
1533 vp1->block = gimple_bb (phi);
1534 vp1->result = result;
1535 vp1->hashcode = vn_phi_compute_hash (vp1);
1537 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1538 INSERT);
1540 /* Because we iterate over phi operations more than once, it's
1541 possible the slot might already exist here, hence no assert.*/
1542 *slot = vp1;
1543 return vp1;
1547 /* Print set of components in strongly connected component SCC to OUT. */
1549 static void
1550 print_scc (FILE *out, VEC (tree, heap) *scc)
1552 tree var;
1553 unsigned int i;
1555 fprintf (out, "SCC consists of: ");
1556 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1558 print_generic_expr (out, var, 0);
1559 fprintf (out, " ");
1561 fprintf (out, "\n");
1564 /* Set the value number of FROM to TO, return true if it has changed
1565 as a result. */
1567 static inline bool
1568 set_ssa_val_to (tree from, tree to)
1570 tree currval;
1572 if (from != to
1573 && TREE_CODE (to) == SSA_NAME
1574 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1575 to = from;
1577 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1578 and invariants. So assert that here. */
1579 gcc_assert (to != NULL_TREE
1580 && (to == VN_TOP
1581 || TREE_CODE (to) == SSA_NAME
1582 || is_gimple_min_invariant (to)));
1584 if (dump_file && (dump_flags & TDF_DETAILS))
1586 fprintf (dump_file, "Setting value number of ");
1587 print_generic_expr (dump_file, from, 0);
1588 fprintf (dump_file, " to ");
1589 print_generic_expr (dump_file, to, 0);
1592 currval = SSA_VAL (from);
1594 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1596 SSA_VAL (from) = to;
1597 if (dump_file && (dump_flags & TDF_DETAILS))
1598 fprintf (dump_file, " (changed)\n");
1599 return true;
1601 if (dump_file && (dump_flags & TDF_DETAILS))
1602 fprintf (dump_file, "\n");
1603 return false;
1606 /* Set all definitions in STMT to value number to themselves.
1607 Return true if a value number changed. */
1609 static bool
1610 defs_to_varying (gimple stmt)
1612 bool changed = false;
1613 ssa_op_iter iter;
1614 def_operand_p defp;
1616 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1618 tree def = DEF_FROM_PTR (defp);
1620 VN_INFO (def)->use_processed = true;
1621 changed |= set_ssa_val_to (def, def);
1623 return changed;
1626 static bool expr_has_constants (tree expr);
1627 static tree valueize_expr (tree expr);
1629 /* Visit a copy between LHS and RHS, return true if the value number
1630 changed. */
1632 static bool
1633 visit_copy (tree lhs, tree rhs)
1635 /* Follow chains of copies to their destination. */
1636 while (TREE_CODE (rhs) == SSA_NAME
1637 && SSA_VAL (rhs) != rhs)
1638 rhs = SSA_VAL (rhs);
1640 /* The copy may have a more interesting constant filled expression
1641 (we don't, since we know our RHS is just an SSA name). */
1642 if (TREE_CODE (rhs) == SSA_NAME)
1644 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1645 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1648 return set_ssa_val_to (lhs, rhs);
1651 /* Visit a unary operator RHS, value number it, and return true if the
1652 value number of LHS has changed as a result. */
1654 static bool
1655 visit_unary_op (tree lhs, gimple stmt)
1657 bool changed = false;
1658 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1660 if (result)
1662 changed = set_ssa_val_to (lhs, result);
1664 else
1666 changed = set_ssa_val_to (lhs, lhs);
1667 vn_nary_op_insert_stmt (stmt, lhs);
1670 return changed;
1673 /* Visit a binary operator RHS, value number it, and return true if the
1674 value number of LHS has changed as a result. */
1676 static bool
1677 visit_binary_op (tree lhs, gimple stmt)
1679 bool changed = false;
1680 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1682 if (result)
1684 changed = set_ssa_val_to (lhs, result);
1686 else
1688 changed = set_ssa_val_to (lhs, lhs);
1689 vn_nary_op_insert_stmt (stmt, lhs);
1692 return changed;
1695 /* Visit a call STMT storing into LHS. Return true if the value number
1696 of the LHS has changed as a result. */
1698 static bool
1699 visit_reference_op_call (tree lhs, gimple stmt)
1701 bool changed = false;
1702 struct vn_reference_s vr1;
1703 tree result;
1705 vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt));
1706 vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt));
1707 vr1.hashcode = vn_reference_compute_hash (&vr1);
1708 result = vn_reference_lookup_1 (&vr1, NULL);
1709 if (result)
1711 changed = set_ssa_val_to (lhs, result);
1712 if (TREE_CODE (result) == SSA_NAME
1713 && VN_INFO (result)->has_constants)
1714 VN_INFO (lhs)->has_constants = true;
1716 else
1718 void **slot;
1719 vn_reference_t vr2;
1720 changed = set_ssa_val_to (lhs, lhs);
1721 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1722 vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt));
1723 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1724 vr2->hashcode = vr1.hashcode;
1725 vr2->result = lhs;
1726 slot = htab_find_slot_with_hash (current_info->references,
1727 vr2, vr2->hashcode, INSERT);
1728 if (*slot)
1729 free_reference (*slot);
1730 *slot = vr2;
1733 return changed;
1736 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1737 and return true if the value number of the LHS has changed as a result. */
1739 static bool
1740 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1742 bool changed = false;
1743 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true,
1744 NULL);
1746 /* We handle type-punning through unions by value-numbering based
1747 on offset and size of the access. Be prepared to handle a
1748 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1749 if (result
1750 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1752 /* We will be setting the value number of lhs to the value number
1753 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1754 So first simplify and lookup this expression to see if it
1755 is already available. */
1756 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1757 if ((CONVERT_EXPR_P (val)
1758 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1759 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1761 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1762 if ((CONVERT_EXPR_P (tem)
1763 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1764 && (tem = fold_unary (TREE_CODE (val), TREE_TYPE (val), tem)))
1765 val = tem;
1767 result = val;
1768 if (!is_gimple_min_invariant (val)
1769 && TREE_CODE (val) != SSA_NAME)
1770 result = vn_nary_op_lookup (val, NULL);
1771 /* If the expression is not yet available, value-number lhs to
1772 a new SSA_NAME we create. */
1773 if (!result && may_insert)
1775 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1776 /* Initialize value-number information properly. */
1777 VN_INFO_GET (result)->valnum = result;
1778 VN_INFO (result)->value_id = get_next_value_id ();
1779 VN_INFO (result)->expr = val;
1780 VN_INFO (result)->has_constants = expr_has_constants (val);
1781 VN_INFO (result)->needs_insertion = true;
1782 /* As all "inserted" statements are singleton SCCs, insert
1783 to the valid table. This is strictly needed to
1784 avoid re-generating new value SSA_NAMEs for the same
1785 expression during SCC iteration over and over (the
1786 optimistic table gets cleared after each iteration).
1787 We do not need to insert into the optimistic table, as
1788 lookups there will fall back to the valid table. */
1789 if (current_info == optimistic_info)
1791 current_info = valid_info;
1792 vn_nary_op_insert (val, result);
1793 current_info = optimistic_info;
1795 else
1796 vn_nary_op_insert (val, result);
1797 if (dump_file && (dump_flags & TDF_DETAILS))
1799 fprintf (dump_file, "Inserting name ");
1800 print_generic_expr (dump_file, result, 0);
1801 fprintf (dump_file, " for expression ");
1802 print_generic_expr (dump_file, val, 0);
1803 fprintf (dump_file, "\n");
1808 if (result)
1810 changed = set_ssa_val_to (lhs, result);
1811 if (TREE_CODE (result) == SSA_NAME
1812 && VN_INFO (result)->has_constants)
1814 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1815 VN_INFO (lhs)->has_constants = true;
1818 else
1820 changed = set_ssa_val_to (lhs, lhs);
1821 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1824 return changed;
1828 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1829 and return true if the value number of the LHS has changed as a result. */
1831 static bool
1832 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1834 bool changed = false;
1835 tree result;
1836 bool resultsame = false;
1838 /* First we want to lookup using the *vuses* from the store and see
1839 if there the last store to this location with the same address
1840 had the same value.
1842 The vuses represent the memory state before the store. If the
1843 memory state, address, and value of the store is the same as the
1844 last store to this location, then this store will produce the
1845 same memory state as that store.
1847 In this case the vdef versions for this store are value numbered to those
1848 vuse versions, since they represent the same memory state after
1849 this store.
1851 Otherwise, the vdefs for the store are used when inserting into
1852 the table, since the store generates a new memory state. */
1854 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false,
1855 NULL);
1857 if (result)
1859 if (TREE_CODE (result) == SSA_NAME)
1860 result = SSA_VAL (result);
1861 if (TREE_CODE (op) == SSA_NAME)
1862 op = SSA_VAL (op);
1863 resultsame = expressions_equal_p (result, op);
1866 if (!result || !resultsame)
1868 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1869 int i;
1870 tree vdef;
1872 if (dump_file && (dump_flags & TDF_DETAILS))
1874 fprintf (dump_file, "No store match\n");
1875 fprintf (dump_file, "Value numbering store ");
1876 print_generic_expr (dump_file, lhs, 0);
1877 fprintf (dump_file, " to ");
1878 print_generic_expr (dump_file, op, 0);
1879 fprintf (dump_file, "\n");
1881 /* Have to set value numbers before insert, since insert is
1882 going to valueize the references in-place. */
1883 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1885 VN_INFO (vdef)->use_processed = true;
1886 changed |= set_ssa_val_to (vdef, vdef);
1889 /* Do not insert structure copies into the tables. */
1890 if (is_gimple_min_invariant (op)
1891 || is_gimple_reg (op))
1892 vn_reference_insert (lhs, op, vdefs);
1894 else
1896 /* We had a match, so value number the vdefs to have the value
1897 number of the vuses they came from. */
1898 ssa_op_iter op_iter;
1899 def_operand_p var;
1900 vuse_vec_p vv;
1902 if (dump_file && (dump_flags & TDF_DETAILS))
1903 fprintf (dump_file, "Store matched earlier value,"
1904 "value numbering store vdefs to matching vuses.\n");
1906 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1908 tree def = DEF_FROM_PTR (var);
1909 tree use;
1911 /* Uh, if the vuse is a multiuse, we can't really do much
1912 here, sadly, since we don't know which value number of
1913 which vuse to use. */
1914 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1915 use = def;
1916 else
1917 use = VUSE_ELEMENT_VAR (*vv, 0);
1919 VN_INFO (def)->use_processed = true;
1920 changed |= set_ssa_val_to (def, SSA_VAL (use));
1924 return changed;
1927 /* Visit and value number PHI, return true if the value number
1928 changed. */
1930 static bool
1931 visit_phi (gimple phi)
1933 bool changed = false;
1934 tree result;
1935 tree sameval = VN_TOP;
1936 bool allsame = true;
1937 unsigned i;
1939 /* TODO: We could check for this in init_sccvn, and replace this
1940 with a gcc_assert. */
1941 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1942 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1944 /* See if all non-TOP arguments have the same value. TOP is
1945 equivalent to everything, so we can ignore it. */
1946 for (i = 0; i < gimple_phi_num_args (phi); i++)
1948 tree def = PHI_ARG_DEF (phi, i);
1950 if (TREE_CODE (def) == SSA_NAME)
1951 def = SSA_VAL (def);
1952 if (def == VN_TOP)
1953 continue;
1954 if (sameval == VN_TOP)
1956 sameval = def;
1958 else
1960 if (!expressions_equal_p (def, sameval))
1962 allsame = false;
1963 break;
1968 /* If all value numbered to the same value, the phi node has that
1969 value. */
1970 if (allsame)
1972 if (is_gimple_min_invariant (sameval))
1974 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1975 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1977 else
1979 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1980 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1983 if (TREE_CODE (sameval) == SSA_NAME)
1984 return visit_copy (PHI_RESULT (phi), sameval);
1986 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1989 /* Otherwise, see if it is equivalent to a phi node in this block. */
1990 result = vn_phi_lookup (phi);
1991 if (result)
1993 if (TREE_CODE (result) == SSA_NAME)
1994 changed = visit_copy (PHI_RESULT (phi), result);
1995 else
1996 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1998 else
2000 vn_phi_insert (phi, PHI_RESULT (phi));
2001 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2002 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2003 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2006 return changed;
2009 /* Return true if EXPR contains constants. */
2011 static bool
2012 expr_has_constants (tree expr)
2014 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2016 case tcc_unary:
2017 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2019 case tcc_binary:
2020 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2021 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2022 /* Constants inside reference ops are rarely interesting, but
2023 it can take a lot of looking to find them. */
2024 case tcc_reference:
2025 case tcc_declaration:
2026 return false;
2027 default:
2028 return is_gimple_min_invariant (expr);
2030 return false;
2033 /* Return true if STMT contains constants. */
2035 static bool
2036 stmt_has_constants (gimple stmt)
2038 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2039 return false;
2041 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2043 case GIMPLE_UNARY_RHS:
2044 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2046 case GIMPLE_BINARY_RHS:
2047 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2048 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2049 case GIMPLE_SINGLE_RHS:
2050 /* Constants inside reference ops are rarely interesting, but
2051 it can take a lot of looking to find them. */
2052 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2053 default:
2054 gcc_unreachable ();
2056 return false;
2059 /* Replace SSA_NAMES in expr with their value numbers, and return the
2060 result.
2061 This is performed in place. */
2063 static tree
2064 valueize_expr (tree expr)
2066 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2068 case tcc_unary:
2069 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2070 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2071 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2072 break;
2073 case tcc_binary:
2074 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2075 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2076 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2077 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2078 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2079 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2080 break;
2081 default:
2082 break;
2084 return expr;
2087 /* Simplify the binary expression RHS, and return the result if
2088 simplified. */
2090 static tree
2091 simplify_binary_expression (gimple stmt)
2093 tree result = NULL_TREE;
2094 tree op0 = gimple_assign_rhs1 (stmt);
2095 tree op1 = gimple_assign_rhs2 (stmt);
2097 /* This will not catch every single case we could combine, but will
2098 catch those with constants. The goal here is to simultaneously
2099 combine constants between expressions, but avoid infinite
2100 expansion of expressions during simplification. */
2101 if (TREE_CODE (op0) == SSA_NAME)
2103 if (VN_INFO (op0)->has_constants
2104 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2105 op0 = valueize_expr (vn_get_expr_for (op0));
2106 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2107 op0 = SSA_VAL (op0);
2110 if (TREE_CODE (op1) == SSA_NAME)
2112 if (VN_INFO (op1)->has_constants)
2113 op1 = valueize_expr (vn_get_expr_for (op1));
2114 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2115 op1 = SSA_VAL (op1);
2118 /* Avoid folding if nothing changed. */
2119 if (op0 == gimple_assign_rhs1 (stmt)
2120 && op1 == gimple_assign_rhs2 (stmt))
2121 return NULL_TREE;
2123 fold_defer_overflow_warnings ();
2125 result = fold_binary (gimple_assign_rhs_code (stmt),
2126 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
2127 if (result)
2128 STRIP_USELESS_TYPE_CONVERSION (result);
2130 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2131 stmt, 0);
2133 /* Make sure result is not a complex expression consisting
2134 of operators of operators (IE (a + b) + (a + c))
2135 Otherwise, we will end up with unbounded expressions if
2136 fold does anything at all. */
2137 if (result && valid_gimple_rhs_p (result))
2138 return result;
2140 return NULL_TREE;
2143 /* Simplify the unary expression RHS, and return the result if
2144 simplified. */
2146 static tree
2147 simplify_unary_expression (gimple stmt)
2149 tree result = NULL_TREE;
2150 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2152 /* We handle some tcc_reference codes here that are all
2153 GIMPLE_ASSIGN_SINGLE codes. */
2154 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2155 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2156 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2157 op0 = TREE_OPERAND (op0, 0);
2159 if (TREE_CODE (op0) != SSA_NAME)
2160 return NULL_TREE;
2162 orig_op0 = op0;
2163 if (VN_INFO (op0)->has_constants)
2164 op0 = valueize_expr (vn_get_expr_for (op0));
2165 else if (gimple_assign_cast_p (stmt)
2166 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2167 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2168 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2170 /* We want to do tree-combining on conversion-like expressions.
2171 Make sure we feed only SSA_NAMEs or constants to fold though. */
2172 tree tem = valueize_expr (vn_get_expr_for (op0));
2173 if (UNARY_CLASS_P (tem)
2174 || BINARY_CLASS_P (tem)
2175 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2176 || TREE_CODE (tem) == SSA_NAME
2177 || is_gimple_min_invariant (tem))
2178 op0 = tem;
2181 /* Avoid folding if nothing changed, but remember the expression. */
2182 if (op0 == orig_op0)
2183 return NULL_TREE;
2185 result = fold_unary (gimple_assign_rhs_code (stmt),
2186 gimple_expr_type (stmt), op0);
2187 if (result)
2189 STRIP_USELESS_TYPE_CONVERSION (result);
2190 if (valid_gimple_rhs_p (result))
2191 return result;
2194 return NULL_TREE;
2197 /* Try to simplify RHS using equivalences and constant folding. */
2199 static tree
2200 try_to_simplify (gimple stmt)
2202 tree tem;
2204 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2205 in this case, there is no point in doing extra work. */
2206 if (gimple_assign_copy_p (stmt)
2207 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2208 return NULL_TREE;
2210 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2212 case tcc_declaration:
2213 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2214 if (tem)
2215 return tem;
2216 break;
2218 case tcc_reference:
2219 /* Do not do full-blown reference lookup here, but simplify
2220 reads from constant aggregates. */
2221 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2222 if (tem)
2223 return tem;
2225 /* Fallthrough for some codes that can operate on registers. */
2226 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2227 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2228 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2229 break;
2230 /* We could do a little more with unary ops, if they expand
2231 into binary ops, but it's debatable whether it is worth it. */
2232 case tcc_unary:
2233 return simplify_unary_expression (stmt);
2234 break;
2235 case tcc_comparison:
2236 case tcc_binary:
2237 return simplify_binary_expression (stmt);
2238 break;
2239 default:
2240 break;
2243 return NULL_TREE;
2246 /* Visit and value number USE, return true if the value number
2247 changed. */
2249 static bool
2250 visit_use (tree use)
2252 bool changed = false;
2253 gimple stmt = SSA_NAME_DEF_STMT (use);
2255 VN_INFO (use)->use_processed = true;
2257 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2258 if (dump_file && (dump_flags & TDF_DETAILS)
2259 && !SSA_NAME_IS_DEFAULT_DEF (use))
2261 fprintf (dump_file, "Value numbering ");
2262 print_generic_expr (dump_file, use, 0);
2263 fprintf (dump_file, " stmt = ");
2264 print_gimple_stmt (dump_file, stmt, 0, 0);
2267 /* Handle uninitialized uses. */
2268 if (SSA_NAME_IS_DEFAULT_DEF (use))
2269 changed = set_ssa_val_to (use, use);
2270 else
2272 if (gimple_code (stmt) == GIMPLE_PHI)
2273 changed = visit_phi (stmt);
2274 else if (!gimple_has_lhs (stmt)
2275 || gimple_has_volatile_ops (stmt)
2276 || stmt_could_throw_p (stmt))
2277 changed = defs_to_varying (stmt);
2278 else if (is_gimple_assign (stmt))
2280 tree lhs = gimple_assign_lhs (stmt);
2281 tree simplified;
2283 /* Shortcut for copies. Simplifying copies is pointless,
2284 since we copy the expression and value they represent. */
2285 if (gimple_assign_copy_p (stmt)
2286 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2287 && TREE_CODE (lhs) == SSA_NAME)
2289 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2290 goto done;
2292 simplified = try_to_simplify (stmt);
2293 if (simplified)
2295 if (dump_file && (dump_flags & TDF_DETAILS))
2297 fprintf (dump_file, "RHS ");
2298 print_gimple_expr (dump_file, stmt, 0, 0);
2299 fprintf (dump_file, " simplified to ");
2300 print_generic_expr (dump_file, simplified, 0);
2301 if (TREE_CODE (lhs) == SSA_NAME)
2302 fprintf (dump_file, " has constants %d\n",
2303 expr_has_constants (simplified));
2304 else
2305 fprintf (dump_file, "\n");
2308 /* Setting value numbers to constants will occasionally
2309 screw up phi congruence because constants are not
2310 uniquely associated with a single ssa name that can be
2311 looked up. */
2312 if (simplified
2313 && is_gimple_min_invariant (simplified)
2314 && TREE_CODE (lhs) == SSA_NAME)
2316 VN_INFO (lhs)->expr = simplified;
2317 VN_INFO (lhs)->has_constants = true;
2318 changed = set_ssa_val_to (lhs, simplified);
2319 goto done;
2321 else if (simplified
2322 && TREE_CODE (simplified) == SSA_NAME
2323 && TREE_CODE (lhs) == SSA_NAME)
2325 changed = visit_copy (lhs, simplified);
2326 goto done;
2328 else if (simplified)
2330 if (TREE_CODE (lhs) == SSA_NAME)
2332 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2333 /* We have to unshare the expression or else
2334 valuizing may change the IL stream. */
2335 VN_INFO (lhs)->expr = unshare_expr (simplified);
2338 else if (stmt_has_constants (stmt)
2339 && TREE_CODE (lhs) == SSA_NAME)
2340 VN_INFO (lhs)->has_constants = true;
2341 else if (TREE_CODE (lhs) == SSA_NAME)
2343 /* We reset expr and constantness here because we may
2344 have been value numbering optimistically, and
2345 iterating. They may become non-constant in this case,
2346 even if they were optimistically constant. */
2348 VN_INFO (lhs)->has_constants = false;
2349 VN_INFO (lhs)->expr = NULL_TREE;
2352 if (TREE_CODE (lhs) == SSA_NAME
2353 /* We can substitute SSA_NAMEs that are live over
2354 abnormal edges with their constant value. */
2355 && !(gimple_assign_copy_p (stmt)
2356 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2357 && !(simplified
2358 && is_gimple_min_invariant (simplified))
2359 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2360 changed = defs_to_varying (stmt);
2361 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2363 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2365 else if (TREE_CODE (lhs) == SSA_NAME)
2367 if ((gimple_assign_copy_p (stmt)
2368 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2369 || (simplified
2370 && is_gimple_min_invariant (simplified)))
2372 VN_INFO (lhs)->has_constants = true;
2373 if (simplified)
2374 changed = set_ssa_val_to (lhs, simplified);
2375 else
2376 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2378 else
2380 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2382 case GIMPLE_UNARY_RHS:
2383 changed = visit_unary_op (lhs, stmt);
2384 break;
2385 case GIMPLE_BINARY_RHS:
2386 changed = visit_binary_op (lhs, stmt);
2387 break;
2388 case GIMPLE_SINGLE_RHS:
2389 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2391 case tcc_reference:
2392 /* VOP-less references can go through unary case. */
2393 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2394 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2395 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2396 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2398 changed = visit_unary_op (lhs, stmt);
2399 break;
2401 /* Fallthrough. */
2402 case tcc_declaration:
2403 changed = visit_reference_op_load
2404 (lhs, gimple_assign_rhs1 (stmt), stmt);
2405 break;
2406 case tcc_expression:
2407 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2409 changed = visit_unary_op (lhs, stmt);
2410 break;
2412 /* Fallthrough. */
2413 default:
2414 changed = defs_to_varying (stmt);
2416 break;
2417 default:
2418 changed = defs_to_varying (stmt);
2419 break;
2423 else
2424 changed = defs_to_varying (stmt);
2426 else if (is_gimple_call (stmt))
2428 tree lhs = gimple_call_lhs (stmt);
2430 /* ??? We could try to simplify calls. */
2432 if (stmt_has_constants (stmt)
2433 && TREE_CODE (lhs) == SSA_NAME)
2434 VN_INFO (lhs)->has_constants = true;
2435 else if (TREE_CODE (lhs) == SSA_NAME)
2437 /* We reset expr and constantness here because we may
2438 have been value numbering optimistically, and
2439 iterating. They may become non-constant in this case,
2440 even if they were optimistically constant. */
2441 VN_INFO (lhs)->has_constants = false;
2442 VN_INFO (lhs)->expr = NULL_TREE;
2445 if (TREE_CODE (lhs) == SSA_NAME
2446 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2447 changed = defs_to_varying (stmt);
2448 /* ??? We should handle stores from calls. */
2449 else if (TREE_CODE (lhs) == SSA_NAME)
2451 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2452 changed = visit_reference_op_call (lhs, stmt);
2453 else
2454 changed = defs_to_varying (stmt);
2456 else
2457 changed = defs_to_varying (stmt);
2460 done:
2461 return changed;
2464 /* Compare two operands by reverse postorder index */
2466 static int
2467 compare_ops (const void *pa, const void *pb)
2469 const tree opa = *((const tree *)pa);
2470 const tree opb = *((const tree *)pb);
2471 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2472 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2473 basic_block bba;
2474 basic_block bbb;
2476 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2477 return 0;
2478 else if (gimple_nop_p (opstmta))
2479 return -1;
2480 else if (gimple_nop_p (opstmtb))
2481 return 1;
2483 bba = gimple_bb (opstmta);
2484 bbb = gimple_bb (opstmtb);
2486 if (!bba && !bbb)
2487 return 0;
2488 else if (!bba)
2489 return -1;
2490 else if (!bbb)
2491 return 1;
2493 if (bba == bbb)
2495 if (gimple_code (opstmta) == GIMPLE_PHI
2496 && gimple_code (opstmtb) == GIMPLE_PHI)
2497 return 0;
2498 else if (gimple_code (opstmta) == GIMPLE_PHI)
2499 return -1;
2500 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2501 return 1;
2502 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2504 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2507 /* Sort an array containing members of a strongly connected component
2508 SCC so that the members are ordered by RPO number.
2509 This means that when the sort is complete, iterating through the
2510 array will give you the members in RPO order. */
2512 static void
2513 sort_scc (VEC (tree, heap) *scc)
2515 qsort (VEC_address (tree, scc),
2516 VEC_length (tree, scc),
2517 sizeof (tree),
2518 compare_ops);
2521 /* Process a strongly connected component in the SSA graph. */
2523 static void
2524 process_scc (VEC (tree, heap) *scc)
2526 /* If the SCC has a single member, just visit it. */
2528 if (VEC_length (tree, scc) == 1)
2530 tree use = VEC_index (tree, scc, 0);
2531 if (!VN_INFO (use)->use_processed)
2532 visit_use (use);
2534 else
2536 tree var;
2537 unsigned int i;
2538 unsigned int iterations = 0;
2539 bool changed = true;
2541 /* Iterate over the SCC with the optimistic table until it stops
2542 changing. */
2543 current_info = optimistic_info;
2544 while (changed)
2546 changed = false;
2547 iterations++;
2548 /* As we are value-numbering optimistically we have to
2549 clear the expression tables and the simplified expressions
2550 in each iteration until we converge. */
2551 htab_empty (optimistic_info->nary);
2552 htab_empty (optimistic_info->phis);
2553 htab_empty (optimistic_info->references);
2554 obstack_free (&optimistic_info->nary_obstack, NULL);
2555 gcc_obstack_init (&optimistic_info->nary_obstack);
2556 empty_alloc_pool (optimistic_info->phis_pool);
2557 empty_alloc_pool (optimistic_info->references_pool);
2558 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2559 VN_INFO (var)->expr = NULL_TREE;
2560 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2561 changed |= visit_use (var);
2564 statistics_histogram_event (cfun, "SCC iterations", iterations);
2566 /* Finally, visit the SCC once using the valid table. */
2567 current_info = valid_info;
2568 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2569 visit_use (var);
2573 DEF_VEC_O(ssa_op_iter);
2574 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2576 /* Pop the components of the found SCC for NAME off the SCC stack
2577 and process them. Returns true if all went well, false if
2578 we run into resource limits. */
2580 static bool
2581 extract_and_process_scc_for_name (tree name)
2583 VEC (tree, heap) *scc = NULL;
2584 tree x;
2586 /* Found an SCC, pop the components off the SCC stack and
2587 process them. */
2590 x = VEC_pop (tree, sccstack);
2592 VN_INFO (x)->on_sccstack = false;
2593 VEC_safe_push (tree, heap, scc, x);
2594 } while (x != name);
2596 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2597 if (VEC_length (tree, scc)
2598 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2600 if (dump_file)
2601 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2602 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2603 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2604 return false;
2607 if (VEC_length (tree, scc) > 1)
2608 sort_scc (scc);
2610 if (dump_file && (dump_flags & TDF_DETAILS))
2611 print_scc (dump_file, scc);
2613 process_scc (scc);
2615 VEC_free (tree, heap, scc);
2617 return true;
2620 /* Depth first search on NAME to discover and process SCC's in the SSA
2621 graph.
2622 Execution of this algorithm relies on the fact that the SCC's are
2623 popped off the stack in topological order.
2624 Returns true if successful, false if we stopped processing SCC's due
2625 to resource constraints. */
2627 static bool
2628 DFS (tree name)
2630 VEC(ssa_op_iter, heap) *itervec = NULL;
2631 VEC(tree, heap) *namevec = NULL;
2632 use_operand_p usep = NULL;
2633 gimple defstmt;
2634 tree use;
2635 ssa_op_iter iter;
2637 start_over:
2638 /* SCC info */
2639 VN_INFO (name)->dfsnum = next_dfs_num++;
2640 VN_INFO (name)->visited = true;
2641 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2643 VEC_safe_push (tree, heap, sccstack, name);
2644 VN_INFO (name)->on_sccstack = true;
2645 defstmt = SSA_NAME_DEF_STMT (name);
2647 /* Recursively DFS on our operands, looking for SCC's. */
2648 if (!gimple_nop_p (defstmt))
2650 /* Push a new iterator. */
2651 if (gimple_code (defstmt) == GIMPLE_PHI)
2652 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2653 else
2654 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2656 else
2657 clear_and_done_ssa_iter (&iter);
2659 while (1)
2661 /* If we are done processing uses of a name, go up the stack
2662 of iterators and process SCCs as we found them. */
2663 if (op_iter_done (&iter))
2665 /* See if we found an SCC. */
2666 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2667 if (!extract_and_process_scc_for_name (name))
2669 VEC_free (tree, heap, namevec);
2670 VEC_free (ssa_op_iter, heap, itervec);
2671 return false;
2674 /* Check if we are done. */
2675 if (VEC_empty (tree, namevec))
2677 VEC_free (tree, heap, namevec);
2678 VEC_free (ssa_op_iter, heap, itervec);
2679 return true;
2682 /* Restore the last use walker and continue walking there. */
2683 use = name;
2684 name = VEC_pop (tree, namevec);
2685 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2686 sizeof (ssa_op_iter));
2687 VEC_pop (ssa_op_iter, itervec);
2688 goto continue_walking;
2691 use = USE_FROM_PTR (usep);
2693 /* Since we handle phi nodes, we will sometimes get
2694 invariants in the use expression. */
2695 if (TREE_CODE (use) == SSA_NAME)
2697 if (! (VN_INFO (use)->visited))
2699 /* Recurse by pushing the current use walking state on
2700 the stack and starting over. */
2701 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2702 VEC_safe_push(tree, heap, namevec, name);
2703 name = use;
2704 goto start_over;
2706 continue_walking:
2707 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2708 VN_INFO (use)->low);
2710 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2711 && VN_INFO (use)->on_sccstack)
2713 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2714 VN_INFO (name)->low);
2718 usep = op_iter_next_use (&iter);
2722 /* Allocate a value number table. */
2724 static void
2725 allocate_vn_table (vn_tables_t table)
2727 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2728 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2729 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2730 free_reference);
2732 gcc_obstack_init (&table->nary_obstack);
2733 table->phis_pool = create_alloc_pool ("VN phis",
2734 sizeof (struct vn_phi_s),
2735 30);
2736 table->references_pool = create_alloc_pool ("VN references",
2737 sizeof (struct vn_reference_s),
2738 30);
2741 /* Free a value number table. */
2743 static void
2744 free_vn_table (vn_tables_t table)
2746 htab_delete (table->phis);
2747 htab_delete (table->nary);
2748 htab_delete (table->references);
2749 obstack_free (&table->nary_obstack, NULL);
2750 free_alloc_pool (table->phis_pool);
2751 free_alloc_pool (table->references_pool);
2754 static void
2755 init_scc_vn (void)
2757 size_t i;
2758 int j;
2759 int *rpo_numbers_temp;
2761 calculate_dominance_info (CDI_DOMINATORS);
2762 sccstack = NULL;
2763 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2764 free);
2766 constant_value_ids = BITMAP_ALLOC (NULL);
2768 next_dfs_num = 1;
2769 next_value_id = 1;
2771 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2772 /* VEC_alloc doesn't actually grow it to the right size, it just
2773 preallocates the space to do so. */
2774 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2775 gcc_obstack_init (&vn_ssa_aux_obstack);
2777 shared_lookup_phiargs = NULL;
2778 shared_lookup_vops = NULL;
2779 shared_lookup_references = NULL;
2780 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2781 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2782 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2784 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2785 the i'th block in RPO order is bb. We want to map bb's to RPO
2786 numbers, so we need to rearrange this array. */
2787 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2788 rpo_numbers[rpo_numbers_temp[j]] = j;
2790 XDELETE (rpo_numbers_temp);
2792 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2794 /* Create the VN_INFO structures, and initialize value numbers to
2795 TOP. */
2796 for (i = 0; i < num_ssa_names; i++)
2798 tree name = ssa_name (i);
2799 if (name)
2801 VN_INFO_GET (name)->valnum = VN_TOP;
2802 VN_INFO (name)->expr = NULL_TREE;
2803 VN_INFO (name)->value_id = 0;
2807 renumber_gimple_stmt_uids ();
2809 /* Create the valid and optimistic value numbering tables. */
2810 valid_info = XCNEW (struct vn_tables_s);
2811 allocate_vn_table (valid_info);
2812 optimistic_info = XCNEW (struct vn_tables_s);
2813 allocate_vn_table (optimistic_info);
2816 void
2817 free_scc_vn (void)
2819 size_t i;
2821 htab_delete (constant_to_value_id);
2822 BITMAP_FREE (constant_value_ids);
2823 VEC_free (tree, heap, shared_lookup_phiargs);
2824 VEC_free (tree, gc, shared_lookup_vops);
2825 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2826 XDELETEVEC (rpo_numbers);
2828 for (i = 0; i < num_ssa_names; i++)
2830 tree name = ssa_name (i);
2831 if (name
2832 && VN_INFO (name)->needs_insertion)
2833 release_ssa_name (name);
2835 obstack_free (&vn_ssa_aux_obstack, NULL);
2836 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2838 VEC_free (tree, heap, sccstack);
2839 free_vn_table (valid_info);
2840 XDELETE (valid_info);
2841 free_vn_table (optimistic_info);
2842 XDELETE (optimistic_info);
2845 /* Set the value ids in the valid hash tables. */
2847 static void
2848 set_hashtable_value_ids (void)
2850 htab_iterator hi;
2851 vn_nary_op_t vno;
2852 vn_reference_t vr;
2853 vn_phi_t vp;
2855 /* Now set the value ids of the things we had put in the hash
2856 table. */
2858 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2859 vno, vn_nary_op_t, hi)
2861 if (vno->result)
2863 if (TREE_CODE (vno->result) == SSA_NAME)
2864 vno->value_id = VN_INFO (vno->result)->value_id;
2865 else if (is_gimple_min_invariant (vno->result))
2866 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2870 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2871 vp, vn_phi_t, hi)
2873 if (vp->result)
2875 if (TREE_CODE (vp->result) == SSA_NAME)
2876 vp->value_id = VN_INFO (vp->result)->value_id;
2877 else if (is_gimple_min_invariant (vp->result))
2878 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2882 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2883 vr, vn_reference_t, hi)
2885 if (vr->result)
2887 if (TREE_CODE (vr->result) == SSA_NAME)
2888 vr->value_id = VN_INFO (vr->result)->value_id;
2889 else if (is_gimple_min_invariant (vr->result))
2890 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2895 /* Do SCCVN. Returns true if it finished, false if we bailed out
2896 due to resource constraints. */
2898 bool
2899 run_scc_vn (bool may_insert_arg)
2901 size_t i;
2902 tree param;
2903 bool changed = true;
2905 may_insert = may_insert_arg;
2907 init_scc_vn ();
2908 current_info = valid_info;
2910 for (param = DECL_ARGUMENTS (current_function_decl);
2911 param;
2912 param = TREE_CHAIN (param))
2914 if (gimple_default_def (cfun, param) != NULL)
2916 tree def = gimple_default_def (cfun, param);
2917 SSA_VAL (def) = def;
2921 for (i = 1; i < num_ssa_names; ++i)
2923 tree name = ssa_name (i);
2924 if (name
2925 && VN_INFO (name)->visited == false
2926 && !has_zero_uses (name))
2927 if (!DFS (name))
2929 free_scc_vn ();
2930 may_insert = false;
2931 return false;
2935 /* Initialize the value ids. */
2937 for (i = 1; i < num_ssa_names; ++i)
2939 tree name = ssa_name (i);
2940 vn_ssa_aux_t info;
2941 if (!name)
2942 continue;
2943 info = VN_INFO (name);
2944 if (info->valnum == name)
2945 info->value_id = get_next_value_id ();
2946 else if (is_gimple_min_invariant (info->valnum))
2947 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2950 /* Propagate until they stop changing. */
2951 while (changed)
2953 changed = false;
2954 for (i = 1; i < num_ssa_names; ++i)
2956 tree name = ssa_name (i);
2957 vn_ssa_aux_t info;
2958 if (!name)
2959 continue;
2960 info = VN_INFO (name);
2961 if (TREE_CODE (info->valnum) == SSA_NAME
2962 && info->valnum != name
2963 && info->value_id != VN_INFO (info->valnum)->value_id)
2965 changed = true;
2966 info->value_id = VN_INFO (info->valnum)->value_id;
2971 set_hashtable_value_ids ();
2973 if (dump_file && (dump_flags & TDF_DETAILS))
2975 fprintf (dump_file, "Value numbers:\n");
2976 for (i = 0; i < num_ssa_names; i++)
2978 tree name = ssa_name (i);
2979 if (name
2980 && VN_INFO (name)->visited
2981 && SSA_VAL (name) != name)
2983 print_generic_expr (dump_file, name, 0);
2984 fprintf (dump_file, " = ");
2985 print_generic_expr (dump_file, SSA_VAL (name), 0);
2986 fprintf (dump_file, "\n");
2991 may_insert = false;
2992 return true;
2995 /* Return the maximum value id we have ever seen. */
2997 unsigned int
2998 get_max_value_id (void)
3000 return next_value_id;
3003 /* Return the next unique value id. */
3005 unsigned int
3006 get_next_value_id (void)
3008 return next_value_id++;
3012 /* Compare two expressions E1 and E2 and return true if they are equal. */
3014 bool
3015 expressions_equal_p (tree e1, tree e2)
3017 /* The obvious case. */
3018 if (e1 == e2)
3019 return true;
3021 /* If only one of them is null, they cannot be equal. */
3022 if (!e1 || !e2)
3023 return false;
3025 /* Recurse on elements of lists. */
3026 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3028 tree lop1 = e1;
3029 tree lop2 = e2;
3030 for (lop1 = e1, lop2 = e2;
3031 lop1 || lop2;
3032 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3034 if (!lop1 || !lop2)
3035 return false;
3036 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3037 return false;
3039 return true;
3042 /* Now perform the actual comparison. */
3043 if (TREE_CODE (e1) == TREE_CODE (e2)
3044 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3045 return true;
3047 return false;
3050 /* Sort the VUSE array so that we can do equality comparisons
3051 quicker on two vuse vecs. */
3053 void
3054 sort_vuses (VEC (tree,gc) *vuses)
3056 if (VEC_length (tree, vuses) > 1)
3057 qsort (VEC_address (tree, vuses),
3058 VEC_length (tree, vuses),
3059 sizeof (tree),
3060 operand_build_cmp);
3063 /* Sort the VUSE array so that we can do equality comparisons
3064 quicker on two vuse vecs. */
3066 void
3067 sort_vuses_heap (VEC (tree,heap) *vuses)
3069 if (VEC_length (tree, vuses) > 1)
3070 qsort (VEC_address (tree, vuses),
3071 VEC_length (tree, vuses),
3072 sizeof (tree),
3073 operand_build_cmp);
3077 /* Return true if the nary operation NARY may trap. This is a copy
3078 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3080 bool
3081 vn_nary_may_trap (vn_nary_op_t nary)
3083 tree type;
3084 tree rhs2;
3085 bool honor_nans = false;
3086 bool honor_snans = false;
3087 bool fp_operation = false;
3088 bool honor_trapv = false;
3089 bool handled, ret;
3090 unsigned i;
3092 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3093 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3094 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3096 type = nary->type;
3097 fp_operation = FLOAT_TYPE_P (type);
3098 if (fp_operation)
3100 honor_nans = flag_trapping_math && !flag_finite_math_only;
3101 honor_snans = flag_signaling_nans != 0;
3103 else if (INTEGRAL_TYPE_P (type)
3104 && TYPE_OVERFLOW_TRAPS (type))
3105 honor_trapv = true;
3107 rhs2 = nary->op[1];
3108 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3109 honor_trapv,
3110 honor_nans, honor_snans, rhs2,
3111 &handled);
3112 if (handled
3113 && ret)
3114 return true;
3116 for (i = 0; i < nary->length; ++i)
3117 if (tree_could_trap_p (nary->op[i]))
3118 return true;
3120 return false;