2008-07-06 Kai Tietz <kai.tietz@onevision.com>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob99cc08d844518867cdc77726ee19e7d1f85e610f
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 /* Nary operations in the hashtable consist of length operands, an
119 opcode, and a type. Result is the value number of the operation,
120 and hashcode is stored to avoid having to calculate it
121 repeatedly. */
123 typedef struct vn_nary_op_s
125 ENUM_BITFIELD(tree_code) opcode : 16;
126 unsigned length : 16;
127 hashval_t hashcode;
128 tree result;
129 tree type;
130 tree op[4];
131 } *vn_nary_op_t;
132 typedef const struct vn_nary_op_s *const_vn_nary_op_t;
134 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
135 arguments, and the basic block the phi is in. Result is the value
136 number of the operation, and hashcode is stored to avoid having to
137 calculate it repeatedly. Phi nodes not in the same block are never
138 considered equivalent. */
140 typedef struct vn_phi_s
142 VEC (tree, heap) *phiargs;
143 basic_block block;
144 hashval_t hashcode;
145 tree result;
146 } *vn_phi_t;
147 typedef const struct vn_phi_s *const_vn_phi_t;
149 /* Reference operands only exist in reference operations structures.
150 They consist of an opcode, type, and some number of operands. For
151 a given opcode, some, all, or none of the operands may be used.
152 The operands are there to store the information that makes up the
153 portion of the addressing calculation that opcode performs. */
155 typedef struct vn_reference_op_struct
157 enum tree_code opcode;
158 tree type;
159 tree op0;
160 tree op1;
161 } vn_reference_op_s;
162 typedef vn_reference_op_s *vn_reference_op_t;
163 typedef const vn_reference_op_s *const_vn_reference_op_t;
165 DEF_VEC_O(vn_reference_op_s);
166 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
168 /* A reference operation in the hashtable is representation as a
169 collection of vuses, representing the memory state at the time of
170 the operation, and a collection of operands that make up the
171 addressing calculation. If two vn_reference_t's have the same set
172 of operands, they access the same memory location. We also store
173 the resulting value number, and the hashcode. The vuses are
174 always stored in order sorted by ssa name version. */
176 typedef struct vn_reference_s
178 VEC (tree, gc) *vuses;
179 VEC (vn_reference_op_s, heap) *operands;
180 hashval_t hashcode;
181 tree result;
182 } *vn_reference_t;
183 typedef const struct vn_reference_s *const_vn_reference_t;
185 /* Valid hashtables storing information we have proven to be
186 correct. */
188 static vn_tables_t valid_info;
190 /* Optimistic hashtables storing information we are making assumptions about
191 during iterations. */
193 static vn_tables_t optimistic_info;
195 /* PRE hashtables storing information about mapping from expressions to
196 value handles. */
198 static vn_tables_t pre_info;
200 /* Pointer to the set of hashtables that is currently being used.
201 Should always point to either the optimistic_info, or the
202 valid_info. */
204 static vn_tables_t current_info;
207 /* Reverse post order index for each basic block. */
209 static int *rpo_numbers;
211 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
213 /* This represents the top of the VN lattice, which is the universal
214 value. */
216 tree VN_TOP;
218 /* Next DFS number and the stack for strongly connected component
219 detection. */
221 static unsigned int next_dfs_num;
222 static VEC (tree, heap) *sccstack;
224 static bool may_insert;
227 DEF_VEC_P(vn_ssa_aux_t);
228 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
230 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
231 are allocated on an obstack for locality reasons, and to free them
232 without looping over the VEC. */
234 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
235 static struct obstack vn_ssa_aux_obstack;
237 /* Return the value numbering information for a given SSA name. */
239 vn_ssa_aux_t
240 VN_INFO (tree name)
242 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
243 SSA_NAME_VERSION (name));
246 /* Set the value numbering info for a given SSA name to a given
247 value. */
249 static inline void
250 VN_INFO_SET (tree name, vn_ssa_aux_t value)
252 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
253 SSA_NAME_VERSION (name), value);
256 /* Initialize the value numbering info for a given SSA name.
257 This should be called just once for every SSA name. */
259 vn_ssa_aux_t
260 VN_INFO_GET (tree name)
262 vn_ssa_aux_t newinfo;
264 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
265 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
266 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
267 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
268 SSA_NAME_VERSION (name) + 1);
269 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
270 SSA_NAME_VERSION (name), newinfo);
271 return newinfo;
275 /* Free a phi operation structure VP. */
277 static void
278 free_phi (void *vp)
280 vn_phi_t phi = (vn_phi_t) vp;
281 VEC_free (tree, heap, phi->phiargs);
284 /* Free a reference operation structure VP. */
286 static void
287 free_reference (void *vp)
289 vn_reference_t vr = (vn_reference_t) vp;
290 VEC_free (vn_reference_op_s, heap, vr->operands);
293 /* Compare two reference operands P1 and P2 for equality. Return true if
294 they are equal, and false otherwise. */
296 static int
297 vn_reference_op_eq (const void *p1, const void *p2)
299 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
300 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
301 return vro1->opcode == vro2->opcode
302 && vro1->type == vro2->type
303 && expressions_equal_p (vro1->op0, vro2->op0)
304 && expressions_equal_p (vro1->op1, vro2->op1);
307 /* Compute the hash for a reference operand VRO1. */
309 static hashval_t
310 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
312 return iterative_hash_expr (vro1->op0, vro1->opcode)
313 + iterative_hash_expr (vro1->op1, vro1->opcode);
316 /* Return the hashcode for a given reference operation P1. */
318 static hashval_t
319 vn_reference_hash (const void *p1)
321 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
322 return vr1->hashcode;
325 /* Compute a hash for the reference operation VR1 and return it. */
327 static inline hashval_t
328 vn_reference_compute_hash (const vn_reference_t vr1)
330 hashval_t result = 0;
331 tree v;
332 int i;
333 vn_reference_op_t vro;
335 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
336 result += iterative_hash_expr (v, 0);
337 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
338 result += vn_reference_op_compute_hash (vro);
340 return result;
343 /* Return true if reference operations P1 and P2 are equivalent. This
344 means they have the same set of operands and vuses. */
346 static int
347 vn_reference_eq (const void *p1, const void *p2)
349 tree v;
350 int i;
351 vn_reference_op_t vro;
353 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
354 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
356 if (vr1->vuses == vr2->vuses
357 && vr1->operands == vr2->operands)
358 return true;
360 /* Impossible for them to be equivalent if they have different
361 number of vuses. */
362 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
363 return false;
365 /* We require that address operands be canonicalized in a way that
366 two memory references will have the same operands if they are
367 equivalent. */
368 if (VEC_length (vn_reference_op_s, vr1->operands)
369 != VEC_length (vn_reference_op_s, vr2->operands))
370 return false;
372 /* The memory state is more often different than the address of the
373 store/load, so check it first. */
374 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
376 if (VEC_index (tree, vr2->vuses, i) != v)
377 return false;
380 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
382 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
383 vro))
384 return false;
386 return true;
389 /* Place the vuses from STMT into *result. */
391 static inline void
392 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
394 ssa_op_iter iter;
395 tree vuse;
397 if (!stmt)
398 return;
400 VEC_reserve_exact (tree, gc, *result,
401 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
403 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
404 VEC_quick_push (tree, *result, vuse);
408 /* Copy the VUSE names in STMT into a vector, and return
409 the vector. */
411 VEC (tree, gc) *
412 copy_vuses_from_stmt (tree stmt)
414 VEC (tree, gc) *vuses = NULL;
416 vuses_to_vec (stmt, &vuses);
418 return vuses;
421 /* Place the vdefs from STMT into *result. */
423 static inline void
424 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
426 ssa_op_iter iter;
427 tree vdef;
429 if (!stmt)
430 return;
432 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
434 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
435 VEC_quick_push (tree, *result, vdef);
438 /* Copy the names of vdef results in STMT into a vector, and return
439 the vector. */
441 static VEC (tree, gc) *
442 copy_vdefs_from_stmt (tree stmt)
444 VEC (tree, gc) *vdefs = NULL;
446 vdefs_to_vec (stmt, &vdefs);
448 return vdefs;
451 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
452 static VEC (tree, gc) *shared_lookup_vops;
454 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
455 This function will overwrite the current SHARED_LOOKUP_VOPS
456 variable. */
458 VEC (tree, gc) *
459 shared_vuses_from_stmt (tree stmt)
461 VEC_truncate (tree, shared_lookup_vops, 0);
462 vuses_to_vec (stmt, &shared_lookup_vops);
464 return shared_lookup_vops;
467 /* Copy the operations present in load/store/call REF into RESULT, a vector of
468 vn_reference_op_s's. */
470 static void
471 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
473 /* Calls are different from all other reference operations. */
474 if (TREE_CODE (ref) == CALL_EXPR)
476 vn_reference_op_s temp;
477 tree callfn;
478 call_expr_arg_iterator iter;
479 tree callarg;
481 /* Copy the call_expr opcode, type, function being called, and
482 arguments. */
483 memset (&temp, 0, sizeof (temp));
484 temp.type = TREE_TYPE (ref);
485 temp.opcode = CALL_EXPR;
486 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
488 callfn = get_callee_fndecl (ref);
489 if (!callfn)
490 callfn = CALL_EXPR_FN (ref);
491 temp.type = TREE_TYPE (callfn);
492 temp.opcode = TREE_CODE (callfn);
493 temp.op0 = callfn;
494 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
496 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
498 memset (&temp, 0, sizeof (temp));
499 temp.type = TREE_TYPE (callarg);
500 temp.opcode = TREE_CODE (callarg);
501 temp.op0 = callarg;
502 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
504 return;
507 if (TREE_CODE (ref) == TARGET_MEM_REF)
509 vn_reference_op_s temp;
511 memset (&temp, 0, sizeof (temp));
512 /* We do not care for spurious type qualifications. */
513 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
514 temp.opcode = TREE_CODE (ref);
515 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
516 temp.op1 = TMR_INDEX (ref);
517 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
519 memset (&temp, 0, sizeof (temp));
520 temp.type = NULL_TREE;
521 temp.opcode = TREE_CODE (ref);
522 temp.op0 = TMR_STEP (ref);
523 temp.op1 = TMR_OFFSET (ref);
524 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
525 return;
528 /* For non-calls, store the information that makes up the address. */
530 while (ref)
532 vn_reference_op_s temp;
534 memset (&temp, 0, sizeof (temp));
535 /* We do not care for spurious type qualifications. */
536 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
537 temp.opcode = TREE_CODE (ref);
539 switch (temp.opcode)
541 case ALIGN_INDIRECT_REF:
542 case MISALIGNED_INDIRECT_REF:
543 case INDIRECT_REF:
544 /* The only operand is the address, which gets its own
545 vn_reference_op_s structure. */
546 break;
547 case BIT_FIELD_REF:
548 /* Record bits and position. */
549 temp.op0 = TREE_OPERAND (ref, 1);
550 temp.op1 = TREE_OPERAND (ref, 2);
551 break;
552 case COMPONENT_REF:
553 /* The field decl is enough to unambiguously specify the field,
554 a matching type is not necessary and a mismatching type
555 is always a spurious difference. */
556 temp.type = NULL_TREE;
557 /* If this is a reference to a union member, record the union
558 member size as operand. Do so only if we are doing
559 expression insertion (during FRE), as PRE currently gets
560 confused with this. */
561 if (may_insert
562 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
563 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
564 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
565 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
566 else
567 /* Record field as operand. */
568 temp.op0 = TREE_OPERAND (ref, 1);
569 break;
570 case ARRAY_RANGE_REF:
571 case ARRAY_REF:
572 /* Record index as operand. */
573 temp.op0 = TREE_OPERAND (ref, 1);
574 temp.op1 = TREE_OPERAND (ref, 3);
575 break;
576 case STRING_CST:
577 case INTEGER_CST:
578 case COMPLEX_CST:
579 case VECTOR_CST:
580 case REAL_CST:
581 case CONSTRUCTOR:
582 case VALUE_HANDLE:
583 case VAR_DECL:
584 case PARM_DECL:
585 case CONST_DECL:
586 case RESULT_DECL:
587 case SSA_NAME:
588 temp.op0 = ref;
589 break;
590 /* These are only interesting for their operands, their
591 existence, and their type. They will never be the last
592 ref in the chain of references (IE they require an
593 operand), so we don't have to put anything
594 for op* as it will be handled by the iteration */
595 case IMAGPART_EXPR:
596 case REALPART_EXPR:
597 case VIEW_CONVERT_EXPR:
598 case ADDR_EXPR:
599 break;
600 default:
601 gcc_unreachable ();
604 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
606 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
607 ref = TREE_OPERAND (ref, 0);
608 else
609 ref = NULL_TREE;
613 /* Create a vector of vn_reference_op_s structures from REF, a
614 REFERENCE_CLASS_P tree. The vector is not shared. */
616 static VEC(vn_reference_op_s, heap) *
617 create_reference_ops_from_ref (tree ref)
619 VEC (vn_reference_op_s, heap) *result = NULL;
621 copy_reference_ops_from_ref (ref, &result);
622 return result;
625 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
627 /* Create a vector of vn_reference_op_s structures from REF, a
628 REFERENCE_CLASS_P tree. The vector is shared among all callers of
629 this function. */
631 static VEC(vn_reference_op_s, heap) *
632 shared_reference_ops_from_ref (tree ref)
634 if (!ref)
635 return NULL;
636 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
637 copy_reference_ops_from_ref (ref, &shared_lookup_references);
638 return shared_lookup_references;
642 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
643 structures into their value numbers. This is done in-place, and
644 the vector passed in is returned. */
646 static VEC (vn_reference_op_s, heap) *
647 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
649 vn_reference_op_t vro;
650 int i;
652 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
654 if (vro->opcode == SSA_NAME
655 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
656 vro->op0 = SSA_VAL (vro->op0);
659 return orig;
662 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
663 their value numbers. This is done in-place, and the vector passed
664 in is returned. */
666 static VEC (tree, gc) *
667 valueize_vuses (VEC (tree, gc) *orig)
669 bool made_replacement = false;
670 tree vuse;
671 int i;
673 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
675 if (vuse != SSA_VAL (vuse))
677 made_replacement = true;
678 VEC_replace (tree, orig, i, SSA_VAL (vuse));
682 if (made_replacement && VEC_length (tree, orig) > 1)
683 sort_vuses (orig);
685 return orig;
688 /* Return the single reference statement defining all virtual uses
689 in VUSES or NULL_TREE, if there are multiple defining statements.
690 Take into account only definitions that alias REF if following
691 back-edges. */
693 static tree
694 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
696 tree def_stmt, vuse;
697 unsigned int i;
699 gcc_assert (VEC_length (tree, vuses) >= 1);
701 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
702 if (TREE_CODE (def_stmt) == PHI_NODE)
704 /* We can only handle lookups over PHI nodes for a single
705 virtual operand. */
706 if (VEC_length (tree, vuses) == 1)
708 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
709 goto cont;
711 else
712 return NULL_TREE;
715 /* Verify each VUSE reaches the same defining stmt. */
716 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
718 tree tmp = SSA_NAME_DEF_STMT (vuse);
719 if (tmp != def_stmt)
720 return NULL_TREE;
723 /* Now see if the definition aliases ref, and loop until it does. */
724 cont:
725 while (def_stmt
726 && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
727 && !get_call_expr_in (def_stmt)
728 && !refs_may_alias_p (ref, GIMPLE_STMT_OPERAND (def_stmt, 0)))
729 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
731 return def_stmt;
734 /* Lookup a SCCVN reference operation VR in the current hash table.
735 Returns the resulting value number if it exists in the hash table,
736 NULL_TREE otherwise. */
738 static tree
739 vn_reference_lookup_1 (vn_reference_t vr)
741 void **slot;
742 hashval_t hash;
744 hash = vr->hashcode;
745 slot = htab_find_slot_with_hash (current_info->references, vr,
746 hash, NO_INSERT);
747 if (!slot && current_info == optimistic_info)
748 slot = htab_find_slot_with_hash (valid_info->references, vr,
749 hash, NO_INSERT);
750 if (slot)
751 return ((vn_reference_t)*slot)->result;
753 return NULL_TREE;
756 /* Lookup OP in the current hash table, and return the resulting
757 value number if it exists in the hash table. Return NULL_TREE if
758 it does not exist in the hash table. */
760 tree
761 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk)
763 struct vn_reference_s vr1;
764 tree result, def_stmt;
766 vr1.vuses = valueize_vuses (vuses);
767 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
768 vr1.hashcode = vn_reference_compute_hash (&vr1);
769 result = vn_reference_lookup_1 (&vr1);
771 /* If there is a single defining statement for all virtual uses, we can
772 use that, following virtual use-def chains. */
773 if (!result
774 && maywalk
775 && vr1.vuses
776 && VEC_length (tree, vr1.vuses) >= 1
777 && !get_call_expr_in (op)
778 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
779 && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
780 /* If there is a call involved, op must be assumed to
781 be clobbered. */
782 && !get_call_expr_in (def_stmt))
784 /* We are now at an aliasing definition for the vuses we want to
785 look up. Re-do the lookup with the vdefs for this stmt. */
786 vdefs_to_vec (def_stmt, &vuses);
787 vr1.vuses = valueize_vuses (vuses);
788 vr1.hashcode = vn_reference_compute_hash (&vr1);
789 result = vn_reference_lookup_1 (&vr1);
792 return result;
795 /* Insert OP into the current hash table with a value number of
796 RESULT. */
798 void
799 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
801 void **slot;
802 vn_reference_t vr1;
804 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
806 vr1->vuses = valueize_vuses (vuses);
807 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
808 vr1->hashcode = vn_reference_compute_hash (vr1);
809 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
811 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
812 INSERT);
814 /* Because we lookup stores using vuses, and value number failures
815 using the vdefs (see visit_reference_op_store for how and why),
816 it's possible that on failure we may try to insert an already
817 inserted store. This is not wrong, there is no ssa name for a
818 store that we could use as a differentiator anyway. Thus, unlike
819 the other lookup functions, you cannot gcc_assert (!*slot)
820 here. */
822 /* But free the old slot in case of a collision. */
823 if (*slot)
824 free_reference (*slot);
826 *slot = vr1;
829 /* Compute and return the hash value for nary operation VBO1. */
831 static inline hashval_t
832 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
834 hashval_t hash = 0;
835 unsigned i;
837 for (i = 0; i < vno1->length; ++i)
838 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
839 vno1->op[i] = SSA_VAL (vno1->op[i]);
841 if (vno1->length == 2
842 && commutative_tree_code (vno1->opcode)
843 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
845 tree temp = vno1->op[0];
846 vno1->op[0] = vno1->op[1];
847 vno1->op[1] = temp;
850 for (i = 0; i < vno1->length; ++i)
851 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
853 return hash;
856 /* Return the computed hashcode for nary operation P1. */
858 static hashval_t
859 vn_nary_op_hash (const void *p1)
861 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
862 return vno1->hashcode;
865 /* Compare nary operations P1 and P2 and return true if they are
866 equivalent. */
868 static int
869 vn_nary_op_eq (const void *p1, const void *p2)
871 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
872 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
873 unsigned i;
875 if (vno1->opcode != vno2->opcode
876 || vno1->type != vno2->type)
877 return false;
879 for (i = 0; i < vno1->length; ++i)
880 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
881 return false;
883 return true;
886 /* Lookup OP in the current hash table, and return the resulting
887 value number if it exists in the hash table. Return NULL_TREE if
888 it does not exist in the hash table. */
890 tree
891 vn_nary_op_lookup (tree op)
893 void **slot;
894 struct vn_nary_op_s vno1;
895 unsigned i;
897 vno1.opcode = TREE_CODE (op);
898 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
899 vno1.type = TREE_TYPE (op);
900 for (i = 0; i < vno1.length; ++i)
901 vno1.op[i] = TREE_OPERAND (op, i);
902 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
903 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
904 NO_INSERT);
905 if (!slot && current_info == optimistic_info)
906 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
907 NO_INSERT);
908 if (!slot)
909 return NULL_TREE;
910 return ((vn_nary_op_t)*slot)->result;
913 /* Insert OP into the current hash table with a value number of
914 RESULT. */
916 void
917 vn_nary_op_insert (tree op, tree result)
919 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
920 void **slot;
921 vn_nary_op_t vno1;
922 unsigned i;
924 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
925 (sizeof (struct vn_nary_op_s)
926 - sizeof (tree) * (4 - length)));
927 vno1->opcode = TREE_CODE (op);
928 vno1->length = length;
929 vno1->type = TREE_TYPE (op);
930 for (i = 0; i < vno1->length; ++i)
931 vno1->op[i] = TREE_OPERAND (op, i);
932 vno1->result = result;
933 vno1->hashcode = vn_nary_op_compute_hash (vno1);
934 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
935 INSERT);
936 gcc_assert (!*slot);
938 *slot = vno1;
941 /* Compute a hashcode for PHI operation VP1 and return it. */
943 static inline hashval_t
944 vn_phi_compute_hash (vn_phi_t vp1)
946 hashval_t result = 0;
947 int i;
948 tree phi1op;
950 result = vp1->block->index;
952 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
954 if (phi1op == VN_TOP)
955 continue;
956 result += iterative_hash_expr (phi1op, result);
959 return result;
962 /* Return the computed hashcode for phi operation P1. */
964 static hashval_t
965 vn_phi_hash (const void *p1)
967 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
968 return vp1->hashcode;
971 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
973 static int
974 vn_phi_eq (const void *p1, const void *p2)
976 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
977 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
979 if (vp1->block == vp2->block)
981 int i;
982 tree phi1op;
984 /* Any phi in the same block will have it's arguments in the
985 same edge order, because of how we store phi nodes. */
986 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
988 tree phi2op = VEC_index (tree, vp2->phiargs, i);
989 if (phi1op == VN_TOP || phi2op == VN_TOP)
990 continue;
991 if (!expressions_equal_p (phi1op, phi2op))
992 return false;
994 return true;
996 return false;
999 static VEC(tree, heap) *shared_lookup_phiargs;
1001 /* Lookup PHI in the current hash table, and return the resulting
1002 value number if it exists in the hash table. Return NULL_TREE if
1003 it does not exist in the hash table. */
1005 static tree
1006 vn_phi_lookup (tree phi)
1008 void **slot;
1009 struct vn_phi_s vp1;
1010 int i;
1012 VEC_truncate (tree, shared_lookup_phiargs, 0);
1014 /* Canonicalize the SSA_NAME's to their value number. */
1015 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1017 tree def = PHI_ARG_DEF (phi, i);
1018 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1019 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1021 vp1.phiargs = shared_lookup_phiargs;
1022 vp1.block = bb_for_stmt (phi);
1023 vp1.hashcode = vn_phi_compute_hash (&vp1);
1024 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1025 NO_INSERT);
1026 if (!slot && current_info == optimistic_info)
1027 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1028 NO_INSERT);
1029 if (!slot)
1030 return NULL_TREE;
1031 return ((vn_phi_t)*slot)->result;
1034 /* Insert PHI into the current hash table with a value number of
1035 RESULT. */
1037 static void
1038 vn_phi_insert (tree phi, tree result)
1040 void **slot;
1041 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1042 int i;
1043 VEC (tree, heap) *args = NULL;
1045 /* Canonicalize the SSA_NAME's to their value number. */
1046 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1048 tree def = PHI_ARG_DEF (phi, i);
1049 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1050 VEC_safe_push (tree, heap, args, def);
1052 vp1->phiargs = args;
1053 vp1->block = bb_for_stmt (phi);
1054 vp1->result = result;
1055 vp1->hashcode = vn_phi_compute_hash (vp1);
1057 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1058 INSERT);
1060 /* Because we iterate over phi operations more than once, it's
1061 possible the slot might already exist here, hence no assert.*/
1062 *slot = vp1;
1066 /* Print set of components in strongly connected component SCC to OUT. */
1068 static void
1069 print_scc (FILE *out, VEC (tree, heap) *scc)
1071 tree var;
1072 unsigned int i;
1074 fprintf (out, "SCC consists of: ");
1075 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1077 print_generic_expr (out, var, 0);
1078 fprintf (out, " ");
1080 fprintf (out, "\n");
1083 /* Set the value number of FROM to TO, return true if it has changed
1084 as a result. */
1086 static inline bool
1087 set_ssa_val_to (tree from, tree to)
1089 tree currval;
1091 if (from != to
1092 && TREE_CODE (to) == SSA_NAME
1093 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1094 to = from;
1096 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1097 and invariants. So assert that here. */
1098 gcc_assert (to != NULL_TREE
1099 && (to == VN_TOP
1100 || TREE_CODE (to) == SSA_NAME
1101 || is_gimple_min_invariant (to)));
1103 if (dump_file && (dump_flags & TDF_DETAILS))
1105 fprintf (dump_file, "Setting value number of ");
1106 print_generic_expr (dump_file, from, 0);
1107 fprintf (dump_file, " to ");
1108 print_generic_expr (dump_file, to, 0);
1109 fprintf (dump_file, "\n");
1112 currval = SSA_VAL (from);
1114 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1116 SSA_VAL (from) = to;
1117 return true;
1119 return false;
1122 /* Set all definitions in STMT to value number to themselves.
1123 Return true if a value number changed. */
1125 static bool
1126 defs_to_varying (tree stmt)
1128 bool changed = false;
1129 ssa_op_iter iter;
1130 def_operand_p defp;
1132 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1134 tree def = DEF_FROM_PTR (defp);
1136 VN_INFO (def)->use_processed = true;
1137 changed |= set_ssa_val_to (def, def);
1139 return changed;
1142 static bool expr_has_constants (tree expr);
1143 static tree try_to_simplify (tree stmt, tree rhs);
1145 /* Visit a copy between LHS and RHS, return true if the value number
1146 changed. */
1148 static bool
1149 visit_copy (tree lhs, tree rhs)
1152 /* Follow chains of copies to their destination. */
1153 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1154 rhs = SSA_VAL (rhs);
1156 /* The copy may have a more interesting constant filled expression
1157 (we don't, since we know our RHS is just an SSA name). */
1158 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1159 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1161 return set_ssa_val_to (lhs, rhs);
1164 /* Visit a unary operator RHS, value number it, and return true if the
1165 value number of LHS has changed as a result. */
1167 static bool
1168 visit_unary_op (tree lhs, tree op)
1170 bool changed = false;
1171 tree result = vn_nary_op_lookup (op);
1173 if (result)
1175 changed = set_ssa_val_to (lhs, result);
1177 else
1179 changed = set_ssa_val_to (lhs, lhs);
1180 vn_nary_op_insert (op, lhs);
1183 return changed;
1186 /* Visit a binary operator RHS, value number it, and return true if the
1187 value number of LHS has changed as a result. */
1189 static bool
1190 visit_binary_op (tree lhs, tree op)
1192 bool changed = false;
1193 tree result = vn_nary_op_lookup (op);
1195 if (result)
1197 changed = set_ssa_val_to (lhs, result);
1199 else
1201 changed = set_ssa_val_to (lhs, lhs);
1202 vn_nary_op_insert (op, lhs);
1205 return changed;
1208 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1209 and return true if the value number of the LHS has changed as a result. */
1211 static bool
1212 visit_reference_op_load (tree lhs, tree op, tree stmt)
1214 bool changed = false;
1215 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true);
1217 /* We handle type-punning through unions by value-numbering based
1218 on offset and size of the access. Be prepared to handle a
1219 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1220 if (result
1221 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1223 /* We will be setting the value number of lhs to the value number
1224 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1225 So first simplify and lookup this expression to see if it
1226 is already available. */
1227 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1228 if (stmt
1229 && !is_gimple_min_invariant (val)
1230 && TREE_CODE (val) != SSA_NAME)
1232 tree tem = try_to_simplify (stmt, val);
1233 if (tem)
1234 val = tem;
1236 result = val;
1237 if (!is_gimple_min_invariant (val)
1238 && TREE_CODE (val) != SSA_NAME)
1239 result = vn_nary_op_lookup (val);
1240 /* If the expression is not yet available, value-number lhs to
1241 a new SSA_NAME we create. */
1242 if (!result && may_insert)
1244 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL_TREE);
1245 /* Initialize value-number information properly. */
1246 VN_INFO_GET (result)->valnum = result;
1247 VN_INFO (result)->expr = val;
1248 VN_INFO (result)->has_constants = expr_has_constants (val);
1249 VN_INFO (result)->needs_insertion = true;
1250 /* As all "inserted" statements are singleton SCCs, insert
1251 to the valid table. This is strictly needed to
1252 avoid re-generating new value SSA_NAMEs for the same
1253 expression during SCC iteration over and over (the
1254 optimistic table gets cleared after each iteration).
1255 We do not need to insert into the optimistic table, as
1256 lookups there will fall back to the valid table. */
1257 if (current_info == optimistic_info)
1259 current_info = valid_info;
1260 vn_nary_op_insert (val, result);
1261 current_info = optimistic_info;
1263 else
1264 vn_nary_op_insert (val, result);
1265 if (dump_file && (dump_flags & TDF_DETAILS))
1267 fprintf (dump_file, "Inserting name ");
1268 print_generic_expr (dump_file, result, 0);
1269 fprintf (dump_file, " for expression ");
1270 print_generic_expr (dump_file, val, 0);
1271 fprintf (dump_file, "\n");
1276 if (result)
1278 changed = set_ssa_val_to (lhs, result);
1279 if (TREE_CODE (result) == SSA_NAME
1280 && VN_INFO (result)->has_constants)
1282 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1283 VN_INFO (lhs)->has_constants = true;
1286 else
1288 changed = set_ssa_val_to (lhs, lhs);
1289 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1292 return changed;
1296 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1297 and return true if the value number of the LHS has changed as a result. */
1299 static bool
1300 visit_reference_op_store (tree lhs, tree op, tree stmt)
1302 bool changed = false;
1303 tree result;
1304 bool resultsame = false;
1306 /* First we want to lookup using the *vuses* from the store and see
1307 if there the last store to this location with the same address
1308 had the same value.
1310 The vuses represent the memory state before the store. If the
1311 memory state, address, and value of the store is the same as the
1312 last store to this location, then this store will produce the
1313 same memory state as that store.
1315 In this case the vdef versions for this store are value numbered to those
1316 vuse versions, since they represent the same memory state after
1317 this store.
1319 Otherwise, the vdefs for the store are used when inserting into
1320 the table, since the store generates a new memory state. */
1322 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false);
1324 if (result)
1326 if (TREE_CODE (result) == SSA_NAME)
1327 result = SSA_VAL (result);
1328 if (TREE_CODE (op) == SSA_NAME)
1329 op = SSA_VAL (op);
1330 resultsame = expressions_equal_p (result, op);
1333 if (!result || !resultsame)
1335 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1336 int i;
1337 tree vdef;
1339 if (dump_file && (dump_flags & TDF_DETAILS))
1341 fprintf (dump_file, "No store match\n");
1342 fprintf (dump_file, "Value numbering store ");
1343 print_generic_expr (dump_file, lhs, 0);
1344 fprintf (dump_file, " to ");
1345 print_generic_expr (dump_file, op, 0);
1346 fprintf (dump_file, "\n");
1348 /* Have to set value numbers before insert, since insert is
1349 going to valueize the references in-place. */
1350 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1352 VN_INFO (vdef)->use_processed = true;
1353 changed |= set_ssa_val_to (vdef, vdef);
1356 /* Do not insert structure copies into the tables. */
1357 if (is_gimple_min_invariant (op)
1358 || is_gimple_reg (op))
1359 vn_reference_insert (lhs, op, vdefs);
1361 else
1363 /* We had a match, so value number the vdefs to have the value
1364 number of the vuses they came from. */
1365 ssa_op_iter op_iter;
1366 def_operand_p var;
1367 vuse_vec_p vv;
1369 if (dump_file && (dump_flags & TDF_DETAILS))
1370 fprintf (dump_file, "Store matched earlier value,"
1371 "value numbering store vdefs to matching vuses.\n");
1373 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1375 tree def = DEF_FROM_PTR (var);
1376 tree use;
1378 /* Uh, if the vuse is a multiuse, we can't really do much
1379 here, sadly, since we don't know which value number of
1380 which vuse to use. */
1381 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1382 use = def;
1383 else
1384 use = VUSE_ELEMENT_VAR (*vv, 0);
1386 VN_INFO (def)->use_processed = true;
1387 changed |= set_ssa_val_to (def, SSA_VAL (use));
1391 return changed;
1394 /* Visit and value number PHI, return true if the value number
1395 changed. */
1397 static bool
1398 visit_phi (tree phi)
1400 bool changed = false;
1401 tree result;
1402 tree sameval = VN_TOP;
1403 bool allsame = true;
1404 int i;
1406 /* TODO: We could check for this in init_sccvn, and replace this
1407 with a gcc_assert. */
1408 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1409 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1411 /* See if all non-TOP arguments have the same value. TOP is
1412 equivalent to everything, so we can ignore it. */
1413 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1415 tree def = PHI_ARG_DEF (phi, i);
1417 if (TREE_CODE (def) == SSA_NAME)
1418 def = SSA_VAL (def);
1419 if (def == VN_TOP)
1420 continue;
1421 if (sameval == VN_TOP)
1423 sameval = def;
1425 else
1427 if (!expressions_equal_p (def, sameval))
1429 allsame = false;
1430 break;
1435 /* If all value numbered to the same value, the phi node has that
1436 value. */
1437 if (allsame)
1439 if (is_gimple_min_invariant (sameval))
1441 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1442 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1444 else
1446 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1447 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1450 if (TREE_CODE (sameval) == SSA_NAME)
1451 return visit_copy (PHI_RESULT (phi), sameval);
1453 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1456 /* Otherwise, see if it is equivalent to a phi node in this block. */
1457 result = vn_phi_lookup (phi);
1458 if (result)
1460 if (TREE_CODE (result) == SSA_NAME)
1461 changed = visit_copy (PHI_RESULT (phi), result);
1462 else
1463 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1465 else
1467 vn_phi_insert (phi, PHI_RESULT (phi));
1468 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1469 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1470 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1473 return changed;
1476 /* Return true if EXPR contains constants. */
1478 static bool
1479 expr_has_constants (tree expr)
1481 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1483 case tcc_unary:
1484 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1486 case tcc_binary:
1487 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1488 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1489 /* Constants inside reference ops are rarely interesting, but
1490 it can take a lot of looking to find them. */
1491 case tcc_reference:
1492 case tcc_declaration:
1493 return false;
1494 default:
1495 return is_gimple_min_invariant (expr);
1497 return false;
1500 /* Replace SSA_NAMES in expr with their value numbers, and return the
1501 result.
1502 This is performed in place. */
1504 static tree
1505 valueize_expr (tree expr)
1507 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1509 case tcc_unary:
1510 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1511 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1512 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1513 break;
1514 case tcc_binary:
1515 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1516 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1517 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1518 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1519 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1520 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1521 break;
1522 default:
1523 break;
1525 return expr;
1528 /* Simplify the binary expression RHS, and return the result if
1529 simplified. */
1531 static tree
1532 simplify_binary_expression (tree stmt, tree rhs)
1534 tree result = NULL_TREE;
1535 tree op0 = TREE_OPERAND (rhs, 0);
1536 tree op1 = TREE_OPERAND (rhs, 1);
1538 /* This will not catch every single case we could combine, but will
1539 catch those with constants. The goal here is to simultaneously
1540 combine constants between expressions, but avoid infinite
1541 expansion of expressions during simplification. */
1542 if (TREE_CODE (op0) == SSA_NAME)
1544 if (VN_INFO (op0)->has_constants)
1545 op0 = valueize_expr (VN_INFO (op0)->expr);
1546 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1547 op0 = SSA_VAL (op0);
1550 if (TREE_CODE (op1) == SSA_NAME)
1552 if (VN_INFO (op1)->has_constants)
1553 op1 = valueize_expr (VN_INFO (op1)->expr);
1554 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1555 op1 = SSA_VAL (op1);
1558 /* Avoid folding if nothing changed. */
1559 if (op0 == TREE_OPERAND (rhs, 0)
1560 && op1 == TREE_OPERAND (rhs, 1))
1561 return NULL_TREE;
1563 fold_defer_overflow_warnings ();
1565 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1567 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1568 stmt, 0);
1570 /* Make sure result is not a complex expression consisting
1571 of operators of operators (IE (a + b) + (a + c))
1572 Otherwise, we will end up with unbounded expressions if
1573 fold does anything at all. */
1574 if (result && valid_gimple_expression_p (result))
1575 return result;
1577 return NULL_TREE;
1580 /* Simplify the unary expression RHS, and return the result if
1581 simplified. */
1583 static tree
1584 simplify_unary_expression (tree rhs)
1586 tree result = NULL_TREE;
1587 tree op0 = TREE_OPERAND (rhs, 0);
1589 if (TREE_CODE (op0) != SSA_NAME)
1590 return NULL_TREE;
1592 if (VN_INFO (op0)->has_constants)
1593 op0 = valueize_expr (VN_INFO (op0)->expr);
1594 else if (CONVERT_EXPR_P (rhs)
1595 || TREE_CODE (rhs) == REALPART_EXPR
1596 || TREE_CODE (rhs) == IMAGPART_EXPR
1597 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
1599 /* We want to do tree-combining on conversion-like expressions.
1600 Make sure we feed only SSA_NAMEs or constants to fold though. */
1601 tree tem = valueize_expr (VN_INFO (op0)->expr);
1602 if (UNARY_CLASS_P (tem)
1603 || BINARY_CLASS_P (tem)
1604 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
1605 || TREE_CODE (tem) == SSA_NAME
1606 || is_gimple_min_invariant (tem))
1607 op0 = tem;
1610 /* Avoid folding if nothing changed, but remember the expression. */
1611 if (op0 == TREE_OPERAND (rhs, 0))
1612 return rhs;
1614 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1615 if (result)
1617 STRIP_USELESS_TYPE_CONVERSION (result);
1618 if (valid_gimple_expression_p (result))
1619 return result;
1622 return rhs;
1625 /* Try to simplify RHS using equivalences and constant folding. */
1627 static tree
1628 try_to_simplify (tree stmt, tree rhs)
1630 tree tem;
1632 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
1633 in this case, there is no point in doing extra work. */
1634 if (TREE_CODE (rhs) == SSA_NAME)
1635 return rhs;
1637 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1639 case tcc_declaration:
1640 tem = get_symbol_constant_value (rhs);
1641 if (tem)
1642 return tem;
1643 break;
1645 case tcc_reference:
1646 /* Do not do full-blown reference lookup here, but simplify
1647 reads from constant aggregates. */
1648 tem = fold_const_aggregate_ref (rhs);
1649 if (tem)
1650 return tem;
1652 /* Fallthrough for some codes that can operate on registers. */
1653 if (!(TREE_CODE (rhs) == REALPART_EXPR
1654 || TREE_CODE (rhs) == IMAGPART_EXPR
1655 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR))
1656 break;
1657 /* We could do a little more with unary ops, if they expand
1658 into binary ops, but it's debatable whether it is worth it. */
1659 case tcc_unary:
1660 return simplify_unary_expression (rhs);
1661 break;
1662 case tcc_comparison:
1663 case tcc_binary:
1664 return simplify_binary_expression (stmt, rhs);
1665 break;
1666 default:
1667 break;
1670 return rhs;
1673 /* Visit and value number USE, return true if the value number
1674 changed. */
1676 static bool
1677 visit_use (tree use)
1679 bool changed = false;
1680 tree stmt = SSA_NAME_DEF_STMT (use);
1681 stmt_ann_t ann;
1683 VN_INFO (use)->use_processed = true;
1685 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1686 if (dump_file && (dump_flags & TDF_DETAILS)
1687 && !IS_EMPTY_STMT (stmt))
1689 fprintf (dump_file, "Value numbering ");
1690 print_generic_expr (dump_file, use, 0);
1691 fprintf (dump_file, " stmt = ");
1692 print_generic_stmt (dump_file, stmt, 0);
1695 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1696 if (TREE_CODE (stmt) == RETURN_EXPR
1697 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1698 stmt = TREE_OPERAND (stmt, 0);
1700 ann = stmt_ann (stmt);
1702 /* Handle uninitialized uses. */
1703 if (IS_EMPTY_STMT (stmt))
1705 changed = set_ssa_val_to (use, use);
1707 else
1709 if (TREE_CODE (stmt) == PHI_NODE)
1711 changed = visit_phi (stmt);
1713 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1714 || (ann && ann->has_volatile_ops)
1715 || tree_could_throw_p (stmt))
1717 changed = defs_to_varying (stmt);
1719 else
1721 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1722 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1723 tree simplified;
1725 STRIP_USELESS_TYPE_CONVERSION (rhs);
1727 /* Shortcut for copies. Simplifying copies is pointless,
1728 since we copy the expression and value they represent. */
1729 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1731 changed = visit_copy (lhs, rhs);
1732 goto done;
1734 simplified = try_to_simplify (stmt, rhs);
1735 if (simplified && simplified != rhs)
1737 if (dump_file && (dump_flags & TDF_DETAILS))
1739 fprintf (dump_file, "RHS ");
1740 print_generic_expr (dump_file, rhs, 0);
1741 fprintf (dump_file, " simplified to ");
1742 print_generic_expr (dump_file, simplified, 0);
1743 if (TREE_CODE (lhs) == SSA_NAME)
1744 fprintf (dump_file, " has constants %d\n",
1745 expr_has_constants (simplified));
1746 else
1747 fprintf (dump_file, "\n");
1750 /* Setting value numbers to constants will occasionally
1751 screw up phi congruence because constants are not
1752 uniquely associated with a single ssa name that can be
1753 looked up. */
1754 if (simplified && is_gimple_min_invariant (simplified)
1755 && TREE_CODE (lhs) == SSA_NAME
1756 && simplified != rhs)
1758 VN_INFO (lhs)->expr = simplified;
1759 VN_INFO (lhs)->has_constants = true;
1760 changed = set_ssa_val_to (lhs, simplified);
1761 goto done;
1763 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1764 && TREE_CODE (lhs) == SSA_NAME)
1766 changed = visit_copy (lhs, simplified);
1767 goto done;
1769 else if (simplified)
1771 if (TREE_CODE (lhs) == SSA_NAME)
1773 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1774 /* We have to unshare the expression or else
1775 valuizing may change the IL stream. */
1776 VN_INFO (lhs)->expr = unshare_expr (simplified);
1778 rhs = simplified;
1780 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1782 VN_INFO (lhs)->has_constants = true;
1783 VN_INFO (lhs)->expr = unshare_expr (rhs);
1785 else if (TREE_CODE (lhs) == SSA_NAME)
1787 /* We reset expr and constantness here because we may
1788 have been value numbering optimistically, and
1789 iterating. They may become non-constant in this case,
1790 even if they were optimistically constant. */
1792 VN_INFO (lhs)->has_constants = false;
1793 VN_INFO (lhs)->expr = lhs;
1796 if (TREE_CODE (lhs) == SSA_NAME
1797 /* We can substitute SSA_NAMEs that are live over
1798 abnormal edges with their constant value. */
1799 && !is_gimple_min_invariant (rhs)
1800 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1801 changed = defs_to_varying (stmt);
1802 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1804 changed = visit_reference_op_store (lhs, rhs, stmt);
1806 else if (TREE_CODE (lhs) == SSA_NAME)
1808 if (is_gimple_min_invariant (rhs))
1810 VN_INFO (lhs)->has_constants = true;
1811 VN_INFO (lhs)->expr = rhs;
1812 changed = set_ssa_val_to (lhs, rhs);
1814 else
1816 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1818 case tcc_unary:
1819 changed = visit_unary_op (lhs, rhs);
1820 break;
1821 case tcc_binary:
1822 changed = visit_binary_op (lhs, rhs);
1823 break;
1824 /* If tcc_vl_expr ever encompasses more than
1825 CALL_EXPR, this will need to be changed. */
1826 case tcc_vl_exp:
1827 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1828 changed = visit_reference_op_load (lhs, rhs, stmt);
1829 else
1830 changed = defs_to_varying (stmt);
1831 break;
1832 case tcc_declaration:
1833 case tcc_reference:
1834 changed = visit_reference_op_load (lhs, rhs, stmt);
1835 break;
1836 case tcc_expression:
1837 if (TREE_CODE (rhs) == ADDR_EXPR)
1839 changed = visit_unary_op (lhs, rhs);
1840 goto done;
1842 /* Fallthrough. */
1843 default:
1844 changed = defs_to_varying (stmt);
1845 break;
1849 else
1850 changed = defs_to_varying (stmt);
1853 done:
1854 return changed;
1857 /* Compare two operands by reverse postorder index */
1859 static int
1860 compare_ops (const void *pa, const void *pb)
1862 const tree opa = *((const tree *)pa);
1863 const tree opb = *((const tree *)pb);
1864 tree opstmta = SSA_NAME_DEF_STMT (opa);
1865 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1866 basic_block bba;
1867 basic_block bbb;
1869 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1870 return 0;
1871 else if (IS_EMPTY_STMT (opstmta))
1872 return -1;
1873 else if (IS_EMPTY_STMT (opstmtb))
1874 return 1;
1876 bba = bb_for_stmt (opstmta);
1877 bbb = bb_for_stmt (opstmtb);
1879 if (!bba && !bbb)
1880 return 0;
1881 else if (!bba)
1882 return -1;
1883 else if (!bbb)
1884 return 1;
1886 if (bba == bbb)
1888 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1889 return 0;
1890 else if (TREE_CODE (opstmta) == PHI_NODE)
1891 return -1;
1892 else if (TREE_CODE (opstmtb) == PHI_NODE)
1893 return 1;
1894 return gimple_stmt_uid (opstmta) - gimple_stmt_uid (opstmtb);
1896 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1899 /* Sort an array containing members of a strongly connected component
1900 SCC so that the members are ordered by RPO number.
1901 This means that when the sort is complete, iterating through the
1902 array will give you the members in RPO order. */
1904 static void
1905 sort_scc (VEC (tree, heap) *scc)
1907 qsort (VEC_address (tree, scc),
1908 VEC_length (tree, scc),
1909 sizeof (tree),
1910 compare_ops);
1913 /* Process a strongly connected component in the SSA graph. */
1915 static void
1916 process_scc (VEC (tree, heap) *scc)
1918 /* If the SCC has a single member, just visit it. */
1920 if (VEC_length (tree, scc) == 1)
1922 tree use = VEC_index (tree, scc, 0);
1923 if (!VN_INFO (use)->use_processed)
1924 visit_use (use);
1926 else
1928 tree var;
1929 unsigned int i;
1930 unsigned int iterations = 0;
1931 bool changed = true;
1933 /* Iterate over the SCC with the optimistic table until it stops
1934 changing. */
1935 current_info = optimistic_info;
1936 while (changed)
1938 changed = false;
1939 iterations++;
1940 htab_empty (optimistic_info->nary);
1941 htab_empty (optimistic_info->phis);
1942 htab_empty (optimistic_info->references);
1943 obstack_free (&optimistic_info->nary_obstack, NULL);
1944 gcc_obstack_init (&optimistic_info->nary_obstack);
1945 empty_alloc_pool (optimistic_info->phis_pool);
1946 empty_alloc_pool (optimistic_info->references_pool);
1947 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1948 changed |= visit_use (var);
1951 statistics_histogram_event (cfun, "SCC iterations", iterations);
1953 /* Finally, visit the SCC once using the valid table. */
1954 current_info = valid_info;
1955 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1956 visit_use (var);
1960 DEF_VEC_O(ssa_op_iter);
1961 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
1963 /* Pop the components of the found SCC for NAME off the SCC stack
1964 and process them. Returns true if all went well, false if
1965 we run into resource limits. */
1967 static bool
1968 extract_and_process_scc_for_name (tree name)
1970 VEC (tree, heap) *scc = NULL;
1971 tree x;
1973 /* Found an SCC, pop the components off the SCC stack and
1974 process them. */
1977 x = VEC_pop (tree, sccstack);
1979 VN_INFO (x)->on_sccstack = false;
1980 VEC_safe_push (tree, heap, scc, x);
1981 } while (x != name);
1983 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
1984 if (VEC_length (tree, scc)
1985 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
1987 if (dump_file)
1988 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
1989 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
1990 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
1991 return false;
1994 if (VEC_length (tree, scc) > 1)
1995 sort_scc (scc);
1997 if (dump_file && (dump_flags & TDF_DETAILS))
1998 print_scc (dump_file, scc);
2000 process_scc (scc);
2002 VEC_free (tree, heap, scc);
2004 return true;
2007 /* Depth first search on NAME to discover and process SCC's in the SSA
2008 graph.
2009 Execution of this algorithm relies on the fact that the SCC's are
2010 popped off the stack in topological order.
2011 Returns true if successful, false if we stopped processing SCC's due
2012 to resource constraints. */
2014 static bool
2015 DFS (tree name)
2017 VEC(ssa_op_iter, heap) *itervec = NULL;
2018 VEC(tree, heap) *namevec = NULL;
2019 use_operand_p usep = NULL;
2020 tree defstmt, use;
2021 ssa_op_iter iter;
2023 start_over:
2024 /* SCC info */
2025 VN_INFO (name)->dfsnum = next_dfs_num++;
2026 VN_INFO (name)->visited = true;
2027 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2029 VEC_safe_push (tree, heap, sccstack, name);
2030 VN_INFO (name)->on_sccstack = true;
2031 defstmt = SSA_NAME_DEF_STMT (name);
2033 /* Recursively DFS on our operands, looking for SCC's. */
2034 if (!IS_EMPTY_STMT (defstmt))
2036 /* Push a new iterator. */
2037 if (TREE_CODE (defstmt) == PHI_NODE)
2038 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2039 else
2040 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2042 else
2043 iter.done = true;
2045 while (1)
2047 /* If we are done processing uses of a name, go up the stack
2048 of iterators and process SCCs as we found them. */
2049 if (op_iter_done (&iter))
2051 /* See if we found an SCC. */
2052 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2053 if (!extract_and_process_scc_for_name (name))
2055 VEC_free (tree, heap, namevec);
2056 VEC_free (ssa_op_iter, heap, itervec);
2057 return false;
2060 /* Check if we are done. */
2061 if (VEC_empty (tree, namevec))
2063 VEC_free (tree, heap, namevec);
2064 VEC_free (ssa_op_iter, heap, itervec);
2065 return true;
2068 /* Restore the last use walker and continue walking there. */
2069 use = name;
2070 name = VEC_pop (tree, namevec);
2071 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2072 sizeof (ssa_op_iter));
2073 VEC_pop (ssa_op_iter, itervec);
2074 goto continue_walking;
2077 use = USE_FROM_PTR (usep);
2079 /* Since we handle phi nodes, we will sometimes get
2080 invariants in the use expression. */
2081 if (TREE_CODE (use) == SSA_NAME)
2083 if (! (VN_INFO (use)->visited))
2085 /* Recurse by pushing the current use walking state on
2086 the stack and starting over. */
2087 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2088 VEC_safe_push(tree, heap, namevec, name);
2089 name = use;
2090 goto start_over;
2092 continue_walking:
2093 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2094 VN_INFO (use)->low);
2096 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2097 && VN_INFO (use)->on_sccstack)
2099 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2100 VN_INFO (name)->low);
2104 usep = op_iter_next_use (&iter);
2108 /* Allocate a value number table. */
2110 static void
2111 allocate_vn_table (vn_tables_t table)
2113 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2114 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2115 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2116 free_reference);
2118 gcc_obstack_init (&table->nary_obstack);
2119 table->phis_pool = create_alloc_pool ("VN phis",
2120 sizeof (struct vn_phi_s),
2121 30);
2122 table->references_pool = create_alloc_pool ("VN references",
2123 sizeof (struct vn_reference_s),
2124 30);
2127 /* Free a value number table. */
2129 static void
2130 free_vn_table (vn_tables_t table)
2132 htab_delete (table->phis);
2133 htab_delete (table->nary);
2134 htab_delete (table->references);
2135 obstack_free (&table->nary_obstack, NULL);
2136 free_alloc_pool (table->phis_pool);
2137 free_alloc_pool (table->references_pool);
2140 static void
2141 init_scc_vn (void)
2143 size_t i;
2144 int j;
2145 int *rpo_numbers_temp;
2147 calculate_dominance_info (CDI_DOMINATORS);
2148 sccstack = NULL;
2149 next_dfs_num = 1;
2151 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2152 /* VEC_alloc doesn't actually grow it to the right size, it just
2153 preallocates the space to do so. */
2154 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2155 gcc_obstack_init (&vn_ssa_aux_obstack);
2157 shared_lookup_phiargs = NULL;
2158 shared_lookup_vops = NULL;
2159 shared_lookup_references = NULL;
2160 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2161 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2162 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2164 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2165 the i'th block in RPO order is bb. We want to map bb's to RPO
2166 numbers, so we need to rearrange this array. */
2167 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2168 rpo_numbers[rpo_numbers_temp[j]] = j;
2170 XDELETE (rpo_numbers_temp);
2172 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2174 /* Create the VN_INFO structures, and initialize value numbers to
2175 TOP. */
2176 for (i = 0; i < num_ssa_names; i++)
2178 tree name = ssa_name (i);
2179 if (name)
2181 VN_INFO_GET (name)->valnum = VN_TOP;
2182 VN_INFO (name)->expr = name;
2186 renumber_gimple_stmt_uids ();
2188 /* Create the valid and optimistic value numbering tables. */
2189 valid_info = XCNEW (struct vn_tables_s);
2190 allocate_vn_table (valid_info);
2191 optimistic_info = XCNEW (struct vn_tables_s);
2192 allocate_vn_table (optimistic_info);
2193 pre_info = NULL;
2196 void
2197 switch_to_PRE_table (void)
2199 pre_info = XCNEW (struct vn_tables_s);
2200 allocate_vn_table (pre_info);
2201 current_info = pre_info;
2204 void
2205 free_scc_vn (void)
2207 size_t i;
2209 VEC_free (tree, heap, shared_lookup_phiargs);
2210 VEC_free (tree, gc, shared_lookup_vops);
2211 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2212 XDELETEVEC (rpo_numbers);
2214 for (i = 0; i < num_ssa_names; i++)
2216 tree name = ssa_name (i);
2217 if (name
2218 && SSA_NAME_VALUE (name)
2219 && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
2220 SSA_NAME_VALUE (name) = NULL;
2221 if (name
2222 && VN_INFO (name)->needs_insertion)
2223 release_ssa_name (name);
2225 obstack_free (&vn_ssa_aux_obstack, NULL);
2226 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2228 VEC_free (tree, heap, sccstack);
2229 free_vn_table (valid_info);
2230 XDELETE (valid_info);
2231 free_vn_table (optimistic_info);
2232 XDELETE (optimistic_info);
2233 if (pre_info)
2235 free_vn_table (pre_info);
2236 XDELETE (pre_info);
2240 /* Do SCCVN. Returns true if it finished, false if we bailed out
2241 due to resource constraints. */
2243 bool
2244 run_scc_vn (bool may_insert_arg)
2246 size_t i;
2247 tree param;
2249 may_insert = may_insert_arg;
2251 init_scc_vn ();
2252 current_info = valid_info;
2254 for (param = DECL_ARGUMENTS (current_function_decl);
2255 param;
2256 param = TREE_CHAIN (param))
2258 if (gimple_default_def (cfun, param) != NULL)
2260 tree def = gimple_default_def (cfun, param);
2261 SSA_VAL (def) = def;
2265 for (i = 1; i < num_ssa_names; ++i)
2267 tree name = ssa_name (i);
2268 if (name
2269 && VN_INFO (name)->visited == false
2270 && !has_zero_uses (name))
2271 if (!DFS (name))
2273 free_scc_vn ();
2274 may_insert = false;
2275 return false;
2279 if (dump_file && (dump_flags & TDF_DETAILS))
2281 fprintf (dump_file, "Value numbers:\n");
2282 for (i = 0; i < num_ssa_names; i++)
2284 tree name = ssa_name (i);
2285 if (name && VN_INFO (name)->visited
2286 && (SSA_VAL (name) != name
2287 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2289 print_generic_expr (dump_file, name, 0);
2290 fprintf (dump_file, " = ");
2291 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2292 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2293 else
2294 print_generic_expr (dump_file, SSA_VAL (name), 0);
2295 fprintf (dump_file, "\n");
2300 may_insert = false;
2301 return true;