UUID stuff from Ryan Raasch
[cegcc.git] / cegcc / src / gcc-4.3.2 / gcc / tree-ssa-sccvn.c
blob34caa133882e071269974dc68f0f701b39c28039
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t unary;
111 htab_t binary;
112 htab_t phis;
113 htab_t references;
114 alloc_pool unary_op_pool;
115 alloc_pool binary_op_pool;
116 alloc_pool phis_pool;
117 alloc_pool references_pool;
118 } *vn_tables_t;
120 /* Binary operations in the hashtable consist of two operands, an
121 opcode, and a type. Result is the value number of the operation,
122 and hashcode is stored to avoid having to calculate it
123 repeatedly. */
125 typedef struct vn_binary_op_s
127 enum tree_code opcode;
128 hashval_t hashcode;
129 tree type;
130 tree op0;
131 tree op1;
132 tree result;
133 } *vn_binary_op_t;
134 typedef const struct vn_binary_op_s *const_vn_binary_op_t;
136 /* Unary operations in the hashtable consist of a single operand, an
137 opcode, and a type. Result is the value number of the operation,
138 and hashcode is stored to avoid having to calculate it repeatedly. */
140 typedef struct vn_unary_op_s
142 enum tree_code opcode;
143 hashval_t hashcode;
144 tree type;
145 tree op0;
146 tree result;
147 } *vn_unary_op_t;
148 typedef const struct vn_unary_op_s *const_vn_unary_op_t;
150 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
151 arguments, and the basic block the phi is in. Result is the value
152 number of the operation, and hashcode is stored to avoid having to
153 calculate it repeatedly. Phi nodes not in the same block are never
154 considered equivalent. */
156 typedef struct vn_phi_s
158 VEC (tree, heap) *phiargs;
159 basic_block block;
160 hashval_t hashcode;
161 tree result;
162 } *vn_phi_t;
163 typedef const struct vn_phi_s *const_vn_phi_t;
165 /* Reference operands only exist in reference operations structures.
166 They consist of an opcode, type, and some number of operands. For
167 a given opcode, some, all, or none of the operands may be used.
168 The operands are there to store the information that makes up the
169 portion of the addressing calculation that opcode performs. */
171 typedef struct vn_reference_op_struct
173 enum tree_code opcode;
174 tree type;
175 tree op0;
176 tree op1;
177 tree op2;
178 } vn_reference_op_s;
179 typedef vn_reference_op_s *vn_reference_op_t;
180 typedef const vn_reference_op_s *const_vn_reference_op_t;
182 DEF_VEC_O(vn_reference_op_s);
183 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
185 /* A reference operation in the hashtable is representation as a
186 collection of vuses, representing the memory state at the time of
187 the operation, and a collection of operands that make up the
188 addressing calculation. If two vn_reference_t's have the same set
189 of operands, they access the same memory location. We also store
190 the resulting value number, and the hashcode. The vuses are
191 always stored in order sorted by ssa name version. */
193 typedef struct vn_reference_s
195 VEC (tree, gc) *vuses;
196 VEC (vn_reference_op_s, heap) *operands;
197 hashval_t hashcode;
198 tree result;
199 } *vn_reference_t;
200 typedef const struct vn_reference_s *const_vn_reference_t;
202 /* Valid hashtables storing information we have proven to be
203 correct. */
205 static vn_tables_t valid_info;
207 /* Optimistic hashtables storing information we are making assumptions about
208 during iterations. */
210 static vn_tables_t optimistic_info;
212 /* PRE hashtables storing information about mapping from expressions to
213 value handles. */
215 static vn_tables_t pre_info;
217 /* Pointer to the set of hashtables that is currently being used.
218 Should always point to either the optimistic_info, or the
219 valid_info. */
221 static vn_tables_t current_info;
224 /* Reverse post order index for each basic block. */
226 static int *rpo_numbers;
228 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
230 /* This represents the top of the VN lattice, which is the universal
231 value. */
233 tree VN_TOP;
235 /* Next DFS number and the stack for strongly connected component
236 detection. */
238 static unsigned int next_dfs_num;
239 static VEC (tree, heap) *sccstack;
241 DEF_VEC_P(vn_ssa_aux_t);
242 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
244 /* Table of vn_ssa_aux_t's, one per ssa_name. */
246 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
248 /* Return the value numbering information for a given SSA name. */
250 vn_ssa_aux_t
251 VN_INFO (tree name)
253 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
254 SSA_NAME_VERSION (name));
257 /* Set the value numbering info for a given SSA name to a given
258 value. */
260 static inline void
261 VN_INFO_SET (tree name, vn_ssa_aux_t value)
263 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
264 SSA_NAME_VERSION (name), value);
267 /* Get the value numbering info for a given SSA name, creating it if
268 it does not exist. */
270 vn_ssa_aux_t
271 VN_INFO_GET (tree name)
273 vn_ssa_aux_t newinfo = XCNEW (struct vn_ssa_aux);
274 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
275 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
276 SSA_NAME_VERSION (name) + 1);
277 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
278 SSA_NAME_VERSION (name), newinfo);
279 return newinfo;
283 /* Free a phi operation structure VP. */
285 static void
286 free_phi (void *vp)
288 vn_phi_t phi = vp;
289 VEC_free (tree, heap, phi->phiargs);
292 /* Free a reference operation structure VP. */
294 static void
295 free_reference (void *vp)
297 vn_reference_t vr = vp;
298 VEC_free (vn_reference_op_s, heap, vr->operands);
301 /* Compare two reference operands P1 and P2 for equality. return true if
302 they are equal, and false otherwise. */
304 static int
305 vn_reference_op_eq (const void *p1, const void *p2)
307 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
308 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
309 return vro1->opcode == vro2->opcode
310 && vro1->type == vro2->type
311 && expressions_equal_p (vro1->op0, vro2->op0)
312 && expressions_equal_p (vro1->op1, vro2->op1)
313 && expressions_equal_p (vro1->op2, vro2->op2);
316 /* Compute the hash for a reference operand VRO1 */
318 static hashval_t
319 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
321 return iterative_hash_expr (vro1->op0, vro1->opcode)
322 + iterative_hash_expr (vro1->op1, vro1->opcode)
323 + iterative_hash_expr (vro1->op2, vro1->opcode);
326 /* Return the hashcode for a given reference operation P1. */
328 static hashval_t
329 vn_reference_hash (const void *p1)
331 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
332 return vr1->hashcode;
335 /* Compute a hash for the reference operation VR1 and return it. */
337 static inline hashval_t
338 vn_reference_compute_hash (const vn_reference_t vr1)
340 hashval_t result = 0;
341 tree v;
342 int i;
343 vn_reference_op_t vro;
345 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
346 result += iterative_hash_expr (v, 0);
347 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
348 result += vn_reference_op_compute_hash (vro);
350 return result;
353 /* Return true if reference operations P1 and P2 are equivalent. This
354 means they have the same set of operands and vuses. */
356 static int
357 vn_reference_eq (const void *p1, const void *p2)
359 tree v;
360 int i;
361 vn_reference_op_t vro;
363 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
364 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
366 if (vr1->vuses == vr2->vuses
367 && vr1->operands == vr2->operands)
368 return true;
370 /* Impossible for them to be equivalent if they have different
371 number of vuses. */
372 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
373 return false;
375 /* We require that address operands be canonicalized in a way that
376 two memory references will have the same operands if they are
377 equivalent. */
378 if (VEC_length (vn_reference_op_s, vr1->operands)
379 != VEC_length (vn_reference_op_s, vr2->operands))
380 return false;
382 /* The memory state is more often different than the address of the
383 store/load, so check it first. */
384 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
386 if (VEC_index (tree, vr2->vuses, i) != v)
387 return false;
390 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
392 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
393 vro))
394 return false;
396 return true;
399 /* Place the vuses from STMT into *result */
401 static inline void
402 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
404 ssa_op_iter iter;
405 tree vuse;
407 if (!stmt)
408 return;
410 VEC_reserve_exact (tree, gc, *result,
411 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
413 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
414 VEC_quick_push (tree, *result, vuse);
418 /* Copy the VUSE names in STMT into a vector, and return
419 the vector. */
421 VEC (tree, gc) *
422 copy_vuses_from_stmt (tree stmt)
424 VEC (tree, gc) *vuses = NULL;
426 vuses_to_vec (stmt, &vuses);
428 return vuses;
431 /* Place the vdefs from STMT into *result */
433 static inline void
434 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
436 ssa_op_iter iter;
437 tree vdef;
439 if (!stmt)
440 return;
442 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
444 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
445 VEC_quick_push (tree, *result, vdef);
448 /* Copy the names of vdef results in STMT into a vector, and return
449 the vector. */
451 static VEC (tree, gc) *
452 copy_vdefs_from_stmt (tree stmt)
454 VEC (tree, gc) *vdefs = NULL;
456 vdefs_to_vec (stmt, &vdefs);
458 return vdefs;
461 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
462 static VEC (tree, gc) *shared_lookup_vops;
464 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
465 This function will overwrite the current SHARED_LOOKUP_VOPS
466 variable. */
468 VEC (tree, gc) *
469 shared_vuses_from_stmt (tree stmt)
471 VEC_truncate (tree, shared_lookup_vops, 0);
472 vuses_to_vec (stmt, &shared_lookup_vops);
474 return shared_lookup_vops;
477 /* Copy the operations present in load/store/call REF into RESULT, a vector of
478 vn_reference_op_s's. */
480 static void
481 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
483 /* Calls are different from all other reference operations. */
484 if (TREE_CODE (ref) == CALL_EXPR)
486 vn_reference_op_s temp;
487 tree callfn;
488 call_expr_arg_iterator iter;
489 tree callarg;
491 /* Copy the call_expr opcode, type, function being called, and
492 arguments. */
493 memset (&temp, 0, sizeof (temp));
494 temp.type = TREE_TYPE (ref);
495 temp.opcode = CALL_EXPR;
496 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
498 callfn = get_callee_fndecl (ref);
499 if (!callfn)
500 callfn = CALL_EXPR_FN (ref);
501 temp.type = TREE_TYPE (callfn);
502 temp.opcode = TREE_CODE (callfn);
503 temp.op0 = callfn;
504 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
506 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
508 memset (&temp, 0, sizeof (temp));
509 temp.type = TREE_TYPE (callarg);
510 temp.opcode = TREE_CODE (callarg);
511 temp.op0 = callarg;
512 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
514 return;
517 /* For non-calls, store the information that makes up the address. */
519 while (ref)
521 vn_reference_op_s temp;
523 memset (&temp, 0, sizeof (temp));
524 temp.type = TREE_TYPE (ref);
525 temp.opcode = TREE_CODE (ref);
527 switch (temp.opcode)
529 case ALIGN_INDIRECT_REF:
530 case MISALIGNED_INDIRECT_REF:
531 case INDIRECT_REF:
532 /* The only operand is the address, which gets its own
533 vn_reference_op_s structure. */
534 break;
535 case BIT_FIELD_REF:
536 /* Record bits and position. */
537 temp.op0 = TREE_OPERAND (ref, 1);
538 temp.op1 = TREE_OPERAND (ref, 2);
539 break;
540 case COMPONENT_REF:
541 /* Record field as operand. */
542 temp.op0 = TREE_OPERAND (ref, 1);
543 temp.op1 = TREE_OPERAND (ref, 2);
544 break;
545 case ARRAY_RANGE_REF:
546 case ARRAY_REF:
547 /* Record index as operand. */
548 temp.op0 = TREE_OPERAND (ref, 1);
549 temp.op1 = TREE_OPERAND (ref, 2);
550 temp.op2 = TREE_OPERAND (ref, 3);
551 break;
552 case STRING_CST:
553 case INTEGER_CST:
554 case COMPLEX_CST:
555 case VECTOR_CST:
556 case REAL_CST:
557 case CONSTRUCTOR:
558 case VALUE_HANDLE:
559 case VAR_DECL:
560 case PARM_DECL:
561 case CONST_DECL:
562 case RESULT_DECL:
563 case SSA_NAME:
564 temp.op0 = ref;
565 break;
566 /* These are only interesting for their operands, their
567 existence, and their type. They will never be the last
568 ref in the chain of references (IE they require an
569 operand), so we don't have to put anything
570 for op* as it will be handled by the iteration */
571 case IMAGPART_EXPR:
572 case REALPART_EXPR:
573 case VIEW_CONVERT_EXPR:
574 case ADDR_EXPR:
575 break;
576 default:
577 gcc_unreachable ();
580 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
582 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
583 ref = TREE_OPERAND (ref, 0);
584 else
585 ref = NULL_TREE;
589 /* Create a vector of vn_reference_op_s structures from REF, a
590 REFERENCE_CLASS_P tree. The vector is not shared. */
592 static VEC(vn_reference_op_s, heap) *
593 create_reference_ops_from_ref (tree ref)
595 VEC (vn_reference_op_s, heap) *result = NULL;
597 copy_reference_ops_from_ref (ref, &result);
598 return result;
601 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
603 /* Create a vector of vn_reference_op_s structures from REF, a
604 REFERENCE_CLASS_P tree. The vector is shared among all callers of
605 this function. */
607 static VEC(vn_reference_op_s, heap) *
608 shared_reference_ops_from_ref (tree ref)
610 if (!ref)
611 return NULL;
612 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
613 copy_reference_ops_from_ref (ref, &shared_lookup_references);
614 return shared_lookup_references;
618 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
619 structures into their value numbers. This is done in-place, and
620 the vector passed in is returned. */
622 static VEC (vn_reference_op_s, heap) *
623 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
625 vn_reference_op_t vro;
626 int i;
628 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
630 if (vro->opcode == SSA_NAME
631 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
632 vro->op0 = SSA_VAL (vro->op0);
635 return orig;
638 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
639 their value numbers. This is done in-place, and the vector passed
640 in is returned. */
642 static VEC (tree, gc) *
643 valueize_vuses (VEC (tree, gc) *orig)
645 bool made_replacement = false;
646 tree vuse;
647 int i;
649 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
651 if (vuse != SSA_VAL (vuse))
653 made_replacement = true;
654 VEC_replace (tree, orig, i, SSA_VAL (vuse));
658 if (made_replacement && VEC_length (tree, orig) > 1)
659 sort_vuses (orig);
661 return orig;
664 /* Lookup OP in the current hash table, and return the resulting
665 value number if it exists in the hash table. Return NULL_TREE if
666 it does not exist in the hash table. */
668 tree
669 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
671 void **slot;
672 struct vn_reference_s vr1;
674 vr1.vuses = valueize_vuses (vuses);
675 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
676 vr1.hashcode = vn_reference_compute_hash (&vr1);
677 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
678 NO_INSERT);
679 if (!slot && current_info == optimistic_info)
680 slot = htab_find_slot_with_hash (valid_info->references, &vr1, vr1.hashcode,
681 NO_INSERT);
682 if (!slot)
683 return NULL_TREE;
685 return ((vn_reference_t)*slot)->result;
688 /* Insert OP into the current hash table with a value number of
689 RESULT. */
691 void
692 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
694 void **slot;
695 vn_reference_t vr1;
697 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
699 vr1->vuses = valueize_vuses (vuses);
700 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
701 vr1->hashcode = vn_reference_compute_hash (vr1);
702 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
704 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
705 INSERT);
707 /* Because we lookup stores using vuses, and value number failures
708 using the vdefs (see visit_reference_op_store for how and why),
709 it's possible that on failure we may try to insert an already
710 inserted store. This is not wrong, there is no ssa name for a
711 store that we could use as a differentiator anyway. Thus, unlike
712 the other lookup functions, you cannot gcc_assert (!*slot)
713 here. */
715 /* But free the old slot in case of a collision. */
716 if (*slot)
717 free_reference (*slot);
719 *slot = vr1;
723 /* Return the stored hashcode for a unary operation. */
725 static hashval_t
726 vn_unary_op_hash (const void *p1)
728 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
729 return vuo1->hashcode;
732 /* Hash a unary operation P1 and return the result. */
734 static inline hashval_t
735 vn_unary_op_compute_hash (const vn_unary_op_t vuo1)
737 return iterative_hash_expr (vuo1->op0, vuo1->opcode);
740 /* Return true if P1 and P2, two unary operations, are equivalent. */
742 static int
743 vn_unary_op_eq (const void *p1, const void *p2)
745 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
746 const_vn_unary_op_t const vuo2 = (const_vn_unary_op_t) p2;
747 return vuo1->opcode == vuo2->opcode
748 && vuo1->type == vuo2->type
749 && expressions_equal_p (vuo1->op0, vuo2->op0);
752 /* Lookup OP in the current hash table, and return the resulting
753 value number if it exists in the hash table. Return NULL_TREE if
754 it does not exist in the hash table. */
756 tree
757 vn_unary_op_lookup (tree op)
759 void **slot;
760 struct vn_unary_op_s vuo1;
762 vuo1.opcode = TREE_CODE (op);
763 vuo1.type = TREE_TYPE (op);
764 vuo1.op0 = TREE_OPERAND (op, 0);
766 if (TREE_CODE (vuo1.op0) == SSA_NAME)
767 vuo1.op0 = SSA_VAL (vuo1.op0);
769 vuo1.hashcode = vn_unary_op_compute_hash (&vuo1);
770 slot = htab_find_slot_with_hash (current_info->unary, &vuo1, vuo1.hashcode,
771 NO_INSERT);
772 if (!slot && current_info == optimistic_info)
773 slot = htab_find_slot_with_hash (valid_info->unary, &vuo1, vuo1.hashcode,
774 NO_INSERT);
775 if (!slot)
776 return NULL_TREE;
777 return ((vn_unary_op_t)*slot)->result;
780 /* Insert OP into the current hash table with a value number of
781 RESULT. */
783 void
784 vn_unary_op_insert (tree op, tree result)
786 void **slot;
787 vn_unary_op_t vuo1 = (vn_unary_op_t) pool_alloc (current_info->unary_op_pool);
789 vuo1->opcode = TREE_CODE (op);
790 vuo1->type = TREE_TYPE (op);
791 vuo1->op0 = TREE_OPERAND (op, 0);
792 vuo1->result = result;
794 if (TREE_CODE (vuo1->op0) == SSA_NAME)
795 vuo1->op0 = SSA_VAL (vuo1->op0);
797 vuo1->hashcode = vn_unary_op_compute_hash (vuo1);
798 slot = htab_find_slot_with_hash (current_info->unary, vuo1, vuo1->hashcode,
799 INSERT);
800 gcc_assert (!*slot);
801 *slot = vuo1;
804 /* Compute and return the hash value for binary operation VBO1. */
806 static inline hashval_t
807 vn_binary_op_compute_hash (const vn_binary_op_t vbo1)
809 return iterative_hash_expr (vbo1->op0, vbo1->opcode)
810 + iterative_hash_expr (vbo1->op1, vbo1->opcode);
813 /* Return the computed hashcode for binary operation P1. */
815 static hashval_t
816 vn_binary_op_hash (const void *p1)
818 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
819 return vbo1->hashcode;
822 /* Compare binary operations P1 and P2 and return true if they are
823 equivalent. */
825 static int
826 vn_binary_op_eq (const void *p1, const void *p2)
828 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
829 const_vn_binary_op_t const vbo2 = (const_vn_binary_op_t) p2;
830 return vbo1->opcode == vbo2->opcode
831 && vbo1->type == vbo2->type
832 && expressions_equal_p (vbo1->op0, vbo2->op0)
833 && expressions_equal_p (vbo1->op1, vbo2->op1);
836 /* Lookup OP in the current hash table, and return the resulting
837 value number if it exists in the hash table. Return NULL_TREE if
838 it does not exist in the hash table. */
840 tree
841 vn_binary_op_lookup (tree op)
843 void **slot;
844 struct vn_binary_op_s vbo1;
846 vbo1.opcode = TREE_CODE (op);
847 vbo1.type = TREE_TYPE (op);
848 vbo1.op0 = TREE_OPERAND (op, 0);
849 vbo1.op1 = TREE_OPERAND (op, 1);
851 if (TREE_CODE (vbo1.op0) == SSA_NAME)
852 vbo1.op0 = SSA_VAL (vbo1.op0);
853 if (TREE_CODE (vbo1.op1) == SSA_NAME)
854 vbo1.op1 = SSA_VAL (vbo1.op1);
856 if (tree_swap_operands_p (vbo1.op0, vbo1.op1, false)
857 && commutative_tree_code (vbo1.opcode))
859 tree temp = vbo1.op0;
860 vbo1.op0 = vbo1.op1;
861 vbo1.op1 = temp;
864 vbo1.hashcode = vn_binary_op_compute_hash (&vbo1);
865 slot = htab_find_slot_with_hash (current_info->binary, &vbo1, vbo1.hashcode,
866 NO_INSERT);
867 if (!slot && current_info == optimistic_info)
868 slot = htab_find_slot_with_hash (valid_info->binary, &vbo1, vbo1.hashcode,
869 NO_INSERT);
870 if (!slot)
871 return NULL_TREE;
872 return ((vn_binary_op_t)*slot)->result;
875 /* Insert OP into the current hash table with a value number of
876 RESULT. */
878 void
879 vn_binary_op_insert (tree op, tree result)
881 void **slot;
882 vn_binary_op_t vbo1;
883 vbo1 = (vn_binary_op_t) pool_alloc (current_info->binary_op_pool);
885 vbo1->opcode = TREE_CODE (op);
886 vbo1->type = TREE_TYPE (op);
887 vbo1->op0 = TREE_OPERAND (op, 0);
888 vbo1->op1 = TREE_OPERAND (op, 1);
889 vbo1->result = result;
891 if (TREE_CODE (vbo1->op0) == SSA_NAME)
892 vbo1->op0 = SSA_VAL (vbo1->op0);
893 if (TREE_CODE (vbo1->op1) == SSA_NAME)
894 vbo1->op1 = SSA_VAL (vbo1->op1);
896 if (tree_swap_operands_p (vbo1->op0, vbo1->op1, false)
897 && commutative_tree_code (vbo1->opcode))
899 tree temp = vbo1->op0;
900 vbo1->op0 = vbo1->op1;
901 vbo1->op1 = temp;
903 vbo1->hashcode = vn_binary_op_compute_hash (vbo1);
904 slot = htab_find_slot_with_hash (current_info->binary, vbo1, vbo1->hashcode,
905 INSERT);
906 gcc_assert (!*slot);
908 *slot = vbo1;
911 /* Compute a hashcode for PHI operation VP1 and return it. */
913 static inline hashval_t
914 vn_phi_compute_hash (vn_phi_t vp1)
916 hashval_t result = 0;
917 int i;
918 tree phi1op;
920 result = vp1->block->index;
922 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
924 if (phi1op == VN_TOP)
925 continue;
926 result += iterative_hash_expr (phi1op, result);
929 return result;
932 /* Return the computed hashcode for phi operation P1. */
934 static hashval_t
935 vn_phi_hash (const void *p1)
937 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
938 return vp1->hashcode;
941 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
943 static int
944 vn_phi_eq (const void *p1, const void *p2)
946 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
947 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
949 if (vp1->block == vp2->block)
951 int i;
952 tree phi1op;
954 /* Any phi in the same block will have it's arguments in the
955 same edge order, because of how we store phi nodes. */
956 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
958 tree phi2op = VEC_index (tree, vp2->phiargs, i);
959 if (phi1op == VN_TOP || phi2op == VN_TOP)
960 continue;
961 if (!expressions_equal_p (phi1op, phi2op))
962 return false;
964 return true;
966 return false;
969 static VEC(tree, heap) *shared_lookup_phiargs;
971 /* Lookup PHI in the current hash table, and return the resulting
972 value number if it exists in the hash table. Return NULL_TREE if
973 it does not exist in the hash table. */
975 static tree
976 vn_phi_lookup (tree phi)
978 void **slot;
979 struct vn_phi_s vp1;
980 int i;
982 VEC_truncate (tree, shared_lookup_phiargs, 0);
984 /* Canonicalize the SSA_NAME's to their value number. */
985 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
987 tree def = PHI_ARG_DEF (phi, i);
988 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
989 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
991 vp1.phiargs = shared_lookup_phiargs;
992 vp1.block = bb_for_stmt (phi);
993 vp1.hashcode = vn_phi_compute_hash (&vp1);
994 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
995 NO_INSERT);
996 if (!slot && current_info == optimistic_info)
997 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
998 NO_INSERT);
999 if (!slot)
1000 return NULL_TREE;
1001 return ((vn_phi_t)*slot)->result;
1004 /* Insert PHI into the current hash table with a value number of
1005 RESULT. */
1007 static void
1008 vn_phi_insert (tree phi, tree result)
1010 void **slot;
1011 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1012 int i;
1013 VEC (tree, heap) *args = NULL;
1015 /* Canonicalize the SSA_NAME's to their value number. */
1016 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1018 tree def = PHI_ARG_DEF (phi, i);
1019 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1020 VEC_safe_push (tree, heap, args, def);
1022 vp1->phiargs = args;
1023 vp1->block = bb_for_stmt (phi);
1024 vp1->result = result;
1025 vp1->hashcode = vn_phi_compute_hash (vp1);
1027 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1028 INSERT);
1030 /* Because we iterate over phi operations more than once, it's
1031 possible the slot might already exist here, hence no assert.*/
1032 *slot = vp1;
1036 /* Print set of components in strongly connected component SCC to OUT. */
1038 static void
1039 print_scc (FILE *out, VEC (tree, heap) *scc)
1041 tree var;
1042 unsigned int i;
1044 fprintf (out, "SCC consists of: ");
1045 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1047 print_generic_expr (out, var, 0);
1048 fprintf (out, " ");
1050 fprintf (out, "\n");
1053 /* Set the value number of FROM to TO, return true if it has changed
1054 as a result. */
1056 static inline bool
1057 set_ssa_val_to (tree from, tree to)
1059 tree currval;
1061 if (from != to
1062 && TREE_CODE (to) == SSA_NAME
1063 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1064 to = from;
1066 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1067 and invariants. So assert that here. */
1068 gcc_assert (to != NULL_TREE
1069 && (to == VN_TOP
1070 || TREE_CODE (to) == SSA_NAME
1071 || is_gimple_min_invariant (to)));
1073 if (dump_file && (dump_flags & TDF_DETAILS))
1075 fprintf (dump_file, "Setting value number of ");
1076 print_generic_expr (dump_file, from, 0);
1077 fprintf (dump_file, " to ");
1078 print_generic_expr (dump_file, to, 0);
1079 fprintf (dump_file, "\n");
1082 currval = SSA_VAL (from);
1084 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1086 SSA_VAL (from) = to;
1087 return true;
1089 return false;
1092 /* Set all definitions in STMT to value number to themselves.
1093 Return true if a value number changed. */
1095 static bool
1096 defs_to_varying (tree stmt)
1098 bool changed = false;
1099 ssa_op_iter iter;
1100 def_operand_p defp;
1102 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1104 tree def = DEF_FROM_PTR (defp);
1106 VN_INFO (def)->use_processed = true;
1107 changed |= set_ssa_val_to (def, def);
1109 return changed;
1112 /* Visit a copy between LHS and RHS, return true if the value number
1113 changed. */
1115 static bool
1116 visit_copy (tree lhs, tree rhs)
1119 /* Follow chains of copies to their destination. */
1120 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1121 rhs = SSA_VAL (rhs);
1123 /* The copy may have a more interesting constant filled expression
1124 (we don't, since we know our RHS is just an SSA name). */
1125 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1126 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1128 return set_ssa_val_to (lhs, rhs);
1131 /* Visit a unary operator RHS, value number it, and return true if the
1132 value number of LHS has changed as a result. */
1134 static bool
1135 visit_unary_op (tree lhs, tree op)
1137 bool changed = false;
1138 tree result = vn_unary_op_lookup (op);
1140 if (result)
1142 changed = set_ssa_val_to (lhs, result);
1144 else
1146 changed = set_ssa_val_to (lhs, lhs);
1147 vn_unary_op_insert (op, lhs);
1150 return changed;
1153 /* Visit a binary operator RHS, value number it, and return true if the
1154 value number of LHS has changed as a result. */
1156 static bool
1157 visit_binary_op (tree lhs, tree op)
1159 bool changed = false;
1160 tree result = vn_binary_op_lookup (op);
1162 if (result)
1164 changed = set_ssa_val_to (lhs, result);
1166 else
1168 changed = set_ssa_val_to (lhs, lhs);
1169 vn_binary_op_insert (op, lhs);
1172 return changed;
1175 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1176 and return true if the value number of the LHS has changed as a result. */
1178 static bool
1179 visit_reference_op_load (tree lhs, tree op, tree stmt)
1181 bool changed = false;
1182 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1184 if (result)
1186 changed = set_ssa_val_to (lhs, result);
1188 else
1190 changed = set_ssa_val_to (lhs, lhs);
1191 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1194 return changed;
1198 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1199 and return true if the value number of the LHS has changed as a result. */
1201 static bool
1202 visit_reference_op_store (tree lhs, tree op, tree stmt)
1204 bool changed = false;
1205 tree result;
1206 bool resultsame = false;
1208 /* First we want to lookup using the *vuses* from the store and see
1209 if there the last store to this location with the same address
1210 had the same value.
1212 The vuses represent the memory state before the store. If the
1213 memory state, address, and value of the store is the same as the
1214 last store to this location, then this store will produce the
1215 same memory state as that store.
1217 In this case the vdef versions for this store are value numbered to those
1218 vuse versions, since they represent the same memory state after
1219 this store.
1221 Otherwise, the vdefs for the store are used when inserting into
1222 the table, since the store generates a new memory state. */
1224 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1226 if (result)
1228 if (TREE_CODE (result) == SSA_NAME)
1229 result = SSA_VAL (result);
1230 resultsame = expressions_equal_p (result, op);
1233 if (!result || !resultsame)
1235 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1236 int i;
1237 tree vdef;
1239 if (dump_file && (dump_flags & TDF_DETAILS))
1241 fprintf (dump_file, "No store match\n");
1242 fprintf (dump_file, "Value numbering store ");
1243 print_generic_expr (dump_file, lhs, 0);
1244 fprintf (dump_file, " to ");
1245 print_generic_expr (dump_file, op, 0);
1246 fprintf (dump_file, "\n");
1248 /* Have to set value numbers before insert, since insert is
1249 going to valueize the references in-place. */
1250 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1252 VN_INFO (vdef)->use_processed = true;
1253 changed |= set_ssa_val_to (vdef, vdef);
1256 vn_reference_insert (lhs, op, vdefs);
1258 else
1260 /* We had a match, so value number the vdefs to have the value
1261 number of the vuses they came from. */
1262 ssa_op_iter op_iter;
1263 def_operand_p var;
1264 vuse_vec_p vv;
1266 if (dump_file && (dump_flags & TDF_DETAILS))
1267 fprintf (dump_file, "Store matched earlier value,"
1268 "value numbering store vdefs to matching vuses.\n");
1270 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1272 tree def = DEF_FROM_PTR (var);
1273 tree use;
1275 /* Uh, if the vuse is a multiuse, we can't really do much
1276 here, sadly, since we don't know which value number of
1277 which vuse to use. */
1278 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1279 use = def;
1280 else
1281 use = VUSE_ELEMENT_VAR (*vv, 0);
1283 VN_INFO (def)->use_processed = true;
1284 changed |= set_ssa_val_to (def, SSA_VAL (use));
1288 return changed;
1291 /* Visit and value number PHI, return true if the value number
1292 changed. */
1294 static bool
1295 visit_phi (tree phi)
1297 bool changed = false;
1298 tree result;
1299 tree sameval = VN_TOP;
1300 bool allsame = true;
1301 int i;
1303 /* TODO: We could check for this in init_sccvn, and replace this
1304 with a gcc_assert. */
1305 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1306 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1308 /* See if all non-TOP arguments have the same value. TOP is
1309 equivalent to everything, so we can ignore it. */
1310 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1312 tree def = PHI_ARG_DEF (phi, i);
1314 if (TREE_CODE (def) == SSA_NAME)
1315 def = SSA_VAL (def);
1316 if (def == VN_TOP)
1317 continue;
1318 if (sameval == VN_TOP)
1320 sameval = def;
1322 else
1324 if (!expressions_equal_p (def, sameval))
1326 allsame = false;
1327 break;
1332 /* If all value numbered to the same value, the phi node has that
1333 value. */
1334 if (allsame)
1336 if (is_gimple_min_invariant (sameval))
1338 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1339 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1341 else
1343 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1344 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1347 if (TREE_CODE (sameval) == SSA_NAME)
1348 return visit_copy (PHI_RESULT (phi), sameval);
1350 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1353 /* Otherwise, see if it is equivalent to a phi node in this block. */
1354 result = vn_phi_lookup (phi);
1355 if (result)
1357 if (TREE_CODE (result) == SSA_NAME)
1358 changed = visit_copy (PHI_RESULT (phi), result);
1359 else
1360 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1362 else
1364 vn_phi_insert (phi, PHI_RESULT (phi));
1365 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1366 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1367 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1370 return changed;
1373 /* Return true if EXPR contains constants. */
1375 static bool
1376 expr_has_constants (tree expr)
1378 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1380 case tcc_unary:
1381 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1383 case tcc_binary:
1384 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1385 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1386 /* Constants inside reference ops are rarely interesting, but
1387 it can take a lot of looking to find them. */
1388 case tcc_reference:
1389 case tcc_declaration:
1390 return false;
1391 default:
1392 return is_gimple_min_invariant (expr);
1394 return false;
1397 /* Replace SSA_NAMES in expr with their value numbers, and return the
1398 result.
1399 This is performed in place. */
1401 static tree
1402 valueize_expr (tree expr)
1404 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1406 case tcc_unary:
1407 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1408 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1409 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1410 break;
1411 case tcc_binary:
1412 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1413 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1414 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1415 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1416 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1417 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1418 break;
1419 default:
1420 break;
1422 return expr;
1425 /* Simplify the binary expression RHS, and return the result if
1426 simplified. */
1428 static tree
1429 simplify_binary_expression (tree stmt, tree rhs)
1431 tree result = NULL_TREE;
1432 tree op0 = TREE_OPERAND (rhs, 0);
1433 tree op1 = TREE_OPERAND (rhs, 1);
1435 /* This will not catch every single case we could combine, but will
1436 catch those with constants. The goal here is to simultaneously
1437 combine constants between expressions, but avoid infinite
1438 expansion of expressions during simplification. */
1439 if (TREE_CODE (op0) == SSA_NAME)
1441 if (VN_INFO (op0)->has_constants)
1442 op0 = valueize_expr (VN_INFO (op0)->expr);
1443 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1444 op0 = SSA_VAL (op0);
1447 if (TREE_CODE (op1) == SSA_NAME)
1449 if (VN_INFO (op1)->has_constants)
1450 op1 = valueize_expr (VN_INFO (op1)->expr);
1451 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1452 op1 = SSA_VAL (op1);
1455 /* Avoid folding if nothing changed. */
1456 if (op0 == TREE_OPERAND (rhs, 0)
1457 && op1 == TREE_OPERAND (rhs, 1))
1458 return NULL_TREE;
1460 fold_defer_overflow_warnings ();
1462 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1464 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1465 stmt, 0);
1467 /* Make sure result is not a complex expression consisting
1468 of operators of operators (IE (a + b) + (a + c))
1469 Otherwise, we will end up with unbounded expressions if
1470 fold does anything at all. */
1471 if (result && valid_gimple_expression_p (result))
1472 return result;
1474 return NULL_TREE;
1477 /* Simplify the unary expression RHS, and return the result if
1478 simplified. */
1480 static tree
1481 simplify_unary_expression (tree rhs)
1483 tree result = NULL_TREE;
1484 tree op0 = TREE_OPERAND (rhs, 0);
1486 if (TREE_CODE (op0) != SSA_NAME)
1487 return NULL_TREE;
1489 if (VN_INFO (op0)->has_constants)
1490 op0 = valueize_expr (VN_INFO (op0)->expr);
1491 else if (TREE_CODE (rhs) == NOP_EXPR
1492 || TREE_CODE (rhs) == CONVERT_EXPR
1493 || TREE_CODE (rhs) == REALPART_EXPR
1494 || TREE_CODE (rhs) == IMAGPART_EXPR)
1496 /* We want to do tree-combining on conversion-like expressions.
1497 Make sure we feed only SSA_NAMEs or constants to fold though. */
1498 tree tem = valueize_expr (VN_INFO (op0)->expr);
1499 if (UNARY_CLASS_P (tem)
1500 || BINARY_CLASS_P (tem)
1501 || TREE_CODE (tem) == SSA_NAME
1502 || is_gimple_min_invariant (tem))
1503 op0 = tem;
1506 /* Avoid folding if nothing changed, but remember the expression. */
1507 if (op0 == TREE_OPERAND (rhs, 0))
1508 return rhs;
1510 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1511 if (result)
1513 STRIP_USELESS_TYPE_CONVERSION (result);
1514 if (valid_gimple_expression_p (result))
1515 return result;
1518 return rhs;
1521 /* Try to simplify RHS using equivalences and constant folding. */
1523 static tree
1524 try_to_simplify (tree stmt, tree rhs)
1526 if (TREE_CODE (rhs) == SSA_NAME)
1528 if (is_gimple_min_invariant (SSA_VAL (rhs)))
1529 return SSA_VAL (rhs);
1530 else if (VN_INFO (rhs)->has_constants)
1531 return VN_INFO (rhs)->expr;
1533 else
1535 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1537 /* For references, see if we find a result for the lookup,
1538 and use it if we do. */
1539 case tcc_declaration:
1540 /* Pull out any truly constant values. */
1541 if (TREE_READONLY (rhs)
1542 && TREE_STATIC (rhs)
1543 && DECL_INITIAL (rhs)
1544 && valid_gimple_expression_p (DECL_INITIAL (rhs)))
1545 return DECL_INITIAL (rhs);
1547 /* Fallthrough. */
1548 case tcc_reference:
1550 tree result = vn_reference_lookup (rhs,
1551 shared_vuses_from_stmt (stmt));
1552 if (result)
1553 return result;
1555 /* Fallthrough for some codes. */
1556 if (!(TREE_CODE (rhs) == REALPART_EXPR
1557 || TREE_CODE (rhs) == IMAGPART_EXPR))
1558 break;
1559 /* We could do a little more with unary ops, if they expand
1560 into binary ops, but it's debatable whether it is worth it. */
1561 case tcc_unary:
1562 return simplify_unary_expression (rhs);
1563 break;
1564 case tcc_comparison:
1565 case tcc_binary:
1566 return simplify_binary_expression (stmt, rhs);
1567 break;
1568 default:
1569 break;
1572 return rhs;
1575 /* Visit and value number USE, return true if the value number
1576 changed. */
1578 static bool
1579 visit_use (tree use)
1581 bool changed = false;
1582 tree stmt = SSA_NAME_DEF_STMT (use);
1583 stmt_ann_t ann;
1585 VN_INFO (use)->use_processed = true;
1587 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1588 if (dump_file && (dump_flags & TDF_DETAILS))
1590 fprintf (dump_file, "Value numbering ");
1591 print_generic_expr (dump_file, use, 0);
1592 fprintf (dump_file, " stmt = ");
1593 print_generic_stmt (dump_file, stmt, 0);
1596 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1597 if (TREE_CODE (stmt) == RETURN_EXPR
1598 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1599 stmt = TREE_OPERAND (stmt, 0);
1601 ann = stmt_ann (stmt);
1603 /* Handle uninitialized uses. */
1604 if (IS_EMPTY_STMT (stmt))
1606 changed = set_ssa_val_to (use, use);
1608 else
1610 if (TREE_CODE (stmt) == PHI_NODE)
1612 changed = visit_phi (stmt);
1614 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1615 || (ann && ann->has_volatile_ops)
1616 || tree_could_throw_p (stmt))
1618 changed = defs_to_varying (stmt);
1620 else
1622 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1623 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1624 tree simplified;
1626 STRIP_USELESS_TYPE_CONVERSION (rhs);
1628 /* Shortcut for copies. Simplifying copies is pointless,
1629 since we copy the expression and value they represent. */
1630 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1632 changed = visit_copy (lhs, rhs);
1633 goto done;
1635 simplified = try_to_simplify (stmt, rhs);
1636 if (simplified && simplified != rhs)
1638 if (dump_file && (dump_flags & TDF_DETAILS))
1640 fprintf (dump_file, "RHS ");
1641 print_generic_expr (dump_file, rhs, 0);
1642 fprintf (dump_file, " simplified to ");
1643 print_generic_expr (dump_file, simplified, 0);
1644 if (TREE_CODE (lhs) == SSA_NAME)
1645 fprintf (dump_file, " has constants %d\n",
1646 VN_INFO (lhs)->has_constants);
1647 else
1648 fprintf (dump_file, "\n");
1652 /* Setting value numbers to constants will occasionally
1653 screw up phi congruence because constants are not
1654 uniquely associated with a single ssa name that can be
1655 looked up. */
1656 if (simplified && is_gimple_min_invariant (simplified)
1657 && TREE_CODE (lhs) == SSA_NAME
1658 && simplified != rhs)
1660 VN_INFO (lhs)->expr = simplified;
1661 VN_INFO (lhs)->has_constants = true;
1662 changed = set_ssa_val_to (lhs, simplified);
1663 goto done;
1665 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1666 && TREE_CODE (lhs) == SSA_NAME)
1668 changed = visit_copy (lhs, simplified);
1669 goto done;
1671 else if (simplified)
1673 if (TREE_CODE (lhs) == SSA_NAME)
1675 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1676 /* We have to unshare the expression or else
1677 valuizing may change the IL stream. */
1678 VN_INFO (lhs)->expr = unshare_expr (simplified);
1680 rhs = simplified;
1682 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1684 VN_INFO (lhs)->has_constants = true;
1685 VN_INFO (lhs)->expr = unshare_expr (rhs);
1687 else if (TREE_CODE (lhs) == SSA_NAME)
1689 /* We reset expr and constantness here because we may
1690 have been value numbering optimistically, and
1691 iterating. They may become non-constant in this case,
1692 even if they were optimistically constant. */
1694 VN_INFO (lhs)->has_constants = false;
1695 VN_INFO (lhs)->expr = lhs;
1698 if (TREE_CODE (lhs) == SSA_NAME
1699 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1700 changed = defs_to_varying (stmt);
1701 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1703 changed = visit_reference_op_store (lhs, rhs, stmt);
1705 else if (TREE_CODE (lhs) == SSA_NAME)
1707 if (is_gimple_min_invariant (rhs))
1709 VN_INFO (lhs)->has_constants = true;
1710 VN_INFO (lhs)->expr = rhs;
1711 changed = set_ssa_val_to (lhs, rhs);
1713 else
1715 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1717 case tcc_unary:
1718 changed = visit_unary_op (lhs, rhs);
1719 break;
1720 case tcc_binary:
1721 changed = visit_binary_op (lhs, rhs);
1722 break;
1723 /* If tcc_vl_expr ever encompasses more than
1724 CALL_EXPR, this will need to be changed. */
1725 case tcc_vl_exp:
1726 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1727 changed = visit_reference_op_load (lhs, rhs, stmt);
1728 else
1729 changed = defs_to_varying (stmt);
1730 break;
1731 case tcc_declaration:
1732 case tcc_reference:
1733 changed = visit_reference_op_load (lhs, rhs, stmt);
1734 break;
1735 case tcc_expression:
1736 if (TREE_CODE (rhs) == ADDR_EXPR)
1738 changed = visit_unary_op (lhs, rhs);
1739 goto done;
1741 /* Fallthrough. */
1742 default:
1743 changed = defs_to_varying (stmt);
1744 break;
1748 else
1749 changed = defs_to_varying (stmt);
1752 done:
1753 return changed;
1756 /* Compare two operands by reverse postorder index */
1758 static int
1759 compare_ops (const void *pa, const void *pb)
1761 const tree opa = *((const tree *)pa);
1762 const tree opb = *((const tree *)pb);
1763 tree opstmta = SSA_NAME_DEF_STMT (opa);
1764 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1765 basic_block bba;
1766 basic_block bbb;
1768 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1769 return 0;
1770 else if (IS_EMPTY_STMT (opstmta))
1771 return -1;
1772 else if (IS_EMPTY_STMT (opstmtb))
1773 return 1;
1775 bba = bb_for_stmt (opstmta);
1776 bbb = bb_for_stmt (opstmtb);
1778 if (!bba && !bbb)
1779 return 0;
1780 else if (!bba)
1781 return -1;
1782 else if (!bbb)
1783 return 1;
1785 if (bba == bbb)
1787 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1788 return 0;
1789 else if (TREE_CODE (opstmta) == PHI_NODE)
1790 return -1;
1791 else if (TREE_CODE (opstmtb) == PHI_NODE)
1792 return 1;
1793 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1795 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1798 /* Sort an array containing members of a strongly connected component
1799 SCC so that the members are ordered by RPO number.
1800 This means that when the sort is complete, iterating through the
1801 array will give you the members in RPO order. */
1803 static void
1804 sort_scc (VEC (tree, heap) *scc)
1806 qsort (VEC_address (tree, scc),
1807 VEC_length (tree, scc),
1808 sizeof (tree),
1809 compare_ops);
1812 /* Process a strongly connected component in the SSA graph. */
1814 static void
1815 process_scc (VEC (tree, heap) *scc)
1817 /* If the SCC has a single member, just visit it. */
1819 if (VEC_length (tree, scc) == 1)
1821 tree use = VEC_index (tree, scc, 0);
1822 if (!VN_INFO (use)->use_processed)
1823 visit_use (use);
1825 else
1827 tree var;
1828 unsigned int i;
1829 unsigned int iterations = 0;
1830 bool changed = true;
1832 /* Iterate over the SCC with the optimistic table until it stops
1833 changing. */
1834 current_info = optimistic_info;
1835 while (changed)
1837 changed = false;
1838 iterations++;
1839 htab_empty (optimistic_info->unary);
1840 htab_empty (optimistic_info->binary);
1841 htab_empty (optimistic_info->phis);
1842 htab_empty (optimistic_info->references);
1843 empty_alloc_pool (optimistic_info->unary_op_pool);
1844 empty_alloc_pool (optimistic_info->binary_op_pool);
1845 empty_alloc_pool (optimistic_info->phis_pool);
1846 empty_alloc_pool (optimistic_info->references_pool);
1847 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1848 changed |= visit_use (var);
1851 if (dump_file && (dump_flags & TDF_STATS))
1852 fprintf (dump_file, "Processing SCC required %d iterations\n",
1853 iterations);
1855 /* Finally, visit the SCC once using the valid table. */
1856 current_info = valid_info;
1857 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1858 visit_use (var);
1862 DEF_VEC_O(ssa_op_iter);
1863 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
1865 /* Pop the components of the found SCC for NAME off the SCC stack
1866 and process them. Returns true if all went well, false if
1867 we run into resource limits. */
1869 static bool
1870 extract_and_process_scc_for_name (tree name)
1872 VEC (tree, heap) *scc = NULL;
1873 tree x;
1875 /* Found an SCC, pop the components off the SCC stack and
1876 process them. */
1879 x = VEC_pop (tree, sccstack);
1881 VN_INFO (x)->on_sccstack = false;
1882 VEC_safe_push (tree, heap, scc, x);
1883 } while (x != name);
1885 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
1886 if (VEC_length (tree, scc)
1887 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
1889 if (dump_file)
1890 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
1891 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
1892 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
1893 return false;
1896 if (VEC_length (tree, scc) > 1)
1897 sort_scc (scc);
1899 if (dump_file && (dump_flags & TDF_DETAILS))
1900 print_scc (dump_file, scc);
1902 process_scc (scc);
1904 VEC_free (tree, heap, scc);
1906 return true;
1909 /* Depth first search on NAME to discover and process SCC's in the SSA
1910 graph.
1911 Execution of this algorithm relies on the fact that the SCC's are
1912 popped off the stack in topological order.
1913 Returns true if successful, false if we stopped processing SCC's due
1914 to ressource constraints. */
1916 static bool
1917 DFS (tree name)
1919 VEC(ssa_op_iter, heap) *itervec = NULL;
1920 VEC(tree, heap) *namevec = NULL;
1921 use_operand_p usep = NULL;
1922 tree defstmt, use;
1923 ssa_op_iter iter;
1925 start_over:
1926 /* SCC info */
1927 VN_INFO (name)->dfsnum = next_dfs_num++;
1928 VN_INFO (name)->visited = true;
1929 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1931 VEC_safe_push (tree, heap, sccstack, name);
1932 VN_INFO (name)->on_sccstack = true;
1933 defstmt = SSA_NAME_DEF_STMT (name);
1935 /* Recursively DFS on our operands, looking for SCC's. */
1936 if (!IS_EMPTY_STMT (defstmt))
1938 /* Push a new iterator. */
1939 if (TREE_CODE (defstmt) == PHI_NODE)
1940 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
1941 else
1942 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
1944 else
1945 iter.done = true;
1947 while (1)
1949 /* If we are done processing uses of a name, go up the stack
1950 of iterators and process SCCs as we found them. */
1951 if (op_iter_done (&iter))
1953 /* See if we found an SCC. */
1954 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1955 if (!extract_and_process_scc_for_name (name))
1957 VEC_free (tree, heap, namevec);
1958 VEC_free (ssa_op_iter, heap, itervec);
1959 return false;
1962 /* Check if we are done. */
1963 if (VEC_empty (tree, namevec))
1965 VEC_free (tree, heap, namevec);
1966 VEC_free (ssa_op_iter, heap, itervec);
1967 return true;
1970 /* Restore the last use walker and continue walking there. */
1971 use = name;
1972 name = VEC_pop (tree, namevec);
1973 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
1974 sizeof (ssa_op_iter));
1975 VEC_pop (ssa_op_iter, itervec);
1976 goto continue_walking;
1979 use = USE_FROM_PTR (usep);
1981 /* Since we handle phi nodes, we will sometimes get
1982 invariants in the use expression. */
1983 if (TREE_CODE (use) == SSA_NAME)
1985 if (! (VN_INFO (use)->visited))
1987 /* Recurse by pushing the current use walking state on
1988 the stack and starting over. */
1989 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
1990 VEC_safe_push(tree, heap, namevec, name);
1991 name = use;
1992 goto start_over;
1994 continue_walking:
1995 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1996 VN_INFO (use)->low);
1998 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1999 && VN_INFO (use)->on_sccstack)
2001 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2002 VN_INFO (name)->low);
2006 usep = op_iter_next_use (&iter);
2010 /* Allocate a value number table. */
2012 static void
2013 allocate_vn_table (vn_tables_t table)
2015 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2016 table->unary = htab_create (23, vn_unary_op_hash, vn_unary_op_eq, NULL);
2017 table->binary = htab_create (23, vn_binary_op_hash, vn_binary_op_eq, NULL);
2018 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2019 free_reference);
2021 table->unary_op_pool = create_alloc_pool ("VN unary operations",
2022 sizeof (struct vn_unary_op_s),
2023 30);
2024 table->binary_op_pool = create_alloc_pool ("VN binary operations",
2025 sizeof (struct vn_binary_op_s),
2026 30);
2027 table->phis_pool = create_alloc_pool ("VN phis",
2028 sizeof (struct vn_phi_s),
2029 30);
2030 table->references_pool = create_alloc_pool ("VN references",
2031 sizeof (struct vn_reference_s),
2032 30);
2035 /* Free a value number table. */
2037 static void
2038 free_vn_table (vn_tables_t table)
2040 htab_delete (table->phis);
2041 htab_delete (table->unary);
2042 htab_delete (table->binary);
2043 htab_delete (table->references);
2044 free_alloc_pool (table->unary_op_pool);
2045 free_alloc_pool (table->binary_op_pool);
2046 free_alloc_pool (table->phis_pool);
2047 free_alloc_pool (table->references_pool);
2050 static void
2051 init_scc_vn (void)
2053 size_t i;
2054 int j;
2055 int *rpo_numbers_temp;
2056 basic_block bb;
2057 size_t id = 0;
2059 calculate_dominance_info (CDI_DOMINATORS);
2060 sccstack = NULL;
2061 next_dfs_num = 1;
2063 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2064 /* VEC_alloc doesn't actually grow it to the right size, it just
2065 preallocates the space to do so. */
2066 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2067 shared_lookup_phiargs = NULL;
2068 shared_lookup_vops = NULL;
2069 shared_lookup_references = NULL;
2070 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2071 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2072 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2074 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2075 the i'th block in RPO order is bb. We want to map bb's to RPO
2076 numbers, so we need to rearrange this array. */
2077 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2078 rpo_numbers[rpo_numbers_temp[j]] = j;
2080 free (rpo_numbers_temp);
2082 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2084 /* Create the VN_INFO structures, and initialize value numbers to
2085 TOP. */
2086 for (i = 0; i < num_ssa_names; i++)
2088 tree name = ssa_name (i);
2089 if (name)
2091 VN_INFO_GET (name)->valnum = VN_TOP;
2092 VN_INFO (name)->expr = name;
2096 FOR_ALL_BB (bb)
2098 block_stmt_iterator bsi;
2099 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2101 tree stmt = bsi_stmt (bsi);
2102 stmt_ann (stmt)->uid = id++;
2106 /* Create the valid and optimistic value numbering tables. */
2107 valid_info = XCNEW (struct vn_tables_s);
2108 allocate_vn_table (valid_info);
2109 optimistic_info = XCNEW (struct vn_tables_s);
2110 allocate_vn_table (optimistic_info);
2111 pre_info = NULL;
2114 void
2115 switch_to_PRE_table (void)
2117 pre_info = XCNEW (struct vn_tables_s);
2118 allocate_vn_table (pre_info);
2119 current_info = pre_info;
2122 void
2123 free_scc_vn (void)
2125 size_t i;
2127 VEC_free (tree, heap, shared_lookup_phiargs);
2128 VEC_free (tree, gc, shared_lookup_vops);
2129 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2130 XDELETEVEC (rpo_numbers);
2131 for (i = 0; i < num_ssa_names; i++)
2133 tree name = ssa_name (i);
2134 if (name)
2136 XDELETE (VN_INFO (name));
2137 if (SSA_NAME_VALUE (name) &&
2138 TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
2139 SSA_NAME_VALUE (name) = NULL;
2143 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2144 VEC_free (tree, heap, sccstack);
2145 free_vn_table (valid_info);
2146 XDELETE (valid_info);
2147 free_vn_table (optimistic_info);
2148 XDELETE (optimistic_info);
2149 if (pre_info)
2151 free_vn_table (pre_info);
2152 XDELETE (pre_info);
2156 /* Do SCCVN. Returns true if it finished, false if we bailed out
2157 due to ressource constraints. */
2159 bool
2160 run_scc_vn (void)
2162 size_t i;
2163 tree param;
2165 init_scc_vn ();
2166 current_info = valid_info;
2168 for (param = DECL_ARGUMENTS (current_function_decl);
2169 param;
2170 param = TREE_CHAIN (param))
2172 if (gimple_default_def (cfun, param) != NULL)
2174 tree def = gimple_default_def (cfun, param);
2175 SSA_VAL (def) = def;
2179 for (i = num_ssa_names - 1; i > 0; i--)
2181 tree name = ssa_name (i);
2182 if (name
2183 && VN_INFO (name)->visited == false
2184 && !has_zero_uses (name))
2185 if (!DFS (name))
2187 free_scc_vn ();
2188 return false;
2192 if (dump_file && (dump_flags & TDF_DETAILS))
2194 fprintf (dump_file, "Value numbers:\n");
2195 for (i = 0; i < num_ssa_names; i++)
2197 tree name = ssa_name (i);
2198 if (name && VN_INFO (name)->visited
2199 && (SSA_VAL (name) != name
2200 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2202 print_generic_expr (dump_file, name, 0);
2203 fprintf (dump_file, " = ");
2204 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2205 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2206 else
2207 print_generic_expr (dump_file, SSA_VAL (name), 0);
2208 fprintf (dump_file, "\n");
2213 return true;