Daily bump.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
bloba14c2a749cf0bea147f7fc0687eca24c6821ea56
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t unary;
111 htab_t binary;
112 htab_t phis;
113 htab_t references;
114 alloc_pool unary_op_pool;
115 alloc_pool binary_op_pool;
116 alloc_pool phis_pool;
117 alloc_pool references_pool;
118 } *vn_tables_t;
120 /* Binary operations in the hashtable consist of two operands, an
121 opcode, and a type. Result is the value number of the operation,
122 and hashcode is stored to avoid having to calculate it
123 repeatedly. */
125 typedef struct vn_binary_op_s
127 enum tree_code opcode;
128 tree type;
129 tree op0;
130 tree op1;
131 hashval_t hashcode;
132 tree result;
133 } *vn_binary_op_t;
134 typedef const struct vn_binary_op_s *const_vn_binary_op_t;
136 /* Unary operations in the hashtable consist of a single operand, an
137 opcode, and a type. Result is the value number of the operation,
138 and hashcode is stored to avoid having to calculate it repeatedly. */
140 typedef struct vn_unary_op_s
142 enum tree_code opcode;
143 tree type;
144 tree op0;
145 hashval_t hashcode;
146 tree result;
147 } *vn_unary_op_t;
148 typedef const struct vn_unary_op_s *const_vn_unary_op_t;
150 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
151 arguments, and the basic block the phi is in. Result is the value
152 number of the operation, and hashcode is stored to avoid having to
153 calculate it repeatedly. Phi nodes not in the same block are never
154 considered equivalent. */
156 typedef struct vn_phi_s
158 VEC (tree, heap) *phiargs;
159 basic_block block;
160 hashval_t hashcode;
161 tree result;
162 } *vn_phi_t;
163 typedef const struct vn_phi_s *const_vn_phi_t;
165 /* Reference operands only exist in reference operations structures.
166 They consist of an opcode, type, and some number of operands. For
167 a given opcode, some, all, or none of the operands may be used.
168 The operands are there to store the information that makes up the
169 portion of the addressing calculation that opcode performs. */
171 typedef struct vn_reference_op_struct
173 enum tree_code opcode;
174 tree type;
175 tree op0;
176 tree op1;
177 tree op2;
178 } vn_reference_op_s;
179 typedef vn_reference_op_s *vn_reference_op_t;
180 typedef const vn_reference_op_s *const_vn_reference_op_t;
182 DEF_VEC_O(vn_reference_op_s);
183 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
185 /* A reference operation in the hashtable is representation as a
186 collection of vuses, representing the memory state at the time of
187 the operation, and a collection of operands that make up the
188 addressing calculation. If two vn_reference_t's have the same set
189 of operands, they access the same memory location. We also store
190 the resulting value number, and the hashcode. The vuses are
191 always stored in order sorted by ssa name version. */
193 typedef struct vn_reference_s
195 VEC (tree, gc) *vuses;
196 VEC (vn_reference_op_s, heap) *operands;
197 hashval_t hashcode;
198 tree result;
199 } *vn_reference_t;
200 typedef const struct vn_reference_s *const_vn_reference_t;
202 /* Valid hashtables storing information we have proven to be
203 correct. */
205 static vn_tables_t valid_info;
207 /* Optimistic hashtables storing information we are making assumptions about
208 during iterations. */
210 static vn_tables_t optimistic_info;
212 /* PRE hashtables storing information about mapping from expressions to
213 value handles. */
215 static vn_tables_t pre_info;
217 /* Pointer to the set of hashtables that is currently being used.
218 Should always point to either the optimistic_info, or the
219 valid_info. */
221 static vn_tables_t current_info;
224 /* Reverse post order index for each basic block. */
226 static int *rpo_numbers;
228 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
230 /* This represents the top of the VN lattice, which is the universal
231 value. */
233 tree VN_TOP;
235 /* Next DFS number and the stack for strongly connected component
236 detection. */
238 static unsigned int next_dfs_num;
239 static VEC (tree, heap) *sccstack;
241 DEF_VEC_P(vn_ssa_aux_t);
242 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
244 /* Table of vn_ssa_aux_t's, one per ssa_name. */
246 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
248 /* Return the value numbering information for a given SSA name. */
250 vn_ssa_aux_t
251 VN_INFO (tree name)
253 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
254 SSA_NAME_VERSION (name));
257 /* Set the value numbering info for a given SSA name to a given
258 value. */
260 static inline void
261 VN_INFO_SET (tree name, vn_ssa_aux_t value)
263 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
264 SSA_NAME_VERSION (name), value);
267 /* Get the value numbering info for a given SSA name, creating it if
268 it does not exist. */
270 vn_ssa_aux_t
271 VN_INFO_GET (tree name)
273 vn_ssa_aux_t newinfo = XCNEW (struct vn_ssa_aux);
274 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
275 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
276 SSA_NAME_VERSION (name) + 1);
277 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
278 SSA_NAME_VERSION (name), newinfo);
279 return newinfo;
283 /* Compare two reference operands P1 and P2 for equality. return true if
284 they are equal, and false otherwise. */
286 static int
287 vn_reference_op_eq (const void *p1, const void *p2)
289 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
290 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
291 return vro1->opcode == vro2->opcode
292 && vro1->type == vro2->type
293 && expressions_equal_p (vro1->op0, vro2->op0)
294 && expressions_equal_p (vro1->op1, vro2->op1)
295 && expressions_equal_p (vro1->op2, vro2->op2);
298 /* Compute the hash for a reference operand VRO1 */
300 static hashval_t
301 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
303 return iterative_hash_expr (vro1->op0, vro1->opcode)
304 + iterative_hash_expr (vro1->op1, vro1->opcode)
305 + iterative_hash_expr (vro1->op2, vro1->opcode);
308 /* Return the hashcode for a given reference operation P1. */
310 static hashval_t
311 vn_reference_hash (const void *p1)
313 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
314 return vr1->hashcode;
317 /* Compute a hash for the reference operation VR1 and return it. */
319 static inline hashval_t
320 vn_reference_compute_hash (const vn_reference_t vr1)
322 hashval_t result = 0;
323 tree v;
324 int i;
325 vn_reference_op_t vro;
327 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
328 result += iterative_hash_expr (v, 0);
329 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
330 result += vn_reference_op_compute_hash (vro);
332 return result;
335 /* Return true if reference operations P1 and P2 are equivalent. This
336 means they have the same set of operands and vuses. */
338 static int
339 vn_reference_eq (const void *p1, const void *p2)
341 tree v;
342 int i;
343 vn_reference_op_t vro;
345 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
346 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
348 if (vr1->vuses == vr2->vuses
349 && vr1->operands == vr2->operands)
350 return true;
352 /* Impossible for them to be equivalent if they have different
353 number of vuses. */
354 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
355 return false;
357 /* We require that address operands be canonicalized in a way that
358 two memory references will have the same operands if they are
359 equivalent. */
360 if (VEC_length (vn_reference_op_s, vr1->operands)
361 != VEC_length (vn_reference_op_s, vr2->operands))
362 return false;
364 /* The memory state is more often different than the address of the
365 store/load, so check it first. */
366 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
368 if (VEC_index (tree, vr2->vuses, i) != v)
369 return false;
372 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
374 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
375 vro))
376 return false;
378 return true;
381 /* Place the vuses from STMT into *result */
383 static inline void
384 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
386 ssa_op_iter iter;
387 tree vuse;
389 if (!stmt)
390 return;
392 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
393 VEC_safe_push (tree, gc, *result, vuse);
395 if (VEC_length (tree, *result) > 1)
396 sort_vuses (*result);
400 /* Copy the VUSE names in STMT into a vector, and return
401 the vector. */
403 VEC (tree, gc) *
404 copy_vuses_from_stmt (tree stmt)
406 VEC (tree, gc) *vuses = NULL;
408 vuses_to_vec (stmt, &vuses);
410 return vuses;
413 /* Place the vdefs from STMT into *result */
415 static inline void
416 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
418 ssa_op_iter iter;
419 tree vdef;
421 if (!stmt)
422 return;
424 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
425 VEC_safe_push (tree, gc, *result, vdef);
427 if (VEC_length (tree, *result) > 1)
428 sort_vuses (*result);
431 /* Copy the names of vdef results in STMT into a vector, and return
432 the vector. */
434 static VEC (tree, gc) *
435 copy_vdefs_from_stmt (tree stmt)
437 VEC (tree, gc) *vdefs = NULL;
439 vdefs_to_vec (stmt, &vdefs);
441 return vdefs;
444 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
445 static VEC (tree, gc) *shared_lookup_vops;
447 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
448 This function will overwrite the current SHARED_LOOKUP_VOPS
449 variable. */
451 VEC (tree, gc) *
452 shared_vuses_from_stmt (tree stmt)
454 VEC_truncate (tree, shared_lookup_vops, 0);
455 vuses_to_vec (stmt, &shared_lookup_vops);
457 return shared_lookup_vops;
460 /* Copy the operations present in load/store/call REF into RESULT, a vector of
461 vn_reference_op_s's. */
463 static void
464 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
466 /* Calls are different from all other reference operations. */
467 if (TREE_CODE (ref) == CALL_EXPR)
469 vn_reference_op_s temp;
470 tree callfn;
471 call_expr_arg_iterator iter;
472 tree callarg;
474 /* Copy the call_expr opcode, type, function being called, and
475 arguments. */
476 memset (&temp, 0, sizeof (temp));
477 temp.type = TREE_TYPE (ref);
478 temp.opcode = CALL_EXPR;
479 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
481 callfn = get_callee_fndecl (ref);
482 if (!callfn)
483 callfn = CALL_EXPR_FN (ref);
484 temp.type = TREE_TYPE (callfn);
485 temp.opcode = TREE_CODE (callfn);
486 temp.op0 = callfn;
487 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
489 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
491 memset (&temp, 0, sizeof (temp));
492 temp.type = TREE_TYPE (callarg);
493 temp.opcode = TREE_CODE (callarg);
494 temp.op0 = callarg;
495 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
497 return;
500 /* For non-calls, store the information that makes up the address. */
502 while (ref)
504 vn_reference_op_s temp;
506 memset (&temp, 0, sizeof (temp));
507 temp.type = TREE_TYPE (ref);
508 temp.opcode = TREE_CODE (ref);
510 switch (temp.opcode)
512 case ALIGN_INDIRECT_REF:
513 case MISALIGNED_INDIRECT_REF:
514 case INDIRECT_REF:
515 /* The only operand is the address, which gets its own
516 vn_reference_op_s structure. */
517 break;
518 case BIT_FIELD_REF:
519 /* Record bits and position. */
520 temp.op0 = TREE_OPERAND (ref, 1);
521 temp.op1 = TREE_OPERAND (ref, 2);
522 break;
523 case COMPONENT_REF:
524 /* Record field as operand. */
525 temp.op0 = TREE_OPERAND (ref, 1);
526 break;
527 case ARRAY_RANGE_REF:
528 case ARRAY_REF:
529 /* Record index as operand. */
530 temp.op0 = TREE_OPERAND (ref, 1);
531 temp.op1 = TREE_OPERAND (ref, 3);
532 break;
533 case STRING_CST:
534 case INTEGER_CST:
535 case COMPLEX_CST:
536 case VECTOR_CST:
537 case REAL_CST:
538 case VALUE_HANDLE:
539 case VAR_DECL:
540 case PARM_DECL:
541 case CONST_DECL:
542 case RESULT_DECL:
543 case SSA_NAME:
544 temp.op0 = ref;
545 break;
546 /* These are only interesting for their operands, their
547 existence, and their type. They will never be the last
548 ref in the chain of references (IE they require an
549 operand), so we don't have to put anything
550 for op* as it will be handled by the iteration */
551 case IMAGPART_EXPR:
552 case REALPART_EXPR:
553 case VIEW_CONVERT_EXPR:
554 case ADDR_EXPR:
555 break;
556 default:
557 gcc_unreachable ();
560 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
562 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
563 ref = TREE_OPERAND (ref, 0);
564 else
565 ref = NULL_TREE;
569 /* Create a vector of vn_reference_op_s structures from REF, a
570 REFERENCE_CLASS_P tree. The vector is not shared. */
572 static VEC(vn_reference_op_s, heap) *
573 create_reference_ops_from_ref (tree ref)
575 VEC (vn_reference_op_s, heap) *result = NULL;
577 copy_reference_ops_from_ref (ref, &result);
578 return result;
581 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
583 /* Create a vector of vn_reference_op_s structures from REF, a
584 REFERENCE_CLASS_P tree. The vector is shared among all callers of
585 this function. */
587 static VEC(vn_reference_op_s, heap) *
588 shared_reference_ops_from_ref (tree ref)
590 if (!ref)
591 return NULL;
592 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
593 copy_reference_ops_from_ref (ref, &shared_lookup_references);
594 return shared_lookup_references;
598 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
599 structures into their value numbers. This is done in-place, and
600 the vector passed in is returned. */
602 static VEC (vn_reference_op_s, heap) *
603 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
605 vn_reference_op_t vro;
606 int i;
608 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
610 if (vro->opcode == SSA_NAME
611 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
612 vro->op0 = SSA_VAL (vro->op0);
615 return orig;
618 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
619 their value numbers. This is done in-place, and the vector passed
620 in is returned. */
622 static VEC (tree, gc) *
623 valueize_vuses (VEC (tree, gc) *orig)
625 bool made_replacement = false;
626 tree vuse;
627 int i;
629 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
631 if (vuse != SSA_VAL (vuse))
633 made_replacement = true;
634 VEC_replace (tree, orig, i, SSA_VAL (vuse));
638 if (made_replacement && VEC_length (tree, orig) > 1)
639 sort_vuses (orig);
641 return orig;
644 /* Lookup OP in the current hash table, and return the resulting
645 value number if it exists in the hash table. Return NULL_TREE if
646 it does not exist in the hash table. */
648 tree
649 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
651 void **slot;
652 struct vn_reference_s vr1;
654 vr1.vuses = valueize_vuses (vuses);
655 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
656 vr1.hashcode = vn_reference_compute_hash (&vr1);
657 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
658 NO_INSERT);
659 if (!slot && current_info == optimistic_info)
660 slot = htab_find_slot_with_hash (valid_info->references, &vr1, vr1.hashcode,
661 NO_INSERT);
662 if (!slot)
663 return NULL_TREE;
665 return ((vn_reference_t)*slot)->result;
668 /* Insert OP into the current hash table with a value number of
669 RESULT. */
671 void
672 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
674 void **slot;
675 vn_reference_t vr1;
677 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
679 vr1->vuses = valueize_vuses (vuses);
680 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
681 vr1->hashcode = vn_reference_compute_hash (vr1);
682 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
684 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
685 INSERT);
687 /* Because we lookup stores using vuses, and value number failures
688 using the vdefs (see visit_reference_op_store for how and why),
689 it's possible that on failure we may try to insert an already
690 inserted store. This is not wrong, there is no ssa name for a
691 store that we could use as a differentiator anyway. Thus, unlike
692 the other lookup functions, you cannot gcc_assert (!*slot)
693 here. */
696 *slot = vr1;
700 /* Return the stored hashcode for a unary operation. */
702 static hashval_t
703 vn_unary_op_hash (const void *p1)
705 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
706 return vuo1->hashcode;
709 /* Hash a unary operation P1 and return the result. */
711 static inline hashval_t
712 vn_unary_op_compute_hash (const vn_unary_op_t vuo1)
714 return iterative_hash_expr (vuo1->op0, vuo1->opcode);
717 /* Return true if P1 and P2, two unary operations, are equivalent. */
719 static int
720 vn_unary_op_eq (const void *p1, const void *p2)
722 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
723 const_vn_unary_op_t const vuo2 = (const_vn_unary_op_t) p2;
724 return vuo1->opcode == vuo2->opcode
725 && vuo1->type == vuo2->type
726 && expressions_equal_p (vuo1->op0, vuo2->op0);
729 /* Lookup OP in the current hash table, and return the resulting
730 value number if it exists in the hash table. Return NULL_TREE if
731 it does not exist in the hash table. */
733 tree
734 vn_unary_op_lookup (tree op)
736 void **slot;
737 struct vn_unary_op_s vuo1;
739 vuo1.opcode = TREE_CODE (op);
740 vuo1.type = TREE_TYPE (op);
741 vuo1.op0 = TREE_OPERAND (op, 0);
743 if (TREE_CODE (vuo1.op0) == SSA_NAME)
744 vuo1.op0 = SSA_VAL (vuo1.op0);
746 vuo1.hashcode = vn_unary_op_compute_hash (&vuo1);
747 slot = htab_find_slot_with_hash (current_info->unary, &vuo1, vuo1.hashcode,
748 NO_INSERT);
749 if (!slot && current_info == optimistic_info)
750 slot = htab_find_slot_with_hash (valid_info->unary, &vuo1, vuo1.hashcode,
751 NO_INSERT);
752 if (!slot)
753 return NULL_TREE;
754 return ((vn_unary_op_t)*slot)->result;
757 /* Insert OP into the current hash table with a value number of
758 RESULT. */
760 void
761 vn_unary_op_insert (tree op, tree result)
763 void **slot;
764 vn_unary_op_t vuo1 = (vn_unary_op_t) pool_alloc (current_info->unary_op_pool);
766 vuo1->opcode = TREE_CODE (op);
767 vuo1->type = TREE_TYPE (op);
768 vuo1->op0 = TREE_OPERAND (op, 0);
769 vuo1->result = result;
771 if (TREE_CODE (vuo1->op0) == SSA_NAME)
772 vuo1->op0 = SSA_VAL (vuo1->op0);
774 vuo1->hashcode = vn_unary_op_compute_hash (vuo1);
775 slot = htab_find_slot_with_hash (current_info->unary, vuo1, vuo1->hashcode,
776 INSERT);
777 gcc_assert (!*slot);
778 *slot = vuo1;
781 /* Compute and return the hash value for binary operation VBO1. */
783 static inline hashval_t
784 vn_binary_op_compute_hash (const vn_binary_op_t vbo1)
786 return iterative_hash_expr (vbo1->op0, vbo1->opcode)
787 + iterative_hash_expr (vbo1->op1, vbo1->opcode);
790 /* Return the computed hashcode for binary operation P1. */
792 static hashval_t
793 vn_binary_op_hash (const void *p1)
795 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
796 return vbo1->hashcode;
799 /* Compare binary operations P1 and P2 and return true if they are
800 equivalent. */
802 static int
803 vn_binary_op_eq (const void *p1, const void *p2)
805 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
806 const_vn_binary_op_t const vbo2 = (const_vn_binary_op_t) p2;
807 return vbo1->opcode == vbo2->opcode
808 && vbo1->type == vbo2->type
809 && expressions_equal_p (vbo1->op0, vbo2->op0)
810 && expressions_equal_p (vbo1->op1, vbo2->op1);
813 /* Lookup OP in the current hash table, and return the resulting
814 value number if it exists in the hash table. Return NULL_TREE if
815 it does not exist in the hash table. */
817 tree
818 vn_binary_op_lookup (tree op)
820 void **slot;
821 struct vn_binary_op_s vbo1;
823 vbo1.opcode = TREE_CODE (op);
824 vbo1.type = TREE_TYPE (op);
825 vbo1.op0 = TREE_OPERAND (op, 0);
826 vbo1.op1 = TREE_OPERAND (op, 1);
828 if (TREE_CODE (vbo1.op0) == SSA_NAME)
829 vbo1.op0 = SSA_VAL (vbo1.op0);
830 if (TREE_CODE (vbo1.op1) == SSA_NAME)
831 vbo1.op1 = SSA_VAL (vbo1.op1);
833 if (tree_swap_operands_p (vbo1.op0, vbo1.op1, false)
834 && commutative_tree_code (vbo1.opcode))
836 tree temp = vbo1.op0;
837 vbo1.op0 = vbo1.op1;
838 vbo1.op1 = temp;
841 vbo1.hashcode = vn_binary_op_compute_hash (&vbo1);
842 slot = htab_find_slot_with_hash (current_info->binary, &vbo1, vbo1.hashcode,
843 NO_INSERT);
844 if (!slot && current_info == optimistic_info)
845 slot = htab_find_slot_with_hash (valid_info->binary, &vbo1, vbo1.hashcode,
846 NO_INSERT);
847 if (!slot)
848 return NULL_TREE;
849 return ((vn_binary_op_t)*slot)->result;
852 /* Insert OP into the current hash table with a value number of
853 RESULT. */
855 void
856 vn_binary_op_insert (tree op, tree result)
858 void **slot;
859 vn_binary_op_t vbo1;
860 vbo1 = (vn_binary_op_t) pool_alloc (current_info->binary_op_pool);
862 vbo1->opcode = TREE_CODE (op);
863 vbo1->type = TREE_TYPE (op);
864 vbo1->op0 = TREE_OPERAND (op, 0);
865 vbo1->op1 = TREE_OPERAND (op, 1);
866 vbo1->result = result;
868 if (TREE_CODE (vbo1->op0) == SSA_NAME)
869 vbo1->op0 = SSA_VAL (vbo1->op0);
870 if (TREE_CODE (vbo1->op1) == SSA_NAME)
871 vbo1->op1 = SSA_VAL (vbo1->op1);
873 if (tree_swap_operands_p (vbo1->op0, vbo1->op1, false)
874 && commutative_tree_code (vbo1->opcode))
876 tree temp = vbo1->op0;
877 vbo1->op0 = vbo1->op1;
878 vbo1->op1 = temp;
880 vbo1->hashcode = vn_binary_op_compute_hash (vbo1);
881 slot = htab_find_slot_with_hash (current_info->binary, vbo1, vbo1->hashcode,
882 INSERT);
883 gcc_assert (!*slot);
885 *slot = vbo1;
888 /* Compute a hashcode for PHI operation VP1 and return it. */
890 static inline hashval_t
891 vn_phi_compute_hash (vn_phi_t vp1)
893 hashval_t result = 0;
894 int i;
895 tree phi1op;
897 result = vp1->block->index;
899 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
901 if (phi1op == VN_TOP)
902 continue;
903 result += iterative_hash_expr (phi1op, result);
906 return result;
909 /* Return the computed hashcode for phi operation P1. */
911 static hashval_t
912 vn_phi_hash (const void *p1)
914 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
915 return vp1->hashcode;
918 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
920 static int
921 vn_phi_eq (const void *p1, const void *p2)
923 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
924 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
926 if (vp1->block == vp2->block)
928 int i;
929 tree phi1op;
931 /* Any phi in the same block will have it's arguments in the
932 same edge order, because of how we store phi nodes. */
933 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
935 tree phi2op = VEC_index (tree, vp2->phiargs, i);
936 if (phi1op == VN_TOP || phi2op == VN_TOP)
937 continue;
938 if (!expressions_equal_p (phi1op, phi2op))
939 return false;
941 return true;
943 return false;
946 static VEC(tree, heap) *shared_lookup_phiargs;
948 /* Lookup PHI in the current hash table, and return the resulting
949 value number if it exists in the hash table. Return NULL_TREE if
950 it does not exist in the hash table. */
952 static tree
953 vn_phi_lookup (tree phi)
955 void **slot;
956 struct vn_phi_s vp1;
957 int i;
959 VEC_truncate (tree, shared_lookup_phiargs, 0);
961 /* Canonicalize the SSA_NAME's to their value number. */
962 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
964 tree def = PHI_ARG_DEF (phi, i);
965 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
966 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
968 vp1.phiargs = shared_lookup_phiargs;
969 vp1.block = bb_for_stmt (phi);
970 vp1.hashcode = vn_phi_compute_hash (&vp1);
971 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
972 NO_INSERT);
973 if (!slot && current_info == optimistic_info)
974 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
975 NO_INSERT);
976 if (!slot)
977 return NULL_TREE;
978 return ((vn_phi_t)*slot)->result;
981 /* Insert PHI into the current hash table with a value number of
982 RESULT. */
984 static void
985 vn_phi_insert (tree phi, tree result)
987 void **slot;
988 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
989 int i;
990 VEC (tree, heap) *args = NULL;
992 /* Canonicalize the SSA_NAME's to their value number. */
993 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
995 tree def = PHI_ARG_DEF (phi, i);
996 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
997 VEC_safe_push (tree, heap, args, def);
999 vp1->phiargs = args;
1000 vp1->block = bb_for_stmt (phi);
1001 vp1->result = result;
1002 vp1->hashcode = vn_phi_compute_hash (vp1);
1004 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1005 INSERT);
1007 /* Because we iterate over phi operations more than once, it's
1008 possible the slot might already exist here, hence no assert.*/
1009 *slot = vp1;
1013 /* Print set of components in strongly connected component SCC to OUT. */
1015 static void
1016 print_scc (FILE *out, VEC (tree, heap) *scc)
1018 tree var;
1019 unsigned int i;
1021 fprintf (out, "SCC consists of: ");
1022 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1024 print_generic_expr (out, var, 0);
1025 fprintf (out, " ");
1027 fprintf (out, "\n");
1030 /* Set the value number of FROM to TO, return true if it has changed
1031 as a result. */
1033 static inline bool
1034 set_ssa_val_to (tree from, tree to)
1036 tree currval;
1038 if (from != to
1039 && TREE_CODE (to) == SSA_NAME
1040 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1041 to = from;
1043 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1044 and invariants. So assert that here. */
1045 gcc_assert (to != NULL_TREE
1046 && (to == VN_TOP
1047 || TREE_CODE (to) == SSA_NAME
1048 || is_gimple_min_invariant (to)));
1050 if (dump_file && (dump_flags & TDF_DETAILS))
1052 fprintf (dump_file, "Setting value number of ");
1053 print_generic_expr (dump_file, from, 0);
1054 fprintf (dump_file, " to ");
1055 print_generic_expr (dump_file, to, 0);
1056 fprintf (dump_file, "\n");
1059 currval = SSA_VAL (from);
1061 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1063 SSA_VAL (from) = to;
1064 return true;
1066 return false;
1069 /* Set all definitions in STMT to value number to themselves.
1070 Return true if a value number changed. */
1072 static bool
1073 defs_to_varying (tree stmt)
1075 bool changed = false;
1076 ssa_op_iter iter;
1077 def_operand_p defp;
1079 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1081 tree def = DEF_FROM_PTR (defp);
1083 VN_INFO (def)->use_processed = true;
1084 changed |= set_ssa_val_to (def, def);
1086 return changed;
1089 /* Visit a copy between LHS and RHS, return true if the value number
1090 changed. */
1092 static bool
1093 visit_copy (tree lhs, tree rhs)
1096 /* Follow chains of copies to their destination. */
1097 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1098 rhs = SSA_VAL (rhs);
1100 /* The copy may have a more interesting constant filled expression
1101 (we don't, since we know our RHS is just an SSA name). */
1102 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1103 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1105 return set_ssa_val_to (lhs, rhs);
1108 /* Visit a unary operator RHS, value number it, and return true if the
1109 value number of LHS has changed as a result. */
1111 static bool
1112 visit_unary_op (tree lhs, tree op)
1114 bool changed = false;
1115 tree result = vn_unary_op_lookup (op);
1117 if (result)
1119 changed = set_ssa_val_to (lhs, result);
1121 else
1123 changed = set_ssa_val_to (lhs, lhs);
1124 vn_unary_op_insert (op, lhs);
1127 return changed;
1130 /* Visit a binary operator RHS, value number it, and return true if the
1131 value number of LHS has changed as a result. */
1133 static bool
1134 visit_binary_op (tree lhs, tree op)
1136 bool changed = false;
1137 tree result = vn_binary_op_lookup (op);
1139 if (result)
1141 changed = set_ssa_val_to (lhs, result);
1143 else
1145 changed = set_ssa_val_to (lhs, lhs);
1146 vn_binary_op_insert (op, lhs);
1149 return changed;
1152 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1153 and return true if the value number of the LHS has changed as a result. */
1155 static bool
1156 visit_reference_op_load (tree lhs, tree op, tree stmt)
1158 bool changed = false;
1159 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1161 if (result)
1163 changed = set_ssa_val_to (lhs, result);
1165 else
1167 changed = set_ssa_val_to (lhs, lhs);
1168 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1171 return changed;
1175 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1176 and return true if the value number of the LHS has changed as a result. */
1178 static bool
1179 visit_reference_op_store (tree lhs, tree op, tree stmt)
1181 bool changed = false;
1182 tree result;
1183 bool resultsame = false;
1185 /* First we want to lookup using the *vuses* from the store and see
1186 if there the last store to this location with the same address
1187 had the same value.
1189 The vuses represent the memory state before the store. If the
1190 memory state, address, and value of the store is the same as the
1191 last store to this location, then this store will produce the
1192 same memory state as that store.
1194 In this case the vdef versions for this store are value numbered to those
1195 vuse versions, since they represent the same memory state after
1196 this store.
1198 Otherwise, the vdefs for the store are used when inserting into
1199 the table, since the store generates a new memory state. */
1201 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1203 if (result)
1205 if (TREE_CODE (result) == SSA_NAME)
1206 result = SSA_VAL (result);
1207 resultsame = expressions_equal_p (result, op);
1210 if (!result || !resultsame)
1212 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1213 int i;
1214 tree vdef;
1216 if (dump_file && (dump_flags & TDF_DETAILS))
1218 fprintf (dump_file, "No store match\n");
1219 fprintf (dump_file, "Value numbering store ");
1220 print_generic_expr (dump_file, lhs, 0);
1221 fprintf (dump_file, " to ");
1222 print_generic_expr (dump_file, op, 0);
1223 fprintf (dump_file, "\n");
1225 /* Have to set value numbers before insert, since insert is
1226 going to valueize the references in-place. */
1227 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1229 VN_INFO (vdef)->use_processed = true;
1230 changed |= set_ssa_val_to (vdef, vdef);
1233 vn_reference_insert (lhs, op, vdefs);
1235 else
1237 /* We had a match, so value number the vdefs to have the value
1238 number of the vuses they came from. */
1239 ssa_op_iter op_iter;
1240 def_operand_p var;
1241 vuse_vec_p vv;
1243 if (dump_file && (dump_flags & TDF_DETAILS))
1244 fprintf (dump_file, "Store matched earlier value,"
1245 "value numbering store vdefs to matching vuses.\n");
1247 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1249 tree def = DEF_FROM_PTR (var);
1250 tree use;
1252 /* Uh, if the vuse is a multiuse, we can't really do much
1253 here, sadly, since we don't know which value number of
1254 which vuse to use. */
1255 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1256 use = def;
1257 else
1258 use = VUSE_ELEMENT_VAR (*vv, 0);
1260 VN_INFO (def)->use_processed = true;
1261 changed |= set_ssa_val_to (def, SSA_VAL (use));
1265 return changed;
1268 /* Visit and value number PHI, return true if the value number
1269 changed. */
1271 static bool
1272 visit_phi (tree phi)
1274 bool changed = false;
1275 tree result;
1276 tree sameval = VN_TOP;
1277 bool allsame = true;
1278 int i;
1280 /* TODO: We could check for this in init_sccvn, and replace this
1281 with a gcc_assert. */
1282 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1283 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1285 /* See if all non-TOP arguments have the same value. TOP is
1286 equivalent to everything, so we can ignore it. */
1287 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1289 tree def = PHI_ARG_DEF (phi, i);
1291 if (TREE_CODE (def) == SSA_NAME)
1292 def = SSA_VAL (def);
1293 if (def == VN_TOP)
1294 continue;
1295 if (sameval == VN_TOP)
1297 sameval = def;
1299 else
1301 if (!expressions_equal_p (def, sameval))
1303 allsame = false;
1304 break;
1309 /* If all value numbered to the same value, the phi node has that
1310 value. */
1311 if (allsame)
1313 if (is_gimple_min_invariant (sameval))
1315 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1316 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1318 else
1320 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1321 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1324 if (TREE_CODE (sameval) == SSA_NAME)
1325 return visit_copy (PHI_RESULT (phi), sameval);
1327 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1330 /* Otherwise, see if it is equivalent to a phi node in this block. */
1331 result = vn_phi_lookup (phi);
1332 if (result)
1334 if (TREE_CODE (result) == SSA_NAME)
1335 changed = visit_copy (PHI_RESULT (phi), result);
1336 else
1337 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1339 else
1341 vn_phi_insert (phi, PHI_RESULT (phi));
1342 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1343 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1344 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1347 return changed;
1350 /* Return true if EXPR contains constants. */
1352 static bool
1353 expr_has_constants (tree expr)
1355 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1357 case tcc_unary:
1358 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1360 case tcc_binary:
1361 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1362 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1363 /* Constants inside reference ops are rarely interesting, but
1364 it can take a lot of looking to find them. */
1365 case tcc_reference:
1366 case tcc_declaration:
1367 return false;
1368 default:
1369 return is_gimple_min_invariant (expr);
1371 return false;
1374 /* Replace SSA_NAMES in expr with their value numbers, and return the
1375 result.
1376 This is performed in place. */
1378 static tree
1379 valueize_expr (tree expr)
1381 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1383 case tcc_unary:
1384 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1385 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1386 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1387 break;
1388 case tcc_binary:
1389 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1390 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1391 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1392 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1393 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1394 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1395 break;
1396 default:
1397 break;
1399 return expr;
1402 /* Simplify the binary expression RHS, and return the result if
1403 simplified. */
1405 static tree
1406 simplify_binary_expression (tree stmt, tree rhs)
1408 tree result = NULL_TREE;
1409 tree op0 = TREE_OPERAND (rhs, 0);
1410 tree op1 = TREE_OPERAND (rhs, 1);
1412 /* This will not catch every single case we could combine, but will
1413 catch those with constants. The goal here is to simultaneously
1414 combine constants between expressions, but avoid infinite
1415 expansion of expressions during simplification. */
1416 if (TREE_CODE (op0) == SSA_NAME)
1418 if (VN_INFO (op0)->has_constants)
1419 op0 = valueize_expr (VN_INFO (op0)->expr);
1420 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1421 op0 = SSA_VAL (op0);
1424 if (TREE_CODE (op1) == SSA_NAME)
1426 if (VN_INFO (op1)->has_constants)
1427 op1 = valueize_expr (VN_INFO (op1)->expr);
1428 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1429 op1 = SSA_VAL (op1);
1432 /* Avoid folding if nothing changed. */
1433 if (op0 == TREE_OPERAND (rhs, 0)
1434 && op1 == TREE_OPERAND (rhs, 1))
1435 return NULL_TREE;
1437 fold_defer_overflow_warnings ();
1439 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1441 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1442 stmt, 0);
1444 /* Make sure result is not a complex expression consisting
1445 of operators of operators (IE (a + b) + (a + c))
1446 Otherwise, we will end up with unbounded expressions if
1447 fold does anything at all. */
1448 if (result && valid_gimple_expression_p (result))
1449 return result;
1451 return NULL_TREE;
1454 /* Simplify the unary expression RHS, and return the result if
1455 simplified. */
1457 static tree
1458 simplify_unary_expression (tree rhs)
1460 tree result = NULL_TREE;
1461 tree op0 = TREE_OPERAND (rhs, 0);
1463 if (TREE_CODE (op0) != SSA_NAME)
1464 return NULL_TREE;
1466 if (VN_INFO (op0)->has_constants)
1467 op0 = valueize_expr (VN_INFO (op0)->expr);
1468 else if (TREE_CODE (rhs) == NOP_EXPR
1469 || TREE_CODE (rhs) == CONVERT_EXPR
1470 || TREE_CODE (rhs) == REALPART_EXPR
1471 || TREE_CODE (rhs) == IMAGPART_EXPR)
1473 /* We want to do tree-combining on conversion-like expressions.
1474 Make sure we feed only SSA_NAMEs or constants to fold though. */
1475 tree tem = valueize_expr (VN_INFO (op0)->expr);
1476 if (UNARY_CLASS_P (tem)
1477 || BINARY_CLASS_P (tem)
1478 || TREE_CODE (tem) == SSA_NAME
1479 || is_gimple_min_invariant (tem))
1480 op0 = tem;
1483 /* Avoid folding if nothing changed, but remember the expression. */
1484 if (op0 == TREE_OPERAND (rhs, 0))
1485 return rhs;
1487 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1488 if (result)
1490 STRIP_USELESS_TYPE_CONVERSION (result);
1491 if (valid_gimple_expression_p (result))
1492 return result;
1495 return rhs;
1498 /* Try to simplify RHS using equivalences and constant folding. */
1500 static tree
1501 try_to_simplify (tree stmt, tree rhs)
1503 if (TREE_CODE (rhs) == SSA_NAME)
1505 if (is_gimple_min_invariant (SSA_VAL (rhs)))
1506 return SSA_VAL (rhs);
1507 else if (VN_INFO (rhs)->has_constants)
1508 return VN_INFO (rhs)->expr;
1510 else
1512 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1514 /* For references, see if we find a result for the lookup,
1515 and use it if we do. */
1516 case tcc_declaration:
1517 /* Pull out any truly constant values. */
1518 if (TREE_READONLY (rhs)
1519 && TREE_STATIC (rhs)
1520 && DECL_INITIAL (rhs)
1521 && valid_gimple_expression_p (DECL_INITIAL (rhs)))
1522 return DECL_INITIAL (rhs);
1524 /* Fallthrough. */
1525 case tcc_reference:
1527 tree result = vn_reference_lookup (rhs,
1528 shared_vuses_from_stmt (stmt));
1529 if (result)
1530 return result;
1532 /* Fallthrough for some codes. */
1533 if (!(TREE_CODE (rhs) == REALPART_EXPR
1534 || TREE_CODE (rhs) == IMAGPART_EXPR))
1535 break;
1536 /* We could do a little more with unary ops, if they expand
1537 into binary ops, but it's debatable whether it is worth it. */
1538 case tcc_unary:
1539 return simplify_unary_expression (rhs);
1540 break;
1541 case tcc_comparison:
1542 case tcc_binary:
1543 return simplify_binary_expression (stmt, rhs);
1544 break;
1545 default:
1546 break;
1549 return rhs;
1552 /* Visit and value number USE, return true if the value number
1553 changed. */
1555 static bool
1556 visit_use (tree use)
1558 bool changed = false;
1559 tree stmt = SSA_NAME_DEF_STMT (use);
1560 stmt_ann_t ann;
1562 VN_INFO (use)->use_processed = true;
1564 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1565 if (dump_file && (dump_flags & TDF_DETAILS))
1567 fprintf (dump_file, "Value numbering ");
1568 print_generic_expr (dump_file, use, 0);
1569 fprintf (dump_file, " stmt = ");
1570 print_generic_stmt (dump_file, stmt, 0);
1573 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1574 if (TREE_CODE (stmt) == RETURN_EXPR
1575 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1576 stmt = TREE_OPERAND (stmt, 0);
1578 ann = stmt_ann (stmt);
1580 /* Handle uninitialized uses. */
1581 if (IS_EMPTY_STMT (stmt))
1583 changed = set_ssa_val_to (use, use);
1585 else
1587 if (TREE_CODE (stmt) == PHI_NODE)
1589 changed = visit_phi (stmt);
1591 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1592 || (ann && ann->has_volatile_ops))
1594 changed = defs_to_varying (stmt);
1596 else
1598 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1599 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1600 tree simplified;
1602 STRIP_USELESS_TYPE_CONVERSION (rhs);
1604 /* Shortcut for copies. Simplifying copies is pointless,
1605 since we copy the expression and value they represent. */
1606 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1608 changed = visit_copy (lhs, rhs);
1609 goto done;
1611 simplified = try_to_simplify (stmt, rhs);
1612 if (simplified && simplified != rhs)
1614 if (dump_file && (dump_flags & TDF_DETAILS))
1616 fprintf (dump_file, "RHS ");
1617 print_generic_expr (dump_file, rhs, 0);
1618 fprintf (dump_file, " simplified to ");
1619 print_generic_expr (dump_file, simplified, 0);
1620 if (TREE_CODE (lhs) == SSA_NAME)
1621 fprintf (dump_file, " has constants %d\n",
1622 VN_INFO (lhs)->has_constants);
1623 else
1624 fprintf (dump_file, "\n");
1628 /* Setting value numbers to constants will occasionally
1629 screw up phi congruence because constants are not
1630 uniquely associated with a single ssa name that can be
1631 looked up. */
1632 if (simplified && is_gimple_min_invariant (simplified)
1633 && TREE_CODE (lhs) == SSA_NAME
1634 && simplified != rhs)
1636 VN_INFO (lhs)->expr = simplified;
1637 VN_INFO (lhs)->has_constants = true;
1638 changed = set_ssa_val_to (lhs, simplified);
1639 goto done;
1641 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1642 && TREE_CODE (lhs) == SSA_NAME)
1644 changed = visit_copy (lhs, simplified);
1645 goto done;
1647 else if (simplified)
1649 if (TREE_CODE (lhs) == SSA_NAME)
1651 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1652 /* We have to unshare the expression or else
1653 valuizing may change the IL stream. */
1654 VN_INFO (lhs)->expr = unshare_expr (simplified);
1656 rhs = simplified;
1658 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1660 VN_INFO (lhs)->has_constants = true;
1661 VN_INFO (lhs)->expr = unshare_expr (rhs);
1663 else if (TREE_CODE (lhs) == SSA_NAME)
1665 /* We reset expr and constantness here because we may
1666 have been value numbering optimistically, and
1667 iterating. They may become non-constant in this case,
1668 even if they were optimistically constant. */
1670 VN_INFO (lhs)->has_constants = false;
1671 VN_INFO (lhs)->expr = lhs;
1674 if (TREE_CODE (lhs) == SSA_NAME
1675 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1676 changed = defs_to_varying (stmt);
1677 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1679 changed = visit_reference_op_store (lhs, rhs, stmt);
1681 else if (TREE_CODE (lhs) == SSA_NAME)
1683 if (is_gimple_min_invariant (rhs))
1685 VN_INFO (lhs)->has_constants = true;
1686 VN_INFO (lhs)->expr = rhs;
1687 changed = set_ssa_val_to (lhs, rhs);
1689 else
1691 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1693 case tcc_unary:
1694 changed = visit_unary_op (lhs, rhs);
1695 break;
1696 case tcc_binary:
1697 changed = visit_binary_op (lhs, rhs);
1698 break;
1699 /* If tcc_vl_expr ever encompasses more than
1700 CALL_EXPR, this will need to be changed. */
1701 case tcc_vl_exp:
1702 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1703 changed = visit_reference_op_load (lhs, rhs, stmt);
1704 else
1705 changed = defs_to_varying (stmt);
1706 break;
1707 case tcc_declaration:
1708 case tcc_reference:
1709 changed = visit_reference_op_load (lhs, rhs, stmt);
1710 break;
1711 case tcc_expression:
1712 if (TREE_CODE (rhs) == ADDR_EXPR)
1714 changed = visit_unary_op (lhs, rhs);
1715 goto done;
1717 /* Fallthrough. */
1718 default:
1719 changed = defs_to_varying (stmt);
1720 break;
1724 else
1725 changed = defs_to_varying (stmt);
1728 done:
1729 return changed;
1732 /* Compare two operands by reverse postorder index */
1734 static int
1735 compare_ops (const void *pa, const void *pb)
1737 const tree opa = *((const tree *)pa);
1738 const tree opb = *((const tree *)pb);
1739 tree opstmta = SSA_NAME_DEF_STMT (opa);
1740 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1741 basic_block bba;
1742 basic_block bbb;
1744 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1745 return 0;
1746 else if (IS_EMPTY_STMT (opstmta))
1747 return -1;
1748 else if (IS_EMPTY_STMT (opstmtb))
1749 return 1;
1751 bba = bb_for_stmt (opstmta);
1752 bbb = bb_for_stmt (opstmtb);
1754 if (!bba && !bbb)
1755 return 0;
1756 else if (!bba)
1757 return -1;
1758 else if (!bbb)
1759 return 1;
1761 if (bba == bbb)
1763 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1764 return 0;
1765 else if (TREE_CODE (opstmta) == PHI_NODE)
1766 return -1;
1767 else if (TREE_CODE (opstmtb) == PHI_NODE)
1768 return 1;
1769 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1771 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1774 /* Sort an array containing members of a strongly connected component
1775 SCC so that the members are ordered by RPO number.
1776 This means that when the sort is complete, iterating through the
1777 array will give you the members in RPO order. */
1779 static void
1780 sort_scc (VEC (tree, heap) *scc)
1782 qsort (VEC_address (tree, scc),
1783 VEC_length (tree, scc),
1784 sizeof (tree),
1785 compare_ops);
1788 /* Process a strongly connected component in the SSA graph. */
1790 static void
1791 process_scc (VEC (tree, heap) *scc)
1793 /* If the SCC has a single member, just visit it. */
1795 if (VEC_length (tree, scc) == 1)
1797 tree use = VEC_index (tree, scc, 0);
1798 if (!VN_INFO (use)->use_processed)
1799 visit_use (use);
1801 else
1803 tree var;
1804 unsigned int i;
1805 unsigned int iterations = 0;
1806 bool changed = true;
1808 /* Iterate over the SCC with the optimistic table until it stops
1809 changing. */
1810 current_info = optimistic_info;
1811 while (changed)
1813 changed = false;
1814 iterations++;
1815 htab_empty (optimistic_info->unary);
1816 htab_empty (optimistic_info->binary);
1817 htab_empty (optimistic_info->phis);
1818 htab_empty (optimistic_info->references);
1819 empty_alloc_pool (optimistic_info->unary_op_pool);
1820 empty_alloc_pool (optimistic_info->binary_op_pool);
1821 empty_alloc_pool (optimistic_info->phis_pool);
1822 empty_alloc_pool (optimistic_info->references_pool);
1823 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1824 changed |= visit_use (var);
1827 if (dump_file && (dump_flags & TDF_STATS))
1828 fprintf (dump_file, "Processing SCC required %d iterations\n",
1829 iterations);
1831 /* Finally, visit the SCC once using the valid table. */
1832 current_info = valid_info;
1833 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1834 visit_use (var);
1838 /* Depth first search on NAME to discover and process SCC's in the SSA
1839 graph.
1840 Execution of this algorithm relies on the fact that the SCC's are
1841 popped off the stack in topological order.
1842 Returns true if successful, false if we stopped processing SCC's due
1843 to ressource constraints. */
1845 static bool
1846 DFS (tree name)
1848 ssa_op_iter iter;
1849 use_operand_p usep;
1850 tree defstmt;
1852 /* SCC info */
1853 VN_INFO (name)->dfsnum = next_dfs_num++;
1854 VN_INFO (name)->visited = true;
1855 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1857 VEC_safe_push (tree, heap, sccstack, name);
1858 VN_INFO (name)->on_sccstack = true;
1859 defstmt = SSA_NAME_DEF_STMT (name);
1861 /* Recursively DFS on our operands, looking for SCC's. */
1862 if (!IS_EMPTY_STMT (defstmt))
1864 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1865 SSA_OP_ALL_USES)
1867 tree use = USE_FROM_PTR (usep);
1869 /* Since we handle phi nodes, we will sometimes get
1870 invariants in the use expression. */
1871 if (TREE_CODE (use) != SSA_NAME)
1872 continue;
1874 if (! (VN_INFO (use)->visited))
1876 if (!DFS (use))
1877 return false;
1878 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1879 VN_INFO (use)->low);
1881 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1882 && VN_INFO (use)->on_sccstack)
1884 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1885 VN_INFO (name)->low);
1890 /* See if we found an SCC. */
1891 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1893 VEC (tree, heap) *scc = NULL;
1894 tree x;
1896 /* Found an SCC, pop the components off the SCC stack and
1897 process them. */
1900 x = VEC_pop (tree, sccstack);
1902 VN_INFO (x)->on_sccstack = false;
1903 VEC_safe_push (tree, heap, scc, x);
1904 } while (x != name);
1906 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
1907 if (VEC_length (tree, scc)
1908 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
1910 if (dump_file)
1911 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
1912 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
1913 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
1914 return false;
1917 if (VEC_length (tree, scc) > 1)
1918 sort_scc (scc);
1920 if (dump_file && (dump_flags & TDF_DETAILS))
1921 print_scc (dump_file, scc);
1923 process_scc (scc);
1925 VEC_free (tree, heap, scc);
1928 return true;
1931 static void
1932 free_phi (void *vp)
1934 vn_phi_t phi = vp;
1935 VEC_free (tree, heap, phi->phiargs);
1939 /* Free a reference operation structure VP. */
1941 static void
1942 free_reference (void *vp)
1944 vn_reference_t vr = vp;
1945 VEC_free (vn_reference_op_s, heap, vr->operands);
1948 /* Allocate a value number table. */
1950 static void
1951 allocate_vn_table (vn_tables_t table)
1953 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
1954 table->unary = htab_create (23, vn_unary_op_hash, vn_unary_op_eq, NULL);
1955 table->binary = htab_create (23, vn_binary_op_hash, vn_binary_op_eq, NULL);
1956 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
1957 free_reference);
1959 table->unary_op_pool = create_alloc_pool ("VN unary operations",
1960 sizeof (struct vn_unary_op_s),
1961 30);
1962 table->binary_op_pool = create_alloc_pool ("VN binary operations",
1963 sizeof (struct vn_binary_op_s),
1964 30);
1965 table->phis_pool = create_alloc_pool ("VN phis",
1966 sizeof (struct vn_phi_s),
1967 30);
1968 table->references_pool = create_alloc_pool ("VN references",
1969 sizeof (struct vn_reference_s),
1970 30);
1973 /* Free a value number table. */
1975 static void
1976 free_vn_table (vn_tables_t table)
1978 htab_delete (table->phis);
1979 htab_delete (table->unary);
1980 htab_delete (table->binary);
1981 htab_delete (table->references);
1982 free_alloc_pool (table->unary_op_pool);
1983 free_alloc_pool (table->binary_op_pool);
1984 free_alloc_pool (table->phis_pool);
1985 free_alloc_pool (table->references_pool);
1988 static void
1989 init_scc_vn (void)
1991 size_t i;
1992 int j;
1993 int *rpo_numbers_temp;
1994 basic_block bb;
1995 size_t id = 0;
1997 calculate_dominance_info (CDI_DOMINATORS);
1998 sccstack = NULL;
1999 next_dfs_num = 1;
2001 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2002 /* VEC_alloc doesn't actually grow it to the right size, it just
2003 preallocates the space to do so. */
2004 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2005 shared_lookup_phiargs = NULL;
2006 shared_lookup_vops = NULL;
2007 shared_lookup_references = NULL;
2008 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2009 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2010 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2012 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2013 the i'th block in RPO order is bb. We want to map bb's to RPO
2014 numbers, so we need to rearrange this array. */
2015 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2016 rpo_numbers[rpo_numbers_temp[j]] = j;
2018 free (rpo_numbers_temp);
2020 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2022 /* Create the VN_INFO structures, and initialize value numbers to
2023 TOP. */
2024 for (i = 0; i < num_ssa_names; i++)
2026 tree name = ssa_name (i);
2027 if (name)
2029 VN_INFO_GET (name)->valnum = VN_TOP;
2030 VN_INFO (name)->expr = name;
2034 FOR_ALL_BB (bb)
2036 block_stmt_iterator bsi;
2037 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2039 tree stmt = bsi_stmt (bsi);
2040 stmt_ann (stmt)->uid = id++;
2044 /* Create the valid and optimistic value numbering tables. */
2045 valid_info = XCNEW (struct vn_tables_s);
2046 allocate_vn_table (valid_info);
2047 optimistic_info = XCNEW (struct vn_tables_s);
2048 allocate_vn_table (optimistic_info);
2049 pre_info = NULL;
2052 void
2053 switch_to_PRE_table (void)
2055 pre_info = XCNEW (struct vn_tables_s);
2056 allocate_vn_table (pre_info);
2057 current_info = pre_info;
2060 void
2061 free_scc_vn (void)
2063 size_t i;
2065 VEC_free (tree, heap, shared_lookup_phiargs);
2066 VEC_free (tree, gc, shared_lookup_vops);
2067 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2068 XDELETEVEC (rpo_numbers);
2069 for (i = 0; i < num_ssa_names; i++)
2071 tree name = ssa_name (i);
2072 if (name)
2074 XDELETE (VN_INFO (name));
2075 if (SSA_NAME_VALUE (name) &&
2076 TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
2077 SSA_NAME_VALUE (name) = NULL;
2081 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2082 VEC_free (tree, heap, sccstack);
2083 free_vn_table (valid_info);
2084 XDELETE (valid_info);
2085 free_vn_table (optimistic_info);
2086 XDELETE (optimistic_info);
2087 if (pre_info)
2089 free_vn_table (pre_info);
2090 XDELETE (pre_info);
2094 /* Do SCCVN. Returns true if it finished, false if we bailed out
2095 due to ressource constraints. */
2097 bool
2098 run_scc_vn (void)
2100 size_t i;
2101 tree param;
2103 init_scc_vn ();
2104 current_info = valid_info;
2106 for (param = DECL_ARGUMENTS (current_function_decl);
2107 param;
2108 param = TREE_CHAIN (param))
2110 if (gimple_default_def (cfun, param) != NULL)
2112 tree def = gimple_default_def (cfun, param);
2113 SSA_VAL (def) = def;
2117 for (i = num_ssa_names - 1; i > 0; i--)
2119 tree name = ssa_name (i);
2120 if (name
2121 && VN_INFO (name)->visited == false
2122 && !has_zero_uses (name))
2123 if (!DFS (name))
2125 free_scc_vn ();
2126 return false;
2130 if (dump_file && (dump_flags & TDF_DETAILS))
2132 fprintf (dump_file, "Value numbers:\n");
2133 for (i = 0; i < num_ssa_names; i++)
2135 tree name = ssa_name (i);
2136 if (name && VN_INFO (name)->visited
2137 && (SSA_VAL (name) != name
2138 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2140 print_generic_expr (dump_file, name, 0);
2141 fprintf (dump_file, " = ");
2142 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2143 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2144 else
2145 print_generic_expr (dump_file, SSA_VAL (name), 0);
2146 fprintf (dump_file, "\n");
2151 return true;