PR c++/34824
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob8380ebb35f2aef937d30e668c97698b396d0415a
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t unary;
111 htab_t binary;
112 htab_t phis;
113 htab_t references;
114 alloc_pool unary_op_pool;
115 alloc_pool binary_op_pool;
116 alloc_pool phis_pool;
117 alloc_pool references_pool;
118 } *vn_tables_t;
120 /* Binary operations in the hashtable consist of two operands, an
121 opcode, and a type. Result is the value number of the operation,
122 and hashcode is stored to avoid having to calculate it
123 repeatedly. */
125 typedef struct vn_binary_op_s
127 enum tree_code opcode;
128 hashval_t hashcode;
129 tree type;
130 tree op0;
131 tree op1;
132 tree result;
133 } *vn_binary_op_t;
134 typedef const struct vn_binary_op_s *const_vn_binary_op_t;
136 /* Unary operations in the hashtable consist of a single operand, an
137 opcode, and a type. Result is the value number of the operation,
138 and hashcode is stored to avoid having to calculate it repeatedly. */
140 typedef struct vn_unary_op_s
142 enum tree_code opcode;
143 hashval_t hashcode;
144 tree type;
145 tree op0;
146 tree result;
147 } *vn_unary_op_t;
148 typedef const struct vn_unary_op_s *const_vn_unary_op_t;
150 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
151 arguments, and the basic block the phi is in. Result is the value
152 number of the operation, and hashcode is stored to avoid having to
153 calculate it repeatedly. Phi nodes not in the same block are never
154 considered equivalent. */
156 typedef struct vn_phi_s
158 VEC (tree, heap) *phiargs;
159 basic_block block;
160 hashval_t hashcode;
161 tree result;
162 } *vn_phi_t;
163 typedef const struct vn_phi_s *const_vn_phi_t;
165 /* Reference operands only exist in reference operations structures.
166 They consist of an opcode, type, and some number of operands. For
167 a given opcode, some, all, or none of the operands may be used.
168 The operands are there to store the information that makes up the
169 portion of the addressing calculation that opcode performs. */
171 typedef struct vn_reference_op_struct
173 enum tree_code opcode;
174 tree type;
175 tree op0;
176 tree op1;
177 tree op2;
178 } vn_reference_op_s;
179 typedef vn_reference_op_s *vn_reference_op_t;
180 typedef const vn_reference_op_s *const_vn_reference_op_t;
182 DEF_VEC_O(vn_reference_op_s);
183 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
185 /* A reference operation in the hashtable is representation as a
186 collection of vuses, representing the memory state at the time of
187 the operation, and a collection of operands that make up the
188 addressing calculation. If two vn_reference_t's have the same set
189 of operands, they access the same memory location. We also store
190 the resulting value number, and the hashcode. The vuses are
191 always stored in order sorted by ssa name version. */
193 typedef struct vn_reference_s
195 VEC (tree, gc) *vuses;
196 VEC (vn_reference_op_s, heap) *operands;
197 hashval_t hashcode;
198 tree result;
199 } *vn_reference_t;
200 typedef const struct vn_reference_s *const_vn_reference_t;
202 /* Valid hashtables storing information we have proven to be
203 correct. */
205 static vn_tables_t valid_info;
207 /* Optimistic hashtables storing information we are making assumptions about
208 during iterations. */
210 static vn_tables_t optimistic_info;
212 /* PRE hashtables storing information about mapping from expressions to
213 value handles. */
215 static vn_tables_t pre_info;
217 /* Pointer to the set of hashtables that is currently being used.
218 Should always point to either the optimistic_info, or the
219 valid_info. */
221 static vn_tables_t current_info;
224 /* Reverse post order index for each basic block. */
226 static int *rpo_numbers;
228 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
230 /* This represents the top of the VN lattice, which is the universal
231 value. */
233 tree VN_TOP;
235 /* Next DFS number and the stack for strongly connected component
236 detection. */
238 static unsigned int next_dfs_num;
239 static VEC (tree, heap) *sccstack;
241 DEF_VEC_P(vn_ssa_aux_t);
242 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
244 /* Table of vn_ssa_aux_t's, one per ssa_name. */
246 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
248 /* Return the value numbering information for a given SSA name. */
250 vn_ssa_aux_t
251 VN_INFO (tree name)
253 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
254 SSA_NAME_VERSION (name));
257 /* Set the value numbering info for a given SSA name to a given
258 value. */
260 static inline void
261 VN_INFO_SET (tree name, vn_ssa_aux_t value)
263 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
264 SSA_NAME_VERSION (name), value);
267 /* Get the value numbering info for a given SSA name, creating it if
268 it does not exist. */
270 vn_ssa_aux_t
271 VN_INFO_GET (tree name)
273 vn_ssa_aux_t newinfo = XCNEW (struct vn_ssa_aux);
274 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
275 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
276 SSA_NAME_VERSION (name) + 1);
277 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
278 SSA_NAME_VERSION (name), newinfo);
279 return newinfo;
283 /* Free a phi operation structure VP. */
285 static void
286 free_phi (void *vp)
288 vn_phi_t phi = vp;
289 VEC_free (tree, heap, phi->phiargs);
292 /* Free a reference operation structure VP. */
294 static void
295 free_reference (void *vp)
297 vn_reference_t vr = vp;
298 VEC_free (vn_reference_op_s, heap, vr->operands);
301 /* Compare two reference operands P1 and P2 for equality. return true if
302 they are equal, and false otherwise. */
304 static int
305 vn_reference_op_eq (const void *p1, const void *p2)
307 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
308 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
309 return vro1->opcode == vro2->opcode
310 && vro1->type == vro2->type
311 && expressions_equal_p (vro1->op0, vro2->op0)
312 && expressions_equal_p (vro1->op1, vro2->op1)
313 && expressions_equal_p (vro1->op2, vro2->op2);
316 /* Compute the hash for a reference operand VRO1 */
318 static hashval_t
319 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
321 return iterative_hash_expr (vro1->op0, vro1->opcode)
322 + iterative_hash_expr (vro1->op1, vro1->opcode)
323 + iterative_hash_expr (vro1->op2, vro1->opcode);
326 /* Return the hashcode for a given reference operation P1. */
328 static hashval_t
329 vn_reference_hash (const void *p1)
331 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
332 return vr1->hashcode;
335 /* Compute a hash for the reference operation VR1 and return it. */
337 static inline hashval_t
338 vn_reference_compute_hash (const vn_reference_t vr1)
340 hashval_t result = 0;
341 tree v;
342 int i;
343 vn_reference_op_t vro;
345 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
346 result += iterative_hash_expr (v, 0);
347 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
348 result += vn_reference_op_compute_hash (vro);
350 return result;
353 /* Return true if reference operations P1 and P2 are equivalent. This
354 means they have the same set of operands and vuses. */
356 static int
357 vn_reference_eq (const void *p1, const void *p2)
359 tree v;
360 int i;
361 vn_reference_op_t vro;
363 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
364 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
366 if (vr1->vuses == vr2->vuses
367 && vr1->operands == vr2->operands)
368 return true;
370 /* Impossible for them to be equivalent if they have different
371 number of vuses. */
372 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
373 return false;
375 /* We require that address operands be canonicalized in a way that
376 two memory references will have the same operands if they are
377 equivalent. */
378 if (VEC_length (vn_reference_op_s, vr1->operands)
379 != VEC_length (vn_reference_op_s, vr2->operands))
380 return false;
382 /* The memory state is more often different than the address of the
383 store/load, so check it first. */
384 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
386 if (VEC_index (tree, vr2->vuses, i) != v)
387 return false;
390 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
392 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
393 vro))
394 return false;
396 return true;
399 /* Place the vuses from STMT into *result */
401 static inline void
402 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
404 ssa_op_iter iter;
405 tree vuse;
407 if (!stmt)
408 return;
410 VEC_reserve_exact (tree, gc, *result,
411 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
413 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
414 VEC_quick_push (tree, *result, vuse);
418 /* Copy the VUSE names in STMT into a vector, and return
419 the vector. */
421 VEC (tree, gc) *
422 copy_vuses_from_stmt (tree stmt)
424 VEC (tree, gc) *vuses = NULL;
426 vuses_to_vec (stmt, &vuses);
428 return vuses;
431 /* Place the vdefs from STMT into *result */
433 static inline void
434 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
436 ssa_op_iter iter;
437 tree vdef;
439 if (!stmt)
440 return;
442 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
444 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
445 VEC_quick_push (tree, *result, vdef);
448 /* Copy the names of vdef results in STMT into a vector, and return
449 the vector. */
451 static VEC (tree, gc) *
452 copy_vdefs_from_stmt (tree stmt)
454 VEC (tree, gc) *vdefs = NULL;
456 vdefs_to_vec (stmt, &vdefs);
458 return vdefs;
461 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
462 static VEC (tree, gc) *shared_lookup_vops;
464 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
465 This function will overwrite the current SHARED_LOOKUP_VOPS
466 variable. */
468 VEC (tree, gc) *
469 shared_vuses_from_stmt (tree stmt)
471 VEC_truncate (tree, shared_lookup_vops, 0);
472 vuses_to_vec (stmt, &shared_lookup_vops);
474 return shared_lookup_vops;
477 /* Copy the operations present in load/store/call REF into RESULT, a vector of
478 vn_reference_op_s's. */
480 static void
481 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
483 /* Calls are different from all other reference operations. */
484 if (TREE_CODE (ref) == CALL_EXPR)
486 vn_reference_op_s temp;
487 tree callfn;
488 call_expr_arg_iterator iter;
489 tree callarg;
491 /* Copy the call_expr opcode, type, function being called, and
492 arguments. */
493 memset (&temp, 0, sizeof (temp));
494 temp.type = TREE_TYPE (ref);
495 temp.opcode = CALL_EXPR;
496 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
498 callfn = get_callee_fndecl (ref);
499 if (!callfn)
500 callfn = CALL_EXPR_FN (ref);
501 temp.type = TREE_TYPE (callfn);
502 temp.opcode = TREE_CODE (callfn);
503 temp.op0 = callfn;
504 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
506 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
508 memset (&temp, 0, sizeof (temp));
509 temp.type = TREE_TYPE (callarg);
510 temp.opcode = TREE_CODE (callarg);
511 temp.op0 = callarg;
512 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
514 return;
517 /* For non-calls, store the information that makes up the address. */
519 while (ref)
521 vn_reference_op_s temp;
523 memset (&temp, 0, sizeof (temp));
524 temp.type = TREE_TYPE (ref);
525 temp.opcode = TREE_CODE (ref);
527 switch (temp.opcode)
529 case ALIGN_INDIRECT_REF:
530 case MISALIGNED_INDIRECT_REF:
531 case INDIRECT_REF:
532 /* The only operand is the address, which gets its own
533 vn_reference_op_s structure. */
534 break;
535 case BIT_FIELD_REF:
536 /* Record bits and position. */
537 temp.op0 = TREE_OPERAND (ref, 1);
538 temp.op1 = TREE_OPERAND (ref, 2);
539 break;
540 case COMPONENT_REF:
541 /* Record field as operand. */
542 temp.op0 = TREE_OPERAND (ref, 1);
543 break;
544 case ARRAY_RANGE_REF:
545 case ARRAY_REF:
546 /* Record index as operand. */
547 temp.op0 = TREE_OPERAND (ref, 1);
548 temp.op1 = TREE_OPERAND (ref, 3);
549 break;
550 case STRING_CST:
551 case INTEGER_CST:
552 case COMPLEX_CST:
553 case VECTOR_CST:
554 case REAL_CST:
555 case CONSTRUCTOR:
556 case VALUE_HANDLE:
557 case VAR_DECL:
558 case PARM_DECL:
559 case CONST_DECL:
560 case RESULT_DECL:
561 case SSA_NAME:
562 temp.op0 = ref;
563 break;
564 /* These are only interesting for their operands, their
565 existence, and their type. They will never be the last
566 ref in the chain of references (IE they require an
567 operand), so we don't have to put anything
568 for op* as it will be handled by the iteration */
569 case IMAGPART_EXPR:
570 case REALPART_EXPR:
571 case VIEW_CONVERT_EXPR:
572 case ADDR_EXPR:
573 break;
574 default:
575 gcc_unreachable ();
578 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
580 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
581 ref = TREE_OPERAND (ref, 0);
582 else
583 ref = NULL_TREE;
587 /* Create a vector of vn_reference_op_s structures from REF, a
588 REFERENCE_CLASS_P tree. The vector is not shared. */
590 static VEC(vn_reference_op_s, heap) *
591 create_reference_ops_from_ref (tree ref)
593 VEC (vn_reference_op_s, heap) *result = NULL;
595 copy_reference_ops_from_ref (ref, &result);
596 return result;
599 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
601 /* Create a vector of vn_reference_op_s structures from REF, a
602 REFERENCE_CLASS_P tree. The vector is shared among all callers of
603 this function. */
605 static VEC(vn_reference_op_s, heap) *
606 shared_reference_ops_from_ref (tree ref)
608 if (!ref)
609 return NULL;
610 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
611 copy_reference_ops_from_ref (ref, &shared_lookup_references);
612 return shared_lookup_references;
616 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
617 structures into their value numbers. This is done in-place, and
618 the vector passed in is returned. */
620 static VEC (vn_reference_op_s, heap) *
621 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
623 vn_reference_op_t vro;
624 int i;
626 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
628 if (vro->opcode == SSA_NAME
629 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
630 vro->op0 = SSA_VAL (vro->op0);
633 return orig;
636 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
637 their value numbers. This is done in-place, and the vector passed
638 in is returned. */
640 static VEC (tree, gc) *
641 valueize_vuses (VEC (tree, gc) *orig)
643 bool made_replacement = false;
644 tree vuse;
645 int i;
647 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
649 if (vuse != SSA_VAL (vuse))
651 made_replacement = true;
652 VEC_replace (tree, orig, i, SSA_VAL (vuse));
656 if (made_replacement && VEC_length (tree, orig) > 1)
657 sort_vuses (orig);
659 return orig;
662 /* Lookup OP in the current hash table, and return the resulting
663 value number if it exists in the hash table. Return NULL_TREE if
664 it does not exist in the hash table. */
666 tree
667 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
669 void **slot;
670 struct vn_reference_s vr1;
672 vr1.vuses = valueize_vuses (vuses);
673 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
674 vr1.hashcode = vn_reference_compute_hash (&vr1);
675 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
676 NO_INSERT);
677 if (!slot && current_info == optimistic_info)
678 slot = htab_find_slot_with_hash (valid_info->references, &vr1, vr1.hashcode,
679 NO_INSERT);
680 if (!slot)
681 return NULL_TREE;
683 return ((vn_reference_t)*slot)->result;
686 /* Insert OP into the current hash table with a value number of
687 RESULT. */
689 void
690 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
692 void **slot;
693 vn_reference_t vr1;
695 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
697 vr1->vuses = valueize_vuses (vuses);
698 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
699 vr1->hashcode = vn_reference_compute_hash (vr1);
700 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
702 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
703 INSERT);
705 /* Because we lookup stores using vuses, and value number failures
706 using the vdefs (see visit_reference_op_store for how and why),
707 it's possible that on failure we may try to insert an already
708 inserted store. This is not wrong, there is no ssa name for a
709 store that we could use as a differentiator anyway. Thus, unlike
710 the other lookup functions, you cannot gcc_assert (!*slot)
711 here. */
713 /* But free the old slot in case of a collision. */
714 if (*slot)
715 free_reference (*slot);
717 *slot = vr1;
721 /* Return the stored hashcode for a unary operation. */
723 static hashval_t
724 vn_unary_op_hash (const void *p1)
726 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
727 return vuo1->hashcode;
730 /* Hash a unary operation P1 and return the result. */
732 static inline hashval_t
733 vn_unary_op_compute_hash (const vn_unary_op_t vuo1)
735 return iterative_hash_expr (vuo1->op0, vuo1->opcode);
738 /* Return true if P1 and P2, two unary operations, are equivalent. */
740 static int
741 vn_unary_op_eq (const void *p1, const void *p2)
743 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
744 const_vn_unary_op_t const vuo2 = (const_vn_unary_op_t) p2;
745 return vuo1->opcode == vuo2->opcode
746 && vuo1->type == vuo2->type
747 && expressions_equal_p (vuo1->op0, vuo2->op0);
750 /* Lookup OP in the current hash table, and return the resulting
751 value number if it exists in the hash table. Return NULL_TREE if
752 it does not exist in the hash table. */
754 tree
755 vn_unary_op_lookup (tree op)
757 void **slot;
758 struct vn_unary_op_s vuo1;
760 vuo1.opcode = TREE_CODE (op);
761 vuo1.type = TREE_TYPE (op);
762 vuo1.op0 = TREE_OPERAND (op, 0);
764 if (TREE_CODE (vuo1.op0) == SSA_NAME)
765 vuo1.op0 = SSA_VAL (vuo1.op0);
767 vuo1.hashcode = vn_unary_op_compute_hash (&vuo1);
768 slot = htab_find_slot_with_hash (current_info->unary, &vuo1, vuo1.hashcode,
769 NO_INSERT);
770 if (!slot && current_info == optimistic_info)
771 slot = htab_find_slot_with_hash (valid_info->unary, &vuo1, vuo1.hashcode,
772 NO_INSERT);
773 if (!slot)
774 return NULL_TREE;
775 return ((vn_unary_op_t)*slot)->result;
778 /* Insert OP into the current hash table with a value number of
779 RESULT. */
781 void
782 vn_unary_op_insert (tree op, tree result)
784 void **slot;
785 vn_unary_op_t vuo1 = (vn_unary_op_t) pool_alloc (current_info->unary_op_pool);
787 vuo1->opcode = TREE_CODE (op);
788 vuo1->type = TREE_TYPE (op);
789 vuo1->op0 = TREE_OPERAND (op, 0);
790 vuo1->result = result;
792 if (TREE_CODE (vuo1->op0) == SSA_NAME)
793 vuo1->op0 = SSA_VAL (vuo1->op0);
795 vuo1->hashcode = vn_unary_op_compute_hash (vuo1);
796 slot = htab_find_slot_with_hash (current_info->unary, vuo1, vuo1->hashcode,
797 INSERT);
798 gcc_assert (!*slot);
799 *slot = vuo1;
802 /* Compute and return the hash value for binary operation VBO1. */
804 static inline hashval_t
805 vn_binary_op_compute_hash (const vn_binary_op_t vbo1)
807 return iterative_hash_expr (vbo1->op0, vbo1->opcode)
808 + iterative_hash_expr (vbo1->op1, vbo1->opcode);
811 /* Return the computed hashcode for binary operation P1. */
813 static hashval_t
814 vn_binary_op_hash (const void *p1)
816 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
817 return vbo1->hashcode;
820 /* Compare binary operations P1 and P2 and return true if they are
821 equivalent. */
823 static int
824 vn_binary_op_eq (const void *p1, const void *p2)
826 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
827 const_vn_binary_op_t const vbo2 = (const_vn_binary_op_t) p2;
828 return vbo1->opcode == vbo2->opcode
829 && vbo1->type == vbo2->type
830 && expressions_equal_p (vbo1->op0, vbo2->op0)
831 && expressions_equal_p (vbo1->op1, vbo2->op1);
834 /* Lookup OP in the current hash table, and return the resulting
835 value number if it exists in the hash table. Return NULL_TREE if
836 it does not exist in the hash table. */
838 tree
839 vn_binary_op_lookup (tree op)
841 void **slot;
842 struct vn_binary_op_s vbo1;
844 vbo1.opcode = TREE_CODE (op);
845 vbo1.type = TREE_TYPE (op);
846 vbo1.op0 = TREE_OPERAND (op, 0);
847 vbo1.op1 = TREE_OPERAND (op, 1);
849 if (TREE_CODE (vbo1.op0) == SSA_NAME)
850 vbo1.op0 = SSA_VAL (vbo1.op0);
851 if (TREE_CODE (vbo1.op1) == SSA_NAME)
852 vbo1.op1 = SSA_VAL (vbo1.op1);
854 if (tree_swap_operands_p (vbo1.op0, vbo1.op1, false)
855 && commutative_tree_code (vbo1.opcode))
857 tree temp = vbo1.op0;
858 vbo1.op0 = vbo1.op1;
859 vbo1.op1 = temp;
862 vbo1.hashcode = vn_binary_op_compute_hash (&vbo1);
863 slot = htab_find_slot_with_hash (current_info->binary, &vbo1, vbo1.hashcode,
864 NO_INSERT);
865 if (!slot && current_info == optimistic_info)
866 slot = htab_find_slot_with_hash (valid_info->binary, &vbo1, vbo1.hashcode,
867 NO_INSERT);
868 if (!slot)
869 return NULL_TREE;
870 return ((vn_binary_op_t)*slot)->result;
873 /* Insert OP into the current hash table with a value number of
874 RESULT. */
876 void
877 vn_binary_op_insert (tree op, tree result)
879 void **slot;
880 vn_binary_op_t vbo1;
881 vbo1 = (vn_binary_op_t) pool_alloc (current_info->binary_op_pool);
883 vbo1->opcode = TREE_CODE (op);
884 vbo1->type = TREE_TYPE (op);
885 vbo1->op0 = TREE_OPERAND (op, 0);
886 vbo1->op1 = TREE_OPERAND (op, 1);
887 vbo1->result = result;
889 if (TREE_CODE (vbo1->op0) == SSA_NAME)
890 vbo1->op0 = SSA_VAL (vbo1->op0);
891 if (TREE_CODE (vbo1->op1) == SSA_NAME)
892 vbo1->op1 = SSA_VAL (vbo1->op1);
894 if (tree_swap_operands_p (vbo1->op0, vbo1->op1, false)
895 && commutative_tree_code (vbo1->opcode))
897 tree temp = vbo1->op0;
898 vbo1->op0 = vbo1->op1;
899 vbo1->op1 = temp;
901 vbo1->hashcode = vn_binary_op_compute_hash (vbo1);
902 slot = htab_find_slot_with_hash (current_info->binary, vbo1, vbo1->hashcode,
903 INSERT);
904 gcc_assert (!*slot);
906 *slot = vbo1;
909 /* Compute a hashcode for PHI operation VP1 and return it. */
911 static inline hashval_t
912 vn_phi_compute_hash (vn_phi_t vp1)
914 hashval_t result = 0;
915 int i;
916 tree phi1op;
918 result = vp1->block->index;
920 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
922 if (phi1op == VN_TOP)
923 continue;
924 result += iterative_hash_expr (phi1op, result);
927 return result;
930 /* Return the computed hashcode for phi operation P1. */
932 static hashval_t
933 vn_phi_hash (const void *p1)
935 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
936 return vp1->hashcode;
939 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
941 static int
942 vn_phi_eq (const void *p1, const void *p2)
944 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
945 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
947 if (vp1->block == vp2->block)
949 int i;
950 tree phi1op;
952 /* Any phi in the same block will have it's arguments in the
953 same edge order, because of how we store phi nodes. */
954 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
956 tree phi2op = VEC_index (tree, vp2->phiargs, i);
957 if (phi1op == VN_TOP || phi2op == VN_TOP)
958 continue;
959 if (!expressions_equal_p (phi1op, phi2op))
960 return false;
962 return true;
964 return false;
967 static VEC(tree, heap) *shared_lookup_phiargs;
969 /* Lookup PHI in the current hash table, and return the resulting
970 value number if it exists in the hash table. Return NULL_TREE if
971 it does not exist in the hash table. */
973 static tree
974 vn_phi_lookup (tree phi)
976 void **slot;
977 struct vn_phi_s vp1;
978 int i;
980 VEC_truncate (tree, shared_lookup_phiargs, 0);
982 /* Canonicalize the SSA_NAME's to their value number. */
983 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
985 tree def = PHI_ARG_DEF (phi, i);
986 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
987 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
989 vp1.phiargs = shared_lookup_phiargs;
990 vp1.block = bb_for_stmt (phi);
991 vp1.hashcode = vn_phi_compute_hash (&vp1);
992 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
993 NO_INSERT);
994 if (!slot && current_info == optimistic_info)
995 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
996 NO_INSERT);
997 if (!slot)
998 return NULL_TREE;
999 return ((vn_phi_t)*slot)->result;
1002 /* Insert PHI into the current hash table with a value number of
1003 RESULT. */
1005 static void
1006 vn_phi_insert (tree phi, tree result)
1008 void **slot;
1009 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1010 int i;
1011 VEC (tree, heap) *args = NULL;
1013 /* Canonicalize the SSA_NAME's to their value number. */
1014 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1016 tree def = PHI_ARG_DEF (phi, i);
1017 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1018 VEC_safe_push (tree, heap, args, def);
1020 vp1->phiargs = args;
1021 vp1->block = bb_for_stmt (phi);
1022 vp1->result = result;
1023 vp1->hashcode = vn_phi_compute_hash (vp1);
1025 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1026 INSERT);
1028 /* Because we iterate over phi operations more than once, it's
1029 possible the slot might already exist here, hence no assert.*/
1030 *slot = vp1;
1034 /* Print set of components in strongly connected component SCC to OUT. */
1036 static void
1037 print_scc (FILE *out, VEC (tree, heap) *scc)
1039 tree var;
1040 unsigned int i;
1042 fprintf (out, "SCC consists of: ");
1043 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1045 print_generic_expr (out, var, 0);
1046 fprintf (out, " ");
1048 fprintf (out, "\n");
1051 /* Set the value number of FROM to TO, return true if it has changed
1052 as a result. */
1054 static inline bool
1055 set_ssa_val_to (tree from, tree to)
1057 tree currval;
1059 if (from != to
1060 && TREE_CODE (to) == SSA_NAME
1061 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1062 to = from;
1064 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1065 and invariants. So assert that here. */
1066 gcc_assert (to != NULL_TREE
1067 && (to == VN_TOP
1068 || TREE_CODE (to) == SSA_NAME
1069 || is_gimple_min_invariant (to)));
1071 if (dump_file && (dump_flags & TDF_DETAILS))
1073 fprintf (dump_file, "Setting value number of ");
1074 print_generic_expr (dump_file, from, 0);
1075 fprintf (dump_file, " to ");
1076 print_generic_expr (dump_file, to, 0);
1077 fprintf (dump_file, "\n");
1080 currval = SSA_VAL (from);
1082 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1084 SSA_VAL (from) = to;
1085 return true;
1087 return false;
1090 /* Set all definitions in STMT to value number to themselves.
1091 Return true if a value number changed. */
1093 static bool
1094 defs_to_varying (tree stmt)
1096 bool changed = false;
1097 ssa_op_iter iter;
1098 def_operand_p defp;
1100 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1102 tree def = DEF_FROM_PTR (defp);
1104 VN_INFO (def)->use_processed = true;
1105 changed |= set_ssa_val_to (def, def);
1107 return changed;
1110 /* Visit a copy between LHS and RHS, return true if the value number
1111 changed. */
1113 static bool
1114 visit_copy (tree lhs, tree rhs)
1117 /* Follow chains of copies to their destination. */
1118 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1119 rhs = SSA_VAL (rhs);
1121 /* The copy may have a more interesting constant filled expression
1122 (we don't, since we know our RHS is just an SSA name). */
1123 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1124 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1126 return set_ssa_val_to (lhs, rhs);
1129 /* Visit a unary operator RHS, value number it, and return true if the
1130 value number of LHS has changed as a result. */
1132 static bool
1133 visit_unary_op (tree lhs, tree op)
1135 bool changed = false;
1136 tree result = vn_unary_op_lookup (op);
1138 if (result)
1140 changed = set_ssa_val_to (lhs, result);
1142 else
1144 changed = set_ssa_val_to (lhs, lhs);
1145 vn_unary_op_insert (op, lhs);
1148 return changed;
1151 /* Visit a binary operator RHS, value number it, and return true if the
1152 value number of LHS has changed as a result. */
1154 static bool
1155 visit_binary_op (tree lhs, tree op)
1157 bool changed = false;
1158 tree result = vn_binary_op_lookup (op);
1160 if (result)
1162 changed = set_ssa_val_to (lhs, result);
1164 else
1166 changed = set_ssa_val_to (lhs, lhs);
1167 vn_binary_op_insert (op, lhs);
1170 return changed;
1173 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1174 and return true if the value number of the LHS has changed as a result. */
1176 static bool
1177 visit_reference_op_load (tree lhs, tree op, tree stmt)
1179 bool changed = false;
1180 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1182 if (result)
1184 changed = set_ssa_val_to (lhs, result);
1186 else
1188 changed = set_ssa_val_to (lhs, lhs);
1189 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1192 return changed;
1196 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1197 and return true if the value number of the LHS has changed as a result. */
1199 static bool
1200 visit_reference_op_store (tree lhs, tree op, tree stmt)
1202 bool changed = false;
1203 tree result;
1204 bool resultsame = false;
1206 /* First we want to lookup using the *vuses* from the store and see
1207 if there the last store to this location with the same address
1208 had the same value.
1210 The vuses represent the memory state before the store. If the
1211 memory state, address, and value of the store is the same as the
1212 last store to this location, then this store will produce the
1213 same memory state as that store.
1215 In this case the vdef versions for this store are value numbered to those
1216 vuse versions, since they represent the same memory state after
1217 this store.
1219 Otherwise, the vdefs for the store are used when inserting into
1220 the table, since the store generates a new memory state. */
1222 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1224 if (result)
1226 if (TREE_CODE (result) == SSA_NAME)
1227 result = SSA_VAL (result);
1228 resultsame = expressions_equal_p (result, op);
1231 if (!result || !resultsame)
1233 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1234 int i;
1235 tree vdef;
1237 if (dump_file && (dump_flags & TDF_DETAILS))
1239 fprintf (dump_file, "No store match\n");
1240 fprintf (dump_file, "Value numbering store ");
1241 print_generic_expr (dump_file, lhs, 0);
1242 fprintf (dump_file, " to ");
1243 print_generic_expr (dump_file, op, 0);
1244 fprintf (dump_file, "\n");
1246 /* Have to set value numbers before insert, since insert is
1247 going to valueize the references in-place. */
1248 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1250 VN_INFO (vdef)->use_processed = true;
1251 changed |= set_ssa_val_to (vdef, vdef);
1254 vn_reference_insert (lhs, op, vdefs);
1256 else
1258 /* We had a match, so value number the vdefs to have the value
1259 number of the vuses they came from. */
1260 ssa_op_iter op_iter;
1261 def_operand_p var;
1262 vuse_vec_p vv;
1264 if (dump_file && (dump_flags & TDF_DETAILS))
1265 fprintf (dump_file, "Store matched earlier value,"
1266 "value numbering store vdefs to matching vuses.\n");
1268 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1270 tree def = DEF_FROM_PTR (var);
1271 tree use;
1273 /* Uh, if the vuse is a multiuse, we can't really do much
1274 here, sadly, since we don't know which value number of
1275 which vuse to use. */
1276 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1277 use = def;
1278 else
1279 use = VUSE_ELEMENT_VAR (*vv, 0);
1281 VN_INFO (def)->use_processed = true;
1282 changed |= set_ssa_val_to (def, SSA_VAL (use));
1286 return changed;
1289 /* Visit and value number PHI, return true if the value number
1290 changed. */
1292 static bool
1293 visit_phi (tree phi)
1295 bool changed = false;
1296 tree result;
1297 tree sameval = VN_TOP;
1298 bool allsame = true;
1299 int i;
1301 /* TODO: We could check for this in init_sccvn, and replace this
1302 with a gcc_assert. */
1303 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1304 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1306 /* See if all non-TOP arguments have the same value. TOP is
1307 equivalent to everything, so we can ignore it. */
1308 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1310 tree def = PHI_ARG_DEF (phi, i);
1312 if (TREE_CODE (def) == SSA_NAME)
1313 def = SSA_VAL (def);
1314 if (def == VN_TOP)
1315 continue;
1316 if (sameval == VN_TOP)
1318 sameval = def;
1320 else
1322 if (!expressions_equal_p (def, sameval))
1324 allsame = false;
1325 break;
1330 /* If all value numbered to the same value, the phi node has that
1331 value. */
1332 if (allsame)
1334 if (is_gimple_min_invariant (sameval))
1336 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1337 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1339 else
1341 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1342 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1345 if (TREE_CODE (sameval) == SSA_NAME)
1346 return visit_copy (PHI_RESULT (phi), sameval);
1348 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1351 /* Otherwise, see if it is equivalent to a phi node in this block. */
1352 result = vn_phi_lookup (phi);
1353 if (result)
1355 if (TREE_CODE (result) == SSA_NAME)
1356 changed = visit_copy (PHI_RESULT (phi), result);
1357 else
1358 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1360 else
1362 vn_phi_insert (phi, PHI_RESULT (phi));
1363 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1364 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1365 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1368 return changed;
1371 /* Return true if EXPR contains constants. */
1373 static bool
1374 expr_has_constants (tree expr)
1376 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1378 case tcc_unary:
1379 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1381 case tcc_binary:
1382 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1383 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1384 /* Constants inside reference ops are rarely interesting, but
1385 it can take a lot of looking to find them. */
1386 case tcc_reference:
1387 case tcc_declaration:
1388 return false;
1389 default:
1390 return is_gimple_min_invariant (expr);
1392 return false;
1395 /* Replace SSA_NAMES in expr with their value numbers, and return the
1396 result.
1397 This is performed in place. */
1399 static tree
1400 valueize_expr (tree expr)
1402 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1404 case tcc_unary:
1405 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1406 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1407 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1408 break;
1409 case tcc_binary:
1410 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1411 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1412 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1413 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1414 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1415 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1416 break;
1417 default:
1418 break;
1420 return expr;
1423 /* Simplify the binary expression RHS, and return the result if
1424 simplified. */
1426 static tree
1427 simplify_binary_expression (tree stmt, tree rhs)
1429 tree result = NULL_TREE;
1430 tree op0 = TREE_OPERAND (rhs, 0);
1431 tree op1 = TREE_OPERAND (rhs, 1);
1433 /* This will not catch every single case we could combine, but will
1434 catch those with constants. The goal here is to simultaneously
1435 combine constants between expressions, but avoid infinite
1436 expansion of expressions during simplification. */
1437 if (TREE_CODE (op0) == SSA_NAME)
1439 if (VN_INFO (op0)->has_constants)
1440 op0 = valueize_expr (VN_INFO (op0)->expr);
1441 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1442 op0 = SSA_VAL (op0);
1445 if (TREE_CODE (op1) == SSA_NAME)
1447 if (VN_INFO (op1)->has_constants)
1448 op1 = valueize_expr (VN_INFO (op1)->expr);
1449 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1450 op1 = SSA_VAL (op1);
1453 /* Avoid folding if nothing changed. */
1454 if (op0 == TREE_OPERAND (rhs, 0)
1455 && op1 == TREE_OPERAND (rhs, 1))
1456 return NULL_TREE;
1458 fold_defer_overflow_warnings ();
1460 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1462 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1463 stmt, 0);
1465 /* Make sure result is not a complex expression consisting
1466 of operators of operators (IE (a + b) + (a + c))
1467 Otherwise, we will end up with unbounded expressions if
1468 fold does anything at all. */
1469 if (result && valid_gimple_expression_p (result))
1470 return result;
1472 return NULL_TREE;
1475 /* Simplify the unary expression RHS, and return the result if
1476 simplified. */
1478 static tree
1479 simplify_unary_expression (tree rhs)
1481 tree result = NULL_TREE;
1482 tree op0 = TREE_OPERAND (rhs, 0);
1484 if (TREE_CODE (op0) != SSA_NAME)
1485 return NULL_TREE;
1487 if (VN_INFO (op0)->has_constants)
1488 op0 = valueize_expr (VN_INFO (op0)->expr);
1489 else if (TREE_CODE (rhs) == NOP_EXPR
1490 || TREE_CODE (rhs) == CONVERT_EXPR
1491 || TREE_CODE (rhs) == REALPART_EXPR
1492 || TREE_CODE (rhs) == IMAGPART_EXPR)
1494 /* We want to do tree-combining on conversion-like expressions.
1495 Make sure we feed only SSA_NAMEs or constants to fold though. */
1496 tree tem = valueize_expr (VN_INFO (op0)->expr);
1497 if (UNARY_CLASS_P (tem)
1498 || BINARY_CLASS_P (tem)
1499 || TREE_CODE (tem) == SSA_NAME
1500 || is_gimple_min_invariant (tem))
1501 op0 = tem;
1504 /* Avoid folding if nothing changed, but remember the expression. */
1505 if (op0 == TREE_OPERAND (rhs, 0))
1506 return rhs;
1508 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1509 if (result)
1511 STRIP_USELESS_TYPE_CONVERSION (result);
1512 if (valid_gimple_expression_p (result))
1513 return result;
1516 return rhs;
1519 /* Try to simplify RHS using equivalences and constant folding. */
1521 static tree
1522 try_to_simplify (tree stmt, tree rhs)
1524 if (TREE_CODE (rhs) == SSA_NAME)
1526 if (is_gimple_min_invariant (SSA_VAL (rhs)))
1527 return SSA_VAL (rhs);
1528 else if (VN_INFO (rhs)->has_constants)
1529 return VN_INFO (rhs)->expr;
1531 else
1533 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1535 /* For references, see if we find a result for the lookup,
1536 and use it if we do. */
1537 case tcc_declaration:
1538 /* Pull out any truly constant values. */
1539 if (TREE_READONLY (rhs)
1540 && TREE_STATIC (rhs)
1541 && DECL_INITIAL (rhs)
1542 && valid_gimple_expression_p (DECL_INITIAL (rhs)))
1543 return DECL_INITIAL (rhs);
1545 /* Fallthrough. */
1546 case tcc_reference:
1548 tree result = vn_reference_lookup (rhs,
1549 shared_vuses_from_stmt (stmt));
1550 if (result)
1551 return result;
1553 /* Fallthrough for some codes. */
1554 if (!(TREE_CODE (rhs) == REALPART_EXPR
1555 || TREE_CODE (rhs) == IMAGPART_EXPR))
1556 break;
1557 /* We could do a little more with unary ops, if they expand
1558 into binary ops, but it's debatable whether it is worth it. */
1559 case tcc_unary:
1560 return simplify_unary_expression (rhs);
1561 break;
1562 case tcc_comparison:
1563 case tcc_binary:
1564 return simplify_binary_expression (stmt, rhs);
1565 break;
1566 default:
1567 break;
1570 return rhs;
1573 /* Visit and value number USE, return true if the value number
1574 changed. */
1576 static bool
1577 visit_use (tree use)
1579 bool changed = false;
1580 tree stmt = SSA_NAME_DEF_STMT (use);
1581 stmt_ann_t ann;
1583 VN_INFO (use)->use_processed = true;
1585 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1586 if (dump_file && (dump_flags & TDF_DETAILS))
1588 fprintf (dump_file, "Value numbering ");
1589 print_generic_expr (dump_file, use, 0);
1590 fprintf (dump_file, " stmt = ");
1591 print_generic_stmt (dump_file, stmt, 0);
1594 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1595 if (TREE_CODE (stmt) == RETURN_EXPR
1596 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1597 stmt = TREE_OPERAND (stmt, 0);
1599 ann = stmt_ann (stmt);
1601 /* Handle uninitialized uses. */
1602 if (IS_EMPTY_STMT (stmt))
1604 changed = set_ssa_val_to (use, use);
1606 else
1608 if (TREE_CODE (stmt) == PHI_NODE)
1610 changed = visit_phi (stmt);
1612 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1613 || (ann && ann->has_volatile_ops)
1614 || tree_could_throw_p (stmt))
1616 changed = defs_to_varying (stmt);
1618 else
1620 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1621 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1622 tree simplified;
1624 STRIP_USELESS_TYPE_CONVERSION (rhs);
1626 /* Shortcut for copies. Simplifying copies is pointless,
1627 since we copy the expression and value they represent. */
1628 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1630 changed = visit_copy (lhs, rhs);
1631 goto done;
1633 simplified = try_to_simplify (stmt, rhs);
1634 if (simplified && simplified != rhs)
1636 if (dump_file && (dump_flags & TDF_DETAILS))
1638 fprintf (dump_file, "RHS ");
1639 print_generic_expr (dump_file, rhs, 0);
1640 fprintf (dump_file, " simplified to ");
1641 print_generic_expr (dump_file, simplified, 0);
1642 if (TREE_CODE (lhs) == SSA_NAME)
1643 fprintf (dump_file, " has constants %d\n",
1644 VN_INFO (lhs)->has_constants);
1645 else
1646 fprintf (dump_file, "\n");
1650 /* Setting value numbers to constants will occasionally
1651 screw up phi congruence because constants are not
1652 uniquely associated with a single ssa name that can be
1653 looked up. */
1654 if (simplified && is_gimple_min_invariant (simplified)
1655 && TREE_CODE (lhs) == SSA_NAME
1656 && simplified != rhs)
1658 VN_INFO (lhs)->expr = simplified;
1659 VN_INFO (lhs)->has_constants = true;
1660 changed = set_ssa_val_to (lhs, simplified);
1661 goto done;
1663 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1664 && TREE_CODE (lhs) == SSA_NAME)
1666 changed = visit_copy (lhs, simplified);
1667 goto done;
1669 else if (simplified)
1671 if (TREE_CODE (lhs) == SSA_NAME)
1673 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1674 /* We have to unshare the expression or else
1675 valuizing may change the IL stream. */
1676 VN_INFO (lhs)->expr = unshare_expr (simplified);
1678 rhs = simplified;
1680 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1682 VN_INFO (lhs)->has_constants = true;
1683 VN_INFO (lhs)->expr = unshare_expr (rhs);
1685 else if (TREE_CODE (lhs) == SSA_NAME)
1687 /* We reset expr and constantness here because we may
1688 have been value numbering optimistically, and
1689 iterating. They may become non-constant in this case,
1690 even if they were optimistically constant. */
1692 VN_INFO (lhs)->has_constants = false;
1693 VN_INFO (lhs)->expr = lhs;
1696 if (TREE_CODE (lhs) == SSA_NAME
1697 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1698 changed = defs_to_varying (stmt);
1699 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1701 changed = visit_reference_op_store (lhs, rhs, stmt);
1703 else if (TREE_CODE (lhs) == SSA_NAME)
1705 if (is_gimple_min_invariant (rhs))
1707 VN_INFO (lhs)->has_constants = true;
1708 VN_INFO (lhs)->expr = rhs;
1709 changed = set_ssa_val_to (lhs, rhs);
1711 else
1713 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1715 case tcc_unary:
1716 changed = visit_unary_op (lhs, rhs);
1717 break;
1718 case tcc_binary:
1719 changed = visit_binary_op (lhs, rhs);
1720 break;
1721 /* If tcc_vl_expr ever encompasses more than
1722 CALL_EXPR, this will need to be changed. */
1723 case tcc_vl_exp:
1724 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1725 changed = visit_reference_op_load (lhs, rhs, stmt);
1726 else
1727 changed = defs_to_varying (stmt);
1728 break;
1729 case tcc_declaration:
1730 case tcc_reference:
1731 changed = visit_reference_op_load (lhs, rhs, stmt);
1732 break;
1733 case tcc_expression:
1734 if (TREE_CODE (rhs) == ADDR_EXPR)
1736 changed = visit_unary_op (lhs, rhs);
1737 goto done;
1739 /* Fallthrough. */
1740 default:
1741 changed = defs_to_varying (stmt);
1742 break;
1746 else
1747 changed = defs_to_varying (stmt);
1750 done:
1751 return changed;
1754 /* Compare two operands by reverse postorder index */
1756 static int
1757 compare_ops (const void *pa, const void *pb)
1759 const tree opa = *((const tree *)pa);
1760 const tree opb = *((const tree *)pb);
1761 tree opstmta = SSA_NAME_DEF_STMT (opa);
1762 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1763 basic_block bba;
1764 basic_block bbb;
1766 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1767 return 0;
1768 else if (IS_EMPTY_STMT (opstmta))
1769 return -1;
1770 else if (IS_EMPTY_STMT (opstmtb))
1771 return 1;
1773 bba = bb_for_stmt (opstmta);
1774 bbb = bb_for_stmt (opstmtb);
1776 if (!bba && !bbb)
1777 return 0;
1778 else if (!bba)
1779 return -1;
1780 else if (!bbb)
1781 return 1;
1783 if (bba == bbb)
1785 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1786 return 0;
1787 else if (TREE_CODE (opstmta) == PHI_NODE)
1788 return -1;
1789 else if (TREE_CODE (opstmtb) == PHI_NODE)
1790 return 1;
1791 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1793 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1796 /* Sort an array containing members of a strongly connected component
1797 SCC so that the members are ordered by RPO number.
1798 This means that when the sort is complete, iterating through the
1799 array will give you the members in RPO order. */
1801 static void
1802 sort_scc (VEC (tree, heap) *scc)
1804 qsort (VEC_address (tree, scc),
1805 VEC_length (tree, scc),
1806 sizeof (tree),
1807 compare_ops);
1810 /* Process a strongly connected component in the SSA graph. */
1812 static void
1813 process_scc (VEC (tree, heap) *scc)
1815 /* If the SCC has a single member, just visit it. */
1817 if (VEC_length (tree, scc) == 1)
1819 tree use = VEC_index (tree, scc, 0);
1820 if (!VN_INFO (use)->use_processed)
1821 visit_use (use);
1823 else
1825 tree var;
1826 unsigned int i;
1827 unsigned int iterations = 0;
1828 bool changed = true;
1830 /* Iterate over the SCC with the optimistic table until it stops
1831 changing. */
1832 current_info = optimistic_info;
1833 while (changed)
1835 changed = false;
1836 iterations++;
1837 htab_empty (optimistic_info->unary);
1838 htab_empty (optimistic_info->binary);
1839 htab_empty (optimistic_info->phis);
1840 htab_empty (optimistic_info->references);
1841 empty_alloc_pool (optimistic_info->unary_op_pool);
1842 empty_alloc_pool (optimistic_info->binary_op_pool);
1843 empty_alloc_pool (optimistic_info->phis_pool);
1844 empty_alloc_pool (optimistic_info->references_pool);
1845 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1846 changed |= visit_use (var);
1849 if (dump_file && (dump_flags & TDF_STATS))
1850 fprintf (dump_file, "Processing SCC required %d iterations\n",
1851 iterations);
1853 /* Finally, visit the SCC once using the valid table. */
1854 current_info = valid_info;
1855 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1856 visit_use (var);
1860 /* Depth first search on NAME to discover and process SCC's in the SSA
1861 graph.
1862 Execution of this algorithm relies on the fact that the SCC's are
1863 popped off the stack in topological order.
1864 Returns true if successful, false if we stopped processing SCC's due
1865 to ressource constraints. */
1867 static bool
1868 DFS (tree name)
1870 ssa_op_iter iter;
1871 use_operand_p usep;
1872 tree defstmt;
1874 /* SCC info */
1875 VN_INFO (name)->dfsnum = next_dfs_num++;
1876 VN_INFO (name)->visited = true;
1877 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1879 VEC_safe_push (tree, heap, sccstack, name);
1880 VN_INFO (name)->on_sccstack = true;
1881 defstmt = SSA_NAME_DEF_STMT (name);
1883 /* Recursively DFS on our operands, looking for SCC's. */
1884 if (!IS_EMPTY_STMT (defstmt))
1886 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1887 SSA_OP_ALL_USES)
1889 tree use = USE_FROM_PTR (usep);
1891 /* Since we handle phi nodes, we will sometimes get
1892 invariants in the use expression. */
1893 if (TREE_CODE (use) != SSA_NAME)
1894 continue;
1896 if (! (VN_INFO (use)->visited))
1898 if (!DFS (use))
1899 return false;
1900 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1901 VN_INFO (use)->low);
1903 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1904 && VN_INFO (use)->on_sccstack)
1906 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1907 VN_INFO (name)->low);
1912 /* See if we found an SCC. */
1913 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1915 VEC (tree, heap) *scc = NULL;
1916 tree x;
1918 /* Found an SCC, pop the components off the SCC stack and
1919 process them. */
1922 x = VEC_pop (tree, sccstack);
1924 VN_INFO (x)->on_sccstack = false;
1925 VEC_safe_push (tree, heap, scc, x);
1926 } while (x != name);
1928 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
1929 if (VEC_length (tree, scc)
1930 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
1932 if (dump_file)
1933 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
1934 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
1935 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
1936 return false;
1939 if (VEC_length (tree, scc) > 1)
1940 sort_scc (scc);
1942 if (dump_file && (dump_flags & TDF_DETAILS))
1943 print_scc (dump_file, scc);
1945 process_scc (scc);
1947 VEC_free (tree, heap, scc);
1950 return true;
1953 /* Allocate a value number table. */
1955 static void
1956 allocate_vn_table (vn_tables_t table)
1958 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
1959 table->unary = htab_create (23, vn_unary_op_hash, vn_unary_op_eq, NULL);
1960 table->binary = htab_create (23, vn_binary_op_hash, vn_binary_op_eq, NULL);
1961 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
1962 free_reference);
1964 table->unary_op_pool = create_alloc_pool ("VN unary operations",
1965 sizeof (struct vn_unary_op_s),
1966 30);
1967 table->binary_op_pool = create_alloc_pool ("VN binary operations",
1968 sizeof (struct vn_binary_op_s),
1969 30);
1970 table->phis_pool = create_alloc_pool ("VN phis",
1971 sizeof (struct vn_phi_s),
1972 30);
1973 table->references_pool = create_alloc_pool ("VN references",
1974 sizeof (struct vn_reference_s),
1975 30);
1978 /* Free a value number table. */
1980 static void
1981 free_vn_table (vn_tables_t table)
1983 htab_delete (table->phis);
1984 htab_delete (table->unary);
1985 htab_delete (table->binary);
1986 htab_delete (table->references);
1987 free_alloc_pool (table->unary_op_pool);
1988 free_alloc_pool (table->binary_op_pool);
1989 free_alloc_pool (table->phis_pool);
1990 free_alloc_pool (table->references_pool);
1993 static void
1994 init_scc_vn (void)
1996 size_t i;
1997 int j;
1998 int *rpo_numbers_temp;
1999 basic_block bb;
2000 size_t id = 0;
2002 calculate_dominance_info (CDI_DOMINATORS);
2003 sccstack = NULL;
2004 next_dfs_num = 1;
2006 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2007 /* VEC_alloc doesn't actually grow it to the right size, it just
2008 preallocates the space to do so. */
2009 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2010 shared_lookup_phiargs = NULL;
2011 shared_lookup_vops = NULL;
2012 shared_lookup_references = NULL;
2013 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2014 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2015 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2017 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2018 the i'th block in RPO order is bb. We want to map bb's to RPO
2019 numbers, so we need to rearrange this array. */
2020 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2021 rpo_numbers[rpo_numbers_temp[j]] = j;
2023 free (rpo_numbers_temp);
2025 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2027 /* Create the VN_INFO structures, and initialize value numbers to
2028 TOP. */
2029 for (i = 0; i < num_ssa_names; i++)
2031 tree name = ssa_name (i);
2032 if (name)
2034 VN_INFO_GET (name)->valnum = VN_TOP;
2035 VN_INFO (name)->expr = name;
2039 FOR_ALL_BB (bb)
2041 block_stmt_iterator bsi;
2042 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2044 tree stmt = bsi_stmt (bsi);
2045 stmt_ann (stmt)->uid = id++;
2049 /* Create the valid and optimistic value numbering tables. */
2050 valid_info = XCNEW (struct vn_tables_s);
2051 allocate_vn_table (valid_info);
2052 optimistic_info = XCNEW (struct vn_tables_s);
2053 allocate_vn_table (optimistic_info);
2054 pre_info = NULL;
2057 void
2058 switch_to_PRE_table (void)
2060 pre_info = XCNEW (struct vn_tables_s);
2061 allocate_vn_table (pre_info);
2062 current_info = pre_info;
2065 void
2066 free_scc_vn (void)
2068 size_t i;
2070 VEC_free (tree, heap, shared_lookup_phiargs);
2071 VEC_free (tree, gc, shared_lookup_vops);
2072 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2073 XDELETEVEC (rpo_numbers);
2074 for (i = 0; i < num_ssa_names; i++)
2076 tree name = ssa_name (i);
2077 if (name)
2079 XDELETE (VN_INFO (name));
2080 if (SSA_NAME_VALUE (name) &&
2081 TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
2082 SSA_NAME_VALUE (name) = NULL;
2086 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2087 VEC_free (tree, heap, sccstack);
2088 free_vn_table (valid_info);
2089 XDELETE (valid_info);
2090 free_vn_table (optimistic_info);
2091 XDELETE (optimistic_info);
2092 if (pre_info)
2094 free_vn_table (pre_info);
2095 XDELETE (pre_info);
2099 /* Do SCCVN. Returns true if it finished, false if we bailed out
2100 due to ressource constraints. */
2102 bool
2103 run_scc_vn (void)
2105 size_t i;
2106 tree param;
2108 init_scc_vn ();
2109 current_info = valid_info;
2111 for (param = DECL_ARGUMENTS (current_function_decl);
2112 param;
2113 param = TREE_CHAIN (param))
2115 if (gimple_default_def (cfun, param) != NULL)
2117 tree def = gimple_default_def (cfun, param);
2118 SSA_VAL (def) = def;
2122 for (i = num_ssa_names - 1; i > 0; i--)
2124 tree name = ssa_name (i);
2125 if (name
2126 && VN_INFO (name)->visited == false
2127 && !has_zero_uses (name))
2128 if (!DFS (name))
2130 free_scc_vn ();
2131 return false;
2135 if (dump_file && (dump_flags & TDF_DETAILS))
2137 fprintf (dump_file, "Value numbers:\n");
2138 for (i = 0; i < num_ssa_names; i++)
2140 tree name = ssa_name (i);
2141 if (name && VN_INFO (name)->visited
2142 && (SSA_VAL (name) != name
2143 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2145 print_generic_expr (dump_file, name, 0);
2146 fprintf (dump_file, " = ");
2147 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2148 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2149 else
2150 print_generic_expr (dump_file, SSA_VAL (name), 0);
2151 fprintf (dump_file, "\n");
2156 return true;