Updated for libbid move.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob89e399508e9bb349477606564f99241cdc654c4d
1 /* SCC value numbering for trees
2 Copyright (C) 2006
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "fibheap.h"
37 #include "hashtab.h"
38 #include "tree-iterator.h"
39 #include "real.h"
40 #include "alloc-pool.h"
41 #include "tree-pass.h"
42 #include "flags.h"
43 #include "bitmap.h"
44 #include "langhooks.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
109 htab_t unary;
110 htab_t binary;
111 htab_t phis;
112 htab_t references;
113 alloc_pool unary_op_pool;
114 alloc_pool binary_op_pool;
115 alloc_pool phis_pool;
116 alloc_pool references_pool;
117 } *vn_tables_t;
119 /* Binary operations in the hashtable consist of two operands, an
120 opcode, and a type. Result is the value number of the operation,
121 and hashcode is stored to avoid having to calculate it
122 repeatedly. */
124 typedef struct vn_binary_op_s
126 enum tree_code opcode;
127 tree type;
128 tree op0;
129 tree op1;
130 hashval_t hashcode;
131 tree result;
132 } *vn_binary_op_t;
134 /* Unary operations in the hashtable consist of a single operand, an
135 opcode, and a type. Result is the value number of the operation,
136 and hashcode is stored to avoid having to calculate it repeatedly. */
138 typedef struct vn_unary_op_s
140 enum tree_code opcode;
141 tree type;
142 tree op0;
143 hashval_t hashcode;
144 tree result;
145 } *vn_unary_op_t;
147 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
148 arguments, and the basic block the phi is in. Result is the value
149 number of the operation, and hashcode is stored to avoid having to
150 calculate it repeatedly. Phi nodes not in the same block are never
151 considered equivalent. */
153 typedef struct vn_phi_s
155 VEC (tree, heap) *phiargs;
156 basic_block block;
157 hashval_t hashcode;
158 tree result;
159 } *vn_phi_t;
161 /* Reference operands only exist in reference operations structures.
162 They consist of an opcode, type, and some number of operands. For
163 a given opcode, some, all, or none of the operands may be used.
164 The operands are there to store the information that makes up the
165 portion of the addressing calculation that opcode performs. */
167 typedef struct vn_reference_op_struct
169 enum tree_code opcode;
170 tree type;
171 tree op0;
172 tree op1;
173 tree op2;
174 } vn_reference_op_s;
175 typedef vn_reference_op_s *vn_reference_op_t;
177 DEF_VEC_O(vn_reference_op_s);
178 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
180 /* A reference operation in the hashtable is representation as a
181 collection of vuses, representing the memory state at the time of
182 the operation, and a collection of operands that make up the
183 addressing calculation. If two vn_reference_t's have the same set
184 of operands, they access the same memory location. We also store
185 the resulting value number, and the hashcode. The vuses are
186 always stored in order sorted by ssa name version. */
188 typedef struct vn_reference_s
190 VEC (tree, gc) *vuses;
191 VEC (vn_reference_op_s, heap) *operands;
192 hashval_t hashcode;
193 tree result;
194 } *vn_reference_t;
196 /* Valid hashtables storing information we have proven to be
197 correct. */
199 static vn_tables_t valid_info;
201 /* Optimistic hashtables storing information we are making assumptions about
202 during iterations. */
204 static vn_tables_t optimistic_info;
206 /* PRE hashtables storing information about mapping from expressions to
207 value handles. */
209 static vn_tables_t pre_info;
211 /* Pointer to the set of hashtables that is currently being used.
212 Should always point to either the optimistic_info, or the
213 valid_info. */
215 static vn_tables_t current_info;
218 /* Reverse post order index for each basic block. */
220 static int *rpo_numbers;
222 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
224 /* This represents the top of the VN lattice, which is the universal
225 value. */
227 tree VN_TOP;
229 /* Next DFS number and the stack for strongly connected component
230 detection. */
232 static unsigned int next_dfs_num;
233 static VEC (tree, heap) *sccstack;
235 DEF_VEC_P(vn_ssa_aux_t);
236 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
238 /* Table of vn_ssa_aux_t's, one per ssa_name. */
240 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
242 /* Return the value numbering information for a given SSA name. */
244 vn_ssa_aux_t
245 VN_INFO (tree name)
247 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
248 SSA_NAME_VERSION (name));
251 /* Set the value numbering info for a given SSA name to a given
252 value. */
254 static inline void
255 VN_INFO_SET (tree name, vn_ssa_aux_t value)
257 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
258 SSA_NAME_VERSION (name), value);
261 /* Get the value numbering info for a given SSA name, creating it if
262 it does not exist. */
264 vn_ssa_aux_t
265 VN_INFO_GET (tree name)
267 vn_ssa_aux_t newinfo = XCNEW (struct vn_ssa_aux);
268 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
269 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
270 SSA_NAME_VERSION (name) + 1);
271 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
272 SSA_NAME_VERSION (name), newinfo);
273 return newinfo;
277 /* Compare two reference operands P1 and P2 for equality. return true if
278 they are equal, and false otherwise. */
280 static int
281 vn_reference_op_eq (const void *p1, const void *p2)
283 const vn_reference_op_t vro1 = (vn_reference_op_t) p1;
284 const vn_reference_op_t vro2 = (vn_reference_op_t) p2;
285 return vro1->opcode == vro2->opcode
286 && vro1->type == vro2->type
287 && expressions_equal_p (vro1->op0, vro2->op0)
288 && expressions_equal_p (vro1->op1, vro2->op1)
289 && expressions_equal_p (vro1->op2, vro2->op2);
292 /* Compute the hash for a reference operand VRO1 */
294 static hashval_t
295 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
297 return iterative_hash_expr (vro1->op0, vro1->opcode)
298 + iterative_hash_expr (vro1->op1, vro1->opcode)
299 + iterative_hash_expr (vro1->op2, vro1->opcode);
302 /* Return the hashcode for a given reference operation P1. */
304 static hashval_t
305 vn_reference_hash (const void *p1)
307 const vn_reference_t vr1 = (vn_reference_t) p1;
308 return vr1->hashcode;
311 /* Compute a hash for the reference operation VR1 and return it. */
313 static inline hashval_t
314 vn_reference_compute_hash (const vn_reference_t vr1)
316 hashval_t result = 0;
317 tree v;
318 int i;
319 vn_reference_op_t vro;
321 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
322 result += iterative_hash_expr (v, 0);
323 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
324 result += vn_reference_op_compute_hash (vro);
326 return result;
329 /* Return true if reference operations P1 and P2 are equivalent. This
330 means they have the same set of operands and vuses. */
332 static int
333 vn_reference_eq (const void *p1, const void *p2)
335 tree v;
336 int i;
337 vn_reference_op_t vro;
339 const vn_reference_t vr1 = (vn_reference_t) p1;
340 const vn_reference_t vr2 = (vn_reference_t) p2;
342 if (vr1->vuses == vr2->vuses
343 && vr1->operands == vr2->operands)
344 return true;
346 /* Impossible for them to be equivalent if they have different
347 number of vuses. */
348 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
349 return false;
351 /* We require that address operands be canonicalized in a way that
352 two memory references will have the same operands if they are
353 equivalent. */
354 if (VEC_length (vn_reference_op_s, vr1->operands)
355 != VEC_length (vn_reference_op_s, vr2->operands))
356 return false;
358 /* The memory state is more often different than the address of the
359 store/load, so check it first. */
360 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
362 if (VEC_index (tree, vr2->vuses, i) != v)
363 return false;
366 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
368 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
369 vro))
370 return false;
372 return true;
375 /* Place the vuses from STMT into *result */
377 static inline void
378 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
380 ssa_op_iter iter;
381 tree vuse;
383 if (!stmt)
384 return;
386 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
387 VEC_safe_push (tree, gc, *result, vuse);
389 if (VEC_length (tree, *result) > 1)
390 sort_vuses (*result);
394 /* Copy the VUSE names in STMT into a vector, and return
395 the vector. */
397 VEC (tree, gc) *
398 copy_vuses_from_stmt (tree stmt)
400 VEC (tree, gc) *vuses = NULL;
402 vuses_to_vec (stmt, &vuses);
404 return vuses;
407 /* Place the vdefs from STMT into *result */
409 static inline void
410 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
412 ssa_op_iter iter;
413 tree vdef;
415 if (!stmt)
416 return;
418 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
419 VEC_safe_push (tree, gc, *result, vdef);
421 if (VEC_length (tree, *result) > 1)
422 sort_vuses (*result);
425 /* Copy the names of vdef results in STMT into a vector, and return
426 the vector. */
428 static VEC (tree, gc) *
429 copy_vdefs_from_stmt (tree stmt)
431 VEC (tree, gc) *vdefs = NULL;
433 vdefs_to_vec (stmt, &vdefs);
435 return vdefs;
438 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
439 static VEC (tree, gc) *shared_lookup_vops;
441 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
442 This function will overwrite the current SHARED_LOOKUP_VOPS
443 variable. */
445 VEC (tree, gc) *
446 shared_vuses_from_stmt (tree stmt)
448 VEC_truncate (tree, shared_lookup_vops, 0);
449 vuses_to_vec (stmt, &shared_lookup_vops);
451 return shared_lookup_vops;
454 /* Copy the operations present in load/store/call REF into RESULT, a vector of
455 vn_reference_op_s's. */
457 static void
458 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
460 /* Calls are different from all other reference operations. */
461 if (TREE_CODE (ref) == CALL_EXPR)
463 vn_reference_op_s temp;
464 tree callfn;
465 call_expr_arg_iterator iter;
466 tree callarg;
468 /* Copy the call_expr opcode, type, function being called, and
469 arguments. */
470 memset (&temp, 0, sizeof (temp));
471 temp.type = TREE_TYPE (ref);
472 temp.opcode = CALL_EXPR;
473 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
475 callfn = get_callee_fndecl (ref);
476 if (!callfn)
477 callfn = CALL_EXPR_FN (ref);
478 temp.type = TREE_TYPE (callfn);
479 temp.opcode = TREE_CODE (callfn);
480 temp.op0 = callfn;
481 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
483 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
485 memset (&temp, 0, sizeof (temp));
486 temp.type = TREE_TYPE (callarg);
487 temp.opcode = TREE_CODE (callarg);
488 temp.op0 = callarg;
489 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
491 return;
494 /* For non-calls, store the information that makes up the address. */
496 while (ref)
498 vn_reference_op_s temp;
500 memset (&temp, 0, sizeof (temp));
501 temp.type = TREE_TYPE (ref);
502 temp.opcode = TREE_CODE (ref);
504 switch (temp.opcode)
506 case ALIGN_INDIRECT_REF:
507 case MISALIGNED_INDIRECT_REF:
508 case INDIRECT_REF:
509 /* The only operand is the address, which gets its own
510 vn_reference_op_s structure. */
511 break;
512 case BIT_FIELD_REF:
513 /* Record bits and position. */
514 temp.op0 = TREE_OPERAND (ref, 1);
515 temp.op1 = TREE_OPERAND (ref, 2);
516 break;
517 case COMPONENT_REF:
518 /* Record field as operand. */
519 temp.op0 = TREE_OPERAND (ref, 1);
520 break;
521 case ARRAY_RANGE_REF:
522 case ARRAY_REF:
523 /* Record index as operand. */
524 temp.op0 = TREE_OPERAND (ref, 1);
525 temp.op1 = TREE_OPERAND (ref, 3);
526 break;
527 case VALUE_HANDLE:
528 case VAR_DECL:
529 case PARM_DECL:
530 case CONST_DECL:
531 case RESULT_DECL:
532 case SSA_NAME:
533 temp.op0 = ref;
534 break;
535 default:
536 break;
538 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
540 if (REFERENCE_CLASS_P (ref))
541 ref = TREE_OPERAND (ref, 0);
542 else
543 ref = NULL_TREE;
547 /* Create a vector of vn_reference_op_s structures from REF, a
548 REFERENCE_CLASS_P tree. The vector is not shared. */
550 static VEC(vn_reference_op_s, heap) *
551 create_reference_ops_from_ref (tree ref)
553 VEC (vn_reference_op_s, heap) *result = NULL;
555 copy_reference_ops_from_ref (ref, &result);
556 return result;
559 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
561 /* Create a vector of vn_reference_op_s structures from REF, a
562 REFERENCE_CLASS_P tree. The vector is shared among all callers of
563 this function. */
565 static VEC(vn_reference_op_s, heap) *
566 shared_reference_ops_from_ref (tree ref)
568 if (!ref)
569 return NULL;
570 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
571 copy_reference_ops_from_ref (ref, &shared_lookup_references);
572 return shared_lookup_references;
576 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
577 structures into their value numbers. This is done in-place, and
578 the vector passed in is returned. */
580 static VEC (vn_reference_op_s, heap) *
581 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
583 vn_reference_op_t vro;
584 int i;
586 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
588 if (vro->opcode == SSA_NAME
589 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
590 vro->op0 = SSA_VAL (vro->op0);
593 return orig;
596 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
597 their value numbers. This is done in-place, and the vector passed
598 in is returned. */
600 static VEC (tree, gc) *
601 valueize_vuses (VEC (tree, gc) *orig)
603 bool made_replacement = false;
604 tree vuse;
605 int i;
607 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
609 if (vuse != SSA_VAL (vuse))
611 made_replacement = true;
612 VEC_replace (tree, orig, i, SSA_VAL (vuse));
616 if (made_replacement && VEC_length (tree, orig) > 1)
617 sort_vuses (orig);
619 return orig;
622 /* Lookup OP in the current hash table, and return the resulting
623 value number if it exists in the hash table. Return NULL_TREE if
624 it does not exist in the hash table. */
626 tree
627 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
629 void **slot;
630 struct vn_reference_s vr1;
632 vr1.vuses = valueize_vuses (vuses);
633 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
634 vr1.hashcode = vn_reference_compute_hash (&vr1);
635 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
636 NO_INSERT);
637 if (!slot)
638 return NULL_TREE;
640 return ((vn_reference_t)*slot)->result;
643 /* Insert OP into the current hash table with a value number of
644 RESULT. */
646 void
647 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
649 void **slot;
650 vn_reference_t vr1;
652 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
654 vr1->vuses = valueize_vuses (vuses);
655 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
656 vr1->hashcode = vn_reference_compute_hash (vr1);
657 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
659 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
660 INSERT);
662 /* Because we lookup stores using vuses, and value number failures
663 using the vdefs (see visit_reference_op_store for how and why),
664 it's possible that on failure we may try to insert an already
665 inserted store. This is not wrong, there is no ssa name for a
666 store that we could use as a differentiator anyway. Thus, unlike
667 the other lookup functions, you cannot gcc_assert (!*slot)
668 here. */
671 *slot = vr1;
675 /* Return the stored hashcode for a unary operation. */
677 static hashval_t
678 vn_unary_op_hash (const void *p1)
680 const vn_unary_op_t vuo1 = (vn_unary_op_t) p1;
681 return vuo1->hashcode;
684 /* Hash a unary operation P1 and return the result. */
686 static inline hashval_t
687 vn_unary_op_compute_hash (const vn_unary_op_t vuo1)
689 return iterative_hash_expr (vuo1->op0, vuo1->opcode);
692 /* Return true if P1 and P2, two unary operations, are equivalent. */
694 static int
695 vn_unary_op_eq (const void *p1, const void *p2)
697 const vn_unary_op_t vuo1 = (vn_unary_op_t) p1;
698 const vn_unary_op_t vuo2 = (vn_unary_op_t) p2;
699 return vuo1->opcode == vuo2->opcode
700 && vuo1->type == vuo2->type
701 && expressions_equal_p (vuo1->op0, vuo2->op0);
704 /* Lookup OP in the current hash table, and return the resulting
705 value number if it exists in the hash table. Return NULL_TREE if
706 it does not exist in the hash table. */
708 tree
709 vn_unary_op_lookup (tree op)
711 void **slot;
712 struct vn_unary_op_s vuo1;
714 vuo1.opcode = TREE_CODE (op);
715 vuo1.type = TREE_TYPE (op);
716 vuo1.op0 = TREE_OPERAND (op, 0);
718 if (TREE_CODE (vuo1.op0) == SSA_NAME)
719 vuo1.op0 = SSA_VAL (vuo1.op0);
721 vuo1.hashcode = vn_unary_op_compute_hash (&vuo1);
722 slot = htab_find_slot_with_hash (current_info->unary, &vuo1, vuo1.hashcode,
723 NO_INSERT);
724 if (!slot)
725 return NULL_TREE;
726 return ((vn_unary_op_t)*slot)->result;
729 /* Insert OP into the current hash table with a value number of
730 RESULT. */
732 void
733 vn_unary_op_insert (tree op, tree result)
735 void **slot;
736 vn_unary_op_t vuo1 = (vn_unary_op_t) pool_alloc (current_info->unary_op_pool);
738 vuo1->opcode = TREE_CODE (op);
739 vuo1->type = TREE_TYPE (op);
740 vuo1->op0 = TREE_OPERAND (op, 0);
741 vuo1->result = result;
743 if (TREE_CODE (vuo1->op0) == SSA_NAME)
744 vuo1->op0 = SSA_VAL (vuo1->op0);
746 vuo1->hashcode = vn_unary_op_compute_hash (vuo1);
747 slot = htab_find_slot_with_hash (current_info->unary, vuo1, vuo1->hashcode,
748 INSERT);
749 gcc_assert (!*slot);
750 *slot = vuo1;
753 /* Compute and return the hash value for binary operation VBO1. */
755 static inline hashval_t
756 vn_binary_op_compute_hash (const vn_binary_op_t vbo1)
758 return iterative_hash_expr (vbo1->op0, vbo1->opcode)
759 + iterative_hash_expr (vbo1->op1, vbo1->opcode);
762 /* Return the computed hashcode for binary operation P1. */
764 static hashval_t
765 vn_binary_op_hash (const void *p1)
767 const vn_binary_op_t vbo1 = (vn_binary_op_t) p1;
768 return vbo1->hashcode;
771 /* Compare binary operations P1 and P2 and return true if they are
772 equivalent. */
774 static int
775 vn_binary_op_eq (const void *p1, const void *p2)
777 const vn_binary_op_t vbo1 = (vn_binary_op_t) p1;
778 const vn_binary_op_t vbo2 = (vn_binary_op_t) p2;
779 return vbo1->opcode == vbo2->opcode
780 && vbo1->type == vbo2->type
781 && expressions_equal_p (vbo1->op0, vbo2->op0)
782 && expressions_equal_p (vbo1->op1, vbo2->op1);
785 /* Lookup OP in the current hash table, and return the resulting
786 value number if it exists in the hash table. Return NULL_TREE if
787 it does not exist in the hash table. */
789 tree
790 vn_binary_op_lookup (tree op)
792 void **slot;
793 struct vn_binary_op_s vbo1;
795 vbo1.opcode = TREE_CODE (op);
796 vbo1.type = TREE_TYPE (op);
797 vbo1.op0 = TREE_OPERAND (op, 0);
798 vbo1.op1 = TREE_OPERAND (op, 1);
800 if (TREE_CODE (vbo1.op0) == SSA_NAME)
801 vbo1.op0 = SSA_VAL (vbo1.op0);
802 if (TREE_CODE (vbo1.op1) == SSA_NAME)
803 vbo1.op1 = SSA_VAL (vbo1.op1);
805 if (tree_swap_operands_p (vbo1.op0, vbo1.op1, false)
806 && commutative_tree_code (vbo1.opcode))
808 tree temp = vbo1.op0;
809 vbo1.op0 = vbo1.op1;
810 vbo1.op1 = temp;
813 vbo1.hashcode = vn_binary_op_compute_hash (&vbo1);
814 slot = htab_find_slot_with_hash (current_info->binary, &vbo1, vbo1.hashcode,
815 NO_INSERT);
816 if (!slot)
817 return NULL_TREE;
818 return ((vn_binary_op_t)*slot)->result;
821 /* Insert OP into the current hash table with a value number of
822 RESULT. */
824 void
825 vn_binary_op_insert (tree op, tree result)
827 void **slot;
828 vn_binary_op_t vbo1;
829 vbo1 = (vn_binary_op_t) pool_alloc (current_info->binary_op_pool);
831 vbo1->opcode = TREE_CODE (op);
832 vbo1->type = TREE_TYPE (op);
833 vbo1->op0 = TREE_OPERAND (op, 0);
834 vbo1->op1 = TREE_OPERAND (op, 1);
835 vbo1->result = result;
837 if (TREE_CODE (vbo1->op0) == SSA_NAME)
838 vbo1->op0 = SSA_VAL (vbo1->op0);
839 if (TREE_CODE (vbo1->op1) == SSA_NAME)
840 vbo1->op1 = SSA_VAL (vbo1->op1);
842 if (tree_swap_operands_p (vbo1->op0, vbo1->op1, false)
843 && commutative_tree_code (vbo1->opcode))
845 tree temp = vbo1->op0;
846 vbo1->op0 = vbo1->op1;
847 vbo1->op1 = temp;
849 vbo1->hashcode = vn_binary_op_compute_hash (vbo1);
850 slot = htab_find_slot_with_hash (current_info->binary, vbo1, vbo1->hashcode,
851 INSERT);
852 gcc_assert (!*slot);
854 *slot = vbo1;
857 /* Compute a hashcode for PHI operation VP1 and return it. */
859 static inline hashval_t
860 vn_phi_compute_hash (vn_phi_t vp1)
862 hashval_t result = 0;
863 int i;
864 tree phi1op;
866 result = vp1->block->index;
868 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
870 if (phi1op == VN_TOP)
871 continue;
872 result += iterative_hash_expr (phi1op, result);
875 return result;
878 /* Return the computed hashcode for phi operation P1. */
880 static hashval_t
881 vn_phi_hash (const void *p1)
883 const vn_phi_t vp1 = (vn_phi_t) p1;
884 return vp1->hashcode;
887 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
889 static int
890 vn_phi_eq (const void *p1, const void *p2)
892 const vn_phi_t vp1 = (vn_phi_t) p1;
893 const vn_phi_t vp2 = (vn_phi_t) p2;
895 if (vp1->block == vp2->block)
897 int i;
898 tree phi1op;
900 /* Any phi in the same block will have it's arguments in the
901 same edge order, because of how we store phi nodes. */
902 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
904 tree phi2op = VEC_index (tree, vp2->phiargs, i);
905 if (phi1op == VN_TOP || phi2op == VN_TOP)
906 continue;
907 if (!expressions_equal_p (phi1op, phi2op))
908 return false;
910 return true;
912 return false;
915 static VEC(tree, heap) *shared_lookup_phiargs;
917 /* Lookup PHI in the current hash table, and return the resulting
918 value number if it exists in the hash table. Return NULL_TREE if
919 it does not exist in the hash table. */
921 static tree
922 vn_phi_lookup (tree phi)
924 void **slot;
925 struct vn_phi_s vp1;
926 int i;
928 VEC_truncate (tree, shared_lookup_phiargs, 0);
930 /* Canonicalize the SSA_NAME's to their value number. */
931 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
933 tree def = PHI_ARG_DEF (phi, i);
934 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
935 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
937 vp1.phiargs = shared_lookup_phiargs;
938 vp1.block = bb_for_stmt (phi);
939 vp1.hashcode = vn_phi_compute_hash (&vp1);
940 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
941 NO_INSERT);
942 if (!slot)
943 return NULL_TREE;
944 return ((vn_phi_t)*slot)->result;
947 /* Insert PHI into the current hash table with a value number of
948 RESULT. */
950 static void
951 vn_phi_insert (tree phi, tree result)
953 void **slot;
954 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
955 int i;
956 VEC (tree, heap) *args = NULL;
958 /* Canonicalize the SSA_NAME's to their value number. */
959 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
961 tree def = PHI_ARG_DEF (phi, i);
962 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
963 VEC_safe_push (tree, heap, args, def);
965 vp1->phiargs = args;
966 vp1->block = bb_for_stmt (phi);
967 vp1->result = result;
968 vp1->hashcode = vn_phi_compute_hash (vp1);
970 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
971 INSERT);
973 /* Because we iterate over phi operations more than once, it's
974 possible the slot might already exist here, hence no assert.*/
975 *slot = vp1;
979 /* Print set of components in strongly connected component SCC to OUT. */
981 static void
982 print_scc (FILE *out, VEC (tree, heap) *scc)
984 tree var;
985 unsigned int i;
987 fprintf (out, "SCC consists of: ");
988 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
990 print_generic_expr (out, var, 0);
991 fprintf (out, " ");
993 fprintf (out, "\n");
996 /* Set the value number of FROM to TO, return true if it has changed
997 as a result. */
999 static inline bool
1000 set_ssa_val_to (tree from, tree to)
1002 gcc_assert (to != NULL);
1004 /* Make sure we don't create chains of copies, so that we get the
1005 best value numbering. visit_copy has code to make sure this doesn't
1006 happen, we are doing this here to assert that nothing else breaks
1007 this. */
1008 gcc_assert (TREE_CODE (to) != SSA_NAME
1009 || TREE_CODE (SSA_VAL (to)) != SSA_NAME
1010 || SSA_VAL (to) == to
1011 || to == from);
1012 /* The only thing we allow as value numbers are ssa_names and
1013 invariants. So assert that here. */
1014 gcc_assert (TREE_CODE (to) == SSA_NAME || is_gimple_min_invariant (to));
1016 if (dump_file && (dump_flags & TDF_DETAILS))
1018 fprintf (dump_file, "Setting value number of ");
1019 print_generic_expr (dump_file, from, 0);
1020 fprintf (dump_file, " to ");
1021 print_generic_expr (dump_file, to, 0);
1022 fprintf (dump_file, "\n");
1025 if (SSA_VAL (from) != to)
1027 SSA_VAL (from) = to;
1028 return true;
1030 return false;
1033 /* Set all definitions in STMT to value number to themselves.
1034 Return true if a value number changed. */
1036 static bool
1037 defs_to_varying (tree stmt)
1039 bool changed = false;
1040 ssa_op_iter iter;
1041 def_operand_p defp;
1043 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1045 tree def = DEF_FROM_PTR (defp);
1047 VN_INFO (def)->use_processed = true;
1048 changed |= set_ssa_val_to (def, def);
1050 return changed;
1053 /* Visit a copy between LHS and RHS, return true if the value number
1054 changed. */
1056 static bool
1057 visit_copy (tree lhs, tree rhs)
1060 /* Follow chains of copies to their destination. */
1061 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1062 rhs = SSA_VAL (rhs);
1064 /* The copy may have a more interesting constant filled expression
1065 (we don't, since we know our RHS is just an SSA name). */
1066 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1067 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1069 return set_ssa_val_to (lhs, rhs);
1072 /* Visit a unary operator RHS, value number it, and return true if the
1073 value number of LHS has changed as a result. */
1075 static bool
1076 visit_unary_op (tree lhs, tree op)
1078 bool changed = false;
1079 tree result = vn_unary_op_lookup (op);
1081 if (result)
1083 changed = set_ssa_val_to (lhs, result);
1085 else
1087 changed = set_ssa_val_to (lhs, lhs);
1088 vn_unary_op_insert (op, lhs);
1091 return changed;
1094 /* Visit a binary operator RHS, value number it, and return true if the
1095 value number of LHS has changed as a result. */
1097 static bool
1098 visit_binary_op (tree lhs, tree op)
1100 bool changed = false;
1101 tree result = vn_binary_op_lookup (op);
1103 if (result)
1105 changed = set_ssa_val_to (lhs, result);
1107 else
1109 changed = set_ssa_val_to (lhs, lhs);
1110 vn_binary_op_insert (op, lhs);
1113 return changed;
1116 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1117 and return true if the value number of the LHS has changed as a result. */
1119 static bool
1120 visit_reference_op_load (tree lhs, tree op, tree stmt)
1122 bool changed = false;
1123 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1125 if (result)
1127 changed = set_ssa_val_to (lhs, result);
1129 else
1131 changed = set_ssa_val_to (lhs, lhs);
1132 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1135 return changed;
1139 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1140 and return true if the value number of the LHS has changed as a result. */
1142 static bool
1143 visit_reference_op_store (tree lhs, tree op, tree stmt)
1145 bool changed = false;
1146 tree result;
1147 bool resultsame = false;
1149 /* First we want to lookup using the *vuses* from the store and see
1150 if there the last store to this location with the same address
1151 had the same value.
1153 The vuses represent the memory state before the store. If the
1154 memory state, address, and value of the store is the same as the
1155 last store to this location, then this store will produce the
1156 same memory state as that store.
1158 In this case the vdef versions for this store are value numbered to those
1159 vuse versions, since they represent the same memory state after
1160 this store.
1162 Otherwise, the vdefs for the store are used when inserting into
1163 the table, since the store generates a new memory state. */
1165 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1167 if (result)
1169 if (TREE_CODE (result) == SSA_NAME)
1170 result = SSA_VAL (result);
1171 resultsame = expressions_equal_p (result, op);
1174 if (!result || !resultsame)
1176 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1177 int i;
1178 tree vdef;
1180 if (dump_file && (dump_flags & TDF_DETAILS))
1182 fprintf (dump_file, "No store match\n");
1183 fprintf (dump_file, "Value numbering store ");
1184 print_generic_expr (dump_file, lhs, 0);
1185 fprintf (dump_file, " to ");
1186 print_generic_expr (dump_file, op, 0);
1187 fprintf (dump_file, "\n");
1189 /* Have to set value numbers before insert, since insert is
1190 going to valueize the references in-place. */
1191 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1193 VN_INFO (vdef)->use_processed = true;
1194 changed |= set_ssa_val_to (vdef, vdef);
1197 vn_reference_insert (lhs, op, vdefs);
1199 else
1201 /* We had a match, so value number the vdefs to have the value
1202 number of the vuses they came from. */
1203 ssa_op_iter op_iter;
1204 def_operand_p var;
1205 vuse_vec_p vv;
1207 if (dump_file && (dump_flags & TDF_DETAILS))
1208 fprintf (dump_file, "Store matched earlier value,"
1209 "value numbering store vdefs to matching vuses.\n");
1211 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1213 tree def = DEF_FROM_PTR (var);
1214 tree use;
1216 /* Uh, if the vuse is a multiuse, we can't really do much
1217 here, sadly, since we don't know which value number of
1218 which vuse to use. */
1219 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1220 use = def;
1221 else
1222 use = VUSE_ELEMENT_VAR (*vv, 0);
1224 VN_INFO (def)->use_processed = true;
1225 changed |= set_ssa_val_to (def, SSA_VAL (use));
1229 return changed;
1232 /* Visit and value number PHI, return true if the value number
1233 changed. */
1235 static bool
1236 visit_phi (tree phi)
1238 bool changed = false;
1239 tree result;
1240 tree sameval = VN_TOP;
1241 bool allsame = true;
1242 int i;
1244 /* See if all non-TOP arguments have the same value. TOP is
1245 equivalent to everything, so we can ignore it. */
1246 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1248 tree def = PHI_ARG_DEF (phi, i);
1250 if (TREE_CODE (def) == SSA_NAME)
1251 def = SSA_VAL (def);
1252 if (def == VN_TOP)
1253 continue;
1254 if (sameval == VN_TOP)
1256 sameval = def;
1258 else
1260 if (!expressions_equal_p (def, sameval))
1262 allsame = false;
1263 break;
1268 /* If all value numbered to the same value, the phi node has that
1269 value. */
1270 if (allsame)
1272 if (is_gimple_min_invariant (sameval))
1274 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1275 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1277 else
1279 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1280 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1283 if (TREE_CODE (sameval) == SSA_NAME)
1284 return visit_copy (PHI_RESULT (phi), sameval);
1286 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1289 /* Otherwise, see if it is equivalent to a phi node in this block. */
1290 result = vn_phi_lookup (phi);
1291 if (result)
1293 if (TREE_CODE (result) == SSA_NAME)
1294 changed = visit_copy (PHI_RESULT (phi), result);
1295 else
1296 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1298 else
1300 vn_phi_insert (phi, PHI_RESULT (phi));
1301 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1302 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1303 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1306 return changed;
1309 /* Return true if EXPR contains constants. */
1311 static bool
1312 expr_has_constants (tree expr)
1314 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1316 case tcc_unary:
1317 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1319 case tcc_binary:
1320 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1321 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1322 /* Constants inside reference ops are rarely interesting, but
1323 it can take a lot of looking to find them. */
1324 case tcc_reference:
1325 return false;
1326 default:
1327 return is_gimple_min_invariant (expr);
1329 return false;
1332 /* Replace SSA_NAMES in expr with their value numbers, and return the
1333 result.
1334 This is performed in place. */
1336 static tree
1337 valueize_expr (tree expr)
1339 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1341 case tcc_unary:
1342 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1343 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1344 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1345 break;
1346 case tcc_binary:
1347 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1348 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1349 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1350 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1351 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1352 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1353 break;
1354 default:
1355 break;
1357 return expr;
1360 /* Simplify the binary expression RHS, and return the result if
1361 simplified. */
1363 static tree
1364 simplify_binary_expression (tree rhs)
1366 tree result = NULL_TREE;
1367 tree op0 = TREE_OPERAND (rhs, 0);
1368 tree op1 = TREE_OPERAND (rhs, 1);
1370 /* This will not catch every single case we could combine, but will
1371 catch those with constants. The goal here is to simultaneously
1372 combine constants between expressions, but avoid infinite
1373 expansion of expressions during simplification. */
1374 if (TREE_CODE (op0) == SSA_NAME)
1376 if (VN_INFO (op0)->has_constants)
1377 op0 = valueize_expr (VN_INFO (op0)->expr);
1378 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1379 op0 = VN_INFO (op0)->valnum;
1382 if (TREE_CODE (op1) == SSA_NAME)
1384 if (VN_INFO (op1)->has_constants)
1385 op1 = valueize_expr (VN_INFO (op1)->expr);
1386 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1387 op1 = VN_INFO (op1)->valnum;
1389 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1391 /* Make sure result is not a complex expression consiting
1392 of operators of operators (IE (a + b) + (a + c))
1393 Otherwise, we will end up with unbounded expressions if
1394 fold does anything at all. */
1395 if (result)
1397 if (is_gimple_min_invariant (result))
1398 return result;
1399 else if (SSA_VAR_P (result))
1400 return result;
1401 else if (EXPR_P (result))
1403 switch (TREE_CODE_CLASS (TREE_CODE (result)))
1405 case tcc_unary:
1407 tree op0 = TREE_OPERAND (result, 0);
1408 if (!EXPR_P (op0))
1409 return result;
1411 break;
1412 case tcc_binary:
1414 tree op0 = TREE_OPERAND (result, 0);
1415 tree op1 = TREE_OPERAND (result, 1);
1416 if (!EXPR_P (op0) && !EXPR_P (op1))
1417 return result;
1419 break;
1420 default:
1421 break;
1425 return NULL_TREE;
1428 /* Try to simplify RHS using equivalences and constant folding. */
1430 static tree
1431 try_to_simplify (tree stmt, tree rhs)
1433 if (TREE_CODE (rhs) == SSA_NAME)
1435 if (is_gimple_min_invariant (SSA_VAL (rhs)))
1436 return SSA_VAL (rhs);
1437 else if (VN_INFO (rhs)->has_constants)
1438 return VN_INFO (rhs)->expr;
1440 else
1442 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1444 /* For references, see if we find a result for the lookup,
1445 and use it if we do. */
1447 case tcc_reference:
1449 tree result = vn_reference_lookup (rhs,
1450 shared_vuses_from_stmt (stmt));
1451 if (result)
1452 return result;
1454 break;
1455 /* We could do a little more with unary ops, if they expand
1456 into binary ops, but it's debatable whether it is worth it. */
1457 case tcc_unary:
1459 tree result = NULL_TREE;
1460 tree op0 = TREE_OPERAND (rhs, 0);
1461 if (TREE_CODE (op0) == SSA_NAME && VN_INFO (op0)->has_constants)
1462 op0 = VN_INFO (op0)->expr;
1463 else if (TREE_CODE (op0) == SSA_NAME && SSA_VAL (op0) != op0)
1464 op0 = SSA_VAL (op0);
1465 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1466 if (result)
1467 return result;
1469 break;
1470 case tcc_comparison:
1471 case tcc_binary:
1472 return simplify_binary_expression (rhs);
1473 break;
1474 default:
1475 break;
1478 return rhs;
1481 /* Visit and value number USE, return true if the value number
1482 changed. */
1484 static bool
1485 visit_use (tree use)
1487 bool changed = false;
1488 tree stmt = SSA_NAME_DEF_STMT (use);
1489 stmt_ann_t ann;
1491 VN_INFO (use)->use_processed = true;
1493 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1494 if (dump_file && (dump_flags & TDF_DETAILS))
1496 fprintf (dump_file, "Value numbering ");
1497 print_generic_expr (dump_file, use, 0);
1498 fprintf (dump_file, " stmt = ");
1499 print_generic_stmt (dump_file, stmt, 0);
1502 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1503 if (TREE_CODE (stmt) == RETURN_EXPR
1504 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1505 stmt = TREE_OPERAND (stmt, 0);
1507 ann = stmt_ann (stmt);
1509 /* Handle uninitialized uses. */
1510 if (IS_EMPTY_STMT (stmt))
1512 changed = set_ssa_val_to (use, use);
1514 else
1516 if (TREE_CODE (stmt) == PHI_NODE)
1518 changed = visit_phi (stmt);
1520 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1521 || (ann && ann->has_volatile_ops))
1523 changed = defs_to_varying (stmt);
1525 else
1527 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1528 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1529 tree simplified;
1531 STRIP_USELESS_TYPE_CONVERSION (rhs);
1533 simplified = try_to_simplify (stmt, rhs);
1534 if (simplified && simplified != rhs)
1536 if (dump_file && (dump_flags & TDF_DETAILS))
1538 fprintf (dump_file, "RHS ");
1539 print_generic_expr (dump_file, rhs, 0);
1540 fprintf (dump_file, " simplified to ");
1541 print_generic_expr (dump_file, simplified, 0);
1542 if (TREE_CODE (lhs) == SSA_NAME)
1543 fprintf (dump_file, " has constants %d\n",
1544 VN_INFO (lhs)->has_constants);
1545 else
1546 fprintf (dump_file, "\n");
1550 /* Setting value numbers to constants will occasionally
1551 screw up phi congruence because constants are not
1552 uniquely associated with a single ssa name that can be
1553 looked up. */
1554 if (simplified && is_gimple_min_invariant (simplified)
1555 && TREE_CODE (lhs) == SSA_NAME
1556 && simplified != rhs)
1558 VN_INFO (lhs)->expr = simplified;
1559 VN_INFO (lhs)->has_constants = true;
1560 changed = set_ssa_val_to (lhs, simplified);
1561 goto done;
1563 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1564 && TREE_CODE (lhs) == SSA_NAME)
1566 changed = visit_copy (lhs, simplified);
1567 goto done;
1569 else if (simplified)
1571 if (TREE_CODE (lhs) == SSA_NAME)
1573 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1574 /* We have to unshare the expression or else
1575 valuizing may change the IL stream. */
1576 VN_INFO (lhs)->expr = unshare_expr (simplified);
1578 rhs = simplified;
1580 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1582 VN_INFO (lhs)->has_constants = true;
1583 VN_INFO (lhs)->expr = unshare_expr (rhs);
1585 else if (TREE_CODE (lhs) == SSA_NAME)
1587 /* We reset expr and constantness here because we may
1588 have been value numbering optimistically, and
1589 iterating. They may become non-constant in this case,
1590 even if they were optimistically constant. */
1592 VN_INFO (lhs)->has_constants = false;
1593 VN_INFO (lhs)->expr = lhs;
1596 if (TREE_CODE (lhs) == SSA_NAME
1597 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1598 changed = defs_to_varying (stmt);
1599 else if (REFERENCE_CLASS_P (lhs))
1601 changed = visit_reference_op_store (lhs, rhs, stmt);
1603 else if (TREE_CODE (lhs) == SSA_NAME)
1605 if (is_gimple_min_invariant (rhs))
1607 VN_INFO (lhs)->has_constants = true;
1608 VN_INFO (lhs)->expr = rhs;
1609 changed = set_ssa_val_to (lhs, rhs);
1611 else if (TREE_CODE (rhs) == SSA_NAME)
1612 changed = visit_copy (lhs, rhs);
1613 else
1615 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1617 case tcc_unary:
1618 changed = visit_unary_op (lhs, rhs);
1619 break;
1620 case tcc_binary:
1621 changed = visit_binary_op (lhs, rhs);
1622 break;
1623 /* If tcc_vl_expr ever encompasses more than
1624 CALL_EXPR, this will need to be changed. */
1625 case tcc_vl_exp:
1626 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1627 changed = visit_reference_op_load (lhs, rhs, stmt);
1628 else
1629 changed = defs_to_varying (stmt);
1630 break;
1631 case tcc_declaration:
1632 case tcc_reference:
1633 changed = visit_reference_op_load (lhs, rhs, stmt);
1634 break;
1635 case tcc_expression:
1636 if (TREE_CODE (rhs) == ADDR_EXPR)
1638 changed = visit_unary_op (lhs, rhs);
1639 goto done;
1641 /* Fallthrough. */
1642 default:
1643 changed = defs_to_varying (stmt);
1644 break;
1648 else
1649 changed = defs_to_varying (stmt);
1652 done:
1653 return changed;
1656 /* Compare two operands by reverse postorder index */
1658 static int
1659 compare_ops (const void *pa, const void *pb)
1661 const tree opa = *((const tree *)pa);
1662 const tree opb = *((const tree *)pb);
1663 tree opstmta = SSA_NAME_DEF_STMT (opa);
1664 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1665 basic_block bba;
1666 basic_block bbb;
1668 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1669 return 0;
1670 else if (IS_EMPTY_STMT (opstmta))
1671 return -1;
1672 else if (IS_EMPTY_STMT (opstmtb))
1673 return 1;
1675 bba = bb_for_stmt (opstmta);
1676 bbb = bb_for_stmt (opstmtb);
1678 if (!bba && !bbb)
1679 return 0;
1680 else if (!bba)
1681 return -1;
1682 else if (!bbb)
1683 return 1;
1685 if (bba == bbb)
1687 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1688 return 0;
1689 else if (TREE_CODE (opstmta) == PHI_NODE)
1690 return -1;
1691 else if (TREE_CODE (opstmtb) == PHI_NODE)
1692 return 1;
1693 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1695 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1698 /* Sort an array containing members of a strongly connected component
1699 SCC so that the members are ordered by RPO number.
1700 This means that when the sort is complete, iterating through the
1701 array will give you the members in RPO order. */
1703 static void
1704 sort_scc (VEC (tree, heap) *scc)
1706 qsort (VEC_address (tree, scc),
1707 VEC_length (tree, scc),
1708 sizeof (tree),
1709 compare_ops);
1712 /* Process a strongly connected component in the SSA graph. */
1714 static void
1715 process_scc (VEC (tree, heap) *scc)
1717 /* If the SCC has a single member, just visit it. */
1719 if (VEC_length (tree, scc) == 1)
1721 tree use = VEC_index (tree, scc, 0);
1722 if (!VN_INFO (use)->use_processed)
1723 visit_use (use);
1725 else
1727 tree var;
1728 unsigned int i;
1729 unsigned int iterations = 0;
1730 bool changed = true;
1732 /* Iterate over the SCC with the optimistic table until it stops
1733 changing. */
1734 current_info = optimistic_info;
1735 while (changed)
1737 changed = false;
1738 iterations++;
1739 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1740 changed |= visit_use (var);
1743 if (dump_file && (dump_flags & TDF_STATS))
1744 fprintf (dump_file, "Processing SCC required %d iterations\n",
1745 iterations);
1747 /* Finally, visit the SCC once using the valid table. */
1748 current_info = valid_info;
1749 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1750 visit_use (var);
1754 /* Depth first search on NAME to discover and process SCC's in the SSA
1755 graph.
1756 Execution of this algorithm relies on the fact that the SCC's are
1757 popped off the stack in topological order. */
1759 static void
1760 DFS (tree name)
1762 ssa_op_iter iter;
1763 use_operand_p usep;
1764 tree defstmt;
1766 /* SCC info */
1767 VN_INFO (name)->dfsnum = next_dfs_num++;
1768 VN_INFO (name)->visited = true;
1769 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1771 VEC_safe_push (tree, heap, sccstack, name);
1772 VN_INFO (name)->on_sccstack = true;
1773 defstmt = SSA_NAME_DEF_STMT (name);
1775 /* Recursively DFS on our operands, looking for SCC's. */
1776 if (!IS_EMPTY_STMT (defstmt))
1778 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1779 SSA_OP_ALL_USES)
1781 tree use = USE_FROM_PTR (usep);
1783 /* Since we handle phi nodes, we will sometimes get
1784 invariants in the use expression. */
1785 if (TREE_CODE (use) != SSA_NAME)
1786 continue;
1788 if (! (VN_INFO (use)->visited))
1790 DFS (use);
1791 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1792 VN_INFO (use)->low);
1794 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1795 && VN_INFO (use)->on_sccstack)
1797 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1798 VN_INFO (name)->low);
1803 /* See if we found an SCC. */
1804 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1806 VEC (tree, heap) *scc = NULL;
1807 tree x;
1809 /* Found an SCC, pop the components off the SCC stack and
1810 process them. */
1813 x = VEC_pop (tree, sccstack);
1815 VN_INFO (x)->on_sccstack = false;
1816 VEC_safe_push (tree, heap, scc, x);
1817 } while (x != name);
1819 if (VEC_length (tree, scc) > 1)
1820 sort_scc (scc);
1822 if (dump_file && (dump_flags & TDF_DETAILS))
1823 print_scc (dump_file, scc);
1825 process_scc (scc);
1827 VEC_free (tree, heap, scc);
1831 static void
1832 free_phi (void *vp)
1834 vn_phi_t phi = vp;
1835 VEC_free (tree, heap, phi->phiargs);
1839 /* Free a reference operation structure VP. */
1841 static void
1842 free_reference (void *vp)
1844 vn_reference_t vr = vp;
1845 VEC_free (vn_reference_op_s, heap, vr->operands);
1848 /* Allocate a value number table. */
1850 static void
1851 allocate_vn_table (vn_tables_t table)
1853 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
1854 table->unary = htab_create (23, vn_unary_op_hash, vn_unary_op_eq, NULL);
1855 table->binary = htab_create (23, vn_binary_op_hash, vn_binary_op_eq, NULL);
1856 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
1857 free_reference);
1859 table->unary_op_pool = create_alloc_pool ("VN unary operations",
1860 sizeof (struct vn_unary_op_s),
1861 30);
1862 table->binary_op_pool = create_alloc_pool ("VN binary operations",
1863 sizeof (struct vn_binary_op_s),
1864 30);
1865 table->phis_pool = create_alloc_pool ("VN phis",
1866 sizeof (struct vn_phi_s),
1867 30);
1868 table->references_pool = create_alloc_pool ("VN references",
1869 sizeof (struct vn_reference_s),
1870 30);
1873 /* Free a value number table. */
1875 static void
1876 free_vn_table (vn_tables_t table)
1878 htab_delete (table->phis);
1879 htab_delete (table->unary);
1880 htab_delete (table->binary);
1881 htab_delete (table->references);
1882 free_alloc_pool (table->unary_op_pool);
1883 free_alloc_pool (table->binary_op_pool);
1884 free_alloc_pool (table->phis_pool);
1885 free_alloc_pool (table->references_pool);
1888 static void
1889 init_scc_vn (void)
1891 size_t i;
1892 int j;
1893 int *rpo_numbers_temp;
1894 basic_block bb;
1895 size_t id = 0;
1897 calculate_dominance_info (CDI_DOMINATORS);
1898 sccstack = NULL;
1899 next_dfs_num = 1;
1901 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
1902 /* VEC_alloc doesn't actually grow it to the right size, it just
1903 preallocates the space to do so. */
1904 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
1905 shared_lookup_phiargs = NULL;
1906 shared_lookup_vops = NULL;
1907 shared_lookup_references = NULL;
1908 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1909 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1910 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
1912 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
1913 the i'th block in RPO order is bb. We want to map bb's to RPO
1914 numbers, so we need to rearrange this array. */
1915 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
1916 rpo_numbers[rpo_numbers_temp[j]] = j;
1918 free (rpo_numbers_temp);
1920 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
1922 /* Create the VN_INFO structures, and initialize value numbers to
1923 TOP. */
1924 for (i = 0; i < num_ssa_names; i++)
1926 tree name = ssa_name (i);
1927 if (name)
1929 VN_INFO_GET (name)->valnum = VN_TOP;
1930 VN_INFO (name)->expr = name;
1934 FOR_ALL_BB (bb)
1936 block_stmt_iterator bsi;
1937 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1939 tree stmt = bsi_stmt (bsi);
1940 stmt_ann (stmt)->uid = id++;
1944 /* Create the valid and optimistic value numbering tables. */
1945 valid_info = XCNEW (struct vn_tables_s);
1946 allocate_vn_table (valid_info);
1947 optimistic_info = XCNEW (struct vn_tables_s);
1948 allocate_vn_table (optimistic_info);
1949 pre_info = NULL;
1952 void
1953 switch_to_PRE_table (void)
1955 pre_info = XCNEW (struct vn_tables_s);
1956 allocate_vn_table (pre_info);
1957 current_info = pre_info;
1960 void
1961 free_scc_vn (void)
1963 size_t i;
1965 VEC_free (tree, heap, shared_lookup_phiargs);
1966 VEC_free (tree, gc, shared_lookup_vops);
1967 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
1968 XDELETEVEC (rpo_numbers);
1969 for (i = 0; i < num_ssa_names; i++)
1971 tree name = ssa_name (i);
1972 if (name)
1974 XDELETE (VN_INFO (name));
1975 if (SSA_NAME_VALUE (name) &&
1976 TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
1977 SSA_NAME_VALUE (name) = NULL;
1981 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
1982 VEC_free (tree, heap, sccstack);
1983 free_vn_table (valid_info);
1984 XDELETE (valid_info);
1985 free_vn_table (optimistic_info);
1986 XDELETE (optimistic_info);
1987 if (pre_info)
1989 free_vn_table (pre_info);
1990 XDELETE (pre_info);
1994 void
1995 run_scc_vn (void)
1997 size_t i;
1998 tree param;
2000 init_scc_vn ();
2001 current_info = valid_info;
2003 for (param = DECL_ARGUMENTS (current_function_decl);
2004 param;
2005 param = TREE_CHAIN (param))
2007 if (gimple_default_def (cfun, param) != NULL)
2009 tree def = gimple_default_def (cfun, param);
2010 SSA_VAL (def) = def;
2014 for (i = num_ssa_names - 1; i > 0; i--)
2016 tree name = ssa_name (i);
2017 if (name
2018 && VN_INFO (name)->visited == false
2019 && !has_zero_uses (name))
2020 DFS (name);
2023 if (dump_file && (dump_flags & TDF_DETAILS))
2025 fprintf (dump_file, "Value numbers:\n");
2026 for (i = 0; i < num_ssa_names; i++)
2028 tree name = ssa_name (i);
2029 if (name && VN_INFO (name)->visited
2030 && (SSA_VAL (name) != name
2031 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2033 print_generic_expr (dump_file, name, 0);
2034 fprintf (dump_file, " = ");
2035 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2036 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2037 else
2038 print_generic_expr (dump_file, SSA_VAL (name), 0);
2039 fprintf (dump_file, "\n");