mips.c (mips16_copy_fpr_return_value): New function, split out from...
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobb4fb014b76dd9e529a2ac72123ffe8945b1b235c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
109 htab_t unary;
110 htab_t binary;
111 htab_t phis;
112 htab_t references;
113 alloc_pool unary_op_pool;
114 alloc_pool binary_op_pool;
115 alloc_pool phis_pool;
116 alloc_pool references_pool;
117 } *vn_tables_t;
119 /* Binary operations in the hashtable consist of two operands, an
120 opcode, and a type. Result is the value number of the operation,
121 and hashcode is stored to avoid having to calculate it
122 repeatedly. */
124 typedef struct vn_binary_op_s
126 enum tree_code opcode;
127 tree type;
128 tree op0;
129 tree op1;
130 hashval_t hashcode;
131 tree result;
132 } *vn_binary_op_t;
133 typedef const struct vn_binary_op_s *const_vn_binary_op_t;
135 /* Unary operations in the hashtable consist of a single operand, an
136 opcode, and a type. Result is the value number of the operation,
137 and hashcode is stored to avoid having to calculate it repeatedly. */
139 typedef struct vn_unary_op_s
141 enum tree_code opcode;
142 tree type;
143 tree op0;
144 hashval_t hashcode;
145 tree result;
146 } *vn_unary_op_t;
147 typedef const struct vn_unary_op_s *const_vn_unary_op_t;
149 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
150 arguments, and the basic block the phi is in. Result is the value
151 number of the operation, and hashcode is stored to avoid having to
152 calculate it repeatedly. Phi nodes not in the same block are never
153 considered equivalent. */
155 typedef struct vn_phi_s
157 VEC (tree, heap) *phiargs;
158 basic_block block;
159 hashval_t hashcode;
160 tree result;
161 } *vn_phi_t;
162 typedef const struct vn_phi_s *const_vn_phi_t;
164 /* Reference operands only exist in reference operations structures.
165 They consist of an opcode, type, and some number of operands. For
166 a given opcode, some, all, or none of the operands may be used.
167 The operands are there to store the information that makes up the
168 portion of the addressing calculation that opcode performs. */
170 typedef struct vn_reference_op_struct
172 enum tree_code opcode;
173 tree type;
174 tree op0;
175 tree op1;
176 tree op2;
177 } vn_reference_op_s;
178 typedef vn_reference_op_s *vn_reference_op_t;
179 typedef const vn_reference_op_s *const_vn_reference_op_t;
181 DEF_VEC_O(vn_reference_op_s);
182 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
184 /* A reference operation in the hashtable is representation as a
185 collection of vuses, representing the memory state at the time of
186 the operation, and a collection of operands that make up the
187 addressing calculation. If two vn_reference_t's have the same set
188 of operands, they access the same memory location. We also store
189 the resulting value number, and the hashcode. The vuses are
190 always stored in order sorted by ssa name version. */
192 typedef struct vn_reference_s
194 VEC (tree, gc) *vuses;
195 VEC (vn_reference_op_s, heap) *operands;
196 hashval_t hashcode;
197 tree result;
198 } *vn_reference_t;
199 typedef const struct vn_reference_s *const_vn_reference_t;
201 /* Valid hashtables storing information we have proven to be
202 correct. */
204 static vn_tables_t valid_info;
206 /* Optimistic hashtables storing information we are making assumptions about
207 during iterations. */
209 static vn_tables_t optimistic_info;
211 /* PRE hashtables storing information about mapping from expressions to
212 value handles. */
214 static vn_tables_t pre_info;
216 /* Pointer to the set of hashtables that is currently being used.
217 Should always point to either the optimistic_info, or the
218 valid_info. */
220 static vn_tables_t current_info;
223 /* Reverse post order index for each basic block. */
225 static int *rpo_numbers;
227 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
229 /* This represents the top of the VN lattice, which is the universal
230 value. */
232 tree VN_TOP;
234 /* Next DFS number and the stack for strongly connected component
235 detection. */
237 static unsigned int next_dfs_num;
238 static VEC (tree, heap) *sccstack;
240 DEF_VEC_P(vn_ssa_aux_t);
241 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
243 /* Table of vn_ssa_aux_t's, one per ssa_name. */
245 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
247 /* Return the value numbering information for a given SSA name. */
249 vn_ssa_aux_t
250 VN_INFO (tree name)
252 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
253 SSA_NAME_VERSION (name));
256 /* Set the value numbering info for a given SSA name to a given
257 value. */
259 static inline void
260 VN_INFO_SET (tree name, vn_ssa_aux_t value)
262 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
263 SSA_NAME_VERSION (name), value);
266 /* Get the value numbering info for a given SSA name, creating it if
267 it does not exist. */
269 vn_ssa_aux_t
270 VN_INFO_GET (tree name)
272 vn_ssa_aux_t newinfo = XCNEW (struct vn_ssa_aux);
273 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
274 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
275 SSA_NAME_VERSION (name) + 1);
276 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
277 SSA_NAME_VERSION (name), newinfo);
278 return newinfo;
282 /* Compare two reference operands P1 and P2 for equality. return true if
283 they are equal, and false otherwise. */
285 static int
286 vn_reference_op_eq (const void *p1, const void *p2)
288 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
289 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
290 return vro1->opcode == vro2->opcode
291 && vro1->type == vro2->type
292 && expressions_equal_p (vro1->op0, vro2->op0)
293 && expressions_equal_p (vro1->op1, vro2->op1)
294 && expressions_equal_p (vro1->op2, vro2->op2);
297 /* Compute the hash for a reference operand VRO1 */
299 static hashval_t
300 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
302 return iterative_hash_expr (vro1->op0, vro1->opcode)
303 + iterative_hash_expr (vro1->op1, vro1->opcode)
304 + iterative_hash_expr (vro1->op2, vro1->opcode);
307 /* Return the hashcode for a given reference operation P1. */
309 static hashval_t
310 vn_reference_hash (const void *p1)
312 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
313 return vr1->hashcode;
316 /* Compute a hash for the reference operation VR1 and return it. */
318 static inline hashval_t
319 vn_reference_compute_hash (const vn_reference_t vr1)
321 hashval_t result = 0;
322 tree v;
323 int i;
324 vn_reference_op_t vro;
326 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
327 result += iterative_hash_expr (v, 0);
328 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
329 result += vn_reference_op_compute_hash (vro);
331 return result;
334 /* Return true if reference operations P1 and P2 are equivalent. This
335 means they have the same set of operands and vuses. */
337 static int
338 vn_reference_eq (const void *p1, const void *p2)
340 tree v;
341 int i;
342 vn_reference_op_t vro;
344 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
345 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
347 if (vr1->vuses == vr2->vuses
348 && vr1->operands == vr2->operands)
349 return true;
351 /* Impossible for them to be equivalent if they have different
352 number of vuses. */
353 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
354 return false;
356 /* We require that address operands be canonicalized in a way that
357 two memory references will have the same operands if they are
358 equivalent. */
359 if (VEC_length (vn_reference_op_s, vr1->operands)
360 != VEC_length (vn_reference_op_s, vr2->operands))
361 return false;
363 /* The memory state is more often different than the address of the
364 store/load, so check it first. */
365 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
367 if (VEC_index (tree, vr2->vuses, i) != v)
368 return false;
371 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
373 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
374 vro))
375 return false;
377 return true;
380 /* Place the vuses from STMT into *result */
382 static inline void
383 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
385 ssa_op_iter iter;
386 tree vuse;
388 if (!stmt)
389 return;
391 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
392 VEC_safe_push (tree, gc, *result, vuse);
394 if (VEC_length (tree, *result) > 1)
395 sort_vuses (*result);
399 /* Copy the VUSE names in STMT into a vector, and return
400 the vector. */
402 VEC (tree, gc) *
403 copy_vuses_from_stmt (tree stmt)
405 VEC (tree, gc) *vuses = NULL;
407 vuses_to_vec (stmt, &vuses);
409 return vuses;
412 /* Place the vdefs from STMT into *result */
414 static inline void
415 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
417 ssa_op_iter iter;
418 tree vdef;
420 if (!stmt)
421 return;
423 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
424 VEC_safe_push (tree, gc, *result, vdef);
426 if (VEC_length (tree, *result) > 1)
427 sort_vuses (*result);
430 /* Copy the names of vdef results in STMT into a vector, and return
431 the vector. */
433 static VEC (tree, gc) *
434 copy_vdefs_from_stmt (tree stmt)
436 VEC (tree, gc) *vdefs = NULL;
438 vdefs_to_vec (stmt, &vdefs);
440 return vdefs;
443 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
444 static VEC (tree, gc) *shared_lookup_vops;
446 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
447 This function will overwrite the current SHARED_LOOKUP_VOPS
448 variable. */
450 VEC (tree, gc) *
451 shared_vuses_from_stmt (tree stmt)
453 VEC_truncate (tree, shared_lookup_vops, 0);
454 vuses_to_vec (stmt, &shared_lookup_vops);
456 return shared_lookup_vops;
459 /* Copy the operations present in load/store/call REF into RESULT, a vector of
460 vn_reference_op_s's. */
462 static void
463 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
465 /* Calls are different from all other reference operations. */
466 if (TREE_CODE (ref) == CALL_EXPR)
468 vn_reference_op_s temp;
469 tree callfn;
470 call_expr_arg_iterator iter;
471 tree callarg;
473 /* Copy the call_expr opcode, type, function being called, and
474 arguments. */
475 memset (&temp, 0, sizeof (temp));
476 temp.type = TREE_TYPE (ref);
477 temp.opcode = CALL_EXPR;
478 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
480 callfn = get_callee_fndecl (ref);
481 if (!callfn)
482 callfn = CALL_EXPR_FN (ref);
483 temp.type = TREE_TYPE (callfn);
484 temp.opcode = TREE_CODE (callfn);
485 temp.op0 = callfn;
486 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
488 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
490 memset (&temp, 0, sizeof (temp));
491 temp.type = TREE_TYPE (callarg);
492 temp.opcode = TREE_CODE (callarg);
493 temp.op0 = callarg;
494 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
496 return;
499 /* For non-calls, store the information that makes up the address. */
501 while (ref)
503 vn_reference_op_s temp;
505 memset (&temp, 0, sizeof (temp));
506 temp.type = TREE_TYPE (ref);
507 temp.opcode = TREE_CODE (ref);
509 switch (temp.opcode)
511 case ALIGN_INDIRECT_REF:
512 case MISALIGNED_INDIRECT_REF:
513 case INDIRECT_REF:
514 /* The only operand is the address, which gets its own
515 vn_reference_op_s structure. */
516 break;
517 case BIT_FIELD_REF:
518 /* Record bits and position. */
519 temp.op0 = TREE_OPERAND (ref, 1);
520 temp.op1 = TREE_OPERAND (ref, 2);
521 break;
522 case COMPONENT_REF:
523 /* Record field as operand. */
524 temp.op0 = TREE_OPERAND (ref, 1);
525 break;
526 case ARRAY_RANGE_REF:
527 case ARRAY_REF:
528 /* Record index as operand. */
529 temp.op0 = TREE_OPERAND (ref, 1);
530 temp.op1 = TREE_OPERAND (ref, 3);
531 break;
532 case STRING_CST:
533 case INTEGER_CST:
534 case COMPLEX_CST:
535 case VECTOR_CST:
536 case REAL_CST:
537 case VALUE_HANDLE:
538 case VAR_DECL:
539 case PARM_DECL:
540 case CONST_DECL:
541 case RESULT_DECL:
542 case SSA_NAME:
543 temp.op0 = ref;
544 break;
545 /* These are only interesting for their operands, their
546 existence, and their type. They will never be the last
547 ref in the chain of references (IE they require an
548 operand), so we don't have to put anything
549 for op* as it will be handled by the iteration */
550 case IMAGPART_EXPR:
551 case REALPART_EXPR:
552 case VIEW_CONVERT_EXPR:
553 case ADDR_EXPR:
554 break;
555 default:
556 gcc_unreachable ();
559 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
561 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
562 ref = TREE_OPERAND (ref, 0);
563 else
564 ref = NULL_TREE;
568 /* Create a vector of vn_reference_op_s structures from REF, a
569 REFERENCE_CLASS_P tree. The vector is not shared. */
571 static VEC(vn_reference_op_s, heap) *
572 create_reference_ops_from_ref (tree ref)
574 VEC (vn_reference_op_s, heap) *result = NULL;
576 copy_reference_ops_from_ref (ref, &result);
577 return result;
580 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
582 /* Create a vector of vn_reference_op_s structures from REF, a
583 REFERENCE_CLASS_P tree. The vector is shared among all callers of
584 this function. */
586 static VEC(vn_reference_op_s, heap) *
587 shared_reference_ops_from_ref (tree ref)
589 if (!ref)
590 return NULL;
591 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
592 copy_reference_ops_from_ref (ref, &shared_lookup_references);
593 return shared_lookup_references;
597 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
598 structures into their value numbers. This is done in-place, and
599 the vector passed in is returned. */
601 static VEC (vn_reference_op_s, heap) *
602 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
604 vn_reference_op_t vro;
605 int i;
607 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
609 if (vro->opcode == SSA_NAME
610 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
611 vro->op0 = SSA_VAL (vro->op0);
614 return orig;
617 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
618 their value numbers. This is done in-place, and the vector passed
619 in is returned. */
621 static VEC (tree, gc) *
622 valueize_vuses (VEC (tree, gc) *orig)
624 bool made_replacement = false;
625 tree vuse;
626 int i;
628 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
630 if (vuse != SSA_VAL (vuse))
632 made_replacement = true;
633 VEC_replace (tree, orig, i, SSA_VAL (vuse));
637 if (made_replacement && VEC_length (tree, orig) > 1)
638 sort_vuses (orig);
640 return orig;
643 /* Lookup OP in the current hash table, and return the resulting
644 value number if it exists in the hash table. Return NULL_TREE if
645 it does not exist in the hash table. */
647 tree
648 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
650 void **slot;
651 struct vn_reference_s vr1;
653 vr1.vuses = valueize_vuses (vuses);
654 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
655 vr1.hashcode = vn_reference_compute_hash (&vr1);
656 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
657 NO_INSERT);
658 if (!slot)
659 return NULL_TREE;
661 return ((vn_reference_t)*slot)->result;
664 /* Insert OP into the current hash table with a value number of
665 RESULT. */
667 void
668 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
670 void **slot;
671 vn_reference_t vr1;
673 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
675 vr1->vuses = valueize_vuses (vuses);
676 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
677 vr1->hashcode = vn_reference_compute_hash (vr1);
678 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
680 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
681 INSERT);
683 /* Because we lookup stores using vuses, and value number failures
684 using the vdefs (see visit_reference_op_store for how and why),
685 it's possible that on failure we may try to insert an already
686 inserted store. This is not wrong, there is no ssa name for a
687 store that we could use as a differentiator anyway. Thus, unlike
688 the other lookup functions, you cannot gcc_assert (!*slot)
689 here. */
692 *slot = vr1;
696 /* Return the stored hashcode for a unary operation. */
698 static hashval_t
699 vn_unary_op_hash (const void *p1)
701 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
702 return vuo1->hashcode;
705 /* Hash a unary operation P1 and return the result. */
707 static inline hashval_t
708 vn_unary_op_compute_hash (const vn_unary_op_t vuo1)
710 return iterative_hash_expr (vuo1->op0, vuo1->opcode);
713 /* Return true if P1 and P2, two unary operations, are equivalent. */
715 static int
716 vn_unary_op_eq (const void *p1, const void *p2)
718 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
719 const_vn_unary_op_t const vuo2 = (const_vn_unary_op_t) p2;
720 return vuo1->opcode == vuo2->opcode
721 && vuo1->type == vuo2->type
722 && expressions_equal_p (vuo1->op0, vuo2->op0);
725 /* Lookup OP in the current hash table, and return the resulting
726 value number if it exists in the hash table. Return NULL_TREE if
727 it does not exist in the hash table. */
729 tree
730 vn_unary_op_lookup (tree op)
732 void **slot;
733 struct vn_unary_op_s vuo1;
735 vuo1.opcode = TREE_CODE (op);
736 vuo1.type = TREE_TYPE (op);
737 vuo1.op0 = TREE_OPERAND (op, 0);
739 if (TREE_CODE (vuo1.op0) == SSA_NAME)
740 vuo1.op0 = SSA_VAL (vuo1.op0);
742 vuo1.hashcode = vn_unary_op_compute_hash (&vuo1);
743 slot = htab_find_slot_with_hash (current_info->unary, &vuo1, vuo1.hashcode,
744 NO_INSERT);
745 if (!slot)
746 return NULL_TREE;
747 return ((vn_unary_op_t)*slot)->result;
750 /* Insert OP into the current hash table with a value number of
751 RESULT. */
753 void
754 vn_unary_op_insert (tree op, tree result)
756 void **slot;
757 vn_unary_op_t vuo1 = (vn_unary_op_t) pool_alloc (current_info->unary_op_pool);
759 vuo1->opcode = TREE_CODE (op);
760 vuo1->type = TREE_TYPE (op);
761 vuo1->op0 = TREE_OPERAND (op, 0);
762 vuo1->result = result;
764 if (TREE_CODE (vuo1->op0) == SSA_NAME)
765 vuo1->op0 = SSA_VAL (vuo1->op0);
767 vuo1->hashcode = vn_unary_op_compute_hash (vuo1);
768 slot = htab_find_slot_with_hash (current_info->unary, vuo1, vuo1->hashcode,
769 INSERT);
770 gcc_assert (!*slot);
771 *slot = vuo1;
774 /* Compute and return the hash value for binary operation VBO1. */
776 static inline hashval_t
777 vn_binary_op_compute_hash (const vn_binary_op_t vbo1)
779 return iterative_hash_expr (vbo1->op0, vbo1->opcode)
780 + iterative_hash_expr (vbo1->op1, vbo1->opcode);
783 /* Return the computed hashcode for binary operation P1. */
785 static hashval_t
786 vn_binary_op_hash (const void *p1)
788 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
789 return vbo1->hashcode;
792 /* Compare binary operations P1 and P2 and return true if they are
793 equivalent. */
795 static int
796 vn_binary_op_eq (const void *p1, const void *p2)
798 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
799 const_vn_binary_op_t const vbo2 = (const_vn_binary_op_t) p2;
800 return vbo1->opcode == vbo2->opcode
801 && vbo1->type == vbo2->type
802 && expressions_equal_p (vbo1->op0, vbo2->op0)
803 && expressions_equal_p (vbo1->op1, vbo2->op1);
806 /* Lookup OP in the current hash table, and return the resulting
807 value number if it exists in the hash table. Return NULL_TREE if
808 it does not exist in the hash table. */
810 tree
811 vn_binary_op_lookup (tree op)
813 void **slot;
814 struct vn_binary_op_s vbo1;
816 vbo1.opcode = TREE_CODE (op);
817 vbo1.type = TREE_TYPE (op);
818 vbo1.op0 = TREE_OPERAND (op, 0);
819 vbo1.op1 = TREE_OPERAND (op, 1);
821 if (TREE_CODE (vbo1.op0) == SSA_NAME)
822 vbo1.op0 = SSA_VAL (vbo1.op0);
823 if (TREE_CODE (vbo1.op1) == SSA_NAME)
824 vbo1.op1 = SSA_VAL (vbo1.op1);
826 if (tree_swap_operands_p (vbo1.op0, vbo1.op1, false)
827 && commutative_tree_code (vbo1.opcode))
829 tree temp = vbo1.op0;
830 vbo1.op0 = vbo1.op1;
831 vbo1.op1 = temp;
834 vbo1.hashcode = vn_binary_op_compute_hash (&vbo1);
835 slot = htab_find_slot_with_hash (current_info->binary, &vbo1, vbo1.hashcode,
836 NO_INSERT);
837 if (!slot)
838 return NULL_TREE;
839 return ((vn_binary_op_t)*slot)->result;
842 /* Insert OP into the current hash table with a value number of
843 RESULT. */
845 void
846 vn_binary_op_insert (tree op, tree result)
848 void **slot;
849 vn_binary_op_t vbo1;
850 vbo1 = (vn_binary_op_t) pool_alloc (current_info->binary_op_pool);
852 vbo1->opcode = TREE_CODE (op);
853 vbo1->type = TREE_TYPE (op);
854 vbo1->op0 = TREE_OPERAND (op, 0);
855 vbo1->op1 = TREE_OPERAND (op, 1);
856 vbo1->result = result;
858 if (TREE_CODE (vbo1->op0) == SSA_NAME)
859 vbo1->op0 = SSA_VAL (vbo1->op0);
860 if (TREE_CODE (vbo1->op1) == SSA_NAME)
861 vbo1->op1 = SSA_VAL (vbo1->op1);
863 if (tree_swap_operands_p (vbo1->op0, vbo1->op1, false)
864 && commutative_tree_code (vbo1->opcode))
866 tree temp = vbo1->op0;
867 vbo1->op0 = vbo1->op1;
868 vbo1->op1 = temp;
870 vbo1->hashcode = vn_binary_op_compute_hash (vbo1);
871 slot = htab_find_slot_with_hash (current_info->binary, vbo1, vbo1->hashcode,
872 INSERT);
873 gcc_assert (!*slot);
875 *slot = vbo1;
878 /* Compute a hashcode for PHI operation VP1 and return it. */
880 static inline hashval_t
881 vn_phi_compute_hash (vn_phi_t vp1)
883 hashval_t result = 0;
884 int i;
885 tree phi1op;
887 result = vp1->block->index;
889 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
891 if (phi1op == VN_TOP)
892 continue;
893 result += iterative_hash_expr (phi1op, result);
896 return result;
899 /* Return the computed hashcode for phi operation P1. */
901 static hashval_t
902 vn_phi_hash (const void *p1)
904 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
905 return vp1->hashcode;
908 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
910 static int
911 vn_phi_eq (const void *p1, const void *p2)
913 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
914 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
916 if (vp1->block == vp2->block)
918 int i;
919 tree phi1op;
921 /* Any phi in the same block will have it's arguments in the
922 same edge order, because of how we store phi nodes. */
923 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
925 tree phi2op = VEC_index (tree, vp2->phiargs, i);
926 if (phi1op == VN_TOP || phi2op == VN_TOP)
927 continue;
928 if (!expressions_equal_p (phi1op, phi2op))
929 return false;
931 return true;
933 return false;
936 static VEC(tree, heap) *shared_lookup_phiargs;
938 /* Lookup PHI in the current hash table, and return the resulting
939 value number if it exists in the hash table. Return NULL_TREE if
940 it does not exist in the hash table. */
942 static tree
943 vn_phi_lookup (tree phi)
945 void **slot;
946 struct vn_phi_s vp1;
947 int i;
949 VEC_truncate (tree, shared_lookup_phiargs, 0);
951 /* Canonicalize the SSA_NAME's to their value number. */
952 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
954 tree def = PHI_ARG_DEF (phi, i);
955 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
956 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
958 vp1.phiargs = shared_lookup_phiargs;
959 vp1.block = bb_for_stmt (phi);
960 vp1.hashcode = vn_phi_compute_hash (&vp1);
961 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
962 NO_INSERT);
963 if (!slot)
964 return NULL_TREE;
965 return ((vn_phi_t)*slot)->result;
968 /* Insert PHI into the current hash table with a value number of
969 RESULT. */
971 static void
972 vn_phi_insert (tree phi, tree result)
974 void **slot;
975 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
976 int i;
977 VEC (tree, heap) *args = NULL;
979 /* Canonicalize the SSA_NAME's to their value number. */
980 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
982 tree def = PHI_ARG_DEF (phi, i);
983 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
984 VEC_safe_push (tree, heap, args, def);
986 vp1->phiargs = args;
987 vp1->block = bb_for_stmt (phi);
988 vp1->result = result;
989 vp1->hashcode = vn_phi_compute_hash (vp1);
991 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
992 INSERT);
994 /* Because we iterate over phi operations more than once, it's
995 possible the slot might already exist here, hence no assert.*/
996 *slot = vp1;
1000 /* Print set of components in strongly connected component SCC to OUT. */
1002 static void
1003 print_scc (FILE *out, VEC (tree, heap) *scc)
1005 tree var;
1006 unsigned int i;
1008 fprintf (out, "SCC consists of: ");
1009 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1011 print_generic_expr (out, var, 0);
1012 fprintf (out, " ");
1014 fprintf (out, "\n");
1017 /* Set the value number of FROM to TO, return true if it has changed
1018 as a result. */
1020 static inline bool
1021 set_ssa_val_to (tree from, tree to)
1023 tree currval;
1025 if (from != to
1026 && TREE_CODE (to) == SSA_NAME
1027 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1028 to = from;
1030 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1031 and invariants. So assert that here. */
1032 gcc_assert (to != NULL_TREE
1033 && (to == VN_TOP
1034 || TREE_CODE (to) == SSA_NAME
1035 || is_gimple_min_invariant (to)));
1037 if (dump_file && (dump_flags & TDF_DETAILS))
1039 fprintf (dump_file, "Setting value number of ");
1040 print_generic_expr (dump_file, from, 0);
1041 fprintf (dump_file, " to ");
1042 print_generic_expr (dump_file, to, 0);
1043 fprintf (dump_file, "\n");
1046 currval = SSA_VAL (from);
1048 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1050 SSA_VAL (from) = to;
1051 return true;
1053 return false;
1056 /* Set all definitions in STMT to value number to themselves.
1057 Return true if a value number changed. */
1059 static bool
1060 defs_to_varying (tree stmt)
1062 bool changed = false;
1063 ssa_op_iter iter;
1064 def_operand_p defp;
1066 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1068 tree def = DEF_FROM_PTR (defp);
1070 VN_INFO (def)->use_processed = true;
1071 changed |= set_ssa_val_to (def, def);
1073 return changed;
1076 /* Visit a copy between LHS and RHS, return true if the value number
1077 changed. */
1079 static bool
1080 visit_copy (tree lhs, tree rhs)
1083 /* Follow chains of copies to their destination. */
1084 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1085 rhs = SSA_VAL (rhs);
1087 /* The copy may have a more interesting constant filled expression
1088 (we don't, since we know our RHS is just an SSA name). */
1089 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1090 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1092 return set_ssa_val_to (lhs, rhs);
1095 /* Visit a unary operator RHS, value number it, and return true if the
1096 value number of LHS has changed as a result. */
1098 static bool
1099 visit_unary_op (tree lhs, tree op)
1101 bool changed = false;
1102 tree result = vn_unary_op_lookup (op);
1104 if (result)
1106 changed = set_ssa_val_to (lhs, result);
1108 else
1110 changed = set_ssa_val_to (lhs, lhs);
1111 vn_unary_op_insert (op, lhs);
1114 return changed;
1117 /* Visit a binary operator RHS, value number it, and return true if the
1118 value number of LHS has changed as a result. */
1120 static bool
1121 visit_binary_op (tree lhs, tree op)
1123 bool changed = false;
1124 tree result = vn_binary_op_lookup (op);
1126 if (result)
1128 changed = set_ssa_val_to (lhs, result);
1130 else
1132 changed = set_ssa_val_to (lhs, lhs);
1133 vn_binary_op_insert (op, lhs);
1136 return changed;
1139 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1140 and return true if the value number of the LHS has changed as a result. */
1142 static bool
1143 visit_reference_op_load (tree lhs, tree op, tree stmt)
1145 bool changed = false;
1146 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1148 if (result)
1150 changed = set_ssa_val_to (lhs, result);
1152 else
1154 changed = set_ssa_val_to (lhs, lhs);
1155 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1158 return changed;
1162 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1163 and return true if the value number of the LHS has changed as a result. */
1165 static bool
1166 visit_reference_op_store (tree lhs, tree op, tree stmt)
1168 bool changed = false;
1169 tree result;
1170 bool resultsame = false;
1172 /* First we want to lookup using the *vuses* from the store and see
1173 if there the last store to this location with the same address
1174 had the same value.
1176 The vuses represent the memory state before the store. If the
1177 memory state, address, and value of the store is the same as the
1178 last store to this location, then this store will produce the
1179 same memory state as that store.
1181 In this case the vdef versions for this store are value numbered to those
1182 vuse versions, since they represent the same memory state after
1183 this store.
1185 Otherwise, the vdefs for the store are used when inserting into
1186 the table, since the store generates a new memory state. */
1188 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1190 if (result)
1192 if (TREE_CODE (result) == SSA_NAME)
1193 result = SSA_VAL (result);
1194 resultsame = expressions_equal_p (result, op);
1197 if (!result || !resultsame)
1199 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1200 int i;
1201 tree vdef;
1203 if (dump_file && (dump_flags & TDF_DETAILS))
1205 fprintf (dump_file, "No store match\n");
1206 fprintf (dump_file, "Value numbering store ");
1207 print_generic_expr (dump_file, lhs, 0);
1208 fprintf (dump_file, " to ");
1209 print_generic_expr (dump_file, op, 0);
1210 fprintf (dump_file, "\n");
1212 /* Have to set value numbers before insert, since insert is
1213 going to valueize the references in-place. */
1214 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1216 VN_INFO (vdef)->use_processed = true;
1217 changed |= set_ssa_val_to (vdef, vdef);
1220 vn_reference_insert (lhs, op, vdefs);
1222 else
1224 /* We had a match, so value number the vdefs to have the value
1225 number of the vuses they came from. */
1226 ssa_op_iter op_iter;
1227 def_operand_p var;
1228 vuse_vec_p vv;
1230 if (dump_file && (dump_flags & TDF_DETAILS))
1231 fprintf (dump_file, "Store matched earlier value,"
1232 "value numbering store vdefs to matching vuses.\n");
1234 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1236 tree def = DEF_FROM_PTR (var);
1237 tree use;
1239 /* Uh, if the vuse is a multiuse, we can't really do much
1240 here, sadly, since we don't know which value number of
1241 which vuse to use. */
1242 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1243 use = def;
1244 else
1245 use = VUSE_ELEMENT_VAR (*vv, 0);
1247 VN_INFO (def)->use_processed = true;
1248 changed |= set_ssa_val_to (def, SSA_VAL (use));
1252 return changed;
1255 /* Visit and value number PHI, return true if the value number
1256 changed. */
1258 static bool
1259 visit_phi (tree phi)
1261 bool changed = false;
1262 tree result;
1263 tree sameval = VN_TOP;
1264 bool allsame = true;
1265 int i;
1267 /* TODO: We could check for this in init_sccvn, and replace this
1268 with a gcc_assert. */
1269 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1270 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1272 /* See if all non-TOP arguments have the same value. TOP is
1273 equivalent to everything, so we can ignore it. */
1274 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1276 tree def = PHI_ARG_DEF (phi, i);
1278 if (TREE_CODE (def) == SSA_NAME)
1279 def = SSA_VAL (def);
1280 if (def == VN_TOP)
1281 continue;
1282 if (sameval == VN_TOP)
1284 sameval = def;
1286 else
1288 if (!expressions_equal_p (def, sameval))
1290 allsame = false;
1291 break;
1296 /* If all value numbered to the same value, the phi node has that
1297 value. */
1298 if (allsame)
1300 if (is_gimple_min_invariant (sameval))
1302 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1303 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1305 else
1307 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1308 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1311 if (TREE_CODE (sameval) == SSA_NAME)
1312 return visit_copy (PHI_RESULT (phi), sameval);
1314 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1317 /* Otherwise, see if it is equivalent to a phi node in this block. */
1318 result = vn_phi_lookup (phi);
1319 if (result)
1321 if (TREE_CODE (result) == SSA_NAME)
1322 changed = visit_copy (PHI_RESULT (phi), result);
1323 else
1324 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1326 else
1328 vn_phi_insert (phi, PHI_RESULT (phi));
1329 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1330 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1331 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1334 return changed;
1337 /* Return true if EXPR contains constants. */
1339 static bool
1340 expr_has_constants (tree expr)
1342 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1344 case tcc_unary:
1345 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1347 case tcc_binary:
1348 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1349 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1350 /* Constants inside reference ops are rarely interesting, but
1351 it can take a lot of looking to find them. */
1352 case tcc_reference:
1353 case tcc_declaration:
1354 return false;
1355 default:
1356 return is_gimple_min_invariant (expr);
1358 return false;
1361 /* Replace SSA_NAMES in expr with their value numbers, and return the
1362 result.
1363 This is performed in place. */
1365 static tree
1366 valueize_expr (tree expr)
1368 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1370 case tcc_unary:
1371 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1372 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1373 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1374 break;
1375 case tcc_binary:
1376 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1377 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1378 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1379 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1380 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1381 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1382 break;
1383 default:
1384 break;
1386 return expr;
1389 /* Simplify the binary expression RHS, and return the result if
1390 simplified. */
1392 static tree
1393 simplify_binary_expression (tree stmt, tree rhs)
1395 tree result = NULL_TREE;
1396 tree op0 = TREE_OPERAND (rhs, 0);
1397 tree op1 = TREE_OPERAND (rhs, 1);
1399 /* This will not catch every single case we could combine, but will
1400 catch those with constants. The goal here is to simultaneously
1401 combine constants between expressions, but avoid infinite
1402 expansion of expressions during simplification. */
1403 if (TREE_CODE (op0) == SSA_NAME)
1405 if (VN_INFO (op0)->has_constants)
1406 op0 = valueize_expr (VN_INFO (op0)->expr);
1407 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1408 op0 = SSA_VAL (op0);
1411 if (TREE_CODE (op1) == SSA_NAME)
1413 if (VN_INFO (op1)->has_constants)
1414 op1 = valueize_expr (VN_INFO (op1)->expr);
1415 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1416 op1 = SSA_VAL (op1);
1419 /* Avoid folding if nothing changed. */
1420 if (op0 == TREE_OPERAND (rhs, 0)
1421 && op1 == TREE_OPERAND (rhs, 1))
1422 return NULL_TREE;
1424 fold_defer_overflow_warnings ();
1426 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1428 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1429 stmt, 0);
1431 /* Make sure result is not a complex expression consisting
1432 of operators of operators (IE (a + b) + (a + c))
1433 Otherwise, we will end up with unbounded expressions if
1434 fold does anything at all. */
1435 if (result && valid_gimple_expression_p (result))
1436 return result;
1438 return NULL_TREE;
1441 /* Simplify the unary expression RHS, and return the result if
1442 simplified. */
1444 static tree
1445 simplify_unary_expression (tree rhs)
1447 tree result = NULL_TREE;
1448 tree op0 = TREE_OPERAND (rhs, 0);
1450 if (TREE_CODE (op0) != SSA_NAME)
1451 return NULL_TREE;
1453 if (VN_INFO (op0)->has_constants)
1454 op0 = valueize_expr (VN_INFO (op0)->expr);
1455 else if (TREE_CODE (rhs) == NOP_EXPR
1456 || TREE_CODE (rhs) == CONVERT_EXPR
1457 || TREE_CODE (rhs) == REALPART_EXPR
1458 || TREE_CODE (rhs) == IMAGPART_EXPR)
1460 /* We want to do tree-combining on conversion-like expressions.
1461 Make sure we feed only SSA_NAMEs or constants to fold though. */
1462 tree tem = valueize_expr (VN_INFO (op0)->expr);
1463 if (UNARY_CLASS_P (tem)
1464 || BINARY_CLASS_P (tem)
1465 || TREE_CODE (tem) == SSA_NAME
1466 || is_gimple_min_invariant (tem))
1467 op0 = tem;
1470 /* Avoid folding if nothing changed, but remember the expression. */
1471 if (op0 == TREE_OPERAND (rhs, 0))
1472 return rhs;
1474 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1475 if (result)
1477 STRIP_USELESS_TYPE_CONVERSION (result);
1478 if (valid_gimple_expression_p (result))
1479 return result;
1482 return rhs;
1485 /* Try to simplify RHS using equivalences and constant folding. */
1487 static tree
1488 try_to_simplify (tree stmt, tree rhs)
1490 if (TREE_CODE (rhs) == SSA_NAME)
1492 if (is_gimple_min_invariant (SSA_VAL (rhs)))
1493 return SSA_VAL (rhs);
1494 else if (VN_INFO (rhs)->has_constants)
1495 return VN_INFO (rhs)->expr;
1497 else
1499 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1501 /* For references, see if we find a result for the lookup,
1502 and use it if we do. */
1503 case tcc_declaration:
1504 /* Pull out any truly constant values. */
1505 if (TREE_READONLY (rhs)
1506 && TREE_STATIC (rhs)
1507 && DECL_INITIAL (rhs)
1508 && valid_gimple_expression_p (DECL_INITIAL (rhs)))
1509 return DECL_INITIAL (rhs);
1511 /* Fallthrough. */
1512 case tcc_reference:
1514 tree result = vn_reference_lookup (rhs,
1515 shared_vuses_from_stmt (stmt));
1516 if (result)
1517 return result;
1519 /* Fallthrough for some codes. */
1520 if (!(TREE_CODE (rhs) == REALPART_EXPR
1521 || TREE_CODE (rhs) == IMAGPART_EXPR))
1522 break;
1523 /* We could do a little more with unary ops, if they expand
1524 into binary ops, but it's debatable whether it is worth it. */
1525 case tcc_unary:
1526 return simplify_unary_expression (rhs);
1527 break;
1528 case tcc_comparison:
1529 case tcc_binary:
1530 return simplify_binary_expression (stmt, rhs);
1531 break;
1532 default:
1533 break;
1536 return rhs;
1539 /* Visit and value number USE, return true if the value number
1540 changed. */
1542 static bool
1543 visit_use (tree use)
1545 bool changed = false;
1546 tree stmt = SSA_NAME_DEF_STMT (use);
1547 stmt_ann_t ann;
1549 VN_INFO (use)->use_processed = true;
1551 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1552 if (dump_file && (dump_flags & TDF_DETAILS))
1554 fprintf (dump_file, "Value numbering ");
1555 print_generic_expr (dump_file, use, 0);
1556 fprintf (dump_file, " stmt = ");
1557 print_generic_stmt (dump_file, stmt, 0);
1560 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1561 if (TREE_CODE (stmt) == RETURN_EXPR
1562 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1563 stmt = TREE_OPERAND (stmt, 0);
1565 ann = stmt_ann (stmt);
1567 /* Handle uninitialized uses. */
1568 if (IS_EMPTY_STMT (stmt))
1570 changed = set_ssa_val_to (use, use);
1572 else
1574 if (TREE_CODE (stmt) == PHI_NODE)
1576 changed = visit_phi (stmt);
1578 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1579 || (ann && ann->has_volatile_ops))
1581 changed = defs_to_varying (stmt);
1583 else
1585 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1586 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1587 tree simplified;
1589 STRIP_USELESS_TYPE_CONVERSION (rhs);
1591 /* Shortcut for copies. Simplifying copies is pointless,
1592 since we copy the expression and value they represent. */
1593 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1595 changed = visit_copy (lhs, rhs);
1596 goto done;
1598 simplified = try_to_simplify (stmt, rhs);
1599 if (simplified && simplified != rhs)
1601 if (dump_file && (dump_flags & TDF_DETAILS))
1603 fprintf (dump_file, "RHS ");
1604 print_generic_expr (dump_file, rhs, 0);
1605 fprintf (dump_file, " simplified to ");
1606 print_generic_expr (dump_file, simplified, 0);
1607 if (TREE_CODE (lhs) == SSA_NAME)
1608 fprintf (dump_file, " has constants %d\n",
1609 VN_INFO (lhs)->has_constants);
1610 else
1611 fprintf (dump_file, "\n");
1615 /* Setting value numbers to constants will occasionally
1616 screw up phi congruence because constants are not
1617 uniquely associated with a single ssa name that can be
1618 looked up. */
1619 if (simplified && is_gimple_min_invariant (simplified)
1620 && TREE_CODE (lhs) == SSA_NAME
1621 && simplified != rhs)
1623 VN_INFO (lhs)->expr = simplified;
1624 VN_INFO (lhs)->has_constants = true;
1625 changed = set_ssa_val_to (lhs, simplified);
1626 goto done;
1628 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1629 && TREE_CODE (lhs) == SSA_NAME)
1631 changed = visit_copy (lhs, simplified);
1632 goto done;
1634 else if (simplified)
1636 if (TREE_CODE (lhs) == SSA_NAME)
1638 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1639 /* We have to unshare the expression or else
1640 valuizing may change the IL stream. */
1641 VN_INFO (lhs)->expr = unshare_expr (simplified);
1643 rhs = simplified;
1645 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1647 VN_INFO (lhs)->has_constants = true;
1648 VN_INFO (lhs)->expr = unshare_expr (rhs);
1650 else if (TREE_CODE (lhs) == SSA_NAME)
1652 /* We reset expr and constantness here because we may
1653 have been value numbering optimistically, and
1654 iterating. They may become non-constant in this case,
1655 even if they were optimistically constant. */
1657 VN_INFO (lhs)->has_constants = false;
1658 VN_INFO (lhs)->expr = lhs;
1661 if (TREE_CODE (lhs) == SSA_NAME
1662 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1663 changed = defs_to_varying (stmt);
1664 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1666 changed = visit_reference_op_store (lhs, rhs, stmt);
1668 else if (TREE_CODE (lhs) == SSA_NAME)
1670 if (is_gimple_min_invariant (rhs))
1672 VN_INFO (lhs)->has_constants = true;
1673 VN_INFO (lhs)->expr = rhs;
1674 changed = set_ssa_val_to (lhs, rhs);
1676 else
1678 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1680 case tcc_unary:
1681 changed = visit_unary_op (lhs, rhs);
1682 break;
1683 case tcc_binary:
1684 changed = visit_binary_op (lhs, rhs);
1685 break;
1686 /* If tcc_vl_expr ever encompasses more than
1687 CALL_EXPR, this will need to be changed. */
1688 case tcc_vl_exp:
1689 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1690 changed = visit_reference_op_load (lhs, rhs, stmt);
1691 else
1692 changed = defs_to_varying (stmt);
1693 break;
1694 case tcc_declaration:
1695 case tcc_reference:
1696 changed = visit_reference_op_load (lhs, rhs, stmt);
1697 break;
1698 case tcc_expression:
1699 if (TREE_CODE (rhs) == ADDR_EXPR)
1701 changed = visit_unary_op (lhs, rhs);
1702 goto done;
1704 /* Fallthrough. */
1705 default:
1706 changed = defs_to_varying (stmt);
1707 break;
1711 else
1712 changed = defs_to_varying (stmt);
1715 done:
1716 return changed;
1719 /* Compare two operands by reverse postorder index */
1721 static int
1722 compare_ops (const void *pa, const void *pb)
1724 const tree opa = *((const tree *)pa);
1725 const tree opb = *((const tree *)pb);
1726 tree opstmta = SSA_NAME_DEF_STMT (opa);
1727 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1728 basic_block bba;
1729 basic_block bbb;
1731 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1732 return 0;
1733 else if (IS_EMPTY_STMT (opstmta))
1734 return -1;
1735 else if (IS_EMPTY_STMT (opstmtb))
1736 return 1;
1738 bba = bb_for_stmt (opstmta);
1739 bbb = bb_for_stmt (opstmtb);
1741 if (!bba && !bbb)
1742 return 0;
1743 else if (!bba)
1744 return -1;
1745 else if (!bbb)
1746 return 1;
1748 if (bba == bbb)
1750 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1751 return 0;
1752 else if (TREE_CODE (opstmta) == PHI_NODE)
1753 return -1;
1754 else if (TREE_CODE (opstmtb) == PHI_NODE)
1755 return 1;
1756 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1758 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1761 /* Sort an array containing members of a strongly connected component
1762 SCC so that the members are ordered by RPO number.
1763 This means that when the sort is complete, iterating through the
1764 array will give you the members in RPO order. */
1766 static void
1767 sort_scc (VEC (tree, heap) *scc)
1769 qsort (VEC_address (tree, scc),
1770 VEC_length (tree, scc),
1771 sizeof (tree),
1772 compare_ops);
1775 /* Process a strongly connected component in the SSA graph. */
1777 static void
1778 process_scc (VEC (tree, heap) *scc)
1780 /* If the SCC has a single member, just visit it. */
1782 if (VEC_length (tree, scc) == 1)
1784 tree use = VEC_index (tree, scc, 0);
1785 if (!VN_INFO (use)->use_processed)
1786 visit_use (use);
1788 else
1790 tree var;
1791 unsigned int i;
1792 unsigned int iterations = 0;
1793 bool changed = true;
1795 /* Iterate over the SCC with the optimistic table until it stops
1796 changing. */
1797 current_info = optimistic_info;
1798 while (changed)
1800 changed = false;
1801 iterations++;
1802 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1803 changed |= visit_use (var);
1806 if (dump_file && (dump_flags & TDF_STATS))
1807 fprintf (dump_file, "Processing SCC required %d iterations\n",
1808 iterations);
1810 /* Finally, visit the SCC once using the valid table. */
1811 current_info = valid_info;
1812 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1813 visit_use (var);
1817 /* Depth first search on NAME to discover and process SCC's in the SSA
1818 graph.
1819 Execution of this algorithm relies on the fact that the SCC's are
1820 popped off the stack in topological order. */
1822 static void
1823 DFS (tree name)
1825 ssa_op_iter iter;
1826 use_operand_p usep;
1827 tree defstmt;
1829 /* SCC info */
1830 VN_INFO (name)->dfsnum = next_dfs_num++;
1831 VN_INFO (name)->visited = true;
1832 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1834 VEC_safe_push (tree, heap, sccstack, name);
1835 VN_INFO (name)->on_sccstack = true;
1836 defstmt = SSA_NAME_DEF_STMT (name);
1838 /* Recursively DFS on our operands, looking for SCC's. */
1839 if (!IS_EMPTY_STMT (defstmt))
1841 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1842 SSA_OP_ALL_USES)
1844 tree use = USE_FROM_PTR (usep);
1846 /* Since we handle phi nodes, we will sometimes get
1847 invariants in the use expression. */
1848 if (TREE_CODE (use) != SSA_NAME)
1849 continue;
1851 if (! (VN_INFO (use)->visited))
1853 DFS (use);
1854 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1855 VN_INFO (use)->low);
1857 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1858 && VN_INFO (use)->on_sccstack)
1860 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1861 VN_INFO (name)->low);
1866 /* See if we found an SCC. */
1867 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1869 VEC (tree, heap) *scc = NULL;
1870 tree x;
1872 /* Found an SCC, pop the components off the SCC stack and
1873 process them. */
1876 x = VEC_pop (tree, sccstack);
1878 VN_INFO (x)->on_sccstack = false;
1879 VEC_safe_push (tree, heap, scc, x);
1880 } while (x != name);
1882 if (VEC_length (tree, scc) > 1)
1883 sort_scc (scc);
1885 if (dump_file && (dump_flags & TDF_DETAILS))
1886 print_scc (dump_file, scc);
1888 process_scc (scc);
1890 VEC_free (tree, heap, scc);
1894 static void
1895 free_phi (void *vp)
1897 vn_phi_t phi = vp;
1898 VEC_free (tree, heap, phi->phiargs);
1902 /* Free a reference operation structure VP. */
1904 static void
1905 free_reference (void *vp)
1907 vn_reference_t vr = vp;
1908 VEC_free (vn_reference_op_s, heap, vr->operands);
1911 /* Allocate a value number table. */
1913 static void
1914 allocate_vn_table (vn_tables_t table)
1916 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
1917 table->unary = htab_create (23, vn_unary_op_hash, vn_unary_op_eq, NULL);
1918 table->binary = htab_create (23, vn_binary_op_hash, vn_binary_op_eq, NULL);
1919 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
1920 free_reference);
1922 table->unary_op_pool = create_alloc_pool ("VN unary operations",
1923 sizeof (struct vn_unary_op_s),
1924 30);
1925 table->binary_op_pool = create_alloc_pool ("VN binary operations",
1926 sizeof (struct vn_binary_op_s),
1927 30);
1928 table->phis_pool = create_alloc_pool ("VN phis",
1929 sizeof (struct vn_phi_s),
1930 30);
1931 table->references_pool = create_alloc_pool ("VN references",
1932 sizeof (struct vn_reference_s),
1933 30);
1936 /* Free a value number table. */
1938 static void
1939 free_vn_table (vn_tables_t table)
1941 htab_delete (table->phis);
1942 htab_delete (table->unary);
1943 htab_delete (table->binary);
1944 htab_delete (table->references);
1945 free_alloc_pool (table->unary_op_pool);
1946 free_alloc_pool (table->binary_op_pool);
1947 free_alloc_pool (table->phis_pool);
1948 free_alloc_pool (table->references_pool);
1951 static void
1952 init_scc_vn (void)
1954 size_t i;
1955 int j;
1956 int *rpo_numbers_temp;
1957 basic_block bb;
1958 size_t id = 0;
1960 calculate_dominance_info (CDI_DOMINATORS);
1961 sccstack = NULL;
1962 next_dfs_num = 1;
1964 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
1965 /* VEC_alloc doesn't actually grow it to the right size, it just
1966 preallocates the space to do so. */
1967 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
1968 shared_lookup_phiargs = NULL;
1969 shared_lookup_vops = NULL;
1970 shared_lookup_references = NULL;
1971 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1972 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1973 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
1975 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
1976 the i'th block in RPO order is bb. We want to map bb's to RPO
1977 numbers, so we need to rearrange this array. */
1978 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
1979 rpo_numbers[rpo_numbers_temp[j]] = j;
1981 free (rpo_numbers_temp);
1983 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
1985 /* Create the VN_INFO structures, and initialize value numbers to
1986 TOP. */
1987 for (i = 0; i < num_ssa_names; i++)
1989 tree name = ssa_name (i);
1990 if (name)
1992 VN_INFO_GET (name)->valnum = VN_TOP;
1993 VN_INFO (name)->expr = name;
1997 FOR_ALL_BB (bb)
1999 block_stmt_iterator bsi;
2000 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2002 tree stmt = bsi_stmt (bsi);
2003 stmt_ann (stmt)->uid = id++;
2007 /* Create the valid and optimistic value numbering tables. */
2008 valid_info = XCNEW (struct vn_tables_s);
2009 allocate_vn_table (valid_info);
2010 optimistic_info = XCNEW (struct vn_tables_s);
2011 allocate_vn_table (optimistic_info);
2012 pre_info = NULL;
2015 void
2016 switch_to_PRE_table (void)
2018 pre_info = XCNEW (struct vn_tables_s);
2019 allocate_vn_table (pre_info);
2020 current_info = pre_info;
2023 void
2024 free_scc_vn (void)
2026 size_t i;
2028 VEC_free (tree, heap, shared_lookup_phiargs);
2029 VEC_free (tree, gc, shared_lookup_vops);
2030 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2031 XDELETEVEC (rpo_numbers);
2032 for (i = 0; i < num_ssa_names; i++)
2034 tree name = ssa_name (i);
2035 if (name)
2037 XDELETE (VN_INFO (name));
2038 if (SSA_NAME_VALUE (name) &&
2039 TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
2040 SSA_NAME_VALUE (name) = NULL;
2044 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2045 VEC_free (tree, heap, sccstack);
2046 free_vn_table (valid_info);
2047 XDELETE (valid_info);
2048 free_vn_table (optimistic_info);
2049 XDELETE (optimistic_info);
2050 if (pre_info)
2052 free_vn_table (pre_info);
2053 XDELETE (pre_info);
2057 void
2058 run_scc_vn (void)
2060 size_t i;
2061 tree param;
2063 init_scc_vn ();
2064 current_info = valid_info;
2066 for (param = DECL_ARGUMENTS (current_function_decl);
2067 param;
2068 param = TREE_CHAIN (param))
2070 if (gimple_default_def (cfun, param) != NULL)
2072 tree def = gimple_default_def (cfun, param);
2073 SSA_VAL (def) = def;
2077 for (i = num_ssa_names - 1; i > 0; i--)
2079 tree name = ssa_name (i);
2080 if (name
2081 && VN_INFO (name)->visited == false
2082 && !has_zero_uses (name))
2083 DFS (name);
2086 if (dump_file && (dump_flags & TDF_DETAILS))
2088 fprintf (dump_file, "Value numbers:\n");
2089 for (i = 0; i < num_ssa_names; i++)
2091 tree name = ssa_name (i);
2092 if (name && VN_INFO (name)->visited
2093 && (SSA_VAL (name) != name
2094 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2096 print_generic_expr (dump_file, name, 0);
2097 fprintf (dump_file, " = ");
2098 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2099 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2100 else
2101 print_generic_expr (dump_file, SSA_VAL (name), 0);
2102 fprintf (dump_file, "\n");