Daily bump.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobb613b2ba21fbecaa765e5548208568404801b49b
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 /* Nary operations in the hashtable consist of length operands, an
119 opcode, and a type. Result is the value number of the operation,
120 and hashcode is stored to avoid having to calculate it
121 repeatedly. */
123 typedef struct vn_nary_op_s
125 ENUM_BITFIELD(tree_code) opcode : 16;
126 unsigned length : 16;
127 hashval_t hashcode;
128 tree result;
129 tree type;
130 tree op[4];
131 } *vn_nary_op_t;
132 typedef const struct vn_nary_op_s *const_vn_nary_op_t;
134 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
135 arguments, and the basic block the phi is in. Result is the value
136 number of the operation, and hashcode is stored to avoid having to
137 calculate it repeatedly. Phi nodes not in the same block are never
138 considered equivalent. */
140 typedef struct vn_phi_s
142 VEC (tree, heap) *phiargs;
143 basic_block block;
144 hashval_t hashcode;
145 tree result;
146 } *vn_phi_t;
147 typedef const struct vn_phi_s *const_vn_phi_t;
149 /* Reference operands only exist in reference operations structures.
150 They consist of an opcode, type, and some number of operands. For
151 a given opcode, some, all, or none of the operands may be used.
152 The operands are there to store the information that makes up the
153 portion of the addressing calculation that opcode performs. */
155 typedef struct vn_reference_op_struct
157 enum tree_code opcode;
158 tree type;
159 tree op0;
160 tree op1;
161 } vn_reference_op_s;
162 typedef vn_reference_op_s *vn_reference_op_t;
163 typedef const vn_reference_op_s *const_vn_reference_op_t;
165 DEF_VEC_O(vn_reference_op_s);
166 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
168 /* A reference operation in the hashtable is representation as a
169 collection of vuses, representing the memory state at the time of
170 the operation, and a collection of operands that make up the
171 addressing calculation. If two vn_reference_t's have the same set
172 of operands, they access the same memory location. We also store
173 the resulting value number, and the hashcode. The vuses are
174 always stored in order sorted by ssa name version. */
176 typedef struct vn_reference_s
178 VEC (tree, gc) *vuses;
179 VEC (vn_reference_op_s, heap) *operands;
180 hashval_t hashcode;
181 tree result;
182 } *vn_reference_t;
183 typedef const struct vn_reference_s *const_vn_reference_t;
185 /* Valid hashtables storing information we have proven to be
186 correct. */
188 static vn_tables_t valid_info;
190 /* Optimistic hashtables storing information we are making assumptions about
191 during iterations. */
193 static vn_tables_t optimistic_info;
195 /* PRE hashtables storing information about mapping from expressions to
196 value handles. */
198 static vn_tables_t pre_info;
200 /* Pointer to the set of hashtables that is currently being used.
201 Should always point to either the optimistic_info, or the
202 valid_info. */
204 static vn_tables_t current_info;
207 /* Reverse post order index for each basic block. */
209 static int *rpo_numbers;
211 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
213 /* This represents the top of the VN lattice, which is the universal
214 value. */
216 tree VN_TOP;
218 /* Next DFS number and the stack for strongly connected component
219 detection. */
221 static unsigned int next_dfs_num;
222 static VEC (tree, heap) *sccstack;
224 static bool may_insert;
227 DEF_VEC_P(vn_ssa_aux_t);
228 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
230 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
231 are allocated on an obstack for locality reasons, and to free them
232 without looping over the VEC. */
234 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
235 static struct obstack vn_ssa_aux_obstack;
237 /* Return the value numbering information for a given SSA name. */
239 vn_ssa_aux_t
240 VN_INFO (tree name)
242 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
243 SSA_NAME_VERSION (name));
246 /* Set the value numbering info for a given SSA name to a given
247 value. */
249 static inline void
250 VN_INFO_SET (tree name, vn_ssa_aux_t value)
252 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
253 SSA_NAME_VERSION (name), value);
256 /* Initialize the value numbering info for a given SSA name.
257 This should be called just once for every SSA name. */
259 vn_ssa_aux_t
260 VN_INFO_GET (tree name)
262 vn_ssa_aux_t newinfo;
264 newinfo = obstack_alloc (&vn_ssa_aux_obstack, sizeof (struct vn_ssa_aux));
265 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
266 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
267 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
268 SSA_NAME_VERSION (name) + 1);
269 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
270 SSA_NAME_VERSION (name), newinfo);
271 return newinfo;
275 /* Free a phi operation structure VP. */
277 static void
278 free_phi (void *vp)
280 vn_phi_t phi = vp;
281 VEC_free (tree, heap, phi->phiargs);
284 /* Free a reference operation structure VP. */
286 static void
287 free_reference (void *vp)
289 vn_reference_t vr = vp;
290 VEC_free (vn_reference_op_s, heap, vr->operands);
293 /* Compare two reference operands P1 and P2 for equality. return true if
294 they are equal, and false otherwise. */
296 static int
297 vn_reference_op_eq (const void *p1, const void *p2)
299 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
300 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
301 return vro1->opcode == vro2->opcode
302 && vro1->type == vro2->type
303 && expressions_equal_p (vro1->op0, vro2->op0)
304 && expressions_equal_p (vro1->op1, vro2->op1);
307 /* Compute the hash for a reference operand VRO1 */
309 static hashval_t
310 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
312 return iterative_hash_expr (vro1->op0, vro1->opcode)
313 + iterative_hash_expr (vro1->op1, vro1->opcode);
316 /* Return the hashcode for a given reference operation P1. */
318 static hashval_t
319 vn_reference_hash (const void *p1)
321 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
322 return vr1->hashcode;
325 /* Compute a hash for the reference operation VR1 and return it. */
327 static inline hashval_t
328 vn_reference_compute_hash (const vn_reference_t vr1)
330 hashval_t result = 0;
331 tree v;
332 int i;
333 vn_reference_op_t vro;
335 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
336 result += iterative_hash_expr (v, 0);
337 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
338 result += vn_reference_op_compute_hash (vro);
340 return result;
343 /* Return true if reference operations P1 and P2 are equivalent. This
344 means they have the same set of operands and vuses. */
346 static int
347 vn_reference_eq (const void *p1, const void *p2)
349 tree v;
350 int i;
351 vn_reference_op_t vro;
353 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
354 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
356 if (vr1->vuses == vr2->vuses
357 && vr1->operands == vr2->operands)
358 return true;
360 /* Impossible for them to be equivalent if they have different
361 number of vuses. */
362 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
363 return false;
365 /* We require that address operands be canonicalized in a way that
366 two memory references will have the same operands if they are
367 equivalent. */
368 if (VEC_length (vn_reference_op_s, vr1->operands)
369 != VEC_length (vn_reference_op_s, vr2->operands))
370 return false;
372 /* The memory state is more often different than the address of the
373 store/load, so check it first. */
374 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
376 if (VEC_index (tree, vr2->vuses, i) != v)
377 return false;
380 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
382 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
383 vro))
384 return false;
386 return true;
389 /* Place the vuses from STMT into *result */
391 static inline void
392 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
394 ssa_op_iter iter;
395 tree vuse;
397 if (!stmt)
398 return;
400 VEC_reserve_exact (tree, gc, *result,
401 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
403 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
404 VEC_quick_push (tree, *result, vuse);
408 /* Copy the VUSE names in STMT into a vector, and return
409 the vector. */
411 VEC (tree, gc) *
412 copy_vuses_from_stmt (tree stmt)
414 VEC (tree, gc) *vuses = NULL;
416 vuses_to_vec (stmt, &vuses);
418 return vuses;
421 /* Place the vdefs from STMT into *result */
423 static inline void
424 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
426 ssa_op_iter iter;
427 tree vdef;
429 if (!stmt)
430 return;
432 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
434 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
435 VEC_quick_push (tree, *result, vdef);
438 /* Copy the names of vdef results in STMT into a vector, and return
439 the vector. */
441 static VEC (tree, gc) *
442 copy_vdefs_from_stmt (tree stmt)
444 VEC (tree, gc) *vdefs = NULL;
446 vdefs_to_vec (stmt, &vdefs);
448 return vdefs;
451 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
452 static VEC (tree, gc) *shared_lookup_vops;
454 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
455 This function will overwrite the current SHARED_LOOKUP_VOPS
456 variable. */
458 VEC (tree, gc) *
459 shared_vuses_from_stmt (tree stmt)
461 VEC_truncate (tree, shared_lookup_vops, 0);
462 vuses_to_vec (stmt, &shared_lookup_vops);
464 return shared_lookup_vops;
467 /* Copy the operations present in load/store/call REF into RESULT, a vector of
468 vn_reference_op_s's. */
470 static void
471 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
473 /* Calls are different from all other reference operations. */
474 if (TREE_CODE (ref) == CALL_EXPR)
476 vn_reference_op_s temp;
477 tree callfn;
478 call_expr_arg_iterator iter;
479 tree callarg;
481 /* Copy the call_expr opcode, type, function being called, and
482 arguments. */
483 memset (&temp, 0, sizeof (temp));
484 temp.type = TREE_TYPE (ref);
485 temp.opcode = CALL_EXPR;
486 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
488 callfn = get_callee_fndecl (ref);
489 if (!callfn)
490 callfn = CALL_EXPR_FN (ref);
491 temp.type = TREE_TYPE (callfn);
492 temp.opcode = TREE_CODE (callfn);
493 temp.op0 = callfn;
494 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
496 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
498 memset (&temp, 0, sizeof (temp));
499 temp.type = TREE_TYPE (callarg);
500 temp.opcode = TREE_CODE (callarg);
501 temp.op0 = callarg;
502 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
504 return;
507 /* For non-calls, store the information that makes up the address. */
509 while (ref)
511 vn_reference_op_s temp;
513 memset (&temp, 0, sizeof (temp));
514 temp.type = TREE_TYPE (ref);
515 temp.opcode = TREE_CODE (ref);
517 switch (temp.opcode)
519 case ALIGN_INDIRECT_REF:
520 case MISALIGNED_INDIRECT_REF:
521 case INDIRECT_REF:
522 /* The only operand is the address, which gets its own
523 vn_reference_op_s structure. */
524 break;
525 case BIT_FIELD_REF:
526 /* Record bits and position. */
527 temp.op0 = TREE_OPERAND (ref, 1);
528 temp.op1 = TREE_OPERAND (ref, 2);
529 break;
530 case COMPONENT_REF:
531 /* If this is a reference to a union member, record the union
532 member size as operand. Do so only if we are doing
533 expression insertion (during FRE), as PRE currently gets
534 confused with this. */
535 if (may_insert
536 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
537 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
538 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
540 temp.type = NULL_TREE;
541 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
543 else
544 /* Record field as operand. */
545 temp.op0 = TREE_OPERAND (ref, 1);
546 break;
547 case ARRAY_RANGE_REF:
548 case ARRAY_REF:
549 /* Record index as operand. */
550 temp.op0 = TREE_OPERAND (ref, 1);
551 temp.op1 = TREE_OPERAND (ref, 3);
552 break;
553 case STRING_CST:
554 case INTEGER_CST:
555 case COMPLEX_CST:
556 case VECTOR_CST:
557 case REAL_CST:
558 case CONSTRUCTOR:
559 case VALUE_HANDLE:
560 case VAR_DECL:
561 case PARM_DECL:
562 case CONST_DECL:
563 case RESULT_DECL:
564 case SSA_NAME:
565 temp.op0 = ref;
566 break;
567 /* These are only interesting for their operands, their
568 existence, and their type. They will never be the last
569 ref in the chain of references (IE they require an
570 operand), so we don't have to put anything
571 for op* as it will be handled by the iteration */
572 case IMAGPART_EXPR:
573 case REALPART_EXPR:
574 case VIEW_CONVERT_EXPR:
575 case ADDR_EXPR:
576 break;
577 default:
578 gcc_unreachable ();
581 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
583 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
584 ref = TREE_OPERAND (ref, 0);
585 else
586 ref = NULL_TREE;
590 /* Create a vector of vn_reference_op_s structures from REF, a
591 REFERENCE_CLASS_P tree. The vector is not shared. */
593 static VEC(vn_reference_op_s, heap) *
594 create_reference_ops_from_ref (tree ref)
596 VEC (vn_reference_op_s, heap) *result = NULL;
598 copy_reference_ops_from_ref (ref, &result);
599 return result;
602 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
604 /* Create a vector of vn_reference_op_s structures from REF, a
605 REFERENCE_CLASS_P tree. The vector is shared among all callers of
606 this function. */
608 static VEC(vn_reference_op_s, heap) *
609 shared_reference_ops_from_ref (tree ref)
611 if (!ref)
612 return NULL;
613 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
614 copy_reference_ops_from_ref (ref, &shared_lookup_references);
615 return shared_lookup_references;
619 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
620 structures into their value numbers. This is done in-place, and
621 the vector passed in is returned. */
623 static VEC (vn_reference_op_s, heap) *
624 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
626 vn_reference_op_t vro;
627 int i;
629 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
631 if (vro->opcode == SSA_NAME
632 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
633 vro->op0 = SSA_VAL (vro->op0);
636 return orig;
639 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
640 their value numbers. This is done in-place, and the vector passed
641 in is returned. */
643 static VEC (tree, gc) *
644 valueize_vuses (VEC (tree, gc) *orig)
646 bool made_replacement = false;
647 tree vuse;
648 int i;
650 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
652 if (vuse != SSA_VAL (vuse))
654 made_replacement = true;
655 VEC_replace (tree, orig, i, SSA_VAL (vuse));
659 if (made_replacement && VEC_length (tree, orig) > 1)
660 sort_vuses (orig);
662 return orig;
665 /* Return the single reference statement defining all virtual uses
666 in VUSES or NULL_TREE, if there are multiple defining statements.
667 Take into account only definitions that alias REF if following
668 back-edges. */
670 static tree
671 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
673 tree def_stmt, vuse;
674 unsigned int i;
676 gcc_assert (VEC_length (tree, vuses) >= 1);
678 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
679 if (TREE_CODE (def_stmt) == PHI_NODE)
681 /* We can only handle lookups over PHI nodes for a single
682 virtual operand. */
683 if (VEC_length (tree, vuses) == 1)
685 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
686 goto cont;
688 else
689 return NULL_TREE;
692 /* Verify each VUSE reaches the same defining stmt. */
693 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
695 tree tmp = SSA_NAME_DEF_STMT (vuse);
696 if (tmp != def_stmt)
697 return NULL_TREE;
700 /* Now see if the definition aliases ref, and loop until it does. */
701 cont:
702 while (def_stmt
703 && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
704 && !get_call_expr_in (def_stmt)
705 && !refs_may_alias_p (ref, GIMPLE_STMT_OPERAND (def_stmt, 0)))
706 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
708 return def_stmt;
711 /* Lookup a SCCVN reference operation VR in the current hash table.
712 Returns the resulting value number if it exists in the hash table,
713 NULL_TREE otherwise. */
715 static tree
716 vn_reference_lookup_1 (vn_reference_t vr)
718 void **slot;
719 hashval_t hash;
721 hash = vr->hashcode;
722 slot = htab_find_slot_with_hash (current_info->references, vr,
723 hash, NO_INSERT);
724 if (!slot && current_info == optimistic_info)
725 slot = htab_find_slot_with_hash (valid_info->references, vr,
726 hash, NO_INSERT);
727 if (slot)
728 return ((vn_reference_t)*slot)->result;
730 return NULL_TREE;
733 /* Lookup OP in the current hash table, and return the resulting
734 value number if it exists in the hash table. Return NULL_TREE if
735 it does not exist in the hash table. */
737 tree
738 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
740 struct vn_reference_s vr1;
741 tree result, def_stmt;
743 vr1.vuses = valueize_vuses (vuses);
744 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
745 vr1.hashcode = vn_reference_compute_hash (&vr1);
746 result = vn_reference_lookup_1 (&vr1);
748 /* If there is a single defining statement for all virtual uses, we can
749 use that, following virtual use-def chains. */
750 if (!result
751 && vr1.vuses
752 && VEC_length (tree, vr1.vuses) >= 1
753 && !get_call_expr_in (op)
754 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
755 && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
756 /* If there is a call involved, op must be assumed to
757 be clobbered. */
758 && !get_call_expr_in (def_stmt))
760 /* We are now at an aliasing definition for the vuses we want to
761 look up. Re-do the lookup with the vdefs for this stmt. */
762 vdefs_to_vec (def_stmt, &vuses);
763 vr1.vuses = valueize_vuses (vuses);
764 vr1.hashcode = vn_reference_compute_hash (&vr1);
765 result = vn_reference_lookup_1 (&vr1);
768 return result;
771 /* Insert OP into the current hash table with a value number of
772 RESULT. */
774 void
775 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
777 void **slot;
778 vn_reference_t vr1;
780 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
782 vr1->vuses = valueize_vuses (vuses);
783 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
784 vr1->hashcode = vn_reference_compute_hash (vr1);
785 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
787 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
788 INSERT);
790 /* Because we lookup stores using vuses, and value number failures
791 using the vdefs (see visit_reference_op_store for how and why),
792 it's possible that on failure we may try to insert an already
793 inserted store. This is not wrong, there is no ssa name for a
794 store that we could use as a differentiator anyway. Thus, unlike
795 the other lookup functions, you cannot gcc_assert (!*slot)
796 here. */
798 /* But free the old slot in case of a collision. */
799 if (*slot)
800 free_reference (*slot);
802 *slot = vr1;
805 /* Compute and return the hash value for nary operation VBO1. */
807 static inline hashval_t
808 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
810 hashval_t hash = 0;
811 unsigned i;
813 for (i = 0; i < vno1->length; ++i)
814 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
815 vno1->op[i] = SSA_VAL (vno1->op[i]);
817 if (vno1->length == 2
818 && commutative_tree_code (vno1->opcode)
819 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
821 tree temp = vno1->op[0];
822 vno1->op[0] = vno1->op[1];
823 vno1->op[1] = temp;
826 for (i = 0; i < vno1->length; ++i)
827 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
829 return hash;
832 /* Return the computed hashcode for nary operation P1. */
834 static hashval_t
835 vn_nary_op_hash (const void *p1)
837 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
838 return vno1->hashcode;
841 /* Compare nary operations P1 and P2 and return true if they are
842 equivalent. */
844 static int
845 vn_nary_op_eq (const void *p1, const void *p2)
847 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
848 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
849 unsigned i;
851 if (vno1->opcode != vno2->opcode
852 || vno1->type != vno2->type)
853 return false;
855 for (i = 0; i < vno1->length; ++i)
856 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
857 return false;
859 return true;
862 /* Lookup OP in the current hash table, and return the resulting
863 value number if it exists in the hash table. Return NULL_TREE if
864 it does not exist in the hash table. */
866 tree
867 vn_nary_op_lookup (tree op)
869 void **slot;
870 struct vn_nary_op_s vno1;
871 unsigned i;
873 vno1.opcode = TREE_CODE (op);
874 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
875 vno1.type = TREE_TYPE (op);
876 for (i = 0; i < vno1.length; ++i)
877 vno1.op[i] = TREE_OPERAND (op, i);
878 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
879 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
880 NO_INSERT);
881 if (!slot && current_info == optimistic_info)
882 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
883 NO_INSERT);
884 if (!slot)
885 return NULL_TREE;
886 return ((vn_nary_op_t)*slot)->result;
889 /* Insert OP into the current hash table with a value number of
890 RESULT. */
892 void
893 vn_nary_op_insert (tree op, tree result)
895 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
896 void **slot;
897 vn_nary_op_t vno1;
898 unsigned i;
900 vno1 = obstack_alloc (&current_info->nary_obstack,
901 (sizeof (struct vn_nary_op_s)
902 - sizeof (tree) * (4 - length)));
903 vno1->opcode = TREE_CODE (op);
904 vno1->length = length;
905 vno1->type = TREE_TYPE (op);
906 for (i = 0; i < vno1->length; ++i)
907 vno1->op[i] = TREE_OPERAND (op, i);
908 vno1->result = result;
909 vno1->hashcode = vn_nary_op_compute_hash (vno1);
910 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
911 INSERT);
912 gcc_assert (!*slot);
914 *slot = vno1;
917 /* Compute a hashcode for PHI operation VP1 and return it. */
919 static inline hashval_t
920 vn_phi_compute_hash (vn_phi_t vp1)
922 hashval_t result = 0;
923 int i;
924 tree phi1op;
926 result = vp1->block->index;
928 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
930 if (phi1op == VN_TOP)
931 continue;
932 result += iterative_hash_expr (phi1op, result);
935 return result;
938 /* Return the computed hashcode for phi operation P1. */
940 static hashval_t
941 vn_phi_hash (const void *p1)
943 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
944 return vp1->hashcode;
947 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
949 static int
950 vn_phi_eq (const void *p1, const void *p2)
952 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
953 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
955 if (vp1->block == vp2->block)
957 int i;
958 tree phi1op;
960 /* Any phi in the same block will have it's arguments in the
961 same edge order, because of how we store phi nodes. */
962 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
964 tree phi2op = VEC_index (tree, vp2->phiargs, i);
965 if (phi1op == VN_TOP || phi2op == VN_TOP)
966 continue;
967 if (!expressions_equal_p (phi1op, phi2op))
968 return false;
970 return true;
972 return false;
975 static VEC(tree, heap) *shared_lookup_phiargs;
977 /* Lookup PHI in the current hash table, and return the resulting
978 value number if it exists in the hash table. Return NULL_TREE if
979 it does not exist in the hash table. */
981 static tree
982 vn_phi_lookup (tree phi)
984 void **slot;
985 struct vn_phi_s vp1;
986 int i;
988 VEC_truncate (tree, shared_lookup_phiargs, 0);
990 /* Canonicalize the SSA_NAME's to their value number. */
991 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
993 tree def = PHI_ARG_DEF (phi, i);
994 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
995 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
997 vp1.phiargs = shared_lookup_phiargs;
998 vp1.block = bb_for_stmt (phi);
999 vp1.hashcode = vn_phi_compute_hash (&vp1);
1000 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1001 NO_INSERT);
1002 if (!slot && current_info == optimistic_info)
1003 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1004 NO_INSERT);
1005 if (!slot)
1006 return NULL_TREE;
1007 return ((vn_phi_t)*slot)->result;
1010 /* Insert PHI into the current hash table with a value number of
1011 RESULT. */
1013 static void
1014 vn_phi_insert (tree phi, tree result)
1016 void **slot;
1017 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1018 int i;
1019 VEC (tree, heap) *args = NULL;
1021 /* Canonicalize the SSA_NAME's to their value number. */
1022 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1024 tree def = PHI_ARG_DEF (phi, i);
1025 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1026 VEC_safe_push (tree, heap, args, def);
1028 vp1->phiargs = args;
1029 vp1->block = bb_for_stmt (phi);
1030 vp1->result = result;
1031 vp1->hashcode = vn_phi_compute_hash (vp1);
1033 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1034 INSERT);
1036 /* Because we iterate over phi operations more than once, it's
1037 possible the slot might already exist here, hence no assert.*/
1038 *slot = vp1;
1042 /* Print set of components in strongly connected component SCC to OUT. */
1044 static void
1045 print_scc (FILE *out, VEC (tree, heap) *scc)
1047 tree var;
1048 unsigned int i;
1050 fprintf (out, "SCC consists of: ");
1051 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1053 print_generic_expr (out, var, 0);
1054 fprintf (out, " ");
1056 fprintf (out, "\n");
1059 /* Set the value number of FROM to TO, return true if it has changed
1060 as a result. */
1062 static inline bool
1063 set_ssa_val_to (tree from, tree to)
1065 tree currval;
1067 if (from != to
1068 && TREE_CODE (to) == SSA_NAME
1069 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1070 to = from;
1072 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1073 and invariants. So assert that here. */
1074 gcc_assert (to != NULL_TREE
1075 && (to == VN_TOP
1076 || TREE_CODE (to) == SSA_NAME
1077 || is_gimple_min_invariant (to)));
1079 if (dump_file && (dump_flags & TDF_DETAILS))
1081 fprintf (dump_file, "Setting value number of ");
1082 print_generic_expr (dump_file, from, 0);
1083 fprintf (dump_file, " to ");
1084 print_generic_expr (dump_file, to, 0);
1085 fprintf (dump_file, "\n");
1088 currval = SSA_VAL (from);
1090 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1092 SSA_VAL (from) = to;
1093 return true;
1095 return false;
1098 /* Set all definitions in STMT to value number to themselves.
1099 Return true if a value number changed. */
1101 static bool
1102 defs_to_varying (tree stmt)
1104 bool changed = false;
1105 ssa_op_iter iter;
1106 def_operand_p defp;
1108 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1110 tree def = DEF_FROM_PTR (defp);
1112 VN_INFO (def)->use_processed = true;
1113 changed |= set_ssa_val_to (def, def);
1115 return changed;
1118 static tree
1119 try_to_simplify (tree stmt, tree rhs);
1121 /* Visit a copy between LHS and RHS, return true if the value number
1122 changed. */
1124 static bool
1125 visit_copy (tree lhs, tree rhs)
1128 /* Follow chains of copies to their destination. */
1129 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1130 rhs = SSA_VAL (rhs);
1132 /* The copy may have a more interesting constant filled expression
1133 (we don't, since we know our RHS is just an SSA name). */
1134 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1135 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1137 return set_ssa_val_to (lhs, rhs);
1140 /* Visit a unary operator RHS, value number it, and return true if the
1141 value number of LHS has changed as a result. */
1143 static bool
1144 visit_unary_op (tree lhs, tree op)
1146 bool changed = false;
1147 tree result = vn_nary_op_lookup (op);
1149 if (result)
1151 changed = set_ssa_val_to (lhs, result);
1153 else
1155 changed = set_ssa_val_to (lhs, lhs);
1156 vn_nary_op_insert (op, lhs);
1159 return changed;
1162 /* Visit a binary operator RHS, value number it, and return true if the
1163 value number of LHS has changed as a result. */
1165 static bool
1166 visit_binary_op (tree lhs, tree op)
1168 bool changed = false;
1169 tree result = vn_nary_op_lookup (op);
1171 if (result)
1173 changed = set_ssa_val_to (lhs, result);
1175 else
1177 changed = set_ssa_val_to (lhs, lhs);
1178 vn_nary_op_insert (op, lhs);
1181 return changed;
1184 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1185 and return true if the value number of the LHS has changed as a result. */
1187 static bool
1188 visit_reference_op_load (tree lhs, tree op, tree stmt)
1190 bool changed = false;
1191 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1193 /* We handle type-punning through unions by value-numbering based
1194 on offset and size of the access. Be prepared to handle a
1195 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1196 if (result
1197 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1199 /* We will be setting the value number of lhs to the value number
1200 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1201 So first simplify and lookup this expression to see if it
1202 is already available. */
1203 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1204 if (stmt
1205 && !is_gimple_min_invariant (val)
1206 && TREE_CODE (val) != SSA_NAME)
1208 tree tem = try_to_simplify (stmt, val);
1209 if (tem)
1210 val = tem;
1212 result = val;
1213 if (!is_gimple_min_invariant (val)
1214 && TREE_CODE (val) != SSA_NAME)
1215 result = vn_nary_op_lookup (val);
1216 /* If the expression is not yet available, value-number lhs to
1217 a new SSA_NAME we create. */
1218 if (!result && may_insert)
1220 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL_TREE);
1221 /* Initialize value-number information properly. */
1222 VN_INFO_GET (result)->valnum = result;
1223 VN_INFO (result)->expr = val;
1224 VN_INFO (result)->needs_insertion = true;
1225 /* As all "inserted" statements are singleton SCCs, insert
1226 to the valid table. This is strictly needed to
1227 avoid re-generating new value SSA_NAMEs for the same
1228 expression during SCC iteration over and over (the
1229 optimistic table gets cleared after each iteration).
1230 We do not need to insert into the optimistic table, as
1231 lookups there will fall back to the valid table. */
1232 if (current_info == optimistic_info)
1234 current_info = valid_info;
1235 vn_nary_op_insert (val, result);
1236 current_info = optimistic_info;
1238 else
1239 vn_nary_op_insert (val, result);
1240 if (dump_file && (dump_flags & TDF_DETAILS))
1242 fprintf (dump_file, "Inserting name ");
1243 print_generic_expr (dump_file, result, 0);
1244 fprintf (dump_file, " for expression ");
1245 print_generic_expr (dump_file, val, 0);
1246 fprintf (dump_file, "\n");
1251 if (result)
1253 changed = set_ssa_val_to (lhs, result);
1254 if (TREE_CODE (result) == SSA_NAME
1255 && VN_INFO (result)->has_constants)
1257 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1258 VN_INFO (lhs)->has_constants = true;
1261 else
1263 changed = set_ssa_val_to (lhs, lhs);
1264 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1267 return changed;
1271 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1272 and return true if the value number of the LHS has changed as a result. */
1274 static bool
1275 visit_reference_op_store (tree lhs, tree op, tree stmt)
1277 bool changed = false;
1278 tree result;
1279 bool resultsame = false;
1281 /* First we want to lookup using the *vuses* from the store and see
1282 if there the last store to this location with the same address
1283 had the same value.
1285 The vuses represent the memory state before the store. If the
1286 memory state, address, and value of the store is the same as the
1287 last store to this location, then this store will produce the
1288 same memory state as that store.
1290 In this case the vdef versions for this store are value numbered to those
1291 vuse versions, since they represent the same memory state after
1292 this store.
1294 Otherwise, the vdefs for the store are used when inserting into
1295 the table, since the store generates a new memory state. */
1297 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1299 if (result)
1301 if (TREE_CODE (result) == SSA_NAME)
1302 result = SSA_VAL (result);
1303 if (TREE_CODE (op) == SSA_NAME)
1304 op = SSA_VAL (op);
1305 resultsame = expressions_equal_p (result, op);
1308 if (!result || !resultsame)
1310 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1311 int i;
1312 tree vdef;
1314 if (dump_file && (dump_flags & TDF_DETAILS))
1316 fprintf (dump_file, "No store match\n");
1317 fprintf (dump_file, "Value numbering store ");
1318 print_generic_expr (dump_file, lhs, 0);
1319 fprintf (dump_file, " to ");
1320 print_generic_expr (dump_file, op, 0);
1321 fprintf (dump_file, "\n");
1323 /* Have to set value numbers before insert, since insert is
1324 going to valueize the references in-place. */
1325 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1327 VN_INFO (vdef)->use_processed = true;
1328 changed |= set_ssa_val_to (vdef, vdef);
1331 /* Do not insert structure copies into the tables. */
1332 if (is_gimple_min_invariant (op)
1333 || is_gimple_reg (op))
1334 vn_reference_insert (lhs, op, vdefs);
1336 else
1338 /* We had a match, so value number the vdefs to have the value
1339 number of the vuses they came from. */
1340 ssa_op_iter op_iter;
1341 def_operand_p var;
1342 vuse_vec_p vv;
1344 if (dump_file && (dump_flags & TDF_DETAILS))
1345 fprintf (dump_file, "Store matched earlier value,"
1346 "value numbering store vdefs to matching vuses.\n");
1348 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1350 tree def = DEF_FROM_PTR (var);
1351 tree use;
1353 /* Uh, if the vuse is a multiuse, we can't really do much
1354 here, sadly, since we don't know which value number of
1355 which vuse to use. */
1356 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1357 use = def;
1358 else
1359 use = VUSE_ELEMENT_VAR (*vv, 0);
1361 VN_INFO (def)->use_processed = true;
1362 changed |= set_ssa_val_to (def, SSA_VAL (use));
1366 return changed;
1369 /* Visit and value number PHI, return true if the value number
1370 changed. */
1372 static bool
1373 visit_phi (tree phi)
1375 bool changed = false;
1376 tree result;
1377 tree sameval = VN_TOP;
1378 bool allsame = true;
1379 int i;
1381 /* TODO: We could check for this in init_sccvn, and replace this
1382 with a gcc_assert. */
1383 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1384 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1386 /* See if all non-TOP arguments have the same value. TOP is
1387 equivalent to everything, so we can ignore it. */
1388 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1390 tree def = PHI_ARG_DEF (phi, i);
1392 if (TREE_CODE (def) == SSA_NAME)
1393 def = SSA_VAL (def);
1394 if (def == VN_TOP)
1395 continue;
1396 if (sameval == VN_TOP)
1398 sameval = def;
1400 else
1402 if (!expressions_equal_p (def, sameval))
1404 allsame = false;
1405 break;
1410 /* If all value numbered to the same value, the phi node has that
1411 value. */
1412 if (allsame)
1414 if (is_gimple_min_invariant (sameval))
1416 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1417 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1419 else
1421 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1422 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1425 if (TREE_CODE (sameval) == SSA_NAME)
1426 return visit_copy (PHI_RESULT (phi), sameval);
1428 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1431 /* Otherwise, see if it is equivalent to a phi node in this block. */
1432 result = vn_phi_lookup (phi);
1433 if (result)
1435 if (TREE_CODE (result) == SSA_NAME)
1436 changed = visit_copy (PHI_RESULT (phi), result);
1437 else
1438 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1440 else
1442 vn_phi_insert (phi, PHI_RESULT (phi));
1443 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1444 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1445 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1448 return changed;
1451 /* Return true if EXPR contains constants. */
1453 static bool
1454 expr_has_constants (tree expr)
1456 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1458 case tcc_unary:
1459 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1461 case tcc_binary:
1462 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1463 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1464 /* Constants inside reference ops are rarely interesting, but
1465 it can take a lot of looking to find them. */
1466 case tcc_reference:
1467 case tcc_declaration:
1468 return false;
1469 default:
1470 return is_gimple_min_invariant (expr);
1472 return false;
1475 /* Replace SSA_NAMES in expr with their value numbers, and return the
1476 result.
1477 This is performed in place. */
1479 static tree
1480 valueize_expr (tree expr)
1482 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1484 case tcc_unary:
1485 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1486 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1487 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1488 break;
1489 case tcc_binary:
1490 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1491 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1492 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1493 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1494 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1495 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1496 break;
1497 default:
1498 break;
1500 return expr;
1503 /* Simplify the binary expression RHS, and return the result if
1504 simplified. */
1506 static tree
1507 simplify_binary_expression (tree stmt, tree rhs)
1509 tree result = NULL_TREE;
1510 tree op0 = TREE_OPERAND (rhs, 0);
1511 tree op1 = TREE_OPERAND (rhs, 1);
1513 /* This will not catch every single case we could combine, but will
1514 catch those with constants. The goal here is to simultaneously
1515 combine constants between expressions, but avoid infinite
1516 expansion of expressions during simplification. */
1517 if (TREE_CODE (op0) == SSA_NAME)
1519 if (VN_INFO (op0)->has_constants)
1520 op0 = valueize_expr (VN_INFO (op0)->expr);
1521 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1522 op0 = SSA_VAL (op0);
1525 if (TREE_CODE (op1) == SSA_NAME)
1527 if (VN_INFO (op1)->has_constants)
1528 op1 = valueize_expr (VN_INFO (op1)->expr);
1529 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1530 op1 = SSA_VAL (op1);
1533 /* Avoid folding if nothing changed. */
1534 if (op0 == TREE_OPERAND (rhs, 0)
1535 && op1 == TREE_OPERAND (rhs, 1))
1536 return NULL_TREE;
1538 fold_defer_overflow_warnings ();
1540 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1542 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1543 stmt, 0);
1545 /* Make sure result is not a complex expression consisting
1546 of operators of operators (IE (a + b) + (a + c))
1547 Otherwise, we will end up with unbounded expressions if
1548 fold does anything at all. */
1549 if (result && valid_gimple_expression_p (result))
1550 return result;
1552 return NULL_TREE;
1555 /* Simplify the unary expression RHS, and return the result if
1556 simplified. */
1558 static tree
1559 simplify_unary_expression (tree rhs)
1561 tree result = NULL_TREE;
1562 tree op0 = TREE_OPERAND (rhs, 0);
1564 if (TREE_CODE (op0) != SSA_NAME)
1565 return NULL_TREE;
1567 if (VN_INFO (op0)->has_constants)
1568 op0 = valueize_expr (VN_INFO (op0)->expr);
1569 else if (TREE_CODE (rhs) == NOP_EXPR
1570 || TREE_CODE (rhs) == CONVERT_EXPR
1571 || TREE_CODE (rhs) == REALPART_EXPR
1572 || TREE_CODE (rhs) == IMAGPART_EXPR
1573 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
1575 /* We want to do tree-combining on conversion-like expressions.
1576 Make sure we feed only SSA_NAMEs or constants to fold though. */
1577 tree tem = valueize_expr (VN_INFO (op0)->expr);
1578 if (UNARY_CLASS_P (tem)
1579 || BINARY_CLASS_P (tem)
1580 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
1581 || TREE_CODE (tem) == SSA_NAME
1582 || is_gimple_min_invariant (tem))
1583 op0 = tem;
1586 /* Avoid folding if nothing changed, but remember the expression. */
1587 if (op0 == TREE_OPERAND (rhs, 0))
1588 return rhs;
1590 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1591 if (result)
1593 STRIP_USELESS_TYPE_CONVERSION (result);
1594 if (valid_gimple_expression_p (result))
1595 return result;
1598 return rhs;
1601 /* Try to simplify RHS using equivalences and constant folding. */
1603 static tree
1604 try_to_simplify (tree stmt, tree rhs)
1606 tree tem;
1608 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
1609 in this case, there is no point in doing extra work. */
1610 if (TREE_CODE (rhs) == SSA_NAME)
1611 return rhs;
1613 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1615 case tcc_declaration:
1616 tem = get_symbol_constant_value (rhs);
1617 if (tem)
1618 return tem;
1619 break;
1621 case tcc_reference:
1622 /* Do not do full-blown reference lookup here, but simplify
1623 reads from constant aggregates. */
1624 tem = fold_const_aggregate_ref (rhs);
1625 if (tem)
1626 return tem;
1628 /* Fallthrough for some codes that can operate on registers. */
1629 if (!(TREE_CODE (rhs) == REALPART_EXPR
1630 || TREE_CODE (rhs) == IMAGPART_EXPR
1631 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR))
1632 break;
1633 /* We could do a little more with unary ops, if they expand
1634 into binary ops, but it's debatable whether it is worth it. */
1635 case tcc_unary:
1636 return simplify_unary_expression (rhs);
1637 break;
1638 case tcc_comparison:
1639 case tcc_binary:
1640 return simplify_binary_expression (stmt, rhs);
1641 break;
1642 default:
1643 break;
1646 return rhs;
1649 /* Visit and value number USE, return true if the value number
1650 changed. */
1652 static bool
1653 visit_use (tree use)
1655 bool changed = false;
1656 tree stmt = SSA_NAME_DEF_STMT (use);
1657 stmt_ann_t ann;
1659 VN_INFO (use)->use_processed = true;
1661 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1662 if (dump_file && (dump_flags & TDF_DETAILS)
1663 && !IS_EMPTY_STMT (stmt))
1665 fprintf (dump_file, "Value numbering ");
1666 print_generic_expr (dump_file, use, 0);
1667 fprintf (dump_file, " stmt = ");
1668 print_generic_stmt (dump_file, stmt, 0);
1671 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1672 if (TREE_CODE (stmt) == RETURN_EXPR
1673 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1674 stmt = TREE_OPERAND (stmt, 0);
1676 ann = stmt_ann (stmt);
1678 /* Handle uninitialized uses. */
1679 if (IS_EMPTY_STMT (stmt))
1681 changed = set_ssa_val_to (use, use);
1683 else
1685 if (TREE_CODE (stmt) == PHI_NODE)
1687 changed = visit_phi (stmt);
1689 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1690 || (ann && ann->has_volatile_ops)
1691 || tree_could_throw_p (stmt))
1693 changed = defs_to_varying (stmt);
1695 else
1697 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1698 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1699 tree simplified;
1701 STRIP_USELESS_TYPE_CONVERSION (rhs);
1703 /* Shortcut for copies. Simplifying copies is pointless,
1704 since we copy the expression and value they represent. */
1705 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1707 changed = visit_copy (lhs, rhs);
1708 goto done;
1710 simplified = try_to_simplify (stmt, rhs);
1711 if (simplified && simplified != rhs)
1713 if (dump_file && (dump_flags & TDF_DETAILS))
1715 fprintf (dump_file, "RHS ");
1716 print_generic_expr (dump_file, rhs, 0);
1717 fprintf (dump_file, " simplified to ");
1718 print_generic_expr (dump_file, simplified, 0);
1719 if (TREE_CODE (lhs) == SSA_NAME)
1720 fprintf (dump_file, " has constants %d\n",
1721 expr_has_constants (simplified));
1722 else
1723 fprintf (dump_file, "\n");
1726 /* Setting value numbers to constants will occasionally
1727 screw up phi congruence because constants are not
1728 uniquely associated with a single ssa name that can be
1729 looked up. */
1730 if (simplified && is_gimple_min_invariant (simplified)
1731 && TREE_CODE (lhs) == SSA_NAME
1732 && simplified != rhs)
1734 VN_INFO (lhs)->expr = simplified;
1735 VN_INFO (lhs)->has_constants = true;
1736 changed = set_ssa_val_to (lhs, simplified);
1737 goto done;
1739 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1740 && TREE_CODE (lhs) == SSA_NAME)
1742 changed = visit_copy (lhs, simplified);
1743 goto done;
1745 else if (simplified)
1747 if (TREE_CODE (lhs) == SSA_NAME)
1749 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1750 /* We have to unshare the expression or else
1751 valuizing may change the IL stream. */
1752 VN_INFO (lhs)->expr = unshare_expr (simplified);
1754 rhs = simplified;
1756 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1758 VN_INFO (lhs)->has_constants = true;
1759 VN_INFO (lhs)->expr = unshare_expr (rhs);
1761 else if (TREE_CODE (lhs) == SSA_NAME)
1763 /* We reset expr and constantness here because we may
1764 have been value numbering optimistically, and
1765 iterating. They may become non-constant in this case,
1766 even if they were optimistically constant. */
1768 VN_INFO (lhs)->has_constants = false;
1769 VN_INFO (lhs)->expr = lhs;
1772 if (TREE_CODE (lhs) == SSA_NAME
1773 /* We can substitute SSA_NAMEs that are live over
1774 abnormal edges with their constant value. */
1775 && !is_gimple_min_invariant (rhs)
1776 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1777 changed = defs_to_varying (stmt);
1778 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1780 changed = visit_reference_op_store (lhs, rhs, stmt);
1782 else if (TREE_CODE (lhs) == SSA_NAME)
1784 if (is_gimple_min_invariant (rhs))
1786 VN_INFO (lhs)->has_constants = true;
1787 VN_INFO (lhs)->expr = rhs;
1788 changed = set_ssa_val_to (lhs, rhs);
1790 else
1792 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1794 case tcc_unary:
1795 changed = visit_unary_op (lhs, rhs);
1796 break;
1797 case tcc_binary:
1798 changed = visit_binary_op (lhs, rhs);
1799 break;
1800 /* If tcc_vl_expr ever encompasses more than
1801 CALL_EXPR, this will need to be changed. */
1802 case tcc_vl_exp:
1803 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1804 changed = visit_reference_op_load (lhs, rhs, stmt);
1805 else
1806 changed = defs_to_varying (stmt);
1807 break;
1808 case tcc_declaration:
1809 case tcc_reference:
1810 changed = visit_reference_op_load (lhs, rhs, stmt);
1811 break;
1812 case tcc_expression:
1813 if (TREE_CODE (rhs) == ADDR_EXPR)
1815 changed = visit_unary_op (lhs, rhs);
1816 goto done;
1818 /* Fallthrough. */
1819 default:
1820 changed = defs_to_varying (stmt);
1821 break;
1825 else
1826 changed = defs_to_varying (stmt);
1829 done:
1830 return changed;
1833 /* Compare two operands by reverse postorder index */
1835 static int
1836 compare_ops (const void *pa, const void *pb)
1838 const tree opa = *((const tree *)pa);
1839 const tree opb = *((const tree *)pb);
1840 tree opstmta = SSA_NAME_DEF_STMT (opa);
1841 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1842 basic_block bba;
1843 basic_block bbb;
1845 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1846 return 0;
1847 else if (IS_EMPTY_STMT (opstmta))
1848 return -1;
1849 else if (IS_EMPTY_STMT (opstmtb))
1850 return 1;
1852 bba = bb_for_stmt (opstmta);
1853 bbb = bb_for_stmt (opstmtb);
1855 if (!bba && !bbb)
1856 return 0;
1857 else if (!bba)
1858 return -1;
1859 else if (!bbb)
1860 return 1;
1862 if (bba == bbb)
1864 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1865 return 0;
1866 else if (TREE_CODE (opstmta) == PHI_NODE)
1867 return -1;
1868 else if (TREE_CODE (opstmtb) == PHI_NODE)
1869 return 1;
1870 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1872 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1875 /* Sort an array containing members of a strongly connected component
1876 SCC so that the members are ordered by RPO number.
1877 This means that when the sort is complete, iterating through the
1878 array will give you the members in RPO order. */
1880 static void
1881 sort_scc (VEC (tree, heap) *scc)
1883 qsort (VEC_address (tree, scc),
1884 VEC_length (tree, scc),
1885 sizeof (tree),
1886 compare_ops);
1889 /* Process a strongly connected component in the SSA graph. */
1891 static void
1892 process_scc (VEC (tree, heap) *scc)
1894 /* If the SCC has a single member, just visit it. */
1896 if (VEC_length (tree, scc) == 1)
1898 tree use = VEC_index (tree, scc, 0);
1899 if (!VN_INFO (use)->use_processed)
1900 visit_use (use);
1902 else
1904 tree var;
1905 unsigned int i;
1906 unsigned int iterations = 0;
1907 bool changed = true;
1909 /* Iterate over the SCC with the optimistic table until it stops
1910 changing. */
1911 current_info = optimistic_info;
1912 while (changed)
1914 changed = false;
1915 iterations++;
1916 htab_empty (optimistic_info->nary);
1917 htab_empty (optimistic_info->phis);
1918 htab_empty (optimistic_info->references);
1919 obstack_free (&optimistic_info->nary_obstack, NULL);
1920 gcc_obstack_init (&optimistic_info->nary_obstack);
1921 empty_alloc_pool (optimistic_info->phis_pool);
1922 empty_alloc_pool (optimistic_info->references_pool);
1923 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1924 changed |= visit_use (var);
1927 if (dump_file && (dump_flags & TDF_STATS))
1928 fprintf (dump_file, "Processing SCC required %d iterations\n",
1929 iterations);
1931 /* Finally, visit the SCC once using the valid table. */
1932 current_info = valid_info;
1933 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1934 visit_use (var);
1938 /* Depth first search on NAME to discover and process SCC's in the SSA
1939 graph.
1940 Execution of this algorithm relies on the fact that the SCC's are
1941 popped off the stack in topological order.
1942 Returns true if successful, false if we stopped processing SCC's due
1943 to ressource constraints. */
1945 static bool
1946 DFS (tree name)
1948 ssa_op_iter iter;
1949 use_operand_p usep;
1950 tree defstmt;
1952 /* SCC info */
1953 VN_INFO (name)->dfsnum = next_dfs_num++;
1954 VN_INFO (name)->visited = true;
1955 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1957 VEC_safe_push (tree, heap, sccstack, name);
1958 VN_INFO (name)->on_sccstack = true;
1959 defstmt = SSA_NAME_DEF_STMT (name);
1961 /* Recursively DFS on our operands, looking for SCC's. */
1962 if (!IS_EMPTY_STMT (defstmt))
1964 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1965 SSA_OP_ALL_USES)
1967 tree use = USE_FROM_PTR (usep);
1969 /* Since we handle phi nodes, we will sometimes get
1970 invariants in the use expression. */
1971 if (TREE_CODE (use) != SSA_NAME)
1972 continue;
1974 if (! (VN_INFO (use)->visited))
1976 if (!DFS (use))
1977 return false;
1978 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1979 VN_INFO (use)->low);
1981 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1982 && VN_INFO (use)->on_sccstack)
1984 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1985 VN_INFO (name)->low);
1990 /* See if we found an SCC. */
1991 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1993 VEC (tree, heap) *scc = NULL;
1994 tree x;
1996 /* Found an SCC, pop the components off the SCC stack and
1997 process them. */
2000 x = VEC_pop (tree, sccstack);
2002 VN_INFO (x)->on_sccstack = false;
2003 VEC_safe_push (tree, heap, scc, x);
2004 } while (x != name);
2006 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2007 if (VEC_length (tree, scc)
2008 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2010 if (dump_file)
2011 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2012 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2013 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2014 return false;
2017 if (VEC_length (tree, scc) > 1)
2018 sort_scc (scc);
2020 if (dump_file && (dump_flags & TDF_DETAILS))
2021 print_scc (dump_file, scc);
2023 process_scc (scc);
2025 VEC_free (tree, heap, scc);
2028 return true;
2031 /* Allocate a value number table. */
2033 static void
2034 allocate_vn_table (vn_tables_t table)
2036 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2037 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2038 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2039 free_reference);
2041 gcc_obstack_init (&table->nary_obstack);
2042 table->phis_pool = create_alloc_pool ("VN phis",
2043 sizeof (struct vn_phi_s),
2044 30);
2045 table->references_pool = create_alloc_pool ("VN references",
2046 sizeof (struct vn_reference_s),
2047 30);
2050 /* Free a value number table. */
2052 static void
2053 free_vn_table (vn_tables_t table)
2055 htab_delete (table->phis);
2056 htab_delete (table->nary);
2057 htab_delete (table->references);
2058 obstack_free (&table->nary_obstack, NULL);
2059 free_alloc_pool (table->phis_pool);
2060 free_alloc_pool (table->references_pool);
2063 static void
2064 init_scc_vn (void)
2066 size_t i;
2067 int j;
2068 int *rpo_numbers_temp;
2069 basic_block bb;
2070 size_t id = 0;
2072 calculate_dominance_info (CDI_DOMINATORS);
2073 sccstack = NULL;
2074 next_dfs_num = 1;
2076 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2077 /* VEC_alloc doesn't actually grow it to the right size, it just
2078 preallocates the space to do so. */
2079 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2080 gcc_obstack_init (&vn_ssa_aux_obstack);
2082 shared_lookup_phiargs = NULL;
2083 shared_lookup_vops = NULL;
2084 shared_lookup_references = NULL;
2085 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2086 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2087 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2089 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2090 the i'th block in RPO order is bb. We want to map bb's to RPO
2091 numbers, so we need to rearrange this array. */
2092 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2093 rpo_numbers[rpo_numbers_temp[j]] = j;
2095 XDELETE (rpo_numbers_temp);
2097 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2099 /* Create the VN_INFO structures, and initialize value numbers to
2100 TOP. */
2101 for (i = 0; i < num_ssa_names; i++)
2103 tree name = ssa_name (i);
2104 if (name)
2106 VN_INFO_GET (name)->valnum = VN_TOP;
2107 VN_INFO (name)->expr = name;
2111 FOR_ALL_BB (bb)
2113 block_stmt_iterator bsi;
2114 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2116 tree stmt = bsi_stmt (bsi);
2117 stmt_ann (stmt)->uid = id++;
2121 /* Create the valid and optimistic value numbering tables. */
2122 valid_info = XCNEW (struct vn_tables_s);
2123 allocate_vn_table (valid_info);
2124 optimistic_info = XCNEW (struct vn_tables_s);
2125 allocate_vn_table (optimistic_info);
2126 pre_info = NULL;
2129 void
2130 switch_to_PRE_table (void)
2132 pre_info = XCNEW (struct vn_tables_s);
2133 allocate_vn_table (pre_info);
2134 current_info = pre_info;
2137 void
2138 free_scc_vn (void)
2140 size_t i;
2142 VEC_free (tree, heap, shared_lookup_phiargs);
2143 VEC_free (tree, gc, shared_lookup_vops);
2144 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2145 XDELETEVEC (rpo_numbers);
2147 for (i = 0; i < num_ssa_names; i++)
2149 tree name = ssa_name (i);
2150 if (name
2151 && SSA_NAME_VALUE (name)
2152 && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
2153 SSA_NAME_VALUE (name) = NULL;
2154 if (name
2155 && VN_INFO (name)->needs_insertion)
2156 release_ssa_name (name);
2158 obstack_free (&vn_ssa_aux_obstack, NULL);
2159 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2161 VEC_free (tree, heap, sccstack);
2162 free_vn_table (valid_info);
2163 XDELETE (valid_info);
2164 free_vn_table (optimistic_info);
2165 XDELETE (optimistic_info);
2166 if (pre_info)
2168 free_vn_table (pre_info);
2169 XDELETE (pre_info);
2173 /* Do SCCVN. Returns true if it finished, false if we bailed out
2174 due to ressource constraints. */
2176 bool
2177 run_scc_vn (bool may_insert_arg)
2179 size_t i;
2180 tree param;
2182 may_insert = may_insert_arg;
2184 init_scc_vn ();
2185 current_info = valid_info;
2187 for (param = DECL_ARGUMENTS (current_function_decl);
2188 param;
2189 param = TREE_CHAIN (param))
2191 if (gimple_default_def (cfun, param) != NULL)
2193 tree def = gimple_default_def (cfun, param);
2194 SSA_VAL (def) = def;
2198 for (i = 1; i < num_ssa_names; ++i)
2200 tree name = ssa_name (i);
2201 if (name
2202 && VN_INFO (name)->visited == false
2203 && !has_zero_uses (name))
2204 if (!DFS (name))
2206 free_scc_vn ();
2207 may_insert = false;
2208 return false;
2212 if (dump_file && (dump_flags & TDF_DETAILS))
2214 fprintf (dump_file, "Value numbers:\n");
2215 for (i = 0; i < num_ssa_names; i++)
2217 tree name = ssa_name (i);
2218 if (name && VN_INFO (name)->visited
2219 && (SSA_VAL (name) != name
2220 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2222 print_generic_expr (dump_file, name, 0);
2223 fprintf (dump_file, " = ");
2224 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2225 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2226 else
2227 print_generic_expr (dump_file, SSA_VAL (name), 0);
2228 fprintf (dump_file, "\n");
2233 may_insert = false;
2234 return true;