Merged revisions 143552,143554,143557,143560,143562,143564-143567,143570-143573,14357...
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobdc55676c4a3c455e345c3b113794dcf7d8abfaf6
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t
176 VN_INFO (tree name)
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
216 tree
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
224 return name;
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
259 case tcc_reference:
260 if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
264 gimple_expr_type (def_stmt),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
266 break;
268 case tcc_unary:
269 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
270 gimple_expr_type (def_stmt),
271 gimple_assign_rhs1 (def_stmt));
272 break;
274 case tcc_binary:
275 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
276 gimple_expr_type (def_stmt),
277 gimple_assign_rhs1 (def_stmt),
278 gimple_assign_rhs2 (def_stmt));
279 break;
281 default:;
283 if (expr == NULL_TREE)
284 return vn->valnum;
286 /* Cache the expression. */
287 vn->expr = expr;
289 return expr;
293 /* Free a phi operation structure VP. */
295 static void
296 free_phi (void *vp)
298 vn_phi_t phi = (vn_phi_t) vp;
299 VEC_free (tree, heap, phi->phiargs);
302 /* Free a reference operation structure VP. */
304 static void
305 free_reference (void *vp)
307 vn_reference_t vr = (vn_reference_t) vp;
308 VEC_free (vn_reference_op_s, heap, vr->operands);
311 /* Hash table equality function for vn_constant_t. */
313 static int
314 vn_constant_eq (const void *p1, const void *p2)
316 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
317 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
319 if (vc1->hashcode != vc2->hashcode)
320 return false;
322 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
325 /* Hash table hash function for vn_constant_t. */
327 static hashval_t
328 vn_constant_hash (const void *p1)
330 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
331 return vc1->hashcode;
334 /* Lookup a value id for CONSTANT and return it. If it does not
335 exist returns 0. */
337 unsigned int
338 get_constant_value_id (tree constant)
340 void **slot;
341 struct vn_constant_s vc;
343 vc.hashcode = vn_hash_constant_with_type (constant);
344 vc.constant = constant;
345 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
346 vc.hashcode, NO_INSERT);
347 if (slot)
348 return ((vn_constant_t)*slot)->value_id;
349 return 0;
352 /* Lookup a value id for CONSTANT, and if it does not exist, create a
353 new one and return it. If it does exist, return it. */
355 unsigned int
356 get_or_alloc_constant_value_id (tree constant)
358 void **slot;
359 vn_constant_t vc = XNEW (struct vn_constant_s);
361 vc->hashcode = vn_hash_constant_with_type (constant);
362 vc->constant = constant;
363 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
364 vc->hashcode, INSERT);
365 if (*slot)
367 free (vc);
368 return ((vn_constant_t)*slot)->value_id;
370 vc->value_id = get_next_value_id ();
371 *slot = vc;
372 bitmap_set_bit (constant_value_ids, vc->value_id);
373 return vc->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
402 static hashval_t
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
405 hashval_t result = 0;
406 if (vro1->op0)
407 result += iterative_hash_expr (vro1->op0, vro1->opcode);
408 if (vro1->op1)
409 result += iterative_hash_expr (vro1->op1, vro1->opcode);
410 if (vro1->op2)
411 result += iterative_hash_expr (vro1->op2, vro1->opcode);
412 return result;
415 /* Return the hashcode for a given reference operation P1. */
417 static hashval_t
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
426 hashval_t
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
430 tree v;
431 int i;
432 vn_reference_op_t vro;
434 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
435 result += iterative_hash_expr (v, 0);
436 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
437 result += vn_reference_op_compute_hash (vro);
439 return result;
442 /* Return true if reference operations P1 and P2 are equivalent. This
443 means they have the same set of operands and vuses. */
446 vn_reference_eq (const void *p1, const void *p2)
448 tree v;
449 int i;
450 vn_reference_op_t vro;
452 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
453 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
454 if (vr1->hashcode != vr2->hashcode)
455 return false;
457 if (vr1->vuses == vr2->vuses
458 && vr1->operands == vr2->operands)
459 return true;
461 /* Impossible for them to be equivalent if they have different
462 number of vuses. */
463 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
464 return false;
466 /* We require that address operands be canonicalized in a way that
467 two memory references will have the same operands if they are
468 equivalent. */
469 if (VEC_length (vn_reference_op_s, vr1->operands)
470 != VEC_length (vn_reference_op_s, vr2->operands))
471 return false;
473 /* The memory state is more often different than the address of the
474 store/load, so check it first. */
475 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
477 if (VEC_index (tree, vr2->vuses, i) != v)
478 return false;
481 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
483 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
484 vro))
485 return false;
487 return true;
490 /* Place the vuses from STMT into *result. */
492 static inline void
493 vuses_to_vec (gimple stmt, VEC (tree, gc) **result)
495 ssa_op_iter iter;
496 tree vuse;
498 if (!stmt)
499 return;
501 VEC_reserve_exact (tree, gc, *result,
502 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
504 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
505 VEC_quick_push (tree, *result, vuse);
509 /* Copy the VUSE names in STMT into a vector, and return
510 the vector. */
512 static VEC (tree, gc) *
513 copy_vuses_from_stmt (gimple stmt)
515 VEC (tree, gc) *vuses = NULL;
517 vuses_to_vec (stmt, &vuses);
519 return vuses;
522 /* Place the vdefs from STMT into *result. */
524 static inline void
525 vdefs_to_vec (gimple stmt, VEC (tree, gc) **result)
527 ssa_op_iter iter;
528 tree vdef;
530 if (!stmt)
531 return;
533 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
535 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
536 VEC_quick_push (tree, *result, vdef);
539 /* Copy the names of vdef results in STMT into a vector, and return
540 the vector. */
542 static VEC (tree, gc) *
543 copy_vdefs_from_stmt (gimple stmt)
545 VEC (tree, gc) *vdefs = NULL;
547 vdefs_to_vec (stmt, &vdefs);
549 return vdefs;
552 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
553 static VEC (tree, gc) *shared_lookup_vops;
555 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
556 This function will overwrite the current SHARED_LOOKUP_VOPS
557 variable. */
559 VEC (tree, gc) *
560 shared_vuses_from_stmt (gimple stmt)
562 VEC_truncate (tree, shared_lookup_vops, 0);
563 vuses_to_vec (stmt, &shared_lookup_vops);
565 return shared_lookup_vops;
568 /* Copy the operations present in load/store REF into RESULT, a vector of
569 vn_reference_op_s's. */
571 void
572 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
574 if (TREE_CODE (ref) == TARGET_MEM_REF)
576 vn_reference_op_s temp;
578 memset (&temp, 0, sizeof (temp));
579 /* We do not care for spurious type qualifications. */
580 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
581 temp.opcode = TREE_CODE (ref);
582 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
583 temp.op1 = TMR_INDEX (ref);
584 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
586 memset (&temp, 0, sizeof (temp));
587 temp.type = NULL_TREE;
588 temp.opcode = TREE_CODE (ref);
589 temp.op0 = TMR_STEP (ref);
590 temp.op1 = TMR_OFFSET (ref);
591 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
592 return;
595 /* For non-calls, store the information that makes up the address. */
597 while (ref)
599 vn_reference_op_s temp;
601 memset (&temp, 0, sizeof (temp));
602 /* We do not care for spurious type qualifications. */
603 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
604 temp.opcode = TREE_CODE (ref);
606 switch (temp.opcode)
608 case ALIGN_INDIRECT_REF:
609 case INDIRECT_REF:
610 /* The only operand is the address, which gets its own
611 vn_reference_op_s structure. */
612 break;
613 case MISALIGNED_INDIRECT_REF:
614 temp.op0 = TREE_OPERAND (ref, 1);
615 break;
616 case BIT_FIELD_REF:
617 /* Record bits and position. */
618 temp.op0 = TREE_OPERAND (ref, 1);
619 temp.op1 = TREE_OPERAND (ref, 2);
620 break;
621 case COMPONENT_REF:
622 /* The field decl is enough to unambiguously specify the field,
623 a matching type is not necessary and a mismatching type
624 is always a spurious difference. */
625 temp.type = NULL_TREE;
626 /* If this is a reference to a union member, record the union
627 member size as operand. Do so only if we are doing
628 expression insertion (during FRE), as PRE currently gets
629 confused with this. */
630 if (may_insert
631 && TREE_OPERAND (ref, 2) == NULL_TREE
632 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
633 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
634 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
635 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
636 else
638 /* Record field as operand. */
639 temp.op0 = TREE_OPERAND (ref, 1);
640 temp.op1 = TREE_OPERAND (ref, 2);
642 break;
643 case ARRAY_RANGE_REF:
644 case ARRAY_REF:
645 /* Record index as operand. */
646 temp.op0 = TREE_OPERAND (ref, 1);
647 temp.op1 = TREE_OPERAND (ref, 2);
648 temp.op2 = TREE_OPERAND (ref, 3);
649 break;
650 case STRING_CST:
651 case INTEGER_CST:
652 case COMPLEX_CST:
653 case VECTOR_CST:
654 case REAL_CST:
655 case CONSTRUCTOR:
656 case VAR_DECL:
657 case PARM_DECL:
658 case CONST_DECL:
659 case RESULT_DECL:
660 case SSA_NAME:
661 temp.op0 = ref;
662 break;
663 case ADDR_EXPR:
664 if (is_gimple_min_invariant (ref))
666 temp.op0 = ref;
667 break;
669 /* Fallthrough. */
670 /* These are only interesting for their operands, their
671 existence, and their type. They will never be the last
672 ref in the chain of references (IE they require an
673 operand), so we don't have to put anything
674 for op* as it will be handled by the iteration */
675 case IMAGPART_EXPR:
676 case REALPART_EXPR:
677 case VIEW_CONVERT_EXPR:
678 break;
679 default:
680 gcc_unreachable ();
682 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
684 if (REFERENCE_CLASS_P (ref)
685 || (TREE_CODE (ref) == ADDR_EXPR
686 && !is_gimple_min_invariant (ref)))
687 ref = TREE_OPERAND (ref, 0);
688 else
689 ref = NULL_TREE;
693 /* Re-create a reference tree from the reference ops OPS.
694 Returns NULL_TREE if the ops were not handled.
695 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
697 static tree
698 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
700 vn_reference_op_t op;
701 unsigned i;
702 tree ref, *op0_p = &ref;
704 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
706 switch (op->opcode)
708 case CALL_EXPR:
709 return NULL_TREE;
711 case ALIGN_INDIRECT_REF:
712 case INDIRECT_REF:
713 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
714 op0_p = &TREE_OPERAND (*op0_p, 0);
715 break;
717 case MISALIGNED_INDIRECT_REF:
718 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
719 NULL_TREE, op->op0);
720 op0_p = &TREE_OPERAND (*op0_p, 0);
721 break;
723 case BIT_FIELD_REF:
724 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
725 op->op0, op->op1);
726 op0_p = &TREE_OPERAND (*op0_p, 0);
727 break;
729 case COMPONENT_REF:
730 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
731 op->op0, op->op1);
732 op0_p = &TREE_OPERAND (*op0_p, 0);
733 break;
735 case ARRAY_RANGE_REF:
736 case ARRAY_REF:
737 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
738 op->op0, op->op1, op->op2);
739 op0_p = &TREE_OPERAND (*op0_p, 0);
740 break;
742 case STRING_CST:
743 case INTEGER_CST:
744 case COMPLEX_CST:
745 case VECTOR_CST:
746 case REAL_CST:
747 case CONSTRUCTOR:
748 case VAR_DECL:
749 case PARM_DECL:
750 case CONST_DECL:
751 case RESULT_DECL:
752 case SSA_NAME:
753 *op0_p = op->op0;
754 break;
756 case ADDR_EXPR:
757 if (op->op0 != NULL_TREE)
759 gcc_assert (is_gimple_min_invariant (op->op0));
760 *op0_p = op->op0;
761 break;
763 /* Fallthrough. */
764 case IMAGPART_EXPR:
765 case REALPART_EXPR:
766 case VIEW_CONVERT_EXPR:
767 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
768 op0_p = &TREE_OPERAND (*op0_p, 0);
769 break;
771 default:
772 return NULL_TREE;
776 return ref;
779 /* Copy the operations present in load/store/call REF into RESULT, a vector of
780 vn_reference_op_s's. */
782 void
783 copy_reference_ops_from_call (gimple call,
784 VEC(vn_reference_op_s, heap) **result)
786 vn_reference_op_s temp;
787 unsigned i;
789 /* Copy the type, opcode, function being called and static chain. */
790 memset (&temp, 0, sizeof (temp));
791 temp.type = gimple_call_return_type (call);
792 temp.opcode = CALL_EXPR;
793 temp.op0 = gimple_call_fn (call);
794 temp.op1 = gimple_call_chain (call);
795 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
797 /* Copy the call arguments. As they can be references as well,
798 just chain them together. */
799 for (i = 0; i < gimple_call_num_args (call); ++i)
801 tree callarg = gimple_call_arg (call, i);
802 copy_reference_ops_from_ref (callarg, result);
806 /* Create a vector of vn_reference_op_s structures from REF, a
807 REFERENCE_CLASS_P tree. The vector is not shared. */
809 static VEC(vn_reference_op_s, heap) *
810 create_reference_ops_from_ref (tree ref)
812 VEC (vn_reference_op_s, heap) *result = NULL;
814 copy_reference_ops_from_ref (ref, &result);
815 return result;
818 /* Create a vector of vn_reference_op_s structures from CALL, a
819 call statement. The vector is not shared. */
821 static VEC(vn_reference_op_s, heap) *
822 create_reference_ops_from_call (gimple call)
824 VEC (vn_reference_op_s, heap) *result = NULL;
826 copy_reference_ops_from_call (call, &result);
827 return result;
830 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
832 /* Create a vector of vn_reference_op_s structures from REF, a
833 REFERENCE_CLASS_P tree. The vector is shared among all callers of
834 this function. */
836 static VEC(vn_reference_op_s, heap) *
837 shared_reference_ops_from_ref (tree ref)
839 if (!ref)
840 return NULL;
841 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
842 copy_reference_ops_from_ref (ref, &shared_lookup_references);
843 return shared_lookup_references;
846 /* Create a vector of vn_reference_op_s structures from CALL, a
847 call statement. The vector is shared among all callers of
848 this function. */
850 static VEC(vn_reference_op_s, heap) *
851 shared_reference_ops_from_call (gimple call)
853 if (!call)
854 return NULL;
855 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
856 copy_reference_ops_from_call (call, &shared_lookup_references);
857 return shared_lookup_references;
861 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
862 structures into their value numbers. This is done in-place, and
863 the vector passed in is returned. */
865 static VEC (vn_reference_op_s, heap) *
866 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
868 vn_reference_op_t vro;
869 int i;
871 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
873 if (vro->opcode == SSA_NAME
874 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
876 vro->op0 = SSA_VAL (vro->op0);
877 /* If it transforms from an SSA_NAME to a constant, update
878 the opcode. */
879 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
880 vro->opcode = TREE_CODE (vro->op0);
882 /* TODO: Do we want to valueize op2 and op1 of
883 ARRAY_REF/COMPONENT_REF for Ada */
887 return orig;
890 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
891 their value numbers. This is done in-place, and the vector passed
892 in is returned. */
894 static VEC (tree, gc) *
895 valueize_vuses (VEC (tree, gc) *orig)
897 bool made_replacement = false;
898 tree vuse;
899 int i;
901 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
903 if (vuse != SSA_VAL (vuse))
905 made_replacement = true;
906 VEC_replace (tree, orig, i, SSA_VAL (vuse));
910 if (made_replacement && VEC_length (tree, orig) > 1)
911 sort_vuses (orig);
913 return orig;
916 /* Return the single reference statement defining all virtual uses
917 in VUSES or NULL_TREE, if there are multiple defining statements.
918 Take into account only definitions that alias REF if following
919 back-edges. */
921 static gimple
922 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
924 gimple def_stmt;
925 tree vuse;
926 unsigned int i;
928 gcc_assert (VEC_length (tree, vuses) >= 1);
930 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
931 if (gimple_code (def_stmt) == GIMPLE_PHI)
933 /* We can only handle lookups over PHI nodes for a single
934 virtual operand. */
935 if (VEC_length (tree, vuses) == 1)
937 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
938 goto cont;
940 else
941 return NULL;
944 /* Verify each VUSE reaches the same defining stmt. */
945 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
947 gimple tmp = SSA_NAME_DEF_STMT (vuse);
948 if (tmp != def_stmt)
949 return NULL;
952 /* Now see if the definition aliases ref, and loop until it does. */
953 cont:
954 while (def_stmt
955 && is_gimple_assign (def_stmt)
956 && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt)))
957 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
959 return def_stmt;
962 /* Lookup a SCCVN reference operation VR in the current hash table.
963 Returns the resulting value number if it exists in the hash table,
964 NULL_TREE otherwise. VNRESULT will be filled in with the actual
965 vn_reference_t stored in the hashtable if something is found. */
967 static tree
968 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
970 void **slot;
971 hashval_t hash;
973 hash = vr->hashcode;
974 slot = htab_find_slot_with_hash (current_info->references, vr,
975 hash, NO_INSERT);
976 if (!slot && current_info == optimistic_info)
977 slot = htab_find_slot_with_hash (valid_info->references, vr,
978 hash, NO_INSERT);
979 if (slot)
981 if (vnresult)
982 *vnresult = (vn_reference_t)*slot;
983 return ((vn_reference_t)*slot)->result;
986 return NULL_TREE;
990 /* Lookup a reference operation by it's parts, in the current hash table.
991 Returns the resulting value number if it exists in the hash table,
992 NULL_TREE otherwise. VNRESULT will be filled in with the actual
993 vn_reference_t stored in the hashtable if something is found. */
995 tree
996 vn_reference_lookup_pieces (VEC (tree, gc) *vuses,
997 VEC (vn_reference_op_s, heap) *operands,
998 vn_reference_t *vnresult, bool maywalk)
1000 struct vn_reference_s vr1;
1001 tree result;
1002 if (vnresult)
1003 *vnresult = NULL;
1005 vr1.vuses = valueize_vuses (vuses);
1006 vr1.operands = valueize_refs (operands);
1007 vr1.hashcode = vn_reference_compute_hash (&vr1);
1008 result = vn_reference_lookup_1 (&vr1, vnresult);
1010 /* If there is a single defining statement for all virtual uses, we can
1011 use that, following virtual use-def chains. */
1012 if (!result
1013 && maywalk
1014 && vr1.vuses
1015 && VEC_length (tree, vr1.vuses) >= 1)
1017 tree ref = get_ref_from_reference_ops (operands);
1018 gimple def_stmt;
1019 if (ref
1020 && (def_stmt = get_def_ref_stmt_vuses (ref, vr1.vuses))
1021 && is_gimple_assign (def_stmt))
1023 /* We are now at an aliasing definition for the vuses we want to
1024 look up. Re-do the lookup with the vdefs for this stmt. */
1025 vdefs_to_vec (def_stmt, &vuses);
1026 vr1.vuses = valueize_vuses (vuses);
1027 vr1.hashcode = vn_reference_compute_hash (&vr1);
1028 result = vn_reference_lookup_1 (&vr1, vnresult);
1032 return result;
1035 /* Lookup OP in the current hash table, and return the resulting value
1036 number if it exists in the hash table. Return NULL_TREE if it does
1037 not exist in the hash table or if the result field of the structure
1038 was NULL.. VNRESULT will be filled in with the vn_reference_t
1039 stored in the hashtable if one exists. */
1041 tree
1042 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
1043 vn_reference_t *vnresult)
1045 struct vn_reference_s vr1;
1046 tree result;
1047 gimple def_stmt;
1048 if (vnresult)
1049 *vnresult = NULL;
1051 vr1.vuses = valueize_vuses (vuses);
1052 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
1053 vr1.hashcode = vn_reference_compute_hash (&vr1);
1054 result = vn_reference_lookup_1 (&vr1, vnresult);
1056 /* If there is a single defining statement for all virtual uses, we can
1057 use that, following virtual use-def chains. */
1058 if (!result
1059 && maywalk
1060 && vr1.vuses
1061 && VEC_length (tree, vr1.vuses) >= 1
1062 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
1063 && is_gimple_assign (def_stmt))
1065 /* We are now at an aliasing definition for the vuses we want to
1066 look up. Re-do the lookup with the vdefs for this stmt. */
1067 vdefs_to_vec (def_stmt, &vuses);
1068 vr1.vuses = valueize_vuses (vuses);
1069 vr1.hashcode = vn_reference_compute_hash (&vr1);
1070 result = vn_reference_lookup_1 (&vr1, vnresult);
1073 return result;
1077 /* Insert OP into the current hash table with a value number of
1078 RESULT, and return the resulting reference structure we created. */
1080 vn_reference_t
1081 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
1083 void **slot;
1084 vn_reference_t vr1;
1086 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1087 if (TREE_CODE (result) == SSA_NAME)
1088 vr1->value_id = VN_INFO (result)->value_id;
1089 else
1090 vr1->value_id = get_or_alloc_constant_value_id (result);
1091 vr1->vuses = valueize_vuses (vuses);
1092 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1093 vr1->hashcode = vn_reference_compute_hash (vr1);
1094 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1096 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1097 INSERT);
1099 /* Because we lookup stores using vuses, and value number failures
1100 using the vdefs (see visit_reference_op_store for how and why),
1101 it's possible that on failure we may try to insert an already
1102 inserted store. This is not wrong, there is no ssa name for a
1103 store that we could use as a differentiator anyway. Thus, unlike
1104 the other lookup functions, you cannot gcc_assert (!*slot)
1105 here. */
1107 /* But free the old slot in case of a collision. */
1108 if (*slot)
1109 free_reference (*slot);
1111 *slot = vr1;
1112 return vr1;
1115 /* Insert a reference by it's pieces into the current hash table with
1116 a value number of RESULT. Return the resulting reference
1117 structure we created. */
1119 vn_reference_t
1120 vn_reference_insert_pieces (VEC (tree, gc) *vuses,
1121 VEC (vn_reference_op_s, heap) *operands,
1122 tree result, unsigned int value_id)
1125 void **slot;
1126 vn_reference_t vr1;
1128 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1129 vr1->value_id = value_id;
1130 vr1->vuses = valueize_vuses (vuses);
1131 vr1->operands = valueize_refs (operands);
1132 vr1->hashcode = vn_reference_compute_hash (vr1);
1133 if (result && TREE_CODE (result) == SSA_NAME)
1134 result = SSA_VAL (result);
1135 vr1->result = result;
1137 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1138 INSERT);
1140 /* At this point we should have all the things inserted that we have
1141 seen before, and we should never try inserting something that
1142 already exists. */
1143 gcc_assert (!*slot);
1144 if (*slot)
1145 free_reference (*slot);
1147 *slot = vr1;
1148 return vr1;
1151 /* Compute and return the hash value for nary operation VBO1. */
1153 inline hashval_t
1154 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1156 hashval_t hash = 0;
1157 unsigned i;
1159 for (i = 0; i < vno1->length; ++i)
1160 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1161 vno1->op[i] = SSA_VAL (vno1->op[i]);
1163 if (vno1->length == 2
1164 && commutative_tree_code (vno1->opcode)
1165 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1167 tree temp = vno1->op[0];
1168 vno1->op[0] = vno1->op[1];
1169 vno1->op[1] = temp;
1172 for (i = 0; i < vno1->length; ++i)
1173 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1175 return hash;
1178 /* Return the computed hashcode for nary operation P1. */
1180 static hashval_t
1181 vn_nary_op_hash (const void *p1)
1183 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1184 return vno1->hashcode;
1187 /* Compare nary operations P1 and P2 and return true if they are
1188 equivalent. */
1191 vn_nary_op_eq (const void *p1, const void *p2)
1193 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1194 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1195 unsigned i;
1197 if (vno1->hashcode != vno2->hashcode)
1198 return false;
1200 if (vno1->opcode != vno2->opcode
1201 || !types_compatible_p (vno1->type, vno2->type))
1202 return false;
1204 for (i = 0; i < vno1->length; ++i)
1205 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1206 return false;
1208 return true;
1211 /* Lookup a n-ary operation by its pieces and return the resulting value
1212 number if it exists in the hash table. Return NULL_TREE if it does
1213 not exist in the hash table or if the result field of the operation
1214 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1215 if it exists. */
1217 tree
1218 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1219 tree type, tree op0, tree op1, tree op2,
1220 tree op3, vn_nary_op_t *vnresult)
1222 void **slot;
1223 struct vn_nary_op_s vno1;
1224 if (vnresult)
1225 *vnresult = NULL;
1226 vno1.opcode = code;
1227 vno1.length = length;
1228 vno1.type = type;
1229 vno1.op[0] = op0;
1230 vno1.op[1] = op1;
1231 vno1.op[2] = op2;
1232 vno1.op[3] = op3;
1233 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1234 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1235 NO_INSERT);
1236 if (!slot && current_info == optimistic_info)
1237 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1238 NO_INSERT);
1239 if (!slot)
1240 return NULL_TREE;
1241 if (vnresult)
1242 *vnresult = (vn_nary_op_t)*slot;
1243 return ((vn_nary_op_t)*slot)->result;
1246 /* Lookup OP in the current hash table, and return the resulting value
1247 number if it exists in the hash table. Return NULL_TREE if it does
1248 not exist in the hash table or if the result field of the operation
1249 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1250 if it exists. */
1252 tree
1253 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1255 void **slot;
1256 struct vn_nary_op_s vno1;
1257 unsigned i;
1259 if (vnresult)
1260 *vnresult = NULL;
1261 vno1.opcode = TREE_CODE (op);
1262 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1263 vno1.type = TREE_TYPE (op);
1264 for (i = 0; i < vno1.length; ++i)
1265 vno1.op[i] = TREE_OPERAND (op, i);
1266 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1267 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1268 NO_INSERT);
1269 if (!slot && current_info == optimistic_info)
1270 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1271 NO_INSERT);
1272 if (!slot)
1273 return NULL_TREE;
1274 if (vnresult)
1275 *vnresult = (vn_nary_op_t)*slot;
1276 return ((vn_nary_op_t)*slot)->result;
1279 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1280 value number if it exists in the hash table. Return NULL_TREE if
1281 it does not exist in the hash table. VNRESULT will contain the
1282 vn_nary_op_t from the hashtable if it exists. */
1284 tree
1285 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1287 void **slot;
1288 struct vn_nary_op_s vno1;
1289 unsigned i;
1291 if (vnresult)
1292 *vnresult = NULL;
1293 vno1.opcode = gimple_assign_rhs_code (stmt);
1294 vno1.length = gimple_num_ops (stmt) - 1;
1295 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
1296 for (i = 0; i < vno1.length; ++i)
1297 vno1.op[i] = gimple_op (stmt, i + 1);
1298 if (vno1.opcode == REALPART_EXPR
1299 || vno1.opcode == IMAGPART_EXPR
1300 || vno1.opcode == VIEW_CONVERT_EXPR)
1301 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1302 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1303 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1304 NO_INSERT);
1305 if (!slot && current_info == optimistic_info)
1306 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1307 NO_INSERT);
1308 if (!slot)
1309 return NULL_TREE;
1310 if (vnresult)
1311 *vnresult = (vn_nary_op_t)*slot;
1312 return ((vn_nary_op_t)*slot)->result;
1315 /* Insert a n-ary operation into the current hash table using it's
1316 pieces. Return the vn_nary_op_t structure we created and put in
1317 the hashtable. */
1319 vn_nary_op_t
1320 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1321 tree type, tree op0,
1322 tree op1, tree op2, tree op3,
1323 tree result,
1324 unsigned int value_id)
1326 void **slot;
1327 vn_nary_op_t vno1;
1329 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1330 (sizeof (struct vn_nary_op_s)
1331 - sizeof (tree) * (4 - length)));
1332 vno1->value_id = value_id;
1333 vno1->opcode = code;
1334 vno1->length = length;
1335 vno1->type = type;
1336 if (length >= 1)
1337 vno1->op[0] = op0;
1338 if (length >= 2)
1339 vno1->op[1] = op1;
1340 if (length >= 3)
1341 vno1->op[2] = op2;
1342 if (length >= 4)
1343 vno1->op[3] = op3;
1344 vno1->result = result;
1345 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1346 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1347 INSERT);
1348 gcc_assert (!*slot);
1350 *slot = vno1;
1351 return vno1;
1355 /* Insert OP into the current hash table with a value number of
1356 RESULT. Return the vn_nary_op_t structure we created and put in
1357 the hashtable. */
1359 vn_nary_op_t
1360 vn_nary_op_insert (tree op, tree result)
1362 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1363 void **slot;
1364 vn_nary_op_t vno1;
1365 unsigned i;
1367 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1368 (sizeof (struct vn_nary_op_s)
1369 - sizeof (tree) * (4 - length)));
1370 vno1->value_id = VN_INFO (result)->value_id;
1371 vno1->opcode = TREE_CODE (op);
1372 vno1->length = length;
1373 vno1->type = TREE_TYPE (op);
1374 for (i = 0; i < vno1->length; ++i)
1375 vno1->op[i] = TREE_OPERAND (op, i);
1376 vno1->result = result;
1377 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1378 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1379 INSERT);
1380 gcc_assert (!*slot);
1382 *slot = vno1;
1383 return vno1;
1386 /* Insert the rhs of STMT into the current hash table with a value number of
1387 RESULT. */
1389 vn_nary_op_t
1390 vn_nary_op_insert_stmt (gimple stmt, tree result)
1392 unsigned length = gimple_num_ops (stmt) - 1;
1393 void **slot;
1394 vn_nary_op_t vno1;
1395 unsigned i;
1397 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1398 (sizeof (struct vn_nary_op_s)
1399 - sizeof (tree) * (4 - length)));
1400 vno1->value_id = VN_INFO (result)->value_id;
1401 vno1->opcode = gimple_assign_rhs_code (stmt);
1402 vno1->length = length;
1403 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
1404 for (i = 0; i < vno1->length; ++i)
1405 vno1->op[i] = gimple_op (stmt, i + 1);
1406 if (vno1->opcode == REALPART_EXPR
1407 || vno1->opcode == IMAGPART_EXPR
1408 || vno1->opcode == VIEW_CONVERT_EXPR)
1409 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1410 vno1->result = result;
1411 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1412 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1413 INSERT);
1414 gcc_assert (!*slot);
1416 *slot = vno1;
1417 return vno1;
1420 /* Compute a hashcode for PHI operation VP1 and return it. */
1422 static inline hashval_t
1423 vn_phi_compute_hash (vn_phi_t vp1)
1425 hashval_t result = 0;
1426 int i;
1427 tree phi1op;
1428 tree type;
1430 result = vp1->block->index;
1432 /* If all PHI arguments are constants we need to distinguish
1433 the PHI node via its type. */
1434 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1435 result += (INTEGRAL_TYPE_P (type)
1436 + (INTEGRAL_TYPE_P (type)
1437 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1439 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1441 if (phi1op == VN_TOP)
1442 continue;
1443 result += iterative_hash_expr (phi1op, result);
1446 return result;
1449 /* Return the computed hashcode for phi operation P1. */
1451 static hashval_t
1452 vn_phi_hash (const void *p1)
1454 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1455 return vp1->hashcode;
1458 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1460 static int
1461 vn_phi_eq (const void *p1, const void *p2)
1463 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1464 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1466 if (vp1->hashcode != vp2->hashcode)
1467 return false;
1469 if (vp1->block == vp2->block)
1471 int i;
1472 tree phi1op;
1474 /* If the PHI nodes do not have compatible types
1475 they are not the same. */
1476 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1477 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1478 return false;
1480 /* Any phi in the same block will have it's arguments in the
1481 same edge order, because of how we store phi nodes. */
1482 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1484 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1485 if (phi1op == VN_TOP || phi2op == VN_TOP)
1486 continue;
1487 if (!expressions_equal_p (phi1op, phi2op))
1488 return false;
1490 return true;
1492 return false;
1495 static VEC(tree, heap) *shared_lookup_phiargs;
1497 /* Lookup PHI in the current hash table, and return the resulting
1498 value number if it exists in the hash table. Return NULL_TREE if
1499 it does not exist in the hash table. */
1501 static tree
1502 vn_phi_lookup (gimple phi)
1504 void **slot;
1505 struct vn_phi_s vp1;
1506 unsigned i;
1508 VEC_truncate (tree, shared_lookup_phiargs, 0);
1510 /* Canonicalize the SSA_NAME's to their value number. */
1511 for (i = 0; i < gimple_phi_num_args (phi); i++)
1513 tree def = PHI_ARG_DEF (phi, i);
1514 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1515 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1517 vp1.phiargs = shared_lookup_phiargs;
1518 vp1.block = gimple_bb (phi);
1519 vp1.hashcode = vn_phi_compute_hash (&vp1);
1520 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1521 NO_INSERT);
1522 if (!slot && current_info == optimistic_info)
1523 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1524 NO_INSERT);
1525 if (!slot)
1526 return NULL_TREE;
1527 return ((vn_phi_t)*slot)->result;
1530 /* Insert PHI into the current hash table with a value number of
1531 RESULT. */
1533 static vn_phi_t
1534 vn_phi_insert (gimple phi, tree result)
1536 void **slot;
1537 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1538 unsigned i;
1539 VEC (tree, heap) *args = NULL;
1541 /* Canonicalize the SSA_NAME's to their value number. */
1542 for (i = 0; i < gimple_phi_num_args (phi); i++)
1544 tree def = PHI_ARG_DEF (phi, i);
1545 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1546 VEC_safe_push (tree, heap, args, def);
1548 vp1->value_id = VN_INFO (result)->value_id;
1549 vp1->phiargs = args;
1550 vp1->block = gimple_bb (phi);
1551 vp1->result = result;
1552 vp1->hashcode = vn_phi_compute_hash (vp1);
1554 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1555 INSERT);
1557 /* Because we iterate over phi operations more than once, it's
1558 possible the slot might already exist here, hence no assert.*/
1559 *slot = vp1;
1560 return vp1;
1564 /* Print set of components in strongly connected component SCC to OUT. */
1566 static void
1567 print_scc (FILE *out, VEC (tree, heap) *scc)
1569 tree var;
1570 unsigned int i;
1572 fprintf (out, "SCC consists of: ");
1573 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1575 print_generic_expr (out, var, 0);
1576 fprintf (out, " ");
1578 fprintf (out, "\n");
1581 /* Set the value number of FROM to TO, return true if it has changed
1582 as a result. */
1584 static inline bool
1585 set_ssa_val_to (tree from, tree to)
1587 tree currval;
1589 if (from != to
1590 && TREE_CODE (to) == SSA_NAME
1591 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1592 to = from;
1594 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1595 and invariants. So assert that here. */
1596 gcc_assert (to != NULL_TREE
1597 && (to == VN_TOP
1598 || TREE_CODE (to) == SSA_NAME
1599 || is_gimple_min_invariant (to)));
1601 if (dump_file && (dump_flags & TDF_DETAILS))
1603 fprintf (dump_file, "Setting value number of ");
1604 print_generic_expr (dump_file, from, 0);
1605 fprintf (dump_file, " to ");
1606 print_generic_expr (dump_file, to, 0);
1609 currval = SSA_VAL (from);
1611 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1613 SSA_VAL (from) = to;
1614 if (dump_file && (dump_flags & TDF_DETAILS))
1615 fprintf (dump_file, " (changed)\n");
1616 return true;
1618 if (dump_file && (dump_flags & TDF_DETAILS))
1619 fprintf (dump_file, "\n");
1620 return false;
1623 /* Set all definitions in STMT to value number to themselves.
1624 Return true if a value number changed. */
1626 static bool
1627 defs_to_varying (gimple stmt)
1629 bool changed = false;
1630 ssa_op_iter iter;
1631 def_operand_p defp;
1633 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1635 tree def = DEF_FROM_PTR (defp);
1637 VN_INFO (def)->use_processed = true;
1638 changed |= set_ssa_val_to (def, def);
1640 return changed;
1643 static bool expr_has_constants (tree expr);
1644 static tree valueize_expr (tree expr);
1646 /* Visit a copy between LHS and RHS, return true if the value number
1647 changed. */
1649 static bool
1650 visit_copy (tree lhs, tree rhs)
1652 /* Follow chains of copies to their destination. */
1653 while (TREE_CODE (rhs) == SSA_NAME
1654 && SSA_VAL (rhs) != rhs)
1655 rhs = SSA_VAL (rhs);
1657 /* The copy may have a more interesting constant filled expression
1658 (we don't, since we know our RHS is just an SSA name). */
1659 if (TREE_CODE (rhs) == SSA_NAME)
1661 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1662 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1665 return set_ssa_val_to (lhs, rhs);
1668 /* Visit a unary operator RHS, value number it, and return true if the
1669 value number of LHS has changed as a result. */
1671 static bool
1672 visit_unary_op (tree lhs, gimple stmt)
1674 bool changed = false;
1675 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1677 if (result)
1679 changed = set_ssa_val_to (lhs, result);
1681 else
1683 changed = set_ssa_val_to (lhs, lhs);
1684 vn_nary_op_insert_stmt (stmt, lhs);
1687 return changed;
1690 /* Visit a binary operator RHS, value number it, and return true if the
1691 value number of LHS has changed as a result. */
1693 static bool
1694 visit_binary_op (tree lhs, gimple stmt)
1696 bool changed = false;
1697 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1699 if (result)
1701 changed = set_ssa_val_to (lhs, result);
1703 else
1705 changed = set_ssa_val_to (lhs, lhs);
1706 vn_nary_op_insert_stmt (stmt, lhs);
1709 return changed;
1712 /* Visit a call STMT storing into LHS. Return true if the value number
1713 of the LHS has changed as a result. */
1715 static bool
1716 visit_reference_op_call (tree lhs, gimple stmt)
1718 bool changed = false;
1719 struct vn_reference_s vr1;
1720 tree result;
1722 vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt));
1723 vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt));
1724 vr1.hashcode = vn_reference_compute_hash (&vr1);
1725 result = vn_reference_lookup_1 (&vr1, NULL);
1726 if (result)
1728 changed = set_ssa_val_to (lhs, result);
1729 if (TREE_CODE (result) == SSA_NAME
1730 && VN_INFO (result)->has_constants)
1731 VN_INFO (lhs)->has_constants = true;
1733 else
1735 void **slot;
1736 vn_reference_t vr2;
1737 changed = set_ssa_val_to (lhs, lhs);
1738 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1739 vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt));
1740 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1741 vr2->hashcode = vr1.hashcode;
1742 vr2->result = lhs;
1743 slot = htab_find_slot_with_hash (current_info->references,
1744 vr2, vr2->hashcode, INSERT);
1745 if (*slot)
1746 free_reference (*slot);
1747 *slot = vr2;
1750 return changed;
1753 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1754 and return true if the value number of the LHS has changed as a result. */
1756 static bool
1757 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1759 bool changed = false;
1760 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true,
1761 NULL);
1763 /* We handle type-punning through unions by value-numbering based
1764 on offset and size of the access. Be prepared to handle a
1765 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1766 if (result
1767 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1769 /* We will be setting the value number of lhs to the value number
1770 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1771 So first simplify and lookup this expression to see if it
1772 is already available. */
1773 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1774 if ((CONVERT_EXPR_P (val)
1775 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1776 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1778 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1779 if ((CONVERT_EXPR_P (tem)
1780 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1781 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1782 TREE_TYPE (val), tem)))
1783 val = tem;
1785 result = val;
1786 if (!is_gimple_min_invariant (val)
1787 && TREE_CODE (val) != SSA_NAME)
1788 result = vn_nary_op_lookup (val, NULL);
1789 /* If the expression is not yet available, value-number lhs to
1790 a new SSA_NAME we create. */
1791 if (!result && may_insert)
1793 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1794 /* Initialize value-number information properly. */
1795 VN_INFO_GET (result)->valnum = result;
1796 VN_INFO (result)->value_id = get_next_value_id ();
1797 VN_INFO (result)->expr = val;
1798 VN_INFO (result)->has_constants = expr_has_constants (val);
1799 VN_INFO (result)->needs_insertion = true;
1800 /* As all "inserted" statements are singleton SCCs, insert
1801 to the valid table. This is strictly needed to
1802 avoid re-generating new value SSA_NAMEs for the same
1803 expression during SCC iteration over and over (the
1804 optimistic table gets cleared after each iteration).
1805 We do not need to insert into the optimistic table, as
1806 lookups there will fall back to the valid table. */
1807 if (current_info == optimistic_info)
1809 current_info = valid_info;
1810 vn_nary_op_insert (val, result);
1811 current_info = optimistic_info;
1813 else
1814 vn_nary_op_insert (val, result);
1815 if (dump_file && (dump_flags & TDF_DETAILS))
1817 fprintf (dump_file, "Inserting name ");
1818 print_generic_expr (dump_file, result, 0);
1819 fprintf (dump_file, " for expression ");
1820 print_generic_expr (dump_file, val, 0);
1821 fprintf (dump_file, "\n");
1826 if (result)
1828 changed = set_ssa_val_to (lhs, result);
1829 if (TREE_CODE (result) == SSA_NAME
1830 && VN_INFO (result)->has_constants)
1832 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1833 VN_INFO (lhs)->has_constants = true;
1836 else
1838 changed = set_ssa_val_to (lhs, lhs);
1839 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1842 return changed;
1846 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1847 and return true if the value number of the LHS has changed as a result. */
1849 static bool
1850 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1852 bool changed = false;
1853 tree result;
1854 bool resultsame = false;
1856 /* First we want to lookup using the *vuses* from the store and see
1857 if there the last store to this location with the same address
1858 had the same value.
1860 The vuses represent the memory state before the store. If the
1861 memory state, address, and value of the store is the same as the
1862 last store to this location, then this store will produce the
1863 same memory state as that store.
1865 In this case the vdef versions for this store are value numbered to those
1866 vuse versions, since they represent the same memory state after
1867 this store.
1869 Otherwise, the vdefs for the store are used when inserting into
1870 the table, since the store generates a new memory state. */
1872 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false,
1873 NULL);
1875 if (result)
1877 if (TREE_CODE (result) == SSA_NAME)
1878 result = SSA_VAL (result);
1879 if (TREE_CODE (op) == SSA_NAME)
1880 op = SSA_VAL (op);
1881 resultsame = expressions_equal_p (result, op);
1884 if (!result || !resultsame)
1886 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1887 int i;
1888 tree vdef;
1890 if (dump_file && (dump_flags & TDF_DETAILS))
1892 fprintf (dump_file, "No store match\n");
1893 fprintf (dump_file, "Value numbering store ");
1894 print_generic_expr (dump_file, lhs, 0);
1895 fprintf (dump_file, " to ");
1896 print_generic_expr (dump_file, op, 0);
1897 fprintf (dump_file, "\n");
1899 /* Have to set value numbers before insert, since insert is
1900 going to valueize the references in-place. */
1901 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1903 VN_INFO (vdef)->use_processed = true;
1904 changed |= set_ssa_val_to (vdef, vdef);
1907 /* Do not insert structure copies into the tables. */
1908 if (is_gimple_min_invariant (op)
1909 || is_gimple_reg (op))
1910 vn_reference_insert (lhs, op, vdefs);
1912 else
1914 /* We had a match, so value number the vdefs to have the value
1915 number of the vuses they came from. */
1916 ssa_op_iter op_iter;
1917 def_operand_p var;
1918 vuse_vec_p vv;
1920 if (dump_file && (dump_flags & TDF_DETAILS))
1921 fprintf (dump_file, "Store matched earlier value,"
1922 "value numbering store vdefs to matching vuses.\n");
1924 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1926 tree def = DEF_FROM_PTR (var);
1927 tree use;
1929 /* Uh, if the vuse is a multiuse, we can't really do much
1930 here, sadly, since we don't know which value number of
1931 which vuse to use. */
1932 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1933 use = def;
1934 else
1935 use = VUSE_ELEMENT_VAR (*vv, 0);
1937 VN_INFO (def)->use_processed = true;
1938 changed |= set_ssa_val_to (def, SSA_VAL (use));
1942 return changed;
1945 /* Visit and value number PHI, return true if the value number
1946 changed. */
1948 static bool
1949 visit_phi (gimple phi)
1951 bool changed = false;
1952 tree result;
1953 tree sameval = VN_TOP;
1954 bool allsame = true;
1955 unsigned i;
1957 /* TODO: We could check for this in init_sccvn, and replace this
1958 with a gcc_assert. */
1959 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1960 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1962 /* See if all non-TOP arguments have the same value. TOP is
1963 equivalent to everything, so we can ignore it. */
1964 for (i = 0; i < gimple_phi_num_args (phi); i++)
1966 tree def = PHI_ARG_DEF (phi, i);
1968 if (TREE_CODE (def) == SSA_NAME)
1969 def = SSA_VAL (def);
1970 if (def == VN_TOP)
1971 continue;
1972 if (sameval == VN_TOP)
1974 sameval = def;
1976 else
1978 if (!expressions_equal_p (def, sameval))
1980 allsame = false;
1981 break;
1986 /* If all value numbered to the same value, the phi node has that
1987 value. */
1988 if (allsame)
1990 if (is_gimple_min_invariant (sameval))
1992 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1993 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1995 else
1997 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1998 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2001 if (TREE_CODE (sameval) == SSA_NAME)
2002 return visit_copy (PHI_RESULT (phi), sameval);
2004 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2007 /* Otherwise, see if it is equivalent to a phi node in this block. */
2008 result = vn_phi_lookup (phi);
2009 if (result)
2011 if (TREE_CODE (result) == SSA_NAME)
2012 changed = visit_copy (PHI_RESULT (phi), result);
2013 else
2014 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2016 else
2018 vn_phi_insert (phi, PHI_RESULT (phi));
2019 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2020 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2021 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2024 return changed;
2027 /* Return true if EXPR contains constants. */
2029 static bool
2030 expr_has_constants (tree expr)
2032 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2034 case tcc_unary:
2035 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2037 case tcc_binary:
2038 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2039 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2040 /* Constants inside reference ops are rarely interesting, but
2041 it can take a lot of looking to find them. */
2042 case tcc_reference:
2043 case tcc_declaration:
2044 return false;
2045 default:
2046 return is_gimple_min_invariant (expr);
2048 return false;
2051 /* Return true if STMT contains constants. */
2053 static bool
2054 stmt_has_constants (gimple stmt)
2056 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2057 return false;
2059 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2061 case GIMPLE_UNARY_RHS:
2062 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2064 case GIMPLE_BINARY_RHS:
2065 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2066 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2067 case GIMPLE_SINGLE_RHS:
2068 /* Constants inside reference ops are rarely interesting, but
2069 it can take a lot of looking to find them. */
2070 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2071 default:
2072 gcc_unreachable ();
2074 return false;
2077 /* Replace SSA_NAMES in expr with their value numbers, and return the
2078 result.
2079 This is performed in place. */
2081 static tree
2082 valueize_expr (tree expr)
2084 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2086 case tcc_unary:
2087 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2088 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2089 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2090 break;
2091 case tcc_binary:
2092 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2093 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2094 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2095 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2096 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2097 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2098 break;
2099 default:
2100 break;
2102 return expr;
2105 /* Simplify the binary expression RHS, and return the result if
2106 simplified. */
2108 static tree
2109 simplify_binary_expression (gimple stmt)
2111 tree result = NULL_TREE;
2112 tree op0 = gimple_assign_rhs1 (stmt);
2113 tree op1 = gimple_assign_rhs2 (stmt);
2115 /* This will not catch every single case we could combine, but will
2116 catch those with constants. The goal here is to simultaneously
2117 combine constants between expressions, but avoid infinite
2118 expansion of expressions during simplification. */
2119 if (TREE_CODE (op0) == SSA_NAME)
2121 if (VN_INFO (op0)->has_constants
2122 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2123 op0 = valueize_expr (vn_get_expr_for (op0));
2124 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2125 op0 = SSA_VAL (op0);
2128 if (TREE_CODE (op1) == SSA_NAME)
2130 if (VN_INFO (op1)->has_constants)
2131 op1 = valueize_expr (vn_get_expr_for (op1));
2132 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2133 op1 = SSA_VAL (op1);
2136 /* Avoid folding if nothing changed. */
2137 if (op0 == gimple_assign_rhs1 (stmt)
2138 && op1 == gimple_assign_rhs2 (stmt))
2139 return NULL_TREE;
2141 fold_defer_overflow_warnings ();
2143 result = fold_binary (gimple_assign_rhs_code (stmt),
2144 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
2145 if (result)
2146 STRIP_USELESS_TYPE_CONVERSION (result);
2148 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2149 stmt, 0);
2151 /* Make sure result is not a complex expression consisting
2152 of operators of operators (IE (a + b) + (a + c))
2153 Otherwise, we will end up with unbounded expressions if
2154 fold does anything at all. */
2155 if (result && valid_gimple_rhs_p (result))
2156 return result;
2158 return NULL_TREE;
2161 /* Simplify the unary expression RHS, and return the result if
2162 simplified. */
2164 static tree
2165 simplify_unary_expression (gimple stmt)
2167 tree result = NULL_TREE;
2168 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2170 /* We handle some tcc_reference codes here that are all
2171 GIMPLE_ASSIGN_SINGLE codes. */
2172 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2173 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2174 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2175 op0 = TREE_OPERAND (op0, 0);
2177 if (TREE_CODE (op0) != SSA_NAME)
2178 return NULL_TREE;
2180 orig_op0 = op0;
2181 if (VN_INFO (op0)->has_constants)
2182 op0 = valueize_expr (vn_get_expr_for (op0));
2183 else if (gimple_assign_cast_p (stmt)
2184 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2185 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2186 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2188 /* We want to do tree-combining on conversion-like expressions.
2189 Make sure we feed only SSA_NAMEs or constants to fold though. */
2190 tree tem = valueize_expr (vn_get_expr_for (op0));
2191 if (UNARY_CLASS_P (tem)
2192 || BINARY_CLASS_P (tem)
2193 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2194 || TREE_CODE (tem) == SSA_NAME
2195 || is_gimple_min_invariant (tem))
2196 op0 = tem;
2199 /* Avoid folding if nothing changed, but remember the expression. */
2200 if (op0 == orig_op0)
2201 return NULL_TREE;
2203 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2204 gimple_expr_type (stmt), op0);
2205 if (result)
2207 STRIP_USELESS_TYPE_CONVERSION (result);
2208 if (valid_gimple_rhs_p (result))
2209 return result;
2212 return NULL_TREE;
2215 /* Try to simplify RHS using equivalences and constant folding. */
2217 static tree
2218 try_to_simplify (gimple stmt)
2220 tree tem;
2222 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2223 in this case, there is no point in doing extra work. */
2224 if (gimple_assign_copy_p (stmt)
2225 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2226 return NULL_TREE;
2228 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2230 case tcc_declaration:
2231 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2232 if (tem)
2233 return tem;
2234 break;
2236 case tcc_reference:
2237 /* Do not do full-blown reference lookup here, but simplify
2238 reads from constant aggregates. */
2239 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2240 if (tem)
2241 return tem;
2243 /* Fallthrough for some codes that can operate on registers. */
2244 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2245 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2246 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2247 break;
2248 /* We could do a little more with unary ops, if they expand
2249 into binary ops, but it's debatable whether it is worth it. */
2250 case tcc_unary:
2251 return simplify_unary_expression (stmt);
2252 break;
2253 case tcc_comparison:
2254 case tcc_binary:
2255 return simplify_binary_expression (stmt);
2256 break;
2257 default:
2258 break;
2261 return NULL_TREE;
2264 /* Visit and value number USE, return true if the value number
2265 changed. */
2267 static bool
2268 visit_use (tree use)
2270 bool changed = false;
2271 gimple stmt = SSA_NAME_DEF_STMT (use);
2273 VN_INFO (use)->use_processed = true;
2275 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2276 if (dump_file && (dump_flags & TDF_DETAILS)
2277 && !SSA_NAME_IS_DEFAULT_DEF (use))
2279 fprintf (dump_file, "Value numbering ");
2280 print_generic_expr (dump_file, use, 0);
2281 fprintf (dump_file, " stmt = ");
2282 print_gimple_stmt (dump_file, stmt, 0, 0);
2285 /* Handle uninitialized uses. */
2286 if (SSA_NAME_IS_DEFAULT_DEF (use))
2287 changed = set_ssa_val_to (use, use);
2288 else
2290 if (gimple_code (stmt) == GIMPLE_PHI)
2291 changed = visit_phi (stmt);
2292 else if (!gimple_has_lhs (stmt)
2293 || gimple_has_volatile_ops (stmt)
2294 || stmt_could_throw_p (stmt))
2295 changed = defs_to_varying (stmt);
2296 else if (is_gimple_assign (stmt))
2298 tree lhs = gimple_assign_lhs (stmt);
2299 tree simplified;
2301 /* Shortcut for copies. Simplifying copies is pointless,
2302 since we copy the expression and value they represent. */
2303 if (gimple_assign_copy_p (stmt)
2304 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2305 && TREE_CODE (lhs) == SSA_NAME)
2307 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2308 goto done;
2310 simplified = try_to_simplify (stmt);
2311 if (simplified)
2313 if (dump_file && (dump_flags & TDF_DETAILS))
2315 fprintf (dump_file, "RHS ");
2316 print_gimple_expr (dump_file, stmt, 0, 0);
2317 fprintf (dump_file, " simplified to ");
2318 print_generic_expr (dump_file, simplified, 0);
2319 if (TREE_CODE (lhs) == SSA_NAME)
2320 fprintf (dump_file, " has constants %d\n",
2321 expr_has_constants (simplified));
2322 else
2323 fprintf (dump_file, "\n");
2326 /* Setting value numbers to constants will occasionally
2327 screw up phi congruence because constants are not
2328 uniquely associated with a single ssa name that can be
2329 looked up. */
2330 if (simplified
2331 && is_gimple_min_invariant (simplified)
2332 && TREE_CODE (lhs) == SSA_NAME)
2334 VN_INFO (lhs)->expr = simplified;
2335 VN_INFO (lhs)->has_constants = true;
2336 changed = set_ssa_val_to (lhs, simplified);
2337 goto done;
2339 else if (simplified
2340 && TREE_CODE (simplified) == SSA_NAME
2341 && TREE_CODE (lhs) == SSA_NAME)
2343 changed = visit_copy (lhs, simplified);
2344 goto done;
2346 else if (simplified)
2348 if (TREE_CODE (lhs) == SSA_NAME)
2350 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2351 /* We have to unshare the expression or else
2352 valuizing may change the IL stream. */
2353 VN_INFO (lhs)->expr = unshare_expr (simplified);
2356 else if (stmt_has_constants (stmt)
2357 && TREE_CODE (lhs) == SSA_NAME)
2358 VN_INFO (lhs)->has_constants = true;
2359 else if (TREE_CODE (lhs) == SSA_NAME)
2361 /* We reset expr and constantness here because we may
2362 have been value numbering optimistically, and
2363 iterating. They may become non-constant in this case,
2364 even if they were optimistically constant. */
2366 VN_INFO (lhs)->has_constants = false;
2367 VN_INFO (lhs)->expr = NULL_TREE;
2370 if ((TREE_CODE (lhs) == SSA_NAME
2371 /* We can substitute SSA_NAMEs that are live over
2372 abnormal edges with their constant value. */
2373 && !(gimple_assign_copy_p (stmt)
2374 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2375 && !(simplified
2376 && is_gimple_min_invariant (simplified))
2377 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2378 /* Stores or copies from SSA_NAMEs that are live over
2379 abnormal edges are a problem. */
2380 || (gimple_assign_single_p (stmt)
2381 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2382 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2383 changed = defs_to_varying (stmt);
2384 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2386 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2388 else if (TREE_CODE (lhs) == SSA_NAME)
2390 if ((gimple_assign_copy_p (stmt)
2391 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2392 || (simplified
2393 && is_gimple_min_invariant (simplified)))
2395 VN_INFO (lhs)->has_constants = true;
2396 if (simplified)
2397 changed = set_ssa_val_to (lhs, simplified);
2398 else
2399 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2401 else
2403 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2405 case GIMPLE_UNARY_RHS:
2406 changed = visit_unary_op (lhs, stmt);
2407 break;
2408 case GIMPLE_BINARY_RHS:
2409 changed = visit_binary_op (lhs, stmt);
2410 break;
2411 case GIMPLE_SINGLE_RHS:
2412 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2414 case tcc_reference:
2415 /* VOP-less references can go through unary case. */
2416 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2417 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2418 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2419 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2421 changed = visit_unary_op (lhs, stmt);
2422 break;
2424 /* Fallthrough. */
2425 case tcc_declaration:
2426 changed = visit_reference_op_load
2427 (lhs, gimple_assign_rhs1 (stmt), stmt);
2428 break;
2429 case tcc_expression:
2430 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2432 changed = visit_unary_op (lhs, stmt);
2433 break;
2435 /* Fallthrough. */
2436 default:
2437 changed = defs_to_varying (stmt);
2439 break;
2440 default:
2441 changed = defs_to_varying (stmt);
2442 break;
2446 else
2447 changed = defs_to_varying (stmt);
2449 else if (is_gimple_call (stmt))
2451 tree lhs = gimple_call_lhs (stmt);
2453 /* ??? We could try to simplify calls. */
2455 if (stmt_has_constants (stmt)
2456 && TREE_CODE (lhs) == SSA_NAME)
2457 VN_INFO (lhs)->has_constants = true;
2458 else if (TREE_CODE (lhs) == SSA_NAME)
2460 /* We reset expr and constantness here because we may
2461 have been value numbering optimistically, and
2462 iterating. They may become non-constant in this case,
2463 even if they were optimistically constant. */
2464 VN_INFO (lhs)->has_constants = false;
2465 VN_INFO (lhs)->expr = NULL_TREE;
2468 if (TREE_CODE (lhs) == SSA_NAME
2469 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2470 changed = defs_to_varying (stmt);
2471 /* ??? We should handle stores from calls. */
2472 else if (TREE_CODE (lhs) == SSA_NAME)
2474 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2475 changed = visit_reference_op_call (lhs, stmt);
2476 else
2477 changed = defs_to_varying (stmt);
2479 else
2480 changed = defs_to_varying (stmt);
2483 done:
2484 return changed;
2487 /* Compare two operands by reverse postorder index */
2489 static int
2490 compare_ops (const void *pa, const void *pb)
2492 const tree opa = *((const tree *)pa);
2493 const tree opb = *((const tree *)pb);
2494 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2495 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2496 basic_block bba;
2497 basic_block bbb;
2499 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2500 return 0;
2501 else if (gimple_nop_p (opstmta))
2502 return -1;
2503 else if (gimple_nop_p (opstmtb))
2504 return 1;
2506 bba = gimple_bb (opstmta);
2507 bbb = gimple_bb (opstmtb);
2509 if (!bba && !bbb)
2510 return 0;
2511 else if (!bba)
2512 return -1;
2513 else if (!bbb)
2514 return 1;
2516 if (bba == bbb)
2518 if (gimple_code (opstmta) == GIMPLE_PHI
2519 && gimple_code (opstmtb) == GIMPLE_PHI)
2520 return 0;
2521 else if (gimple_code (opstmta) == GIMPLE_PHI)
2522 return -1;
2523 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2524 return 1;
2525 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2527 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2530 /* Sort an array containing members of a strongly connected component
2531 SCC so that the members are ordered by RPO number.
2532 This means that when the sort is complete, iterating through the
2533 array will give you the members in RPO order. */
2535 static void
2536 sort_scc (VEC (tree, heap) *scc)
2538 qsort (VEC_address (tree, scc),
2539 VEC_length (tree, scc),
2540 sizeof (tree),
2541 compare_ops);
2544 /* Process a strongly connected component in the SSA graph. */
2546 static void
2547 process_scc (VEC (tree, heap) *scc)
2549 /* If the SCC has a single member, just visit it. */
2551 if (VEC_length (tree, scc) == 1)
2553 tree use = VEC_index (tree, scc, 0);
2554 if (!VN_INFO (use)->use_processed)
2555 visit_use (use);
2557 else
2559 tree var;
2560 unsigned int i;
2561 unsigned int iterations = 0;
2562 bool changed = true;
2564 /* Iterate over the SCC with the optimistic table until it stops
2565 changing. */
2566 current_info = optimistic_info;
2567 while (changed)
2569 changed = false;
2570 iterations++;
2571 /* As we are value-numbering optimistically we have to
2572 clear the expression tables and the simplified expressions
2573 in each iteration until we converge. */
2574 htab_empty (optimistic_info->nary);
2575 htab_empty (optimistic_info->phis);
2576 htab_empty (optimistic_info->references);
2577 obstack_free (&optimistic_info->nary_obstack, NULL);
2578 gcc_obstack_init (&optimistic_info->nary_obstack);
2579 empty_alloc_pool (optimistic_info->phis_pool);
2580 empty_alloc_pool (optimistic_info->references_pool);
2581 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2582 VN_INFO (var)->expr = NULL_TREE;
2583 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2584 changed |= visit_use (var);
2587 statistics_histogram_event (cfun, "SCC iterations", iterations);
2589 /* Finally, visit the SCC once using the valid table. */
2590 current_info = valid_info;
2591 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2592 visit_use (var);
2596 DEF_VEC_O(ssa_op_iter);
2597 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2599 /* Pop the components of the found SCC for NAME off the SCC stack
2600 and process them. Returns true if all went well, false if
2601 we run into resource limits. */
2603 static bool
2604 extract_and_process_scc_for_name (tree name)
2606 VEC (tree, heap) *scc = NULL;
2607 tree x;
2609 /* Found an SCC, pop the components off the SCC stack and
2610 process them. */
2613 x = VEC_pop (tree, sccstack);
2615 VN_INFO (x)->on_sccstack = false;
2616 VEC_safe_push (tree, heap, scc, x);
2617 } while (x != name);
2619 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2620 if (VEC_length (tree, scc)
2621 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2623 if (dump_file)
2624 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2625 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2626 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2627 return false;
2630 if (VEC_length (tree, scc) > 1)
2631 sort_scc (scc);
2633 if (dump_file && (dump_flags & TDF_DETAILS))
2634 print_scc (dump_file, scc);
2636 process_scc (scc);
2638 VEC_free (tree, heap, scc);
2640 return true;
2643 /* Depth first search on NAME to discover and process SCC's in the SSA
2644 graph.
2645 Execution of this algorithm relies on the fact that the SCC's are
2646 popped off the stack in topological order.
2647 Returns true if successful, false if we stopped processing SCC's due
2648 to resource constraints. */
2650 static bool
2651 DFS (tree name)
2653 VEC(ssa_op_iter, heap) *itervec = NULL;
2654 VEC(tree, heap) *namevec = NULL;
2655 use_operand_p usep = NULL;
2656 gimple defstmt;
2657 tree use;
2658 ssa_op_iter iter;
2660 start_over:
2661 /* SCC info */
2662 VN_INFO (name)->dfsnum = next_dfs_num++;
2663 VN_INFO (name)->visited = true;
2664 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2666 VEC_safe_push (tree, heap, sccstack, name);
2667 VN_INFO (name)->on_sccstack = true;
2668 defstmt = SSA_NAME_DEF_STMT (name);
2670 /* Recursively DFS on our operands, looking for SCC's. */
2671 if (!gimple_nop_p (defstmt))
2673 /* Push a new iterator. */
2674 if (gimple_code (defstmt) == GIMPLE_PHI)
2675 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2676 else
2677 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2679 else
2680 clear_and_done_ssa_iter (&iter);
2682 while (1)
2684 /* If we are done processing uses of a name, go up the stack
2685 of iterators and process SCCs as we found them. */
2686 if (op_iter_done (&iter))
2688 /* See if we found an SCC. */
2689 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2690 if (!extract_and_process_scc_for_name (name))
2692 VEC_free (tree, heap, namevec);
2693 VEC_free (ssa_op_iter, heap, itervec);
2694 return false;
2697 /* Check if we are done. */
2698 if (VEC_empty (tree, namevec))
2700 VEC_free (tree, heap, namevec);
2701 VEC_free (ssa_op_iter, heap, itervec);
2702 return true;
2705 /* Restore the last use walker and continue walking there. */
2706 use = name;
2707 name = VEC_pop (tree, namevec);
2708 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2709 sizeof (ssa_op_iter));
2710 VEC_pop (ssa_op_iter, itervec);
2711 goto continue_walking;
2714 use = USE_FROM_PTR (usep);
2716 /* Since we handle phi nodes, we will sometimes get
2717 invariants in the use expression. */
2718 if (TREE_CODE (use) == SSA_NAME)
2720 if (! (VN_INFO (use)->visited))
2722 /* Recurse by pushing the current use walking state on
2723 the stack and starting over. */
2724 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2725 VEC_safe_push(tree, heap, namevec, name);
2726 name = use;
2727 goto start_over;
2729 continue_walking:
2730 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2731 VN_INFO (use)->low);
2733 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2734 && VN_INFO (use)->on_sccstack)
2736 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2737 VN_INFO (name)->low);
2741 usep = op_iter_next_use (&iter);
2745 /* Allocate a value number table. */
2747 static void
2748 allocate_vn_table (vn_tables_t table)
2750 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2751 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2752 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2753 free_reference);
2755 gcc_obstack_init (&table->nary_obstack);
2756 table->phis_pool = create_alloc_pool ("VN phis",
2757 sizeof (struct vn_phi_s),
2758 30);
2759 table->references_pool = create_alloc_pool ("VN references",
2760 sizeof (struct vn_reference_s),
2761 30);
2764 /* Free a value number table. */
2766 static void
2767 free_vn_table (vn_tables_t table)
2769 htab_delete (table->phis);
2770 htab_delete (table->nary);
2771 htab_delete (table->references);
2772 obstack_free (&table->nary_obstack, NULL);
2773 free_alloc_pool (table->phis_pool);
2774 free_alloc_pool (table->references_pool);
2777 static void
2778 init_scc_vn (void)
2780 size_t i;
2781 int j;
2782 int *rpo_numbers_temp;
2784 calculate_dominance_info (CDI_DOMINATORS);
2785 sccstack = NULL;
2786 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2787 free);
2789 constant_value_ids = BITMAP_ALLOC (NULL);
2791 next_dfs_num = 1;
2792 next_value_id = 1;
2794 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2795 /* VEC_alloc doesn't actually grow it to the right size, it just
2796 preallocates the space to do so. */
2797 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2798 gcc_obstack_init (&vn_ssa_aux_obstack);
2800 shared_lookup_phiargs = NULL;
2801 shared_lookup_vops = NULL;
2802 shared_lookup_references = NULL;
2803 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2804 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2805 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2807 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2808 the i'th block in RPO order is bb. We want to map bb's to RPO
2809 numbers, so we need to rearrange this array. */
2810 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2811 rpo_numbers[rpo_numbers_temp[j]] = j;
2813 XDELETE (rpo_numbers_temp);
2815 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2817 /* Create the VN_INFO structures, and initialize value numbers to
2818 TOP. */
2819 for (i = 0; i < num_ssa_names; i++)
2821 tree name = ssa_name (i);
2822 if (name)
2824 VN_INFO_GET (name)->valnum = VN_TOP;
2825 VN_INFO (name)->expr = NULL_TREE;
2826 VN_INFO (name)->value_id = 0;
2830 renumber_gimple_stmt_uids ();
2832 /* Create the valid and optimistic value numbering tables. */
2833 valid_info = XCNEW (struct vn_tables_s);
2834 allocate_vn_table (valid_info);
2835 optimistic_info = XCNEW (struct vn_tables_s);
2836 allocate_vn_table (optimistic_info);
2839 void
2840 free_scc_vn (void)
2842 size_t i;
2844 htab_delete (constant_to_value_id);
2845 BITMAP_FREE (constant_value_ids);
2846 VEC_free (tree, heap, shared_lookup_phiargs);
2847 VEC_free (tree, gc, shared_lookup_vops);
2848 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2849 XDELETEVEC (rpo_numbers);
2851 for (i = 0; i < num_ssa_names; i++)
2853 tree name = ssa_name (i);
2854 if (name
2855 && VN_INFO (name)->needs_insertion)
2856 release_ssa_name (name);
2858 obstack_free (&vn_ssa_aux_obstack, NULL);
2859 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2861 VEC_free (tree, heap, sccstack);
2862 free_vn_table (valid_info);
2863 XDELETE (valid_info);
2864 free_vn_table (optimistic_info);
2865 XDELETE (optimistic_info);
2868 /* Set the value ids in the valid hash tables. */
2870 static void
2871 set_hashtable_value_ids (void)
2873 htab_iterator hi;
2874 vn_nary_op_t vno;
2875 vn_reference_t vr;
2876 vn_phi_t vp;
2878 /* Now set the value ids of the things we had put in the hash
2879 table. */
2881 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2882 vno, vn_nary_op_t, hi)
2884 if (vno->result)
2886 if (TREE_CODE (vno->result) == SSA_NAME)
2887 vno->value_id = VN_INFO (vno->result)->value_id;
2888 else if (is_gimple_min_invariant (vno->result))
2889 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2893 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2894 vp, vn_phi_t, hi)
2896 if (vp->result)
2898 if (TREE_CODE (vp->result) == SSA_NAME)
2899 vp->value_id = VN_INFO (vp->result)->value_id;
2900 else if (is_gimple_min_invariant (vp->result))
2901 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2905 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2906 vr, vn_reference_t, hi)
2908 if (vr->result)
2910 if (TREE_CODE (vr->result) == SSA_NAME)
2911 vr->value_id = VN_INFO (vr->result)->value_id;
2912 else if (is_gimple_min_invariant (vr->result))
2913 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2918 /* Do SCCVN. Returns true if it finished, false if we bailed out
2919 due to resource constraints. */
2921 bool
2922 run_scc_vn (bool may_insert_arg)
2924 size_t i;
2925 tree param;
2926 bool changed = true;
2928 may_insert = may_insert_arg;
2930 init_scc_vn ();
2931 current_info = valid_info;
2933 for (param = DECL_ARGUMENTS (current_function_decl);
2934 param;
2935 param = TREE_CHAIN (param))
2937 if (gimple_default_def (cfun, param) != NULL)
2939 tree def = gimple_default_def (cfun, param);
2940 SSA_VAL (def) = def;
2944 for (i = 1; i < num_ssa_names; ++i)
2946 tree name = ssa_name (i);
2947 if (name
2948 && VN_INFO (name)->visited == false
2949 && !has_zero_uses (name))
2950 if (!DFS (name))
2952 free_scc_vn ();
2953 may_insert = false;
2954 return false;
2958 /* Initialize the value ids. */
2960 for (i = 1; i < num_ssa_names; ++i)
2962 tree name = ssa_name (i);
2963 vn_ssa_aux_t info;
2964 if (!name)
2965 continue;
2966 info = VN_INFO (name);
2967 if (info->valnum == name)
2968 info->value_id = get_next_value_id ();
2969 else if (is_gimple_min_invariant (info->valnum))
2970 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2973 /* Propagate until they stop changing. */
2974 while (changed)
2976 changed = false;
2977 for (i = 1; i < num_ssa_names; ++i)
2979 tree name = ssa_name (i);
2980 vn_ssa_aux_t info;
2981 if (!name)
2982 continue;
2983 info = VN_INFO (name);
2984 if (TREE_CODE (info->valnum) == SSA_NAME
2985 && info->valnum != name
2986 && info->value_id != VN_INFO (info->valnum)->value_id)
2988 changed = true;
2989 info->value_id = VN_INFO (info->valnum)->value_id;
2994 set_hashtable_value_ids ();
2996 if (dump_file && (dump_flags & TDF_DETAILS))
2998 fprintf (dump_file, "Value numbers:\n");
2999 for (i = 0; i < num_ssa_names; i++)
3001 tree name = ssa_name (i);
3002 if (name
3003 && VN_INFO (name)->visited
3004 && SSA_VAL (name) != name)
3006 print_generic_expr (dump_file, name, 0);
3007 fprintf (dump_file, " = ");
3008 print_generic_expr (dump_file, SSA_VAL (name), 0);
3009 fprintf (dump_file, "\n");
3014 may_insert = false;
3015 return true;
3018 /* Return the maximum value id we have ever seen. */
3020 unsigned int
3021 get_max_value_id (void)
3023 return next_value_id;
3026 /* Return the next unique value id. */
3028 unsigned int
3029 get_next_value_id (void)
3031 return next_value_id++;
3035 /* Compare two expressions E1 and E2 and return true if they are equal. */
3037 bool
3038 expressions_equal_p (tree e1, tree e2)
3040 /* The obvious case. */
3041 if (e1 == e2)
3042 return true;
3044 /* If only one of them is null, they cannot be equal. */
3045 if (!e1 || !e2)
3046 return false;
3048 /* Recurse on elements of lists. */
3049 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3051 tree lop1 = e1;
3052 tree lop2 = e2;
3053 for (lop1 = e1, lop2 = e2;
3054 lop1 || lop2;
3055 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3057 if (!lop1 || !lop2)
3058 return false;
3059 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3060 return false;
3062 return true;
3065 /* Now perform the actual comparison. */
3066 if (TREE_CODE (e1) == TREE_CODE (e2)
3067 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3068 return true;
3070 return false;
3073 /* Sort the VUSE array so that we can do equality comparisons
3074 quicker on two vuse vecs. */
3076 void
3077 sort_vuses (VEC (tree,gc) *vuses)
3079 if (VEC_length (tree, vuses) > 1)
3080 qsort (VEC_address (tree, vuses),
3081 VEC_length (tree, vuses),
3082 sizeof (tree),
3083 operand_build_cmp);
3086 /* Sort the VUSE array so that we can do equality comparisons
3087 quicker on two vuse vecs. */
3089 void
3090 sort_vuses_heap (VEC (tree,heap) *vuses)
3092 if (VEC_length (tree, vuses) > 1)
3093 qsort (VEC_address (tree, vuses),
3094 VEC_length (tree, vuses),
3095 sizeof (tree),
3096 operand_build_cmp);
3100 /* Return true if the nary operation NARY may trap. This is a copy
3101 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3103 bool
3104 vn_nary_may_trap (vn_nary_op_t nary)
3106 tree type;
3107 tree rhs2;
3108 bool honor_nans = false;
3109 bool honor_snans = false;
3110 bool fp_operation = false;
3111 bool honor_trapv = false;
3112 bool handled, ret;
3113 unsigned i;
3115 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3116 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3117 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3119 type = nary->type;
3120 fp_operation = FLOAT_TYPE_P (type);
3121 if (fp_operation)
3123 honor_nans = flag_trapping_math && !flag_finite_math_only;
3124 honor_snans = flag_signaling_nans != 0;
3126 else if (INTEGRAL_TYPE_P (type)
3127 && TYPE_OVERFLOW_TRAPS (type))
3128 honor_trapv = true;
3130 rhs2 = nary->op[1];
3131 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3132 honor_trapv,
3133 honor_nans, honor_snans, rhs2,
3134 &handled);
3135 if (handled
3136 && ret)
3137 return true;
3139 for (i = 0; i < nary->length; ++i)
3140 if (tree_could_trap_p (nary->op[i]))
3141 return true;
3143 return false;