gcc/
[official-gcc/alias-decl.git] / gcc / tree-ssa-sccvn.c
blobb101aebd9e488afd932d55f616e02503f4148273
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t
176 VN_INFO (tree name)
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
216 tree
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
224 return name;
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
259 case tcc_reference:
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
267 break;
269 case tcc_unary:
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
273 break;
275 case tcc_binary:
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
280 break;
282 default:;
284 if (expr == NULL_TREE)
285 return vn->valnum;
287 /* Cache the expression. */
288 vn->expr = expr;
290 return expr;
294 /* Free a phi operation structure VP. */
296 static void
297 free_phi (void *vp)
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
303 /* Free a reference operation structure VP. */
305 static void
306 free_reference (void *vp)
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
312 /* Hash table equality function for vn_constant_t. */
314 static int
315 vn_constant_eq (const void *p1, const void *p2)
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
320 if (vc1->hashcode != vc2->hashcode)
321 return false;
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 /* Hash table hash function for vn_constant_t. */
328 static hashval_t
329 vn_constant_hash (const void *p1)
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
335 /* Lookup a value id for CONSTANT and return it. If it does not
336 exist returns 0. */
338 unsigned int
339 get_constant_value_id (tree constant)
341 void **slot;
342 struct vn_constant_s vc;
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
348 if (slot)
349 return ((vn_constant_t)*slot)->value_id;
350 return 0;
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
356 unsigned int
357 get_or_alloc_constant_value_id (tree constant)
359 void **slot;
360 vn_constant_t vc = XNEW (struct vn_constant_s);
362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT);
366 if (*slot)
368 free (vc);
369 return ((vn_constant_t)*slot)->value_id;
371 vc->value_id = get_next_value_id ();
372 *slot = vc;
373 bitmap_set_bit (constant_value_ids, vc->value_id);
374 return vc->value_id;
377 /* Return true if V is a value id for a constant. */
379 bool
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
388 static int
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
401 /* Compute the hash for a reference operand VRO1. */
403 static hashval_t
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
406 hashval_t result = 0;
407 if (vro1->op0)
408 result += iterative_hash_expr (vro1->op0, vro1->opcode);
409 if (vro1->op1)
410 result += iterative_hash_expr (vro1->op1, vro1->opcode);
411 if (vro1->op2)
412 result += iterative_hash_expr (vro1->op2, vro1->opcode);
413 return result;
416 /* Return the hashcode for a given reference operation P1. */
418 static hashval_t
419 vn_reference_hash (const void *p1)
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
425 /* Compute a hash for the reference operation VR1 and return it. */
427 hashval_t
428 vn_reference_compute_hash (const vn_reference_t vr1)
430 hashval_t result;
431 int i;
432 vn_reference_op_t vro;
434 result = iterative_hash_expr (vr1->vuse, 0);
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
436 result += vn_reference_op_compute_hash (vro);
438 return result;
441 /* Return true if reference operations P1 and P2 are equivalent. This
442 means they have the same set of operands and vuses. */
445 vn_reference_eq (const void *p1, const void *p2)
447 int i;
448 vn_reference_op_t vro;
450 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
451 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
452 if (vr1->hashcode != vr2->hashcode)
453 return false;
455 /* Early out if this is not a hash collision. */
456 if (vr1->hashcode != vr2->hashcode)
457 return false;
459 /* The VOP needs to be the same. */
460 if (vr1->vuse != vr2->vuse)
461 return false;
463 /* If the operands are the same we are done. */
464 if (vr1->operands == vr2->operands)
465 return true;
467 /* We require that address operands be canonicalized in a way that
468 two memory references will have the same operands if they are
469 equivalent. */
470 if (VEC_length (vn_reference_op_s, vr1->operands)
471 != VEC_length (vn_reference_op_s, vr2->operands))
472 return false;
474 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
475 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
476 vro))
477 return false;
479 return true;
482 /* Copy the operations present in load/store REF into RESULT, a vector of
483 vn_reference_op_s's. */
485 void
486 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
488 if (TREE_CODE (ref) == TARGET_MEM_REF)
490 vn_reference_op_s temp;
492 memset (&temp, 0, sizeof (temp));
493 /* We do not care for spurious type qualifications. */
494 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
495 temp.opcode = TREE_CODE (ref);
496 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
497 temp.op1 = TMR_INDEX (ref);
498 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
500 memset (&temp, 0, sizeof (temp));
501 temp.type = NULL_TREE;
502 temp.opcode = TREE_CODE (ref);
503 temp.op0 = TMR_STEP (ref);
504 temp.op1 = TMR_OFFSET (ref);
505 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
506 return;
509 /* For non-calls, store the information that makes up the address. */
511 while (ref)
513 vn_reference_op_s temp;
515 memset (&temp, 0, sizeof (temp));
516 /* We do not care for spurious type qualifications. */
517 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
518 temp.opcode = TREE_CODE (ref);
520 switch (temp.opcode)
522 case ALIGN_INDIRECT_REF:
523 case INDIRECT_REF:
524 /* The only operand is the address, which gets its own
525 vn_reference_op_s structure. */
526 break;
527 case MISALIGNED_INDIRECT_REF:
528 temp.op0 = TREE_OPERAND (ref, 1);
529 break;
530 case BIT_FIELD_REF:
531 /* Record bits and position. */
532 temp.op0 = TREE_OPERAND (ref, 1);
533 temp.op1 = TREE_OPERAND (ref, 2);
534 break;
535 case COMPONENT_REF:
536 /* The field decl is enough to unambiguously specify the field,
537 a matching type is not necessary and a mismatching type
538 is always a spurious difference. */
539 temp.type = NULL_TREE;
540 /* If this is a reference to a union member, record the union
541 member size as operand. Do so only if we are doing
542 expression insertion (during FRE), as PRE currently gets
543 confused with this. */
544 if (may_insert
545 && TREE_OPERAND (ref, 2) == NULL_TREE
546 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
547 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
548 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
549 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
550 else
552 /* Record field as operand. */
553 temp.op0 = TREE_OPERAND (ref, 1);
554 temp.op1 = TREE_OPERAND (ref, 2);
556 break;
557 case ARRAY_RANGE_REF:
558 case ARRAY_REF:
559 /* Record index as operand. */
560 temp.op0 = TREE_OPERAND (ref, 1);
561 temp.op1 = TREE_OPERAND (ref, 2);
562 temp.op2 = TREE_OPERAND (ref, 3);
563 break;
564 case STRING_CST:
565 case INTEGER_CST:
566 case COMPLEX_CST:
567 case VECTOR_CST:
568 case REAL_CST:
569 case CONSTRUCTOR:
570 case VAR_DECL:
571 case PARM_DECL:
572 case CONST_DECL:
573 case RESULT_DECL:
574 case SSA_NAME:
575 case EXC_PTR_EXPR:
576 case FILTER_EXPR:
577 temp.op0 = ref;
578 break;
579 case ADDR_EXPR:
580 if (is_gimple_min_invariant (ref))
582 temp.op0 = ref;
583 break;
585 /* Fallthrough. */
586 /* These are only interesting for their operands, their
587 existence, and their type. They will never be the last
588 ref in the chain of references (IE they require an
589 operand), so we don't have to put anything
590 for op* as it will be handled by the iteration */
591 case IMAGPART_EXPR:
592 case REALPART_EXPR:
593 case VIEW_CONVERT_EXPR:
594 break;
595 default:
596 gcc_unreachable ();
598 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
600 if (REFERENCE_CLASS_P (ref)
601 || (TREE_CODE (ref) == ADDR_EXPR
602 && !is_gimple_min_invariant (ref)))
603 ref = TREE_OPERAND (ref, 0);
604 else
605 ref = NULL_TREE;
609 /* Re-create a reference tree from the reference ops OPS.
610 Returns NULL_TREE if the ops were not handled.
611 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
613 tree
614 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
616 vn_reference_op_t op;
617 unsigned i;
618 tree ref, *op0_p = &ref;
620 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
622 switch (op->opcode)
624 case CALL_EXPR:
625 return NULL_TREE;
627 case ALIGN_INDIRECT_REF:
628 case INDIRECT_REF:
629 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
630 op0_p = &TREE_OPERAND (*op0_p, 0);
631 break;
633 case MISALIGNED_INDIRECT_REF:
634 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
635 NULL_TREE, op->op0);
636 op0_p = &TREE_OPERAND (*op0_p, 0);
637 break;
639 case BIT_FIELD_REF:
640 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
641 op->op0, op->op1);
642 op0_p = &TREE_OPERAND (*op0_p, 0);
643 break;
645 case COMPONENT_REF:
646 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
647 op->op0, op->op1);
648 op0_p = &TREE_OPERAND (*op0_p, 0);
649 break;
651 case ARRAY_RANGE_REF:
652 case ARRAY_REF:
653 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
654 op->op0, op->op1, op->op2);
655 op0_p = &TREE_OPERAND (*op0_p, 0);
656 break;
658 case STRING_CST:
659 case INTEGER_CST:
660 case COMPLEX_CST:
661 case VECTOR_CST:
662 case REAL_CST:
663 case CONSTRUCTOR:
664 case VAR_DECL:
665 case PARM_DECL:
666 case CONST_DECL:
667 case RESULT_DECL:
668 case SSA_NAME:
669 case FILTER_EXPR:
670 case EXC_PTR_EXPR:
671 *op0_p = op->op0;
672 break;
674 case ADDR_EXPR:
675 if (op->op0 != NULL_TREE)
677 gcc_assert (is_gimple_min_invariant (op->op0));
678 *op0_p = op->op0;
679 break;
681 /* Fallthrough. */
682 case IMAGPART_EXPR:
683 case REALPART_EXPR:
684 case VIEW_CONVERT_EXPR:
685 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
686 op0_p = &TREE_OPERAND (*op0_p, 0);
687 break;
689 default:
690 return NULL_TREE;
694 return ref;
697 /* Copy the operations present in load/store/call REF into RESULT, a vector of
698 vn_reference_op_s's. */
700 void
701 copy_reference_ops_from_call (gimple call,
702 VEC(vn_reference_op_s, heap) **result)
704 vn_reference_op_s temp;
705 unsigned i;
707 /* Copy the type, opcode, function being called and static chain. */
708 memset (&temp, 0, sizeof (temp));
709 temp.type = gimple_call_return_type (call);
710 temp.opcode = CALL_EXPR;
711 temp.op0 = gimple_call_fn (call);
712 temp.op1 = gimple_call_chain (call);
713 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
715 /* Copy the call arguments. As they can be references as well,
716 just chain them together. */
717 for (i = 0; i < gimple_call_num_args (call); ++i)
719 tree callarg = gimple_call_arg (call, i);
720 copy_reference_ops_from_ref (callarg, result);
724 /* Create a vector of vn_reference_op_s structures from REF, a
725 REFERENCE_CLASS_P tree. The vector is not shared. */
727 static VEC(vn_reference_op_s, heap) *
728 create_reference_ops_from_ref (tree ref)
730 VEC (vn_reference_op_s, heap) *result = NULL;
732 copy_reference_ops_from_ref (ref, &result);
733 return result;
736 /* Create a vector of vn_reference_op_s structures from CALL, a
737 call statement. The vector is not shared. */
739 static VEC(vn_reference_op_s, heap) *
740 create_reference_ops_from_call (gimple call)
742 VEC (vn_reference_op_s, heap) *result = NULL;
744 copy_reference_ops_from_call (call, &result);
745 return result;
748 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
749 *I_P to point to the last element of the replacement. */
750 void
751 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
752 unsigned int *i_p)
754 VEC(vn_reference_op_s, heap) *mem = NULL;
755 vn_reference_op_t op;
756 unsigned int i = *i_p;
757 unsigned int j;
759 /* Get ops for the addressed object. */
760 op = VEC_index (vn_reference_op_s, *ops, i);
761 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
762 around it to avoid later ICEs. */
763 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
764 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
766 vn_reference_op_s aref;
767 tree dom;
768 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
769 aref.opcode = ARRAY_REF;
770 aref.op0 = integer_zero_node;
771 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
772 && TYPE_MIN_VALUE (dom))
773 aref.op0 = TYPE_MIN_VALUE (dom);
774 aref.op1 = NULL_TREE;
775 aref.op2 = NULL_TREE;
776 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
778 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
780 /* Do the replacement - we should have at least one op in mem now. */
781 if (VEC_length (vn_reference_op_s, mem) == 1)
783 VEC_replace (vn_reference_op_s, *ops, i - 1,
784 VEC_index (vn_reference_op_s, mem, 0));
785 VEC_ordered_remove (vn_reference_op_s, *ops, i);
786 i--;
788 else if (VEC_length (vn_reference_op_s, mem) == 2)
790 VEC_replace (vn_reference_op_s, *ops, i - 1,
791 VEC_index (vn_reference_op_s, mem, 0));
792 VEC_replace (vn_reference_op_s, *ops, i,
793 VEC_index (vn_reference_op_s, mem, 1));
795 else if (VEC_length (vn_reference_op_s, mem) > 2)
797 VEC_replace (vn_reference_op_s, *ops, i - 1,
798 VEC_index (vn_reference_op_s, mem, 0));
799 VEC_replace (vn_reference_op_s, *ops, i,
800 VEC_index (vn_reference_op_s, mem, 1));
801 /* ??? There is no VEC_splice. */
802 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
803 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
805 else
806 gcc_unreachable ();
808 VEC_free (vn_reference_op_s, heap, mem);
809 *i_p = i;
812 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
813 structures into their value numbers. This is done in-place, and
814 the vector passed in is returned. */
816 static VEC (vn_reference_op_s, heap) *
817 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
819 vn_reference_op_t vro;
820 unsigned int i;
822 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
824 if (vro->opcode == SSA_NAME
825 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
827 vro->op0 = SSA_VAL (vro->op0);
828 /* If it transforms from an SSA_NAME to a constant, update
829 the opcode. */
830 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
831 vro->opcode = TREE_CODE (vro->op0);
832 /* If it transforms from an SSA_NAME to an address, fold with
833 a preceding indirect reference. */
834 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
835 && VEC_index (vn_reference_op_s,
836 orig, i - 1)->opcode == INDIRECT_REF)
838 vn_reference_fold_indirect (&orig, &i);
839 continue;
842 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
843 vro->op1 = SSA_VAL (vro->op1);
844 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
845 vro->op2 = SSA_VAL (vro->op2);
848 return orig;
851 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
853 /* Create a vector of vn_reference_op_s structures from REF, a
854 REFERENCE_CLASS_P tree. The vector is shared among all callers of
855 this function. */
857 static VEC(vn_reference_op_s, heap) *
858 valueize_shared_reference_ops_from_ref (tree ref)
860 if (!ref)
861 return NULL;
862 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
863 copy_reference_ops_from_ref (ref, &shared_lookup_references);
864 shared_lookup_references = valueize_refs (shared_lookup_references);
865 return shared_lookup_references;
868 /* Create a vector of vn_reference_op_s structures from CALL, a
869 call statement. The vector is shared among all callers of
870 this function. */
872 static VEC(vn_reference_op_s, heap) *
873 valueize_shared_reference_ops_from_call (gimple call)
875 if (!call)
876 return NULL;
877 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
878 copy_reference_ops_from_call (call, &shared_lookup_references);
879 shared_lookup_references = valueize_refs (shared_lookup_references);
880 return shared_lookup_references;
883 /* Lookup a SCCVN reference operation VR in the current hash table.
884 Returns the resulting value number if it exists in the hash table,
885 NULL_TREE otherwise. VNRESULT will be filled in with the actual
886 vn_reference_t stored in the hashtable if something is found. */
888 static tree
889 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
891 void **slot;
892 hashval_t hash;
894 hash = vr->hashcode;
895 slot = htab_find_slot_with_hash (current_info->references, vr,
896 hash, NO_INSERT);
897 if (!slot && current_info == optimistic_info)
898 slot = htab_find_slot_with_hash (valid_info->references, vr,
899 hash, NO_INSERT);
900 if (slot)
902 if (vnresult)
903 *vnresult = (vn_reference_t)*slot;
904 return ((vn_reference_t)*slot)->result;
907 return NULL_TREE;
910 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
911 with the current VUSE and performs the expression lookup. */
913 static void *
914 vn_reference_lookup_2 (tree op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
916 vn_reference_t vr = (vn_reference_t)vr_;
917 void **slot;
918 hashval_t hash;
920 /* Fixup vuse and hash. */
921 vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
922 vr->vuse = SSA_VAL (vuse);
923 vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
925 hash = vr->hashcode;
926 slot = htab_find_slot_with_hash (current_info->references, vr,
927 hash, NO_INSERT);
928 if (!slot && current_info == optimistic_info)
929 slot = htab_find_slot_with_hash (valid_info->references, vr,
930 hash, NO_INSERT);
931 if (slot)
932 return *slot;
934 return NULL;
937 /* Lookup a reference operation by it's parts, in the current hash table.
938 Returns the resulting value number if it exists in the hash table,
939 NULL_TREE otherwise. VNRESULT will be filled in with the actual
940 vn_reference_t stored in the hashtable if something is found. */
942 tree
943 vn_reference_lookup_pieces (tree vuse,
944 VEC (vn_reference_op_s, heap) *operands,
945 vn_reference_t *vnresult, bool maywalk)
947 struct vn_reference_s vr1;
948 vn_reference_t tmp;
950 if (!vnresult)
951 vnresult = &tmp;
952 *vnresult = NULL;
954 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
955 vr1.operands = valueize_refs (operands);
956 vr1.hashcode = vn_reference_compute_hash (&vr1);
957 vn_reference_lookup_1 (&vr1, vnresult);
959 if (!*vnresult
960 && maywalk
961 && vr1.vuse)
963 tree ref = get_ref_from_reference_ops (operands);
964 if (!ref)
965 return NULL_TREE;
966 *vnresult =
967 (vn_reference_t)walk_non_aliased_vuses (ref, vr1.vuse,
968 vn_reference_lookup_2, &vr1);
971 if (*vnresult)
972 return (*vnresult)->result;
974 return NULL_TREE;
977 /* Lookup OP in the current hash table, and return the resulting value
978 number if it exists in the hash table. Return NULL_TREE if it does
979 not exist in the hash table or if the result field of the structure
980 was NULL.. VNRESULT will be filled in with the vn_reference_t
981 stored in the hashtable if one exists. */
983 tree
984 vn_reference_lookup (tree op, tree vuse, bool maywalk,
985 vn_reference_t *vnresult)
987 struct vn_reference_s vr1;
989 if (vnresult)
990 *vnresult = NULL;
992 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
993 vr1.operands = valueize_shared_reference_ops_from_ref (op);
994 vr1.hashcode = vn_reference_compute_hash (&vr1);
996 if (maywalk
997 && vr1.vuse)
999 vn_reference_t wvnresult;
1000 wvnresult =
1001 (vn_reference_t)walk_non_aliased_vuses (op, vr1.vuse,
1002 vn_reference_lookup_2, &vr1);
1003 if (wvnresult)
1005 if (vnresult)
1006 *vnresult = wvnresult;
1007 return wvnresult->result;
1010 return NULL_TREE;
1013 return vn_reference_lookup_1 (&vr1, vnresult);
1017 /* Insert OP into the current hash table with a value number of
1018 RESULT, and return the resulting reference structure we created. */
1020 vn_reference_t
1021 vn_reference_insert (tree op, tree result, tree vuse)
1023 void **slot;
1024 vn_reference_t vr1;
1026 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1027 if (TREE_CODE (result) == SSA_NAME)
1028 vr1->value_id = VN_INFO (result)->value_id;
1029 else
1030 vr1->value_id = get_or_alloc_constant_value_id (result);
1031 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1032 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1033 vr1->hashcode = vn_reference_compute_hash (vr1);
1034 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1036 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1037 INSERT);
1039 /* Because we lookup stores using vuses, and value number failures
1040 using the vdefs (see visit_reference_op_store for how and why),
1041 it's possible that on failure we may try to insert an already
1042 inserted store. This is not wrong, there is no ssa name for a
1043 store that we could use as a differentiator anyway. Thus, unlike
1044 the other lookup functions, you cannot gcc_assert (!*slot)
1045 here. */
1047 /* But free the old slot in case of a collision. */
1048 if (*slot)
1049 free_reference (*slot);
1051 *slot = vr1;
1052 return vr1;
1055 /* Insert a reference by it's pieces into the current hash table with
1056 a value number of RESULT. Return the resulting reference
1057 structure we created. */
1059 vn_reference_t
1060 vn_reference_insert_pieces (tree vuse,
1061 VEC (vn_reference_op_s, heap) *operands,
1062 tree result, unsigned int value_id)
1065 void **slot;
1066 vn_reference_t vr1;
1068 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1069 vr1->value_id = value_id;
1070 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1071 vr1->operands = valueize_refs (operands);
1072 vr1->hashcode = vn_reference_compute_hash (vr1);
1073 if (result && TREE_CODE (result) == SSA_NAME)
1074 result = SSA_VAL (result);
1075 vr1->result = result;
1077 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1078 INSERT);
1080 /* At this point we should have all the things inserted that we have
1081 seen before, and we should never try inserting something that
1082 already exists. */
1083 gcc_assert (!*slot);
1084 if (*slot)
1085 free_reference (*slot);
1087 *slot = vr1;
1088 return vr1;
1091 /* Compute and return the hash value for nary operation VBO1. */
1093 inline hashval_t
1094 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1096 hashval_t hash = 0;
1097 unsigned i;
1099 for (i = 0; i < vno1->length; ++i)
1100 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1101 vno1->op[i] = SSA_VAL (vno1->op[i]);
1103 if (vno1->length == 2
1104 && commutative_tree_code (vno1->opcode)
1105 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1107 tree temp = vno1->op[0];
1108 vno1->op[0] = vno1->op[1];
1109 vno1->op[1] = temp;
1112 for (i = 0; i < vno1->length; ++i)
1113 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1115 return hash;
1118 /* Return the computed hashcode for nary operation P1. */
1120 static hashval_t
1121 vn_nary_op_hash (const void *p1)
1123 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1124 return vno1->hashcode;
1127 /* Compare nary operations P1 and P2 and return true if they are
1128 equivalent. */
1131 vn_nary_op_eq (const void *p1, const void *p2)
1133 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1134 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1135 unsigned i;
1137 if (vno1->hashcode != vno2->hashcode)
1138 return false;
1140 if (vno1->opcode != vno2->opcode
1141 || !types_compatible_p (vno1->type, vno2->type))
1142 return false;
1144 for (i = 0; i < vno1->length; ++i)
1145 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1146 return false;
1148 return true;
1151 /* Lookup a n-ary operation by its pieces and return the resulting value
1152 number if it exists in the hash table. Return NULL_TREE if it does
1153 not exist in the hash table or if the result field of the operation
1154 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1155 if it exists. */
1157 tree
1158 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1159 tree type, tree op0, tree op1, tree op2,
1160 tree op3, vn_nary_op_t *vnresult)
1162 void **slot;
1163 struct vn_nary_op_s vno1;
1164 if (vnresult)
1165 *vnresult = NULL;
1166 vno1.opcode = code;
1167 vno1.length = length;
1168 vno1.type = type;
1169 vno1.op[0] = op0;
1170 vno1.op[1] = op1;
1171 vno1.op[2] = op2;
1172 vno1.op[3] = op3;
1173 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1174 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1175 NO_INSERT);
1176 if (!slot && current_info == optimistic_info)
1177 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1178 NO_INSERT);
1179 if (!slot)
1180 return NULL_TREE;
1181 if (vnresult)
1182 *vnresult = (vn_nary_op_t)*slot;
1183 return ((vn_nary_op_t)*slot)->result;
1186 /* Lookup OP in the current hash table, and return the resulting value
1187 number if it exists in the hash table. Return NULL_TREE if it does
1188 not exist in the hash table or if the result field of the operation
1189 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1190 if it exists. */
1192 tree
1193 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1195 void **slot;
1196 struct vn_nary_op_s vno1;
1197 unsigned i;
1199 if (vnresult)
1200 *vnresult = NULL;
1201 vno1.opcode = TREE_CODE (op);
1202 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1203 vno1.type = TREE_TYPE (op);
1204 for (i = 0; i < vno1.length; ++i)
1205 vno1.op[i] = TREE_OPERAND (op, i);
1206 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1207 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1208 NO_INSERT);
1209 if (!slot && current_info == optimistic_info)
1210 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1211 NO_INSERT);
1212 if (!slot)
1213 return NULL_TREE;
1214 if (vnresult)
1215 *vnresult = (vn_nary_op_t)*slot;
1216 return ((vn_nary_op_t)*slot)->result;
1219 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1220 value number if it exists in the hash table. Return NULL_TREE if
1221 it does not exist in the hash table. VNRESULT will contain the
1222 vn_nary_op_t from the hashtable if it exists. */
1224 tree
1225 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1227 void **slot;
1228 struct vn_nary_op_s vno1;
1229 unsigned i;
1231 if (vnresult)
1232 *vnresult = NULL;
1233 vno1.opcode = gimple_assign_rhs_code (stmt);
1234 vno1.length = gimple_num_ops (stmt) - 1;
1235 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
1236 for (i = 0; i < vno1.length; ++i)
1237 vno1.op[i] = gimple_op (stmt, i + 1);
1238 if (vno1.opcode == REALPART_EXPR
1239 || vno1.opcode == IMAGPART_EXPR
1240 || vno1.opcode == VIEW_CONVERT_EXPR)
1241 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1242 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1243 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1244 NO_INSERT);
1245 if (!slot && current_info == optimistic_info)
1246 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1247 NO_INSERT);
1248 if (!slot)
1249 return NULL_TREE;
1250 if (vnresult)
1251 *vnresult = (vn_nary_op_t)*slot;
1252 return ((vn_nary_op_t)*slot)->result;
1255 /* Insert a n-ary operation into the current hash table using it's
1256 pieces. Return the vn_nary_op_t structure we created and put in
1257 the hashtable. */
1259 vn_nary_op_t
1260 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1261 tree type, tree op0,
1262 tree op1, tree op2, tree op3,
1263 tree result,
1264 unsigned int value_id)
1266 void **slot;
1267 vn_nary_op_t vno1;
1269 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1270 (sizeof (struct vn_nary_op_s)
1271 - sizeof (tree) * (4 - length)));
1272 vno1->value_id = value_id;
1273 vno1->opcode = code;
1274 vno1->length = length;
1275 vno1->type = type;
1276 if (length >= 1)
1277 vno1->op[0] = op0;
1278 if (length >= 2)
1279 vno1->op[1] = op1;
1280 if (length >= 3)
1281 vno1->op[2] = op2;
1282 if (length >= 4)
1283 vno1->op[3] = op3;
1284 vno1->result = result;
1285 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1286 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1287 INSERT);
1288 gcc_assert (!*slot);
1290 *slot = vno1;
1291 return vno1;
1295 /* Insert OP into the current hash table with a value number of
1296 RESULT. Return the vn_nary_op_t structure we created and put in
1297 the hashtable. */
1299 vn_nary_op_t
1300 vn_nary_op_insert (tree op, tree result)
1302 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1303 void **slot;
1304 vn_nary_op_t vno1;
1305 unsigned i;
1307 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1308 (sizeof (struct vn_nary_op_s)
1309 - sizeof (tree) * (4 - length)));
1310 vno1->value_id = VN_INFO (result)->value_id;
1311 vno1->opcode = TREE_CODE (op);
1312 vno1->length = length;
1313 vno1->type = TREE_TYPE (op);
1314 for (i = 0; i < vno1->length; ++i)
1315 vno1->op[i] = TREE_OPERAND (op, i);
1316 vno1->result = result;
1317 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1318 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1319 INSERT);
1320 gcc_assert (!*slot);
1322 *slot = vno1;
1323 return vno1;
1326 /* Insert the rhs of STMT into the current hash table with a value number of
1327 RESULT. */
1329 vn_nary_op_t
1330 vn_nary_op_insert_stmt (gimple stmt, tree result)
1332 unsigned length = gimple_num_ops (stmt) - 1;
1333 void **slot;
1334 vn_nary_op_t vno1;
1335 unsigned i;
1337 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1338 (sizeof (struct vn_nary_op_s)
1339 - sizeof (tree) * (4 - length)));
1340 vno1->value_id = VN_INFO (result)->value_id;
1341 vno1->opcode = gimple_assign_rhs_code (stmt);
1342 vno1->length = length;
1343 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
1344 for (i = 0; i < vno1->length; ++i)
1345 vno1->op[i] = gimple_op (stmt, i + 1);
1346 if (vno1->opcode == REALPART_EXPR
1347 || vno1->opcode == IMAGPART_EXPR
1348 || vno1->opcode == VIEW_CONVERT_EXPR)
1349 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1350 vno1->result = result;
1351 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1352 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1353 INSERT);
1354 gcc_assert (!*slot);
1356 *slot = vno1;
1357 return vno1;
1360 /* Compute a hashcode for PHI operation VP1 and return it. */
1362 static inline hashval_t
1363 vn_phi_compute_hash (vn_phi_t vp1)
1365 hashval_t result = 0;
1366 int i;
1367 tree phi1op;
1368 tree type;
1370 result = vp1->block->index;
1372 /* If all PHI arguments are constants we need to distinguish
1373 the PHI node via its type. */
1374 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1375 result += (INTEGRAL_TYPE_P (type)
1376 + (INTEGRAL_TYPE_P (type)
1377 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1379 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1381 if (phi1op == VN_TOP)
1382 continue;
1383 result += iterative_hash_expr (phi1op, result);
1386 return result;
1389 /* Return the computed hashcode for phi operation P1. */
1391 static hashval_t
1392 vn_phi_hash (const void *p1)
1394 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1395 return vp1->hashcode;
1398 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1400 static int
1401 vn_phi_eq (const void *p1, const void *p2)
1403 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1404 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1406 if (vp1->hashcode != vp2->hashcode)
1407 return false;
1409 if (vp1->block == vp2->block)
1411 int i;
1412 tree phi1op;
1414 /* If the PHI nodes do not have compatible types
1415 they are not the same. */
1416 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1417 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1418 return false;
1420 /* Any phi in the same block will have it's arguments in the
1421 same edge order, because of how we store phi nodes. */
1422 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1424 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1425 if (phi1op == VN_TOP || phi2op == VN_TOP)
1426 continue;
1427 if (!expressions_equal_p (phi1op, phi2op))
1428 return false;
1430 return true;
1432 return false;
1435 static VEC(tree, heap) *shared_lookup_phiargs;
1437 /* Lookup PHI in the current hash table, and return the resulting
1438 value number if it exists in the hash table. Return NULL_TREE if
1439 it does not exist in the hash table. */
1441 static tree
1442 vn_phi_lookup (gimple phi)
1444 void **slot;
1445 struct vn_phi_s vp1;
1446 unsigned i;
1448 VEC_truncate (tree, shared_lookup_phiargs, 0);
1450 /* Canonicalize the SSA_NAME's to their value number. */
1451 for (i = 0; i < gimple_phi_num_args (phi); i++)
1453 tree def = PHI_ARG_DEF (phi, i);
1454 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1455 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1457 vp1.phiargs = shared_lookup_phiargs;
1458 vp1.block = gimple_bb (phi);
1459 vp1.hashcode = vn_phi_compute_hash (&vp1);
1460 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1461 NO_INSERT);
1462 if (!slot && current_info == optimistic_info)
1463 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1464 NO_INSERT);
1465 if (!slot)
1466 return NULL_TREE;
1467 return ((vn_phi_t)*slot)->result;
1470 /* Insert PHI into the current hash table with a value number of
1471 RESULT. */
1473 static vn_phi_t
1474 vn_phi_insert (gimple phi, tree result)
1476 void **slot;
1477 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1478 unsigned i;
1479 VEC (tree, heap) *args = NULL;
1481 /* Canonicalize the SSA_NAME's to their value number. */
1482 for (i = 0; i < gimple_phi_num_args (phi); i++)
1484 tree def = PHI_ARG_DEF (phi, i);
1485 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1486 VEC_safe_push (tree, heap, args, def);
1488 vp1->value_id = VN_INFO (result)->value_id;
1489 vp1->phiargs = args;
1490 vp1->block = gimple_bb (phi);
1491 vp1->result = result;
1492 vp1->hashcode = vn_phi_compute_hash (vp1);
1494 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1495 INSERT);
1497 /* Because we iterate over phi operations more than once, it's
1498 possible the slot might already exist here, hence no assert.*/
1499 *slot = vp1;
1500 return vp1;
1504 /* Print set of components in strongly connected component SCC to OUT. */
1506 static void
1507 print_scc (FILE *out, VEC (tree, heap) *scc)
1509 tree var;
1510 unsigned int i;
1512 fprintf (out, "SCC consists of: ");
1513 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1515 print_generic_expr (out, var, 0);
1516 fprintf (out, " ");
1518 fprintf (out, "\n");
1521 /* Set the value number of FROM to TO, return true if it has changed
1522 as a result. */
1524 static inline bool
1525 set_ssa_val_to (tree from, tree to)
1527 tree currval;
1529 if (from != to
1530 && TREE_CODE (to) == SSA_NAME
1531 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1532 to = from;
1534 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1535 and invariants. So assert that here. */
1536 gcc_assert (to != NULL_TREE
1537 && (to == VN_TOP
1538 || TREE_CODE (to) == SSA_NAME
1539 || is_gimple_min_invariant (to)));
1541 if (dump_file && (dump_flags & TDF_DETAILS))
1543 fprintf (dump_file, "Setting value number of ");
1544 print_generic_expr (dump_file, from, 0);
1545 fprintf (dump_file, " to ");
1546 print_generic_expr (dump_file, to, 0);
1549 currval = SSA_VAL (from);
1551 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1553 VN_INFO (from)->valnum = to;
1554 if (dump_file && (dump_flags & TDF_DETAILS))
1555 fprintf (dump_file, " (changed)\n");
1556 return true;
1558 if (dump_file && (dump_flags & TDF_DETAILS))
1559 fprintf (dump_file, "\n");
1560 return false;
1563 /* Set all definitions in STMT to value number to themselves.
1564 Return true if a value number changed. */
1566 static bool
1567 defs_to_varying (gimple stmt)
1569 bool changed = false;
1570 ssa_op_iter iter;
1571 def_operand_p defp;
1573 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1575 tree def = DEF_FROM_PTR (defp);
1577 VN_INFO (def)->use_processed = true;
1578 changed |= set_ssa_val_to (def, def);
1580 return changed;
1583 static bool expr_has_constants (tree expr);
1584 static tree valueize_expr (tree expr);
1586 /* Visit a copy between LHS and RHS, return true if the value number
1587 changed. */
1589 static bool
1590 visit_copy (tree lhs, tree rhs)
1592 /* Follow chains of copies to their destination. */
1593 while (TREE_CODE (rhs) == SSA_NAME
1594 && SSA_VAL (rhs) != rhs)
1595 rhs = SSA_VAL (rhs);
1597 /* The copy may have a more interesting constant filled expression
1598 (we don't, since we know our RHS is just an SSA name). */
1599 if (TREE_CODE (rhs) == SSA_NAME)
1601 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1602 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1605 return set_ssa_val_to (lhs, rhs);
1608 /* Visit a unary operator RHS, value number it, and return true if the
1609 value number of LHS has changed as a result. */
1611 static bool
1612 visit_unary_op (tree lhs, gimple stmt)
1614 bool changed = false;
1615 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1617 if (result)
1619 changed = set_ssa_val_to (lhs, result);
1621 else
1623 changed = set_ssa_val_to (lhs, lhs);
1624 vn_nary_op_insert_stmt (stmt, lhs);
1627 return changed;
1630 /* Visit a binary operator RHS, value number it, and return true if the
1631 value number of LHS has changed as a result. */
1633 static bool
1634 visit_binary_op (tree lhs, gimple stmt)
1636 bool changed = false;
1637 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1639 if (result)
1641 changed = set_ssa_val_to (lhs, result);
1643 else
1645 changed = set_ssa_val_to (lhs, lhs);
1646 vn_nary_op_insert_stmt (stmt, lhs);
1649 return changed;
1652 /* Visit a call STMT storing into LHS. Return true if the value number
1653 of the LHS has changed as a result. */
1655 static bool
1656 visit_reference_op_call (tree lhs, gimple stmt)
1658 bool changed = false;
1659 struct vn_reference_s vr1;
1660 tree result;
1661 tree vuse = gimple_vuse (stmt);
1663 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1664 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1665 vr1.hashcode = vn_reference_compute_hash (&vr1);
1666 result = vn_reference_lookup_1 (&vr1, NULL);
1667 if (result)
1669 changed = set_ssa_val_to (lhs, result);
1670 if (TREE_CODE (result) == SSA_NAME
1671 && VN_INFO (result)->has_constants)
1672 VN_INFO (lhs)->has_constants = true;
1674 else
1676 void **slot;
1677 vn_reference_t vr2;
1678 changed = set_ssa_val_to (lhs, lhs);
1679 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1680 vr2->vuse = vr1.vuse;
1681 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1682 vr2->hashcode = vr1.hashcode;
1683 vr2->result = lhs;
1684 slot = htab_find_slot_with_hash (current_info->references,
1685 vr2, vr2->hashcode, INSERT);
1686 if (*slot)
1687 free_reference (*slot);
1688 *slot = vr2;
1691 return changed;
1694 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1695 and return true if the value number of the LHS has changed as a result. */
1697 static bool
1698 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1700 bool changed = false;
1701 tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1703 /* We handle type-punning through unions by value-numbering based
1704 on offset and size of the access. Be prepared to handle a
1705 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1706 if (result
1707 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1709 /* We will be setting the value number of lhs to the value number
1710 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1711 So first simplify and lookup this expression to see if it
1712 is already available. */
1713 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1714 if ((CONVERT_EXPR_P (val)
1715 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1716 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1718 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1719 if ((CONVERT_EXPR_P (tem)
1720 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1721 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1722 TREE_TYPE (val), tem)))
1723 val = tem;
1725 result = val;
1726 if (!is_gimple_min_invariant (val)
1727 && TREE_CODE (val) != SSA_NAME)
1728 result = vn_nary_op_lookup (val, NULL);
1729 /* If the expression is not yet available, value-number lhs to
1730 a new SSA_NAME we create. */
1731 if (!result && may_insert)
1733 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1734 /* Initialize value-number information properly. */
1735 VN_INFO_GET (result)->valnum = result;
1736 VN_INFO (result)->value_id = get_next_value_id ();
1737 VN_INFO (result)->expr = val;
1738 VN_INFO (result)->has_constants = expr_has_constants (val);
1739 VN_INFO (result)->needs_insertion = true;
1740 /* As all "inserted" statements are singleton SCCs, insert
1741 to the valid table. This is strictly needed to
1742 avoid re-generating new value SSA_NAMEs for the same
1743 expression during SCC iteration over and over (the
1744 optimistic table gets cleared after each iteration).
1745 We do not need to insert into the optimistic table, as
1746 lookups there will fall back to the valid table. */
1747 if (current_info == optimistic_info)
1749 current_info = valid_info;
1750 vn_nary_op_insert (val, result);
1751 current_info = optimistic_info;
1753 else
1754 vn_nary_op_insert (val, result);
1755 if (dump_file && (dump_flags & TDF_DETAILS))
1757 fprintf (dump_file, "Inserting name ");
1758 print_generic_expr (dump_file, result, 0);
1759 fprintf (dump_file, " for expression ");
1760 print_generic_expr (dump_file, val, 0);
1761 fprintf (dump_file, "\n");
1766 if (result)
1768 changed = set_ssa_val_to (lhs, result);
1769 if (TREE_CODE (result) == SSA_NAME
1770 && VN_INFO (result)->has_constants)
1772 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1773 VN_INFO (lhs)->has_constants = true;
1776 else
1778 changed = set_ssa_val_to (lhs, lhs);
1779 vn_reference_insert (op, lhs, gimple_vuse (stmt));
1782 return changed;
1786 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1787 and return true if the value number of the LHS has changed as a result. */
1789 static bool
1790 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1792 bool changed = false;
1793 tree result;
1794 bool resultsame = false;
1796 /* First we want to lookup using the *vuses* from the store and see
1797 if there the last store to this location with the same address
1798 had the same value.
1800 The vuses represent the memory state before the store. If the
1801 memory state, address, and value of the store is the same as the
1802 last store to this location, then this store will produce the
1803 same memory state as that store.
1805 In this case the vdef versions for this store are value numbered to those
1806 vuse versions, since they represent the same memory state after
1807 this store.
1809 Otherwise, the vdefs for the store are used when inserting into
1810 the table, since the store generates a new memory state. */
1812 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
1814 if (result)
1816 if (TREE_CODE (result) == SSA_NAME)
1817 result = SSA_VAL (result);
1818 if (TREE_CODE (op) == SSA_NAME)
1819 op = SSA_VAL (op);
1820 resultsame = expressions_equal_p (result, op);
1823 if (!result || !resultsame)
1825 tree vdef;
1827 if (dump_file && (dump_flags & TDF_DETAILS))
1829 fprintf (dump_file, "No store match\n");
1830 fprintf (dump_file, "Value numbering store ");
1831 print_generic_expr (dump_file, lhs, 0);
1832 fprintf (dump_file, " to ");
1833 print_generic_expr (dump_file, op, 0);
1834 fprintf (dump_file, "\n");
1836 /* Have to set value numbers before insert, since insert is
1837 going to valueize the references in-place. */
1838 if ((vdef = gimple_vdef (stmt)))
1840 VN_INFO (vdef)->use_processed = true;
1841 changed |= set_ssa_val_to (vdef, vdef);
1844 /* Do not insert structure copies into the tables. */
1845 if (is_gimple_min_invariant (op)
1846 || is_gimple_reg (op))
1847 vn_reference_insert (lhs, op, vdef);
1849 else
1851 /* We had a match, so value number the vdef to have the value
1852 number of the vuse it came from. */
1853 tree def, use;
1855 if (dump_file && (dump_flags & TDF_DETAILS))
1856 fprintf (dump_file, "Store matched earlier value,"
1857 "value numbering store vdefs to matching vuses.\n");
1859 def = gimple_vdef (stmt);
1860 use = gimple_vuse (stmt);
1862 VN_INFO (def)->use_processed = true;
1863 changed |= set_ssa_val_to (def, SSA_VAL (use));
1866 return changed;
1869 /* Visit and value number PHI, return true if the value number
1870 changed. */
1872 static bool
1873 visit_phi (gimple phi)
1875 bool changed = false;
1876 tree result;
1877 tree sameval = VN_TOP;
1878 bool allsame = true;
1879 unsigned i;
1881 /* TODO: We could check for this in init_sccvn, and replace this
1882 with a gcc_assert. */
1883 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1884 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1886 /* See if all non-TOP arguments have the same value. TOP is
1887 equivalent to everything, so we can ignore it. */
1888 for (i = 0; i < gimple_phi_num_args (phi); i++)
1890 tree def = PHI_ARG_DEF (phi, i);
1892 if (TREE_CODE (def) == SSA_NAME)
1893 def = SSA_VAL (def);
1894 if (def == VN_TOP)
1895 continue;
1896 if (sameval == VN_TOP)
1898 sameval = def;
1900 else
1902 if (!expressions_equal_p (def, sameval))
1904 allsame = false;
1905 break;
1910 /* If all value numbered to the same value, the phi node has that
1911 value. */
1912 if (allsame)
1914 if (is_gimple_min_invariant (sameval))
1916 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1917 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1919 else
1921 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1922 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1925 if (TREE_CODE (sameval) == SSA_NAME)
1926 return visit_copy (PHI_RESULT (phi), sameval);
1928 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1931 /* Otherwise, see if it is equivalent to a phi node in this block. */
1932 result = vn_phi_lookup (phi);
1933 if (result)
1935 if (TREE_CODE (result) == SSA_NAME)
1936 changed = visit_copy (PHI_RESULT (phi), result);
1937 else
1938 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1940 else
1942 vn_phi_insert (phi, PHI_RESULT (phi));
1943 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1944 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1945 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1948 return changed;
1951 /* Return true if EXPR contains constants. */
1953 static bool
1954 expr_has_constants (tree expr)
1956 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1958 case tcc_unary:
1959 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1961 case tcc_binary:
1962 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1963 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1964 /* Constants inside reference ops are rarely interesting, but
1965 it can take a lot of looking to find them. */
1966 case tcc_reference:
1967 case tcc_declaration:
1968 return false;
1969 default:
1970 return is_gimple_min_invariant (expr);
1972 return false;
1975 /* Return true if STMT contains constants. */
1977 static bool
1978 stmt_has_constants (gimple stmt)
1980 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1981 return false;
1983 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1985 case GIMPLE_UNARY_RHS:
1986 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
1988 case GIMPLE_BINARY_RHS:
1989 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
1990 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
1991 case GIMPLE_SINGLE_RHS:
1992 /* Constants inside reference ops are rarely interesting, but
1993 it can take a lot of looking to find them. */
1994 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
1995 default:
1996 gcc_unreachable ();
1998 return false;
2001 /* Replace SSA_NAMES in expr with their value numbers, and return the
2002 result.
2003 This is performed in place. */
2005 static tree
2006 valueize_expr (tree expr)
2008 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2010 case tcc_unary:
2011 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2012 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2013 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2014 break;
2015 case tcc_binary:
2016 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2017 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2018 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2019 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2020 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2021 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2022 break;
2023 default:
2024 break;
2026 return expr;
2029 /* Simplify the binary expression RHS, and return the result if
2030 simplified. */
2032 static tree
2033 simplify_binary_expression (gimple stmt)
2035 tree result = NULL_TREE;
2036 tree op0 = gimple_assign_rhs1 (stmt);
2037 tree op1 = gimple_assign_rhs2 (stmt);
2039 /* This will not catch every single case we could combine, but will
2040 catch those with constants. The goal here is to simultaneously
2041 combine constants between expressions, but avoid infinite
2042 expansion of expressions during simplification. */
2043 if (TREE_CODE (op0) == SSA_NAME)
2045 if (VN_INFO (op0)->has_constants
2046 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2047 op0 = valueize_expr (vn_get_expr_for (op0));
2048 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2049 op0 = SSA_VAL (op0);
2052 if (TREE_CODE (op1) == SSA_NAME)
2054 if (VN_INFO (op1)->has_constants)
2055 op1 = valueize_expr (vn_get_expr_for (op1));
2056 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2057 op1 = SSA_VAL (op1);
2060 /* Avoid folding if nothing changed. */
2061 if (op0 == gimple_assign_rhs1 (stmt)
2062 && op1 == gimple_assign_rhs2 (stmt))
2063 return NULL_TREE;
2065 fold_defer_overflow_warnings ();
2067 result = fold_binary (gimple_assign_rhs_code (stmt),
2068 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
2069 if (result)
2070 STRIP_USELESS_TYPE_CONVERSION (result);
2072 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2073 stmt, 0);
2075 /* Make sure result is not a complex expression consisting
2076 of operators of operators (IE (a + b) + (a + c))
2077 Otherwise, we will end up with unbounded expressions if
2078 fold does anything at all. */
2079 if (result && valid_gimple_rhs_p (result))
2080 return result;
2082 return NULL_TREE;
2085 /* Simplify the unary expression RHS, and return the result if
2086 simplified. */
2088 static tree
2089 simplify_unary_expression (gimple stmt)
2091 tree result = NULL_TREE;
2092 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2094 /* We handle some tcc_reference codes here that are all
2095 GIMPLE_ASSIGN_SINGLE codes. */
2096 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2097 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2098 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2099 op0 = TREE_OPERAND (op0, 0);
2101 if (TREE_CODE (op0) != SSA_NAME)
2102 return NULL_TREE;
2104 orig_op0 = op0;
2105 if (VN_INFO (op0)->has_constants)
2106 op0 = valueize_expr (vn_get_expr_for (op0));
2107 else if (gimple_assign_cast_p (stmt)
2108 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2109 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2110 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2112 /* We want to do tree-combining on conversion-like expressions.
2113 Make sure we feed only SSA_NAMEs or constants to fold though. */
2114 tree tem = valueize_expr (vn_get_expr_for (op0));
2115 if (UNARY_CLASS_P (tem)
2116 || BINARY_CLASS_P (tem)
2117 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2118 || TREE_CODE (tem) == SSA_NAME
2119 || is_gimple_min_invariant (tem))
2120 op0 = tem;
2123 /* Avoid folding if nothing changed, but remember the expression. */
2124 if (op0 == orig_op0)
2125 return NULL_TREE;
2127 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2128 gimple_expr_type (stmt), op0);
2129 if (result)
2131 STRIP_USELESS_TYPE_CONVERSION (result);
2132 if (valid_gimple_rhs_p (result))
2133 return result;
2136 return NULL_TREE;
2139 /* Try to simplify RHS using equivalences and constant folding. */
2141 static tree
2142 try_to_simplify (gimple stmt)
2144 tree tem;
2146 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2147 in this case, there is no point in doing extra work. */
2148 if (gimple_assign_copy_p (stmt)
2149 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2150 return NULL_TREE;
2152 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2154 case tcc_declaration:
2155 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2156 if (tem)
2157 return tem;
2158 break;
2160 case tcc_reference:
2161 /* Do not do full-blown reference lookup here, but simplify
2162 reads from constant aggregates. */
2163 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2164 if (tem)
2165 return tem;
2167 /* Fallthrough for some codes that can operate on registers. */
2168 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2169 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2170 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2171 break;
2172 /* We could do a little more with unary ops, if they expand
2173 into binary ops, but it's debatable whether it is worth it. */
2174 case tcc_unary:
2175 return simplify_unary_expression (stmt);
2176 break;
2177 case tcc_comparison:
2178 case tcc_binary:
2179 return simplify_binary_expression (stmt);
2180 break;
2181 default:
2182 break;
2185 return NULL_TREE;
2188 /* Visit and value number USE, return true if the value number
2189 changed. */
2191 static bool
2192 visit_use (tree use)
2194 bool changed = false;
2195 gimple stmt = SSA_NAME_DEF_STMT (use);
2197 VN_INFO (use)->use_processed = true;
2199 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2200 if (dump_file && (dump_flags & TDF_DETAILS)
2201 && !SSA_NAME_IS_DEFAULT_DEF (use))
2203 fprintf (dump_file, "Value numbering ");
2204 print_generic_expr (dump_file, use, 0);
2205 fprintf (dump_file, " stmt = ");
2206 print_gimple_stmt (dump_file, stmt, 0, 0);
2209 /* Handle uninitialized uses. */
2210 if (SSA_NAME_IS_DEFAULT_DEF (use))
2211 changed = set_ssa_val_to (use, use);
2212 else
2214 if (gimple_code (stmt) == GIMPLE_PHI)
2215 changed = visit_phi (stmt);
2216 else if (!gimple_has_lhs (stmt)
2217 || gimple_has_volatile_ops (stmt)
2218 || stmt_could_throw_p (stmt))
2219 changed = defs_to_varying (stmt);
2220 else if (is_gimple_assign (stmt))
2222 tree lhs = gimple_assign_lhs (stmt);
2223 tree simplified;
2225 /* Shortcut for copies. Simplifying copies is pointless,
2226 since we copy the expression and value they represent. */
2227 if (gimple_assign_copy_p (stmt)
2228 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2229 && TREE_CODE (lhs) == SSA_NAME)
2231 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2232 goto done;
2234 simplified = try_to_simplify (stmt);
2235 if (simplified)
2237 if (dump_file && (dump_flags & TDF_DETAILS))
2239 fprintf (dump_file, "RHS ");
2240 print_gimple_expr (dump_file, stmt, 0, 0);
2241 fprintf (dump_file, " simplified to ");
2242 print_generic_expr (dump_file, simplified, 0);
2243 if (TREE_CODE (lhs) == SSA_NAME)
2244 fprintf (dump_file, " has constants %d\n",
2245 expr_has_constants (simplified));
2246 else
2247 fprintf (dump_file, "\n");
2250 /* Setting value numbers to constants will occasionally
2251 screw up phi congruence because constants are not
2252 uniquely associated with a single ssa name that can be
2253 looked up. */
2254 if (simplified
2255 && is_gimple_min_invariant (simplified)
2256 && TREE_CODE (lhs) == SSA_NAME)
2258 VN_INFO (lhs)->expr = simplified;
2259 VN_INFO (lhs)->has_constants = true;
2260 changed = set_ssa_val_to (lhs, simplified);
2261 goto done;
2263 else if (simplified
2264 && TREE_CODE (simplified) == SSA_NAME
2265 && TREE_CODE (lhs) == SSA_NAME)
2267 changed = visit_copy (lhs, simplified);
2268 goto done;
2270 else if (simplified)
2272 if (TREE_CODE (lhs) == SSA_NAME)
2274 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2275 /* We have to unshare the expression or else
2276 valuizing may change the IL stream. */
2277 VN_INFO (lhs)->expr = unshare_expr (simplified);
2280 else if (stmt_has_constants (stmt)
2281 && TREE_CODE (lhs) == SSA_NAME)
2282 VN_INFO (lhs)->has_constants = true;
2283 else if (TREE_CODE (lhs) == SSA_NAME)
2285 /* We reset expr and constantness here because we may
2286 have been value numbering optimistically, and
2287 iterating. They may become non-constant in this case,
2288 even if they were optimistically constant. */
2290 VN_INFO (lhs)->has_constants = false;
2291 VN_INFO (lhs)->expr = NULL_TREE;
2294 if ((TREE_CODE (lhs) == SSA_NAME
2295 /* We can substitute SSA_NAMEs that are live over
2296 abnormal edges with their constant value. */
2297 && !(gimple_assign_copy_p (stmt)
2298 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2299 && !(simplified
2300 && is_gimple_min_invariant (simplified))
2301 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2302 /* Stores or copies from SSA_NAMEs that are live over
2303 abnormal edges are a problem. */
2304 || (gimple_assign_single_p (stmt)
2305 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2307 changed = defs_to_varying (stmt);
2308 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2310 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2312 else if (TREE_CODE (lhs) == SSA_NAME)
2314 if ((gimple_assign_copy_p (stmt)
2315 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2316 || (simplified
2317 && is_gimple_min_invariant (simplified)))
2319 VN_INFO (lhs)->has_constants = true;
2320 if (simplified)
2321 changed = set_ssa_val_to (lhs, simplified);
2322 else
2323 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2325 else
2327 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2329 case GIMPLE_UNARY_RHS:
2330 changed = visit_unary_op (lhs, stmt);
2331 break;
2332 case GIMPLE_BINARY_RHS:
2333 changed = visit_binary_op (lhs, stmt);
2334 break;
2335 case GIMPLE_SINGLE_RHS:
2336 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2338 case tcc_reference:
2339 /* VOP-less references can go through unary case. */
2340 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2341 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2342 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2343 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2345 changed = visit_unary_op (lhs, stmt);
2346 break;
2348 /* Fallthrough. */
2349 case tcc_declaration:
2350 changed = visit_reference_op_load
2351 (lhs, gimple_assign_rhs1 (stmt), stmt);
2352 break;
2353 case tcc_expression:
2354 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2356 changed = visit_unary_op (lhs, stmt);
2357 break;
2359 /* Fallthrough. */
2360 default:
2361 changed = defs_to_varying (stmt);
2363 break;
2364 default:
2365 changed = defs_to_varying (stmt);
2366 break;
2370 else
2371 changed = defs_to_varying (stmt);
2373 else if (is_gimple_call (stmt))
2375 tree lhs = gimple_call_lhs (stmt);
2377 /* ??? We could try to simplify calls. */
2379 if (stmt_has_constants (stmt)
2380 && TREE_CODE (lhs) == SSA_NAME)
2381 VN_INFO (lhs)->has_constants = true;
2382 else if (TREE_CODE (lhs) == SSA_NAME)
2384 /* We reset expr and constantness here because we may
2385 have been value numbering optimistically, and
2386 iterating. They may become non-constant in this case,
2387 even if they were optimistically constant. */
2388 VN_INFO (lhs)->has_constants = false;
2389 VN_INFO (lhs)->expr = NULL_TREE;
2392 if (TREE_CODE (lhs) == SSA_NAME
2393 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2394 changed = defs_to_varying (stmt);
2395 /* ??? We should handle stores from calls. */
2396 else if (TREE_CODE (lhs) == SSA_NAME)
2398 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2399 changed = visit_reference_op_call (lhs, stmt);
2400 else
2401 changed = defs_to_varying (stmt);
2403 else
2404 changed = defs_to_varying (stmt);
2407 done:
2408 return changed;
2411 /* Compare two operands by reverse postorder index */
2413 static int
2414 compare_ops (const void *pa, const void *pb)
2416 const tree opa = *((const tree *)pa);
2417 const tree opb = *((const tree *)pb);
2418 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2419 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2420 basic_block bba;
2421 basic_block bbb;
2423 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2424 return 0;
2425 else if (gimple_nop_p (opstmta))
2426 return -1;
2427 else if (gimple_nop_p (opstmtb))
2428 return 1;
2430 bba = gimple_bb (opstmta);
2431 bbb = gimple_bb (opstmtb);
2433 if (!bba && !bbb)
2434 return 0;
2435 else if (!bba)
2436 return -1;
2437 else if (!bbb)
2438 return 1;
2440 if (bba == bbb)
2442 if (gimple_code (opstmta) == GIMPLE_PHI
2443 && gimple_code (opstmtb) == GIMPLE_PHI)
2444 return 0;
2445 else if (gimple_code (opstmta) == GIMPLE_PHI)
2446 return -1;
2447 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2448 return 1;
2449 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2451 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2454 /* Sort an array containing members of a strongly connected component
2455 SCC so that the members are ordered by RPO number.
2456 This means that when the sort is complete, iterating through the
2457 array will give you the members in RPO order. */
2459 static void
2460 sort_scc (VEC (tree, heap) *scc)
2462 qsort (VEC_address (tree, scc),
2463 VEC_length (tree, scc),
2464 sizeof (tree),
2465 compare_ops);
2468 /* Process a strongly connected component in the SSA graph. */
2470 static void
2471 process_scc (VEC (tree, heap) *scc)
2473 /* If the SCC has a single member, just visit it. */
2475 if (VEC_length (tree, scc) == 1)
2477 tree use = VEC_index (tree, scc, 0);
2478 if (!VN_INFO (use)->use_processed)
2479 visit_use (use);
2481 else
2483 tree var;
2484 unsigned int i;
2485 unsigned int iterations = 0;
2486 bool changed = true;
2488 /* Iterate over the SCC with the optimistic table until it stops
2489 changing. */
2490 current_info = optimistic_info;
2491 while (changed)
2493 changed = false;
2494 iterations++;
2495 /* As we are value-numbering optimistically we have to
2496 clear the expression tables and the simplified expressions
2497 in each iteration until we converge. */
2498 htab_empty (optimistic_info->nary);
2499 htab_empty (optimistic_info->phis);
2500 htab_empty (optimistic_info->references);
2501 obstack_free (&optimistic_info->nary_obstack, NULL);
2502 gcc_obstack_init (&optimistic_info->nary_obstack);
2503 empty_alloc_pool (optimistic_info->phis_pool);
2504 empty_alloc_pool (optimistic_info->references_pool);
2505 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2506 VN_INFO (var)->expr = NULL_TREE;
2507 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2508 changed |= visit_use (var);
2511 statistics_histogram_event (cfun, "SCC iterations", iterations);
2513 /* Finally, visit the SCC once using the valid table. */
2514 current_info = valid_info;
2515 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2516 visit_use (var);
2520 DEF_VEC_O(ssa_op_iter);
2521 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2523 /* Pop the components of the found SCC for NAME off the SCC stack
2524 and process them. Returns true if all went well, false if
2525 we run into resource limits. */
2527 static bool
2528 extract_and_process_scc_for_name (tree name)
2530 VEC (tree, heap) *scc = NULL;
2531 tree x;
2533 /* Found an SCC, pop the components off the SCC stack and
2534 process them. */
2537 x = VEC_pop (tree, sccstack);
2539 VN_INFO (x)->on_sccstack = false;
2540 VEC_safe_push (tree, heap, scc, x);
2541 } while (x != name);
2543 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2544 if (VEC_length (tree, scc)
2545 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2547 if (dump_file)
2548 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2549 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2550 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2551 return false;
2554 if (VEC_length (tree, scc) > 1)
2555 sort_scc (scc);
2557 if (dump_file && (dump_flags & TDF_DETAILS))
2558 print_scc (dump_file, scc);
2560 process_scc (scc);
2562 VEC_free (tree, heap, scc);
2564 return true;
2567 /* Depth first search on NAME to discover and process SCC's in the SSA
2568 graph.
2569 Execution of this algorithm relies on the fact that the SCC's are
2570 popped off the stack in topological order.
2571 Returns true if successful, false if we stopped processing SCC's due
2572 to resource constraints. */
2574 static bool
2575 DFS (tree name)
2577 VEC(ssa_op_iter, heap) *itervec = NULL;
2578 VEC(tree, heap) *namevec = NULL;
2579 use_operand_p usep = NULL;
2580 gimple defstmt;
2581 tree use;
2582 ssa_op_iter iter;
2584 start_over:
2585 /* SCC info */
2586 VN_INFO (name)->dfsnum = next_dfs_num++;
2587 VN_INFO (name)->visited = true;
2588 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2590 VEC_safe_push (tree, heap, sccstack, name);
2591 VN_INFO (name)->on_sccstack = true;
2592 defstmt = SSA_NAME_DEF_STMT (name);
2594 /* Recursively DFS on our operands, looking for SCC's. */
2595 if (!gimple_nop_p (defstmt))
2597 /* Push a new iterator. */
2598 if (gimple_code (defstmt) == GIMPLE_PHI)
2599 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2600 else
2601 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2603 else
2604 clear_and_done_ssa_iter (&iter);
2606 while (1)
2608 /* If we are done processing uses of a name, go up the stack
2609 of iterators and process SCCs as we found them. */
2610 if (op_iter_done (&iter))
2612 /* See if we found an SCC. */
2613 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2614 if (!extract_and_process_scc_for_name (name))
2616 VEC_free (tree, heap, namevec);
2617 VEC_free (ssa_op_iter, heap, itervec);
2618 return false;
2621 /* Check if we are done. */
2622 if (VEC_empty (tree, namevec))
2624 VEC_free (tree, heap, namevec);
2625 VEC_free (ssa_op_iter, heap, itervec);
2626 return true;
2629 /* Restore the last use walker and continue walking there. */
2630 use = name;
2631 name = VEC_pop (tree, namevec);
2632 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2633 sizeof (ssa_op_iter));
2634 VEC_pop (ssa_op_iter, itervec);
2635 goto continue_walking;
2638 use = USE_FROM_PTR (usep);
2640 /* Since we handle phi nodes, we will sometimes get
2641 invariants in the use expression. */
2642 if (TREE_CODE (use) == SSA_NAME)
2644 if (! (VN_INFO (use)->visited))
2646 /* Recurse by pushing the current use walking state on
2647 the stack and starting over. */
2648 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2649 VEC_safe_push(tree, heap, namevec, name);
2650 name = use;
2651 goto start_over;
2653 continue_walking:
2654 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2655 VN_INFO (use)->low);
2657 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2658 && VN_INFO (use)->on_sccstack)
2660 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2661 VN_INFO (name)->low);
2665 usep = op_iter_next_use (&iter);
2669 /* Allocate a value number table. */
2671 static void
2672 allocate_vn_table (vn_tables_t table)
2674 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2675 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2676 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2677 free_reference);
2679 gcc_obstack_init (&table->nary_obstack);
2680 table->phis_pool = create_alloc_pool ("VN phis",
2681 sizeof (struct vn_phi_s),
2682 30);
2683 table->references_pool = create_alloc_pool ("VN references",
2684 sizeof (struct vn_reference_s),
2685 30);
2688 /* Free a value number table. */
2690 static void
2691 free_vn_table (vn_tables_t table)
2693 htab_delete (table->phis);
2694 htab_delete (table->nary);
2695 htab_delete (table->references);
2696 obstack_free (&table->nary_obstack, NULL);
2697 free_alloc_pool (table->phis_pool);
2698 free_alloc_pool (table->references_pool);
2701 static void
2702 init_scc_vn (void)
2704 size_t i;
2705 int j;
2706 int *rpo_numbers_temp;
2708 calculate_dominance_info (CDI_DOMINATORS);
2709 sccstack = NULL;
2710 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2711 free);
2713 constant_value_ids = BITMAP_ALLOC (NULL);
2715 next_dfs_num = 1;
2716 next_value_id = 1;
2718 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2719 /* VEC_alloc doesn't actually grow it to the right size, it just
2720 preallocates the space to do so. */
2721 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2722 gcc_obstack_init (&vn_ssa_aux_obstack);
2724 shared_lookup_phiargs = NULL;
2725 shared_lookup_references = NULL;
2726 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2727 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2728 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2730 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2731 the i'th block in RPO order is bb. We want to map bb's to RPO
2732 numbers, so we need to rearrange this array. */
2733 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2734 rpo_numbers[rpo_numbers_temp[j]] = j;
2736 XDELETE (rpo_numbers_temp);
2738 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2740 /* Create the VN_INFO structures, and initialize value numbers to
2741 TOP. */
2742 for (i = 0; i < num_ssa_names; i++)
2744 tree name = ssa_name (i);
2745 if (name)
2747 VN_INFO_GET (name)->valnum = VN_TOP;
2748 VN_INFO (name)->expr = NULL_TREE;
2749 VN_INFO (name)->value_id = 0;
2753 renumber_gimple_stmt_uids ();
2755 /* Create the valid and optimistic value numbering tables. */
2756 valid_info = XCNEW (struct vn_tables_s);
2757 allocate_vn_table (valid_info);
2758 optimistic_info = XCNEW (struct vn_tables_s);
2759 allocate_vn_table (optimistic_info);
2762 void
2763 free_scc_vn (void)
2765 size_t i;
2767 htab_delete (constant_to_value_id);
2768 BITMAP_FREE (constant_value_ids);
2769 VEC_free (tree, heap, shared_lookup_phiargs);
2770 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2771 XDELETEVEC (rpo_numbers);
2773 for (i = 0; i < num_ssa_names; i++)
2775 tree name = ssa_name (i);
2776 if (name
2777 && VN_INFO (name)->needs_insertion)
2778 release_ssa_name (name);
2780 obstack_free (&vn_ssa_aux_obstack, NULL);
2781 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2783 VEC_free (tree, heap, sccstack);
2784 free_vn_table (valid_info);
2785 XDELETE (valid_info);
2786 free_vn_table (optimistic_info);
2787 XDELETE (optimistic_info);
2790 /* Set the value ids in the valid hash tables. */
2792 static void
2793 set_hashtable_value_ids (void)
2795 htab_iterator hi;
2796 vn_nary_op_t vno;
2797 vn_reference_t vr;
2798 vn_phi_t vp;
2800 /* Now set the value ids of the things we had put in the hash
2801 table. */
2803 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2804 vno, vn_nary_op_t, hi)
2806 if (vno->result)
2808 if (TREE_CODE (vno->result) == SSA_NAME)
2809 vno->value_id = VN_INFO (vno->result)->value_id;
2810 else if (is_gimple_min_invariant (vno->result))
2811 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2815 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2816 vp, vn_phi_t, hi)
2818 if (vp->result)
2820 if (TREE_CODE (vp->result) == SSA_NAME)
2821 vp->value_id = VN_INFO (vp->result)->value_id;
2822 else if (is_gimple_min_invariant (vp->result))
2823 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2827 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2828 vr, vn_reference_t, hi)
2830 if (vr->result)
2832 if (TREE_CODE (vr->result) == SSA_NAME)
2833 vr->value_id = VN_INFO (vr->result)->value_id;
2834 else if (is_gimple_min_invariant (vr->result))
2835 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2840 /* Do SCCVN. Returns true if it finished, false if we bailed out
2841 due to resource constraints. */
2843 bool
2844 run_scc_vn (bool may_insert_arg)
2846 size_t i;
2847 tree param;
2848 bool changed = true;
2850 may_insert = may_insert_arg;
2852 init_scc_vn ();
2853 current_info = valid_info;
2855 for (param = DECL_ARGUMENTS (current_function_decl);
2856 param;
2857 param = TREE_CHAIN (param))
2859 if (gimple_default_def (cfun, param) != NULL)
2861 tree def = gimple_default_def (cfun, param);
2862 VN_INFO (def)->valnum = def;
2866 for (i = 1; i < num_ssa_names; ++i)
2868 tree name = ssa_name (i);
2869 if (name
2870 && VN_INFO (name)->visited == false
2871 && !has_zero_uses (name))
2872 if (!DFS (name))
2874 free_scc_vn ();
2875 may_insert = false;
2876 return false;
2880 /* Initialize the value ids. */
2882 for (i = 1; i < num_ssa_names; ++i)
2884 tree name = ssa_name (i);
2885 vn_ssa_aux_t info;
2886 if (!name)
2887 continue;
2888 info = VN_INFO (name);
2889 if (info->valnum == name
2890 || info->valnum == VN_TOP)
2891 info->value_id = get_next_value_id ();
2892 else if (is_gimple_min_invariant (info->valnum))
2893 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2896 /* Propagate until they stop changing. */
2897 while (changed)
2899 changed = false;
2900 for (i = 1; i < num_ssa_names; ++i)
2902 tree name = ssa_name (i);
2903 vn_ssa_aux_t info;
2904 if (!name)
2905 continue;
2906 info = VN_INFO (name);
2907 if (TREE_CODE (info->valnum) == SSA_NAME
2908 && info->valnum != name
2909 && info->value_id != VN_INFO (info->valnum)->value_id)
2911 changed = true;
2912 info->value_id = VN_INFO (info->valnum)->value_id;
2917 set_hashtable_value_ids ();
2919 if (dump_file && (dump_flags & TDF_DETAILS))
2921 fprintf (dump_file, "Value numbers:\n");
2922 for (i = 0; i < num_ssa_names; i++)
2924 tree name = ssa_name (i);
2925 if (name
2926 && VN_INFO (name)->visited
2927 && SSA_VAL (name) != name)
2929 print_generic_expr (dump_file, name, 0);
2930 fprintf (dump_file, " = ");
2931 print_generic_expr (dump_file, SSA_VAL (name), 0);
2932 fprintf (dump_file, "\n");
2937 may_insert = false;
2938 return true;
2941 /* Return the maximum value id we have ever seen. */
2943 unsigned int
2944 get_max_value_id (void)
2946 return next_value_id;
2949 /* Return the next unique value id. */
2951 unsigned int
2952 get_next_value_id (void)
2954 return next_value_id++;
2958 /* Compare two expressions E1 and E2 and return true if they are equal. */
2960 bool
2961 expressions_equal_p (tree e1, tree e2)
2963 /* The obvious case. */
2964 if (e1 == e2)
2965 return true;
2967 /* If only one of them is null, they cannot be equal. */
2968 if (!e1 || !e2)
2969 return false;
2971 /* Recurse on elements of lists. */
2972 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
2974 tree lop1 = e1;
2975 tree lop2 = e2;
2976 for (lop1 = e1, lop2 = e2;
2977 lop1 || lop2;
2978 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
2980 if (!lop1 || !lop2)
2981 return false;
2982 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
2983 return false;
2985 return true;
2988 /* Now perform the actual comparison. */
2989 if (TREE_CODE (e1) == TREE_CODE (e2)
2990 && operand_equal_p (e1, e2, OEP_PURE_SAME))
2991 return true;
2993 return false;
2997 /* Return true if the nary operation NARY may trap. This is a copy
2998 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3000 bool
3001 vn_nary_may_trap (vn_nary_op_t nary)
3003 tree type;
3004 tree rhs2;
3005 bool honor_nans = false;
3006 bool honor_snans = false;
3007 bool fp_operation = false;
3008 bool honor_trapv = false;
3009 bool handled, ret;
3010 unsigned i;
3012 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3013 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3014 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3016 type = nary->type;
3017 fp_operation = FLOAT_TYPE_P (type);
3018 if (fp_operation)
3020 honor_nans = flag_trapping_math && !flag_finite_math_only;
3021 honor_snans = flag_signaling_nans != 0;
3023 else if (INTEGRAL_TYPE_P (type)
3024 && TYPE_OVERFLOW_TRAPS (type))
3025 honor_trapv = true;
3027 rhs2 = nary->op[1];
3028 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3029 honor_trapv,
3030 honor_nans, honor_snans, rhs2,
3031 &handled);
3032 if (handled
3033 && ret)
3034 return true;
3036 for (i = 0; i < nary->length; ++i)
3037 if (tree_could_trap_p (nary->op[i]))
3038 return true;
3040 return false;