Fix typo in ChangeLog entry date.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobd2dafb3023fbb50d9b027b26db7f4639e2548b5f
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t
176 VN_INFO (tree name)
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
216 tree
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
224 return name;
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
259 case tcc_reference:
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
267 break;
269 case tcc_unary:
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
273 break;
275 case tcc_binary:
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
280 break;
282 default:;
284 if (expr == NULL_TREE)
285 return vn->valnum;
287 /* Cache the expression. */
288 vn->expr = expr;
290 return expr;
294 /* Free a phi operation structure VP. */
296 static void
297 free_phi (void *vp)
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
303 /* Free a reference operation structure VP. */
305 static void
306 free_reference (void *vp)
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
312 /* Hash table equality function for vn_constant_t. */
314 static int
315 vn_constant_eq (const void *p1, const void *p2)
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
320 if (vc1->hashcode != vc2->hashcode)
321 return false;
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 /* Hash table hash function for vn_constant_t. */
328 static hashval_t
329 vn_constant_hash (const void *p1)
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
335 /* Lookup a value id for CONSTANT and return it. If it does not
336 exist returns 0. */
338 unsigned int
339 get_constant_value_id (tree constant)
341 void **slot;
342 struct vn_constant_s vc;
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
348 if (slot)
349 return ((vn_constant_t)*slot)->value_id;
350 return 0;
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
356 unsigned int
357 get_or_alloc_constant_value_id (tree constant)
359 void **slot;
360 vn_constant_t vc = XNEW (struct vn_constant_s);
362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT);
366 if (*slot)
368 free (vc);
369 return ((vn_constant_t)*slot)->value_id;
371 vc->value_id = get_next_value_id ();
372 *slot = vc;
373 bitmap_set_bit (constant_value_ids, vc->value_id);
374 return vc->value_id;
377 /* Return true if V is a value id for a constant. */
379 bool
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
388 static int
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
401 /* Compute the hash for a reference operand VRO1. */
403 static hashval_t
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
406 hashval_t result = 0;
407 if (vro1->op0)
408 result += iterative_hash_expr (vro1->op0, vro1->opcode);
409 if (vro1->op1)
410 result += iterative_hash_expr (vro1->op1, vro1->opcode);
411 if (vro1->op2)
412 result += iterative_hash_expr (vro1->op2, vro1->opcode);
413 return result;
416 /* Return the hashcode for a given reference operation P1. */
418 static hashval_t
419 vn_reference_hash (const void *p1)
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
425 /* Compute a hash for the reference operation VR1 and return it. */
427 hashval_t
428 vn_reference_compute_hash (const vn_reference_t vr1)
430 hashval_t result;
431 int i;
432 vn_reference_op_t vro;
434 result = iterative_hash_expr (vr1->vuse, 0);
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
436 result += vn_reference_op_compute_hash (vro);
438 return result;
441 /* Return true if reference operations P1 and P2 are equivalent. This
442 means they have the same set of operands and vuses. */
445 vn_reference_eq (const void *p1, const void *p2)
447 int i;
448 vn_reference_op_t vro;
450 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
451 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
452 if (vr1->hashcode != vr2->hashcode)
453 return false;
455 /* Early out if this is not a hash collision. */
456 if (vr1->hashcode != vr2->hashcode)
457 return false;
459 /* The VOP needs to be the same. */
460 if (vr1->vuse != vr2->vuse)
461 return false;
463 /* If the operands are the same we are done. */
464 if (vr1->operands == vr2->operands)
465 return true;
467 /* We require that address operands be canonicalized in a way that
468 two memory references will have the same operands if they are
469 equivalent. */
470 if (VEC_length (vn_reference_op_s, vr1->operands)
471 != VEC_length (vn_reference_op_s, vr2->operands))
472 return false;
474 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
475 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
476 vro))
477 return false;
479 return true;
482 /* Copy the operations present in load/store REF into RESULT, a vector of
483 vn_reference_op_s's. */
485 void
486 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
488 if (TREE_CODE (ref) == TARGET_MEM_REF)
490 vn_reference_op_s temp;
491 tree base;
493 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
494 if (!base)
495 base = build_int_cst (ptr_type_node, 0);
497 memset (&temp, 0, sizeof (temp));
498 /* We do not care for spurious type qualifications. */
499 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
500 temp.opcode = TREE_CODE (ref);
501 temp.op0 = TMR_INDEX (ref);
502 temp.op1 = TMR_STEP (ref);
503 temp.op2 = TMR_OFFSET (ref);
504 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
506 memset (&temp, 0, sizeof (temp));
507 temp.type = NULL_TREE;
508 temp.opcode = TREE_CODE (base);
509 temp.op0 = base;
510 temp.op1 = TMR_ORIGINAL (ref);
511 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
512 return;
515 /* For non-calls, store the information that makes up the address. */
517 while (ref)
519 vn_reference_op_s temp;
521 memset (&temp, 0, sizeof (temp));
522 /* We do not care for spurious type qualifications. */
523 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
524 temp.opcode = TREE_CODE (ref);
526 switch (temp.opcode)
528 case ALIGN_INDIRECT_REF:
529 case INDIRECT_REF:
530 /* The only operand is the address, which gets its own
531 vn_reference_op_s structure. */
532 break;
533 case MISALIGNED_INDIRECT_REF:
534 temp.op0 = TREE_OPERAND (ref, 1);
535 break;
536 case BIT_FIELD_REF:
537 /* Record bits and position. */
538 temp.op0 = TREE_OPERAND (ref, 1);
539 temp.op1 = TREE_OPERAND (ref, 2);
540 break;
541 case COMPONENT_REF:
542 /* The field decl is enough to unambiguously specify the field,
543 a matching type is not necessary and a mismatching type
544 is always a spurious difference. */
545 temp.type = NULL_TREE;
546 /* If this is a reference to a union member, record the union
547 member size as operand. Do so only if we are doing
548 expression insertion (during FRE), as PRE currently gets
549 confused with this. */
550 if (may_insert
551 && TREE_OPERAND (ref, 2) == NULL_TREE
552 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
553 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
554 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
555 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
556 else
558 /* Record field as operand. */
559 temp.op0 = TREE_OPERAND (ref, 1);
560 temp.op1 = TREE_OPERAND (ref, 2);
562 break;
563 case ARRAY_RANGE_REF:
564 case ARRAY_REF:
565 /* Record index as operand. */
566 temp.op0 = TREE_OPERAND (ref, 1);
567 temp.op1 = TREE_OPERAND (ref, 2);
568 temp.op2 = TREE_OPERAND (ref, 3);
569 break;
570 case STRING_CST:
571 case INTEGER_CST:
572 case COMPLEX_CST:
573 case VECTOR_CST:
574 case REAL_CST:
575 case CONSTRUCTOR:
576 case VAR_DECL:
577 case PARM_DECL:
578 case CONST_DECL:
579 case RESULT_DECL:
580 case SSA_NAME:
581 case EXC_PTR_EXPR:
582 case FILTER_EXPR:
583 temp.op0 = ref;
584 break;
585 case ADDR_EXPR:
586 if (is_gimple_min_invariant (ref))
588 temp.op0 = ref;
589 break;
591 /* Fallthrough. */
592 /* These are only interesting for their operands, their
593 existence, and their type. They will never be the last
594 ref in the chain of references (IE they require an
595 operand), so we don't have to put anything
596 for op* as it will be handled by the iteration */
597 case IMAGPART_EXPR:
598 case REALPART_EXPR:
599 case VIEW_CONVERT_EXPR:
600 break;
601 default:
602 gcc_unreachable ();
604 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
606 if (REFERENCE_CLASS_P (ref)
607 || (TREE_CODE (ref) == ADDR_EXPR
608 && !is_gimple_min_invariant (ref)))
609 ref = TREE_OPERAND (ref, 0);
610 else
611 ref = NULL_TREE;
615 /* Re-create a reference tree from the reference ops OPS.
616 Returns NULL_TREE if the ops were not handled.
617 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
619 tree
620 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
622 vn_reference_op_t op;
623 unsigned i;
624 tree ref, *op0_p = &ref;
626 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
628 switch (op->opcode)
630 case CALL_EXPR:
631 return NULL_TREE;
633 case ALIGN_INDIRECT_REF:
634 case INDIRECT_REF:
635 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
636 op0_p = &TREE_OPERAND (*op0_p, 0);
637 break;
639 case MISALIGNED_INDIRECT_REF:
640 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
641 NULL_TREE, op->op0);
642 op0_p = &TREE_OPERAND (*op0_p, 0);
643 break;
645 case BIT_FIELD_REF:
646 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
647 op->op0, op->op1);
648 op0_p = &TREE_OPERAND (*op0_p, 0);
649 break;
651 case COMPONENT_REF:
652 /* We cannot re-construct our fancy union reference handling. */
653 if (TREE_CODE (op->op0) == INTEGER_CST)
654 return NULL_TREE;
655 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
656 op->op0, op->op1);
657 op0_p = &TREE_OPERAND (*op0_p, 0);
658 break;
660 case ARRAY_RANGE_REF:
661 case ARRAY_REF:
662 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
663 op->op0, op->op1, op->op2);
664 op0_p = &TREE_OPERAND (*op0_p, 0);
665 break;
667 case STRING_CST:
668 case INTEGER_CST:
669 case COMPLEX_CST:
670 case VECTOR_CST:
671 case REAL_CST:
672 case CONSTRUCTOR:
673 case VAR_DECL:
674 case PARM_DECL:
675 case CONST_DECL:
676 case RESULT_DECL:
677 case SSA_NAME:
678 case FILTER_EXPR:
679 case EXC_PTR_EXPR:
680 *op0_p = op->op0;
681 break;
683 case ADDR_EXPR:
684 if (op->op0 != NULL_TREE)
686 gcc_assert (is_gimple_min_invariant (op->op0));
687 *op0_p = op->op0;
688 break;
690 /* Fallthrough. */
691 case IMAGPART_EXPR:
692 case REALPART_EXPR:
693 case VIEW_CONVERT_EXPR:
694 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
695 op0_p = &TREE_OPERAND (*op0_p, 0);
696 break;
698 default:
699 return NULL_TREE;
703 return ref;
706 /* Copy the operations present in load/store/call REF into RESULT, a vector of
707 vn_reference_op_s's. */
709 void
710 copy_reference_ops_from_call (gimple call,
711 VEC(vn_reference_op_s, heap) **result)
713 vn_reference_op_s temp;
714 unsigned i;
716 /* Copy the type, opcode, function being called and static chain. */
717 memset (&temp, 0, sizeof (temp));
718 temp.type = gimple_call_return_type (call);
719 temp.opcode = CALL_EXPR;
720 temp.op0 = gimple_call_fn (call);
721 temp.op1 = gimple_call_chain (call);
722 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
724 /* Copy the call arguments. As they can be references as well,
725 just chain them together. */
726 for (i = 0; i < gimple_call_num_args (call); ++i)
728 tree callarg = gimple_call_arg (call, i);
729 copy_reference_ops_from_ref (callarg, result);
733 /* Create a vector of vn_reference_op_s structures from REF, a
734 REFERENCE_CLASS_P tree. The vector is not shared. */
736 static VEC(vn_reference_op_s, heap) *
737 create_reference_ops_from_ref (tree ref)
739 VEC (vn_reference_op_s, heap) *result = NULL;
741 copy_reference_ops_from_ref (ref, &result);
742 return result;
745 /* Create a vector of vn_reference_op_s structures from CALL, a
746 call statement. The vector is not shared. */
748 static VEC(vn_reference_op_s, heap) *
749 create_reference_ops_from_call (gimple call)
751 VEC (vn_reference_op_s, heap) *result = NULL;
753 copy_reference_ops_from_call (call, &result);
754 return result;
757 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
758 *I_P to point to the last element of the replacement. */
759 void
760 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
761 unsigned int *i_p)
763 VEC(vn_reference_op_s, heap) *mem = NULL;
764 vn_reference_op_t op;
765 unsigned int i = *i_p;
766 unsigned int j;
768 /* Get ops for the addressed object. */
769 op = VEC_index (vn_reference_op_s, *ops, i);
770 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
771 around it to avoid later ICEs. */
772 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
773 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
775 vn_reference_op_s aref;
776 tree dom;
777 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
778 aref.opcode = ARRAY_REF;
779 aref.op0 = integer_zero_node;
780 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
781 && TYPE_MIN_VALUE (dom))
782 aref.op0 = TYPE_MIN_VALUE (dom);
783 aref.op1 = NULL_TREE;
784 aref.op2 = NULL_TREE;
785 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
787 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
789 /* Do the replacement - we should have at least one op in mem now. */
790 if (VEC_length (vn_reference_op_s, mem) == 1)
792 VEC_replace (vn_reference_op_s, *ops, i - 1,
793 VEC_index (vn_reference_op_s, mem, 0));
794 VEC_ordered_remove (vn_reference_op_s, *ops, i);
795 i--;
797 else if (VEC_length (vn_reference_op_s, mem) == 2)
799 VEC_replace (vn_reference_op_s, *ops, i - 1,
800 VEC_index (vn_reference_op_s, mem, 0));
801 VEC_replace (vn_reference_op_s, *ops, i,
802 VEC_index (vn_reference_op_s, mem, 1));
804 else if (VEC_length (vn_reference_op_s, mem) > 2)
806 VEC_replace (vn_reference_op_s, *ops, i - 1,
807 VEC_index (vn_reference_op_s, mem, 0));
808 VEC_replace (vn_reference_op_s, *ops, i,
809 VEC_index (vn_reference_op_s, mem, 1));
810 /* ??? There is no VEC_splice. */
811 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
812 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
814 else
815 gcc_unreachable ();
817 VEC_free (vn_reference_op_s, heap, mem);
818 *i_p = i;
821 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
822 structures into their value numbers. This is done in-place, and
823 the vector passed in is returned. */
825 static VEC (vn_reference_op_s, heap) *
826 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
828 vn_reference_op_t vro;
829 unsigned int i;
831 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
833 if (vro->opcode == SSA_NAME
834 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
836 vro->op0 = SSA_VAL (vro->op0);
837 /* If it transforms from an SSA_NAME to a constant, update
838 the opcode. */
839 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
840 vro->opcode = TREE_CODE (vro->op0);
841 /* If it transforms from an SSA_NAME to an address, fold with
842 a preceding indirect reference. */
843 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
844 && VEC_index (vn_reference_op_s,
845 orig, i - 1)->opcode == INDIRECT_REF)
847 vn_reference_fold_indirect (&orig, &i);
848 continue;
851 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
852 vro->op1 = SSA_VAL (vro->op1);
853 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
854 vro->op2 = SSA_VAL (vro->op2);
857 return orig;
860 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
862 /* Create a vector of vn_reference_op_s structures from REF, a
863 REFERENCE_CLASS_P tree. The vector is shared among all callers of
864 this function. */
866 static VEC(vn_reference_op_s, heap) *
867 valueize_shared_reference_ops_from_ref (tree ref)
869 if (!ref)
870 return NULL;
871 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
872 copy_reference_ops_from_ref (ref, &shared_lookup_references);
873 shared_lookup_references = valueize_refs (shared_lookup_references);
874 return shared_lookup_references;
877 /* Create a vector of vn_reference_op_s structures from CALL, a
878 call statement. The vector is shared among all callers of
879 this function. */
881 static VEC(vn_reference_op_s, heap) *
882 valueize_shared_reference_ops_from_call (gimple call)
884 if (!call)
885 return NULL;
886 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
887 copy_reference_ops_from_call (call, &shared_lookup_references);
888 shared_lookup_references = valueize_refs (shared_lookup_references);
889 return shared_lookup_references;
892 /* Lookup a SCCVN reference operation VR in the current hash table.
893 Returns the resulting value number if it exists in the hash table,
894 NULL_TREE otherwise. VNRESULT will be filled in with the actual
895 vn_reference_t stored in the hashtable if something is found. */
897 static tree
898 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
900 void **slot;
901 hashval_t hash;
903 hash = vr->hashcode;
904 slot = htab_find_slot_with_hash (current_info->references, vr,
905 hash, NO_INSERT);
906 if (!slot && current_info == optimistic_info)
907 slot = htab_find_slot_with_hash (valid_info->references, vr,
908 hash, NO_INSERT);
909 if (slot)
911 if (vnresult)
912 *vnresult = (vn_reference_t)*slot;
913 return ((vn_reference_t)*slot)->result;
916 return NULL_TREE;
919 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
920 with the current VUSE and performs the expression lookup. */
922 static void *
923 vn_reference_lookup_2 (tree op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
925 vn_reference_t vr = (vn_reference_t)vr_;
926 void **slot;
927 hashval_t hash;
929 /* Fixup vuse and hash. */
930 vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
931 vr->vuse = SSA_VAL (vuse);
932 vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
934 hash = vr->hashcode;
935 slot = htab_find_slot_with_hash (current_info->references, vr,
936 hash, NO_INSERT);
937 if (!slot && current_info == optimistic_info)
938 slot = htab_find_slot_with_hash (valid_info->references, vr,
939 hash, NO_INSERT);
940 if (slot)
941 return *slot;
943 return NULL;
946 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
947 from the statement defining VUSE and if not successful tries to
948 translate *REFP and VR_ through an aggregate copy at the defintion
949 of VUSE. */
951 static void *
952 vn_reference_lookup_3 (tree *refp, tree vuse, void *vr_)
954 vn_reference_t vr = (vn_reference_t)vr_;
955 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
956 tree fndecl;
957 tree ref = *refp;
958 tree base;
959 HOST_WIDE_INT offset, size, maxsize;
961 base = get_ref_base_and_extent (ref, &offset, &size, &maxsize);
963 /* If we cannot constrain the size of the reference we cannot
964 test if anything kills it. */
965 if (maxsize == -1)
966 return (void *)-1;
968 /* def_stmt may-defs *ref. See if we can derive a value for *ref
969 from that defintion.
970 1) Memset. */
971 if (is_gimple_reg_type (TREE_TYPE (ref))
972 && is_gimple_call (def_stmt)
973 && (fndecl = gimple_call_fndecl (def_stmt))
974 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
975 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
976 && integer_zerop (gimple_call_arg (def_stmt, 1))
977 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
978 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
980 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
981 tree base2;
982 HOST_WIDE_INT offset2, size2, maxsize2;
983 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
984 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
985 if ((unsigned HOST_WIDE_INT)size2 / 8
986 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
987 && operand_equal_p (base, base2, 0)
988 && offset2 <= offset
989 && offset2 + size2 >= offset + maxsize)
990 return vn_reference_insert (ref,
991 fold_convert (TREE_TYPE (ref),
992 integer_zero_node), vuse);
995 /* 2) Assignment from an empty CONSTRUCTOR. */
996 else if (is_gimple_reg_type (TREE_TYPE (ref))
997 && gimple_assign_single_p (def_stmt)
998 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
999 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1001 tree base2;
1002 HOST_WIDE_INT offset2, size2, maxsize2;
1003 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1004 &offset2, &size2, &maxsize2);
1005 if (operand_equal_p (base, base2, 0)
1006 && offset2 <= offset
1007 && offset2 + size2 >= offset + maxsize)
1008 return vn_reference_insert (ref,
1009 fold_convert (TREE_TYPE (ref),
1010 integer_zero_node), vuse);
1013 /* For aggregate copies translate the reference through them if
1014 the copy kills ref. */
1015 else if (gimple_assign_single_p (def_stmt)
1016 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1017 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1018 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1020 tree base2;
1021 HOST_WIDE_INT offset2, size2, maxsize2;
1022 int i, j;
1023 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1024 vn_reference_op_t vro;
1026 /* See if the assignment kills REF. */
1027 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1028 &offset2, &size2, &maxsize2);
1029 if (!operand_equal_p (base, base2, 0)
1030 || offset2 > offset
1031 || offset2 + size2 < offset + maxsize)
1032 return (void *)-1;
1034 /* Find the common base of ref and the lhs. */
1035 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1036 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1037 j = VEC_length (vn_reference_op_s, lhs) - 1;
1038 while (j >= 0
1039 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1040 vr->operands, i),
1041 VEC_index (vn_reference_op_s, lhs, j)))
1043 i--;
1044 j--;
1046 /* i now points to the first additional op.
1047 ??? LHS may not be completely contained in VR, one or more
1048 VIEW_CONVERT_EXPRs could be in its way. We could at least
1049 try handling outermost VIEW_CONVERT_EXPRs. */
1050 if (j != -1)
1051 return (void *)-1;
1052 VEC_free (vn_reference_op_s, heap, lhs);
1054 /* Now re-write REF to be based on the rhs of the assignment. */
1055 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1056 /* We need to pre-pend vr->operands[0..i] to rhs. */
1057 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1058 > VEC_length (vn_reference_op_s, vr->operands))
1060 VEC (vn_reference_op_s, heap) *old = vr->operands;
1061 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1062 i + 1 + VEC_length (vn_reference_op_s, rhs));
1063 if (old == shared_lookup_references
1064 && vr->operands != old)
1065 shared_lookup_references = NULL;
1067 else
1068 VEC_truncate (vn_reference_op_s, vr->operands,
1069 i + 1 + VEC_length (vn_reference_op_s, rhs));
1070 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1071 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1072 VEC_free (vn_reference_op_s, heap, rhs);
1073 vr->hashcode = vn_reference_compute_hash (vr);
1074 *refp = get_ref_from_reference_ops (vr->operands);
1075 if (!*refp)
1076 return (void *)-1;
1078 /* Keep looking for the adjusted *REF / VR pair. */
1079 return NULL;
1082 /* Bail out and stop walking. */
1083 return (void *)-1;
1086 /* Lookup a reference operation by it's parts, in the current hash table.
1087 Returns the resulting value number if it exists in the hash table,
1088 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1089 vn_reference_t stored in the hashtable if something is found. */
1091 tree
1092 vn_reference_lookup_pieces (tree vuse,
1093 VEC (vn_reference_op_s, heap) *operands,
1094 vn_reference_t *vnresult, bool maywalk)
1096 struct vn_reference_s vr1;
1097 vn_reference_t tmp;
1099 if (!vnresult)
1100 vnresult = &tmp;
1101 *vnresult = NULL;
1103 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1104 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1105 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1106 VEC_length (vn_reference_op_s, operands));
1107 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1108 VEC_address (vn_reference_op_s, operands),
1109 sizeof (vn_reference_op_s)
1110 * VEC_length (vn_reference_op_s, operands));
1111 vr1.operands = operands = shared_lookup_references
1112 = valueize_refs (shared_lookup_references);
1113 vr1.hashcode = vn_reference_compute_hash (&vr1);
1114 vn_reference_lookup_1 (&vr1, vnresult);
1116 if (!*vnresult
1117 && maywalk
1118 && vr1.vuse)
1120 tree ref = get_ref_from_reference_ops (operands);
1121 if (ref)
1122 *vnresult =
1123 (vn_reference_t)walk_non_aliased_vuses (ref, vr1.vuse,
1124 vn_reference_lookup_2,
1125 vn_reference_lookup_3, &vr1);
1126 if (vr1.operands != operands)
1127 VEC_free (vn_reference_op_s, heap, vr1.operands);
1130 if (*vnresult)
1131 return (*vnresult)->result;
1133 return NULL_TREE;
1136 /* Lookup OP in the current hash table, and return the resulting value
1137 number if it exists in the hash table. Return NULL_TREE if it does
1138 not exist in the hash table or if the result field of the structure
1139 was NULL.. VNRESULT will be filled in with the vn_reference_t
1140 stored in the hashtable if one exists. */
1142 tree
1143 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1144 vn_reference_t *vnresult)
1146 VEC (vn_reference_op_s, heap) *operands;
1147 struct vn_reference_s vr1;
1149 if (vnresult)
1150 *vnresult = NULL;
1152 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1153 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1154 vr1.hashcode = vn_reference_compute_hash (&vr1);
1156 if (maywalk
1157 && vr1.vuse)
1159 vn_reference_t wvnresult;
1160 wvnresult =
1161 (vn_reference_t)walk_non_aliased_vuses (op, vr1.vuse,
1162 vn_reference_lookup_2,
1163 vn_reference_lookup_3, &vr1);
1164 if (vr1.operands != operands)
1165 VEC_free (vn_reference_op_s, heap, vr1.operands);
1166 if (wvnresult)
1168 if (vnresult)
1169 *vnresult = wvnresult;
1170 return wvnresult->result;
1173 return NULL_TREE;
1176 return vn_reference_lookup_1 (&vr1, vnresult);
1180 /* Insert OP into the current hash table with a value number of
1181 RESULT, and return the resulting reference structure we created. */
1183 vn_reference_t
1184 vn_reference_insert (tree op, tree result, tree vuse)
1186 void **slot;
1187 vn_reference_t vr1;
1189 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1190 if (TREE_CODE (result) == SSA_NAME)
1191 vr1->value_id = VN_INFO (result)->value_id;
1192 else
1193 vr1->value_id = get_or_alloc_constant_value_id (result);
1194 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1195 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1196 vr1->hashcode = vn_reference_compute_hash (vr1);
1197 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1199 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1200 INSERT);
1202 /* Because we lookup stores using vuses, and value number failures
1203 using the vdefs (see visit_reference_op_store for how and why),
1204 it's possible that on failure we may try to insert an already
1205 inserted store. This is not wrong, there is no ssa name for a
1206 store that we could use as a differentiator anyway. Thus, unlike
1207 the other lookup functions, you cannot gcc_assert (!*slot)
1208 here. */
1210 /* But free the old slot in case of a collision. */
1211 if (*slot)
1212 free_reference (*slot);
1214 *slot = vr1;
1215 return vr1;
1218 /* Insert a reference by it's pieces into the current hash table with
1219 a value number of RESULT. Return the resulting reference
1220 structure we created. */
1222 vn_reference_t
1223 vn_reference_insert_pieces (tree vuse,
1224 VEC (vn_reference_op_s, heap) *operands,
1225 tree result, unsigned int value_id)
1228 void **slot;
1229 vn_reference_t vr1;
1231 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1232 vr1->value_id = value_id;
1233 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1234 vr1->operands = valueize_refs (operands);
1235 vr1->hashcode = vn_reference_compute_hash (vr1);
1236 if (result && TREE_CODE (result) == SSA_NAME)
1237 result = SSA_VAL (result);
1238 vr1->result = result;
1240 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1241 INSERT);
1243 /* At this point we should have all the things inserted that we have
1244 seen before, and we should never try inserting something that
1245 already exists. */
1246 gcc_assert (!*slot);
1247 if (*slot)
1248 free_reference (*slot);
1250 *slot = vr1;
1251 return vr1;
1254 /* Compute and return the hash value for nary operation VBO1. */
1256 inline hashval_t
1257 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1259 hashval_t hash = 0;
1260 unsigned i;
1262 for (i = 0; i < vno1->length; ++i)
1263 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1264 vno1->op[i] = SSA_VAL (vno1->op[i]);
1266 if (vno1->length == 2
1267 && commutative_tree_code (vno1->opcode)
1268 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1270 tree temp = vno1->op[0];
1271 vno1->op[0] = vno1->op[1];
1272 vno1->op[1] = temp;
1275 for (i = 0; i < vno1->length; ++i)
1276 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1278 return hash;
1281 /* Return the computed hashcode for nary operation P1. */
1283 static hashval_t
1284 vn_nary_op_hash (const void *p1)
1286 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1287 return vno1->hashcode;
1290 /* Compare nary operations P1 and P2 and return true if they are
1291 equivalent. */
1294 vn_nary_op_eq (const void *p1, const void *p2)
1296 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1297 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1298 unsigned i;
1300 if (vno1->hashcode != vno2->hashcode)
1301 return false;
1303 if (vno1->opcode != vno2->opcode
1304 || !types_compatible_p (vno1->type, vno2->type))
1305 return false;
1307 for (i = 0; i < vno1->length; ++i)
1308 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1309 return false;
1311 return true;
1314 /* Lookup a n-ary operation by its pieces and return the resulting value
1315 number if it exists in the hash table. Return NULL_TREE if it does
1316 not exist in the hash table or if the result field of the operation
1317 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1318 if it exists. */
1320 tree
1321 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1322 tree type, tree op0, tree op1, tree op2,
1323 tree op3, vn_nary_op_t *vnresult)
1325 void **slot;
1326 struct vn_nary_op_s vno1;
1327 if (vnresult)
1328 *vnresult = NULL;
1329 vno1.opcode = code;
1330 vno1.length = length;
1331 vno1.type = type;
1332 vno1.op[0] = op0;
1333 vno1.op[1] = op1;
1334 vno1.op[2] = op2;
1335 vno1.op[3] = op3;
1336 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1337 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1338 NO_INSERT);
1339 if (!slot && current_info == optimistic_info)
1340 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1341 NO_INSERT);
1342 if (!slot)
1343 return NULL_TREE;
1344 if (vnresult)
1345 *vnresult = (vn_nary_op_t)*slot;
1346 return ((vn_nary_op_t)*slot)->result;
1349 /* Lookup OP in the current hash table, and return the resulting value
1350 number if it exists in the hash table. Return NULL_TREE if it does
1351 not exist in the hash table or if the result field of the operation
1352 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1353 if it exists. */
1355 tree
1356 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1358 void **slot;
1359 struct vn_nary_op_s vno1;
1360 unsigned i;
1362 if (vnresult)
1363 *vnresult = NULL;
1364 vno1.opcode = TREE_CODE (op);
1365 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1366 vno1.type = TREE_TYPE (op);
1367 for (i = 0; i < vno1.length; ++i)
1368 vno1.op[i] = TREE_OPERAND (op, i);
1369 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1370 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1371 NO_INSERT);
1372 if (!slot && current_info == optimistic_info)
1373 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1374 NO_INSERT);
1375 if (!slot)
1376 return NULL_TREE;
1377 if (vnresult)
1378 *vnresult = (vn_nary_op_t)*slot;
1379 return ((vn_nary_op_t)*slot)->result;
1382 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1383 value number if it exists in the hash table. Return NULL_TREE if
1384 it does not exist in the hash table. VNRESULT will contain the
1385 vn_nary_op_t from the hashtable if it exists. */
1387 tree
1388 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1390 void **slot;
1391 struct vn_nary_op_s vno1;
1392 unsigned i;
1394 if (vnresult)
1395 *vnresult = NULL;
1396 vno1.opcode = gimple_assign_rhs_code (stmt);
1397 vno1.length = gimple_num_ops (stmt) - 1;
1398 vno1.type = gimple_expr_type (stmt);
1399 for (i = 0; i < vno1.length; ++i)
1400 vno1.op[i] = gimple_op (stmt, i + 1);
1401 if (vno1.opcode == REALPART_EXPR
1402 || vno1.opcode == IMAGPART_EXPR
1403 || vno1.opcode == VIEW_CONVERT_EXPR)
1404 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1405 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1406 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1407 NO_INSERT);
1408 if (!slot && current_info == optimistic_info)
1409 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1410 NO_INSERT);
1411 if (!slot)
1412 return NULL_TREE;
1413 if (vnresult)
1414 *vnresult = (vn_nary_op_t)*slot;
1415 return ((vn_nary_op_t)*slot)->result;
1418 /* Insert a n-ary operation into the current hash table using it's
1419 pieces. Return the vn_nary_op_t structure we created and put in
1420 the hashtable. */
1422 vn_nary_op_t
1423 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1424 tree type, tree op0,
1425 tree op1, tree op2, tree op3,
1426 tree result,
1427 unsigned int value_id)
1429 void **slot;
1430 vn_nary_op_t vno1;
1432 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1433 (sizeof (struct vn_nary_op_s)
1434 - sizeof (tree) * (4 - length)));
1435 vno1->value_id = value_id;
1436 vno1->opcode = code;
1437 vno1->length = length;
1438 vno1->type = type;
1439 if (length >= 1)
1440 vno1->op[0] = op0;
1441 if (length >= 2)
1442 vno1->op[1] = op1;
1443 if (length >= 3)
1444 vno1->op[2] = op2;
1445 if (length >= 4)
1446 vno1->op[3] = op3;
1447 vno1->result = result;
1448 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1449 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1450 INSERT);
1451 gcc_assert (!*slot);
1453 *slot = vno1;
1454 return vno1;
1458 /* Insert OP into the current hash table with a value number of
1459 RESULT. Return the vn_nary_op_t structure we created and put in
1460 the hashtable. */
1462 vn_nary_op_t
1463 vn_nary_op_insert (tree op, tree result)
1465 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1466 void **slot;
1467 vn_nary_op_t vno1;
1468 unsigned i;
1470 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1471 (sizeof (struct vn_nary_op_s)
1472 - sizeof (tree) * (4 - length)));
1473 vno1->value_id = VN_INFO (result)->value_id;
1474 vno1->opcode = TREE_CODE (op);
1475 vno1->length = length;
1476 vno1->type = TREE_TYPE (op);
1477 for (i = 0; i < vno1->length; ++i)
1478 vno1->op[i] = TREE_OPERAND (op, i);
1479 vno1->result = result;
1480 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1481 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1482 INSERT);
1483 gcc_assert (!*slot);
1485 *slot = vno1;
1486 return vno1;
1489 /* Insert the rhs of STMT into the current hash table with a value number of
1490 RESULT. */
1492 vn_nary_op_t
1493 vn_nary_op_insert_stmt (gimple stmt, tree result)
1495 unsigned length = gimple_num_ops (stmt) - 1;
1496 void **slot;
1497 vn_nary_op_t vno1;
1498 unsigned i;
1500 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1501 (sizeof (struct vn_nary_op_s)
1502 - sizeof (tree) * (4 - length)));
1503 vno1->value_id = VN_INFO (result)->value_id;
1504 vno1->opcode = gimple_assign_rhs_code (stmt);
1505 vno1->length = length;
1506 vno1->type = gimple_expr_type (stmt);
1507 for (i = 0; i < vno1->length; ++i)
1508 vno1->op[i] = gimple_op (stmt, i + 1);
1509 if (vno1->opcode == REALPART_EXPR
1510 || vno1->opcode == IMAGPART_EXPR
1511 || vno1->opcode == VIEW_CONVERT_EXPR)
1512 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1513 vno1->result = result;
1514 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1515 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1516 INSERT);
1517 gcc_assert (!*slot);
1519 *slot = vno1;
1520 return vno1;
1523 /* Compute a hashcode for PHI operation VP1 and return it. */
1525 static inline hashval_t
1526 vn_phi_compute_hash (vn_phi_t vp1)
1528 hashval_t result = 0;
1529 int i;
1530 tree phi1op;
1531 tree type;
1533 result = vp1->block->index;
1535 /* If all PHI arguments are constants we need to distinguish
1536 the PHI node via its type. */
1537 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1538 result += (INTEGRAL_TYPE_P (type)
1539 + (INTEGRAL_TYPE_P (type)
1540 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1542 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1544 if (phi1op == VN_TOP)
1545 continue;
1546 result += iterative_hash_expr (phi1op, result);
1549 return result;
1552 /* Return the computed hashcode for phi operation P1. */
1554 static hashval_t
1555 vn_phi_hash (const void *p1)
1557 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1558 return vp1->hashcode;
1561 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1563 static int
1564 vn_phi_eq (const void *p1, const void *p2)
1566 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1567 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1569 if (vp1->hashcode != vp2->hashcode)
1570 return false;
1572 if (vp1->block == vp2->block)
1574 int i;
1575 tree phi1op;
1577 /* If the PHI nodes do not have compatible types
1578 they are not the same. */
1579 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1580 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1581 return false;
1583 /* Any phi in the same block will have it's arguments in the
1584 same edge order, because of how we store phi nodes. */
1585 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1587 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1588 if (phi1op == VN_TOP || phi2op == VN_TOP)
1589 continue;
1590 if (!expressions_equal_p (phi1op, phi2op))
1591 return false;
1593 return true;
1595 return false;
1598 static VEC(tree, heap) *shared_lookup_phiargs;
1600 /* Lookup PHI in the current hash table, and return the resulting
1601 value number if it exists in the hash table. Return NULL_TREE if
1602 it does not exist in the hash table. */
1604 static tree
1605 vn_phi_lookup (gimple phi)
1607 void **slot;
1608 struct vn_phi_s vp1;
1609 unsigned i;
1611 VEC_truncate (tree, shared_lookup_phiargs, 0);
1613 /* Canonicalize the SSA_NAME's to their value number. */
1614 for (i = 0; i < gimple_phi_num_args (phi); i++)
1616 tree def = PHI_ARG_DEF (phi, i);
1617 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1618 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1620 vp1.phiargs = shared_lookup_phiargs;
1621 vp1.block = gimple_bb (phi);
1622 vp1.hashcode = vn_phi_compute_hash (&vp1);
1623 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1624 NO_INSERT);
1625 if (!slot && current_info == optimistic_info)
1626 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1627 NO_INSERT);
1628 if (!slot)
1629 return NULL_TREE;
1630 return ((vn_phi_t)*slot)->result;
1633 /* Insert PHI into the current hash table with a value number of
1634 RESULT. */
1636 static vn_phi_t
1637 vn_phi_insert (gimple phi, tree result)
1639 void **slot;
1640 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1641 unsigned i;
1642 VEC (tree, heap) *args = NULL;
1644 /* Canonicalize the SSA_NAME's to their value number. */
1645 for (i = 0; i < gimple_phi_num_args (phi); i++)
1647 tree def = PHI_ARG_DEF (phi, i);
1648 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1649 VEC_safe_push (tree, heap, args, def);
1651 vp1->value_id = VN_INFO (result)->value_id;
1652 vp1->phiargs = args;
1653 vp1->block = gimple_bb (phi);
1654 vp1->result = result;
1655 vp1->hashcode = vn_phi_compute_hash (vp1);
1657 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1658 INSERT);
1660 /* Because we iterate over phi operations more than once, it's
1661 possible the slot might already exist here, hence no assert.*/
1662 *slot = vp1;
1663 return vp1;
1667 /* Print set of components in strongly connected component SCC to OUT. */
1669 static void
1670 print_scc (FILE *out, VEC (tree, heap) *scc)
1672 tree var;
1673 unsigned int i;
1675 fprintf (out, "SCC consists of: ");
1676 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1678 print_generic_expr (out, var, 0);
1679 fprintf (out, " ");
1681 fprintf (out, "\n");
1684 /* Set the value number of FROM to TO, return true if it has changed
1685 as a result. */
1687 static inline bool
1688 set_ssa_val_to (tree from, tree to)
1690 tree currval;
1692 if (from != to
1693 && TREE_CODE (to) == SSA_NAME
1694 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1695 to = from;
1697 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1698 and invariants. So assert that here. */
1699 gcc_assert (to != NULL_TREE
1700 && (to == VN_TOP
1701 || TREE_CODE (to) == SSA_NAME
1702 || is_gimple_min_invariant (to)));
1704 if (dump_file && (dump_flags & TDF_DETAILS))
1706 fprintf (dump_file, "Setting value number of ");
1707 print_generic_expr (dump_file, from, 0);
1708 fprintf (dump_file, " to ");
1709 print_generic_expr (dump_file, to, 0);
1712 currval = SSA_VAL (from);
1714 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1716 VN_INFO (from)->valnum = to;
1717 if (dump_file && (dump_flags & TDF_DETAILS))
1718 fprintf (dump_file, " (changed)\n");
1719 return true;
1721 if (dump_file && (dump_flags & TDF_DETAILS))
1722 fprintf (dump_file, "\n");
1723 return false;
1726 /* Set all definitions in STMT to value number to themselves.
1727 Return true if a value number changed. */
1729 static bool
1730 defs_to_varying (gimple stmt)
1732 bool changed = false;
1733 ssa_op_iter iter;
1734 def_operand_p defp;
1736 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1738 tree def = DEF_FROM_PTR (defp);
1740 VN_INFO (def)->use_processed = true;
1741 changed |= set_ssa_val_to (def, def);
1743 return changed;
1746 static bool expr_has_constants (tree expr);
1747 static tree valueize_expr (tree expr);
1749 /* Visit a copy between LHS and RHS, return true if the value number
1750 changed. */
1752 static bool
1753 visit_copy (tree lhs, tree rhs)
1755 /* Follow chains of copies to their destination. */
1756 while (TREE_CODE (rhs) == SSA_NAME
1757 && SSA_VAL (rhs) != rhs)
1758 rhs = SSA_VAL (rhs);
1760 /* The copy may have a more interesting constant filled expression
1761 (we don't, since we know our RHS is just an SSA name). */
1762 if (TREE_CODE (rhs) == SSA_NAME)
1764 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1765 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1768 return set_ssa_val_to (lhs, rhs);
1771 /* Visit a unary operator RHS, value number it, and return true if the
1772 value number of LHS has changed as a result. */
1774 static bool
1775 visit_unary_op (tree lhs, gimple stmt)
1777 bool changed = false;
1778 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1780 if (result)
1782 changed = set_ssa_val_to (lhs, result);
1784 else
1786 changed = set_ssa_val_to (lhs, lhs);
1787 vn_nary_op_insert_stmt (stmt, lhs);
1790 return changed;
1793 /* Visit a binary operator RHS, value number it, and return true if the
1794 value number of LHS has changed as a result. */
1796 static bool
1797 visit_binary_op (tree lhs, gimple stmt)
1799 bool changed = false;
1800 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1802 if (result)
1804 changed = set_ssa_val_to (lhs, result);
1806 else
1808 changed = set_ssa_val_to (lhs, lhs);
1809 vn_nary_op_insert_stmt (stmt, lhs);
1812 return changed;
1815 /* Visit a call STMT storing into LHS. Return true if the value number
1816 of the LHS has changed as a result. */
1818 static bool
1819 visit_reference_op_call (tree lhs, gimple stmt)
1821 bool changed = false;
1822 struct vn_reference_s vr1;
1823 tree result;
1824 tree vuse = gimple_vuse (stmt);
1826 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1827 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1828 vr1.hashcode = vn_reference_compute_hash (&vr1);
1829 result = vn_reference_lookup_1 (&vr1, NULL);
1830 if (result)
1832 changed = set_ssa_val_to (lhs, result);
1833 if (TREE_CODE (result) == SSA_NAME
1834 && VN_INFO (result)->has_constants)
1835 VN_INFO (lhs)->has_constants = true;
1837 else
1839 void **slot;
1840 vn_reference_t vr2;
1841 changed = set_ssa_val_to (lhs, lhs);
1842 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1843 vr2->vuse = vr1.vuse;
1844 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1845 vr2->hashcode = vr1.hashcode;
1846 vr2->result = lhs;
1847 slot = htab_find_slot_with_hash (current_info->references,
1848 vr2, vr2->hashcode, INSERT);
1849 if (*slot)
1850 free_reference (*slot);
1851 *slot = vr2;
1854 return changed;
1857 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1858 and return true if the value number of the LHS has changed as a result. */
1860 static bool
1861 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1863 bool changed = false;
1864 tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1866 /* We handle type-punning through unions by value-numbering based
1867 on offset and size of the access. Be prepared to handle a
1868 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1869 if (result
1870 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1872 /* We will be setting the value number of lhs to the value number
1873 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1874 So first simplify and lookup this expression to see if it
1875 is already available. */
1876 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1877 if ((CONVERT_EXPR_P (val)
1878 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1879 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1881 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1882 if ((CONVERT_EXPR_P (tem)
1883 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1884 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1885 TREE_TYPE (val), tem)))
1886 val = tem;
1888 result = val;
1889 if (!is_gimple_min_invariant (val)
1890 && TREE_CODE (val) != SSA_NAME)
1891 result = vn_nary_op_lookup (val, NULL);
1892 /* If the expression is not yet available, value-number lhs to
1893 a new SSA_NAME we create. */
1894 if (!result && may_insert)
1896 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1897 /* Initialize value-number information properly. */
1898 VN_INFO_GET (result)->valnum = result;
1899 VN_INFO (result)->value_id = get_next_value_id ();
1900 VN_INFO (result)->expr = val;
1901 VN_INFO (result)->has_constants = expr_has_constants (val);
1902 VN_INFO (result)->needs_insertion = true;
1903 /* As all "inserted" statements are singleton SCCs, insert
1904 to the valid table. This is strictly needed to
1905 avoid re-generating new value SSA_NAMEs for the same
1906 expression during SCC iteration over and over (the
1907 optimistic table gets cleared after each iteration).
1908 We do not need to insert into the optimistic table, as
1909 lookups there will fall back to the valid table. */
1910 if (current_info == optimistic_info)
1912 current_info = valid_info;
1913 vn_nary_op_insert (val, result);
1914 current_info = optimistic_info;
1916 else
1917 vn_nary_op_insert (val, result);
1918 if (dump_file && (dump_flags & TDF_DETAILS))
1920 fprintf (dump_file, "Inserting name ");
1921 print_generic_expr (dump_file, result, 0);
1922 fprintf (dump_file, " for expression ");
1923 print_generic_expr (dump_file, val, 0);
1924 fprintf (dump_file, "\n");
1929 if (result)
1931 changed = set_ssa_val_to (lhs, result);
1932 if (TREE_CODE (result) == SSA_NAME
1933 && VN_INFO (result)->has_constants)
1935 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1936 VN_INFO (lhs)->has_constants = true;
1939 else
1941 changed = set_ssa_val_to (lhs, lhs);
1942 vn_reference_insert (op, lhs, gimple_vuse (stmt));
1945 return changed;
1949 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1950 and return true if the value number of the LHS has changed as a result. */
1952 static bool
1953 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1955 bool changed = false;
1956 tree result;
1957 bool resultsame = false;
1959 /* First we want to lookup using the *vuses* from the store and see
1960 if there the last store to this location with the same address
1961 had the same value.
1963 The vuses represent the memory state before the store. If the
1964 memory state, address, and value of the store is the same as the
1965 last store to this location, then this store will produce the
1966 same memory state as that store.
1968 In this case the vdef versions for this store are value numbered to those
1969 vuse versions, since they represent the same memory state after
1970 this store.
1972 Otherwise, the vdefs for the store are used when inserting into
1973 the table, since the store generates a new memory state. */
1975 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
1977 if (result)
1979 if (TREE_CODE (result) == SSA_NAME)
1980 result = SSA_VAL (result);
1981 if (TREE_CODE (op) == SSA_NAME)
1982 op = SSA_VAL (op);
1983 resultsame = expressions_equal_p (result, op);
1986 if (!result || !resultsame)
1988 tree vdef;
1990 if (dump_file && (dump_flags & TDF_DETAILS))
1992 fprintf (dump_file, "No store match\n");
1993 fprintf (dump_file, "Value numbering store ");
1994 print_generic_expr (dump_file, lhs, 0);
1995 fprintf (dump_file, " to ");
1996 print_generic_expr (dump_file, op, 0);
1997 fprintf (dump_file, "\n");
1999 /* Have to set value numbers before insert, since insert is
2000 going to valueize the references in-place. */
2001 if ((vdef = gimple_vdef (stmt)))
2003 VN_INFO (vdef)->use_processed = true;
2004 changed |= set_ssa_val_to (vdef, vdef);
2007 /* Do not insert structure copies into the tables. */
2008 if (is_gimple_min_invariant (op)
2009 || is_gimple_reg (op))
2010 vn_reference_insert (lhs, op, vdef);
2012 else
2014 /* We had a match, so value number the vdef to have the value
2015 number of the vuse it came from. */
2016 tree def, use;
2018 if (dump_file && (dump_flags & TDF_DETAILS))
2019 fprintf (dump_file, "Store matched earlier value,"
2020 "value numbering store vdefs to matching vuses.\n");
2022 def = gimple_vdef (stmt);
2023 use = gimple_vuse (stmt);
2025 VN_INFO (def)->use_processed = true;
2026 changed |= set_ssa_val_to (def, SSA_VAL (use));
2029 return changed;
2032 /* Visit and value number PHI, return true if the value number
2033 changed. */
2035 static bool
2036 visit_phi (gimple phi)
2038 bool changed = false;
2039 tree result;
2040 tree sameval = VN_TOP;
2041 bool allsame = true;
2042 unsigned i;
2044 /* TODO: We could check for this in init_sccvn, and replace this
2045 with a gcc_assert. */
2046 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2047 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2049 /* See if all non-TOP arguments have the same value. TOP is
2050 equivalent to everything, so we can ignore it. */
2051 for (i = 0; i < gimple_phi_num_args (phi); i++)
2053 tree def = PHI_ARG_DEF (phi, i);
2055 if (TREE_CODE (def) == SSA_NAME)
2056 def = SSA_VAL (def);
2057 if (def == VN_TOP)
2058 continue;
2059 if (sameval == VN_TOP)
2061 sameval = def;
2063 else
2065 if (!expressions_equal_p (def, sameval))
2067 allsame = false;
2068 break;
2073 /* If all value numbered to the same value, the phi node has that
2074 value. */
2075 if (allsame)
2077 if (is_gimple_min_invariant (sameval))
2079 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2080 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2082 else
2084 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2085 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2088 if (TREE_CODE (sameval) == SSA_NAME)
2089 return visit_copy (PHI_RESULT (phi), sameval);
2091 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2094 /* Otherwise, see if it is equivalent to a phi node in this block. */
2095 result = vn_phi_lookup (phi);
2096 if (result)
2098 if (TREE_CODE (result) == SSA_NAME)
2099 changed = visit_copy (PHI_RESULT (phi), result);
2100 else
2101 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2103 else
2105 vn_phi_insert (phi, PHI_RESULT (phi));
2106 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2107 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2108 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2111 return changed;
2114 /* Return true if EXPR contains constants. */
2116 static bool
2117 expr_has_constants (tree expr)
2119 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2121 case tcc_unary:
2122 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2124 case tcc_binary:
2125 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2126 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2127 /* Constants inside reference ops are rarely interesting, but
2128 it can take a lot of looking to find them. */
2129 case tcc_reference:
2130 case tcc_declaration:
2131 return false;
2132 default:
2133 return is_gimple_min_invariant (expr);
2135 return false;
2138 /* Return true if STMT contains constants. */
2140 static bool
2141 stmt_has_constants (gimple stmt)
2143 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2144 return false;
2146 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2148 case GIMPLE_UNARY_RHS:
2149 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2151 case GIMPLE_BINARY_RHS:
2152 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2153 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2154 case GIMPLE_SINGLE_RHS:
2155 /* Constants inside reference ops are rarely interesting, but
2156 it can take a lot of looking to find them. */
2157 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2158 default:
2159 gcc_unreachable ();
2161 return false;
2164 /* Replace SSA_NAMES in expr with their value numbers, and return the
2165 result.
2166 This is performed in place. */
2168 static tree
2169 valueize_expr (tree expr)
2171 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2173 case tcc_unary:
2174 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2175 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2176 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2177 break;
2178 case tcc_binary:
2179 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2180 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2181 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2182 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2183 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2184 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2185 break;
2186 default:
2187 break;
2189 return expr;
2192 /* Simplify the binary expression RHS, and return the result if
2193 simplified. */
2195 static tree
2196 simplify_binary_expression (gimple stmt)
2198 tree result = NULL_TREE;
2199 tree op0 = gimple_assign_rhs1 (stmt);
2200 tree op1 = gimple_assign_rhs2 (stmt);
2202 /* This will not catch every single case we could combine, but will
2203 catch those with constants. The goal here is to simultaneously
2204 combine constants between expressions, but avoid infinite
2205 expansion of expressions during simplification. */
2206 if (TREE_CODE (op0) == SSA_NAME)
2208 if (VN_INFO (op0)->has_constants
2209 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2210 op0 = valueize_expr (vn_get_expr_for (op0));
2211 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2212 op0 = SSA_VAL (op0);
2215 if (TREE_CODE (op1) == SSA_NAME)
2217 if (VN_INFO (op1)->has_constants)
2218 op1 = valueize_expr (vn_get_expr_for (op1));
2219 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2220 op1 = SSA_VAL (op1);
2223 /* Avoid folding if nothing changed. */
2224 if (op0 == gimple_assign_rhs1 (stmt)
2225 && op1 == gimple_assign_rhs2 (stmt))
2226 return NULL_TREE;
2228 fold_defer_overflow_warnings ();
2230 result = fold_binary (gimple_assign_rhs_code (stmt),
2231 gimple_expr_type (stmt), op0, op1);
2232 if (result)
2233 STRIP_USELESS_TYPE_CONVERSION (result);
2235 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2236 stmt, 0);
2238 /* Make sure result is not a complex expression consisting
2239 of operators of operators (IE (a + b) + (a + c))
2240 Otherwise, we will end up with unbounded expressions if
2241 fold does anything at all. */
2242 if (result && valid_gimple_rhs_p (result))
2243 return result;
2245 return NULL_TREE;
2248 /* Simplify the unary expression RHS, and return the result if
2249 simplified. */
2251 static tree
2252 simplify_unary_expression (gimple stmt)
2254 tree result = NULL_TREE;
2255 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2257 /* We handle some tcc_reference codes here that are all
2258 GIMPLE_ASSIGN_SINGLE codes. */
2259 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2260 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2261 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2262 op0 = TREE_OPERAND (op0, 0);
2264 if (TREE_CODE (op0) != SSA_NAME)
2265 return NULL_TREE;
2267 orig_op0 = op0;
2268 if (VN_INFO (op0)->has_constants)
2269 op0 = valueize_expr (vn_get_expr_for (op0));
2270 else if (gimple_assign_cast_p (stmt)
2271 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2272 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2273 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2275 /* We want to do tree-combining on conversion-like expressions.
2276 Make sure we feed only SSA_NAMEs or constants to fold though. */
2277 tree tem = valueize_expr (vn_get_expr_for (op0));
2278 if (UNARY_CLASS_P (tem)
2279 || BINARY_CLASS_P (tem)
2280 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2281 || TREE_CODE (tem) == SSA_NAME
2282 || is_gimple_min_invariant (tem))
2283 op0 = tem;
2286 /* Avoid folding if nothing changed, but remember the expression. */
2287 if (op0 == orig_op0)
2288 return NULL_TREE;
2290 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2291 gimple_expr_type (stmt), op0);
2292 if (result)
2294 STRIP_USELESS_TYPE_CONVERSION (result);
2295 if (valid_gimple_rhs_p (result))
2296 return result;
2299 return NULL_TREE;
2302 /* Try to simplify RHS using equivalences and constant folding. */
2304 static tree
2305 try_to_simplify (gimple stmt)
2307 tree tem;
2309 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2310 in this case, there is no point in doing extra work. */
2311 if (gimple_assign_copy_p (stmt)
2312 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2313 return NULL_TREE;
2315 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2317 case tcc_declaration:
2318 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2319 if (tem)
2320 return tem;
2321 break;
2323 case tcc_reference:
2324 /* Do not do full-blown reference lookup here, but simplify
2325 reads from constant aggregates. */
2326 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2327 if (tem)
2328 return tem;
2330 /* Fallthrough for some codes that can operate on registers. */
2331 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2332 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2333 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2334 break;
2335 /* We could do a little more with unary ops, if they expand
2336 into binary ops, but it's debatable whether it is worth it. */
2337 case tcc_unary:
2338 return simplify_unary_expression (stmt);
2339 break;
2340 case tcc_comparison:
2341 case tcc_binary:
2342 return simplify_binary_expression (stmt);
2343 break;
2344 default:
2345 break;
2348 return NULL_TREE;
2351 /* Visit and value number USE, return true if the value number
2352 changed. */
2354 static bool
2355 visit_use (tree use)
2357 bool changed = false;
2358 gimple stmt = SSA_NAME_DEF_STMT (use);
2360 VN_INFO (use)->use_processed = true;
2362 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2363 if (dump_file && (dump_flags & TDF_DETAILS)
2364 && !SSA_NAME_IS_DEFAULT_DEF (use))
2366 fprintf (dump_file, "Value numbering ");
2367 print_generic_expr (dump_file, use, 0);
2368 fprintf (dump_file, " stmt = ");
2369 print_gimple_stmt (dump_file, stmt, 0, 0);
2372 /* Handle uninitialized uses. */
2373 if (SSA_NAME_IS_DEFAULT_DEF (use))
2374 changed = set_ssa_val_to (use, use);
2375 else
2377 if (gimple_code (stmt) == GIMPLE_PHI)
2378 changed = visit_phi (stmt);
2379 else if (!gimple_has_lhs (stmt)
2380 || gimple_has_volatile_ops (stmt)
2381 || stmt_could_throw_p (stmt))
2382 changed = defs_to_varying (stmt);
2383 else if (is_gimple_assign (stmt))
2385 tree lhs = gimple_assign_lhs (stmt);
2386 tree simplified;
2388 /* Shortcut for copies. Simplifying copies is pointless,
2389 since we copy the expression and value they represent. */
2390 if (gimple_assign_copy_p (stmt)
2391 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2392 && TREE_CODE (lhs) == SSA_NAME)
2394 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2395 goto done;
2397 simplified = try_to_simplify (stmt);
2398 if (simplified)
2400 if (dump_file && (dump_flags & TDF_DETAILS))
2402 fprintf (dump_file, "RHS ");
2403 print_gimple_expr (dump_file, stmt, 0, 0);
2404 fprintf (dump_file, " simplified to ");
2405 print_generic_expr (dump_file, simplified, 0);
2406 if (TREE_CODE (lhs) == SSA_NAME)
2407 fprintf (dump_file, " has constants %d\n",
2408 expr_has_constants (simplified));
2409 else
2410 fprintf (dump_file, "\n");
2413 /* Setting value numbers to constants will occasionally
2414 screw up phi congruence because constants are not
2415 uniquely associated with a single ssa name that can be
2416 looked up. */
2417 if (simplified
2418 && is_gimple_min_invariant (simplified)
2419 && TREE_CODE (lhs) == SSA_NAME)
2421 VN_INFO (lhs)->expr = simplified;
2422 VN_INFO (lhs)->has_constants = true;
2423 changed = set_ssa_val_to (lhs, simplified);
2424 goto done;
2426 else if (simplified
2427 && TREE_CODE (simplified) == SSA_NAME
2428 && TREE_CODE (lhs) == SSA_NAME)
2430 changed = visit_copy (lhs, simplified);
2431 goto done;
2433 else if (simplified)
2435 if (TREE_CODE (lhs) == SSA_NAME)
2437 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2438 /* We have to unshare the expression or else
2439 valuizing may change the IL stream. */
2440 VN_INFO (lhs)->expr = unshare_expr (simplified);
2443 else if (stmt_has_constants (stmt)
2444 && TREE_CODE (lhs) == SSA_NAME)
2445 VN_INFO (lhs)->has_constants = true;
2446 else if (TREE_CODE (lhs) == SSA_NAME)
2448 /* We reset expr and constantness here because we may
2449 have been value numbering optimistically, and
2450 iterating. They may become non-constant in this case,
2451 even if they were optimistically constant. */
2453 VN_INFO (lhs)->has_constants = false;
2454 VN_INFO (lhs)->expr = NULL_TREE;
2457 if ((TREE_CODE (lhs) == SSA_NAME
2458 /* We can substitute SSA_NAMEs that are live over
2459 abnormal edges with their constant value. */
2460 && !(gimple_assign_copy_p (stmt)
2461 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2462 && !(simplified
2463 && is_gimple_min_invariant (simplified))
2464 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2465 /* Stores or copies from SSA_NAMEs that are live over
2466 abnormal edges are a problem. */
2467 || (gimple_assign_single_p (stmt)
2468 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2469 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2470 changed = defs_to_varying (stmt);
2471 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2473 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2475 else if (TREE_CODE (lhs) == SSA_NAME)
2477 if ((gimple_assign_copy_p (stmt)
2478 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2479 || (simplified
2480 && is_gimple_min_invariant (simplified)))
2482 VN_INFO (lhs)->has_constants = true;
2483 if (simplified)
2484 changed = set_ssa_val_to (lhs, simplified);
2485 else
2486 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2488 else
2490 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2492 case GIMPLE_UNARY_RHS:
2493 changed = visit_unary_op (lhs, stmt);
2494 break;
2495 case GIMPLE_BINARY_RHS:
2496 changed = visit_binary_op (lhs, stmt);
2497 break;
2498 case GIMPLE_SINGLE_RHS:
2499 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2501 case tcc_reference:
2502 /* VOP-less references can go through unary case. */
2503 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2504 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2505 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2506 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2508 changed = visit_unary_op (lhs, stmt);
2509 break;
2511 /* Fallthrough. */
2512 case tcc_declaration:
2513 changed = visit_reference_op_load
2514 (lhs, gimple_assign_rhs1 (stmt), stmt);
2515 break;
2516 case tcc_expression:
2517 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2519 changed = visit_unary_op (lhs, stmt);
2520 break;
2522 /* Fallthrough. */
2523 default:
2524 changed = defs_to_varying (stmt);
2526 break;
2527 default:
2528 changed = defs_to_varying (stmt);
2529 break;
2533 else
2534 changed = defs_to_varying (stmt);
2536 else if (is_gimple_call (stmt))
2538 tree lhs = gimple_call_lhs (stmt);
2540 /* ??? We could try to simplify calls. */
2542 if (stmt_has_constants (stmt)
2543 && TREE_CODE (lhs) == SSA_NAME)
2544 VN_INFO (lhs)->has_constants = true;
2545 else if (TREE_CODE (lhs) == SSA_NAME)
2547 /* We reset expr and constantness here because we may
2548 have been value numbering optimistically, and
2549 iterating. They may become non-constant in this case,
2550 even if they were optimistically constant. */
2551 VN_INFO (lhs)->has_constants = false;
2552 VN_INFO (lhs)->expr = NULL_TREE;
2555 if (TREE_CODE (lhs) == SSA_NAME
2556 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2557 changed = defs_to_varying (stmt);
2558 /* ??? We should handle stores from calls. */
2559 else if (TREE_CODE (lhs) == SSA_NAME)
2561 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2562 changed = visit_reference_op_call (lhs, stmt);
2563 else
2564 changed = defs_to_varying (stmt);
2566 else
2567 changed = defs_to_varying (stmt);
2570 done:
2571 return changed;
2574 /* Compare two operands by reverse postorder index */
2576 static int
2577 compare_ops (const void *pa, const void *pb)
2579 const tree opa = *((const tree *)pa);
2580 const tree opb = *((const tree *)pb);
2581 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2582 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2583 basic_block bba;
2584 basic_block bbb;
2586 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2587 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2588 else if (gimple_nop_p (opstmta))
2589 return -1;
2590 else if (gimple_nop_p (opstmtb))
2591 return 1;
2593 bba = gimple_bb (opstmta);
2594 bbb = gimple_bb (opstmtb);
2596 if (!bba && !bbb)
2597 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2598 else if (!bba)
2599 return -1;
2600 else if (!bbb)
2601 return 1;
2603 if (bba == bbb)
2605 if (gimple_code (opstmta) == GIMPLE_PHI
2606 && gimple_code (opstmtb) == GIMPLE_PHI)
2607 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2608 else if (gimple_code (opstmta) == GIMPLE_PHI)
2609 return -1;
2610 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2611 return 1;
2612 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2613 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2614 else
2615 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2617 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2620 /* Sort an array containing members of a strongly connected component
2621 SCC so that the members are ordered by RPO number.
2622 This means that when the sort is complete, iterating through the
2623 array will give you the members in RPO order. */
2625 static void
2626 sort_scc (VEC (tree, heap) *scc)
2628 qsort (VEC_address (tree, scc),
2629 VEC_length (tree, scc),
2630 sizeof (tree),
2631 compare_ops);
2634 /* Process a strongly connected component in the SSA graph. */
2636 static void
2637 process_scc (VEC (tree, heap) *scc)
2639 /* If the SCC has a single member, just visit it. */
2641 if (VEC_length (tree, scc) == 1)
2643 tree use = VEC_index (tree, scc, 0);
2644 if (!VN_INFO (use)->use_processed)
2645 visit_use (use);
2647 else
2649 tree var;
2650 unsigned int i;
2651 unsigned int iterations = 0;
2652 bool changed = true;
2654 /* Iterate over the SCC with the optimistic table until it stops
2655 changing. */
2656 current_info = optimistic_info;
2657 while (changed)
2659 changed = false;
2660 iterations++;
2661 /* As we are value-numbering optimistically we have to
2662 clear the expression tables and the simplified expressions
2663 in each iteration until we converge. */
2664 htab_empty (optimistic_info->nary);
2665 htab_empty (optimistic_info->phis);
2666 htab_empty (optimistic_info->references);
2667 obstack_free (&optimistic_info->nary_obstack, NULL);
2668 gcc_obstack_init (&optimistic_info->nary_obstack);
2669 empty_alloc_pool (optimistic_info->phis_pool);
2670 empty_alloc_pool (optimistic_info->references_pool);
2671 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2672 VN_INFO (var)->expr = NULL_TREE;
2673 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2674 changed |= visit_use (var);
2677 statistics_histogram_event (cfun, "SCC iterations", iterations);
2679 /* Finally, visit the SCC once using the valid table. */
2680 current_info = valid_info;
2681 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2682 visit_use (var);
2686 DEF_VEC_O(ssa_op_iter);
2687 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2689 /* Pop the components of the found SCC for NAME off the SCC stack
2690 and process them. Returns true if all went well, false if
2691 we run into resource limits. */
2693 static bool
2694 extract_and_process_scc_for_name (tree name)
2696 VEC (tree, heap) *scc = NULL;
2697 tree x;
2699 /* Found an SCC, pop the components off the SCC stack and
2700 process them. */
2703 x = VEC_pop (tree, sccstack);
2705 VN_INFO (x)->on_sccstack = false;
2706 VEC_safe_push (tree, heap, scc, x);
2707 } while (x != name);
2709 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2710 if (VEC_length (tree, scc)
2711 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2713 if (dump_file)
2714 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2715 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2716 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2717 return false;
2720 if (VEC_length (tree, scc) > 1)
2721 sort_scc (scc);
2723 if (dump_file && (dump_flags & TDF_DETAILS))
2724 print_scc (dump_file, scc);
2726 process_scc (scc);
2728 VEC_free (tree, heap, scc);
2730 return true;
2733 /* Depth first search on NAME to discover and process SCC's in the SSA
2734 graph.
2735 Execution of this algorithm relies on the fact that the SCC's are
2736 popped off the stack in topological order.
2737 Returns true if successful, false if we stopped processing SCC's due
2738 to resource constraints. */
2740 static bool
2741 DFS (tree name)
2743 VEC(ssa_op_iter, heap) *itervec = NULL;
2744 VEC(tree, heap) *namevec = NULL;
2745 use_operand_p usep = NULL;
2746 gimple defstmt;
2747 tree use;
2748 ssa_op_iter iter;
2750 start_over:
2751 /* SCC info */
2752 VN_INFO (name)->dfsnum = next_dfs_num++;
2753 VN_INFO (name)->visited = true;
2754 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2756 VEC_safe_push (tree, heap, sccstack, name);
2757 VN_INFO (name)->on_sccstack = true;
2758 defstmt = SSA_NAME_DEF_STMT (name);
2760 /* Recursively DFS on our operands, looking for SCC's. */
2761 if (!gimple_nop_p (defstmt))
2763 /* Push a new iterator. */
2764 if (gimple_code (defstmt) == GIMPLE_PHI)
2765 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2766 else
2767 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2769 else
2770 clear_and_done_ssa_iter (&iter);
2772 while (1)
2774 /* If we are done processing uses of a name, go up the stack
2775 of iterators and process SCCs as we found them. */
2776 if (op_iter_done (&iter))
2778 /* See if we found an SCC. */
2779 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2780 if (!extract_and_process_scc_for_name (name))
2782 VEC_free (tree, heap, namevec);
2783 VEC_free (ssa_op_iter, heap, itervec);
2784 return false;
2787 /* Check if we are done. */
2788 if (VEC_empty (tree, namevec))
2790 VEC_free (tree, heap, namevec);
2791 VEC_free (ssa_op_iter, heap, itervec);
2792 return true;
2795 /* Restore the last use walker and continue walking there. */
2796 use = name;
2797 name = VEC_pop (tree, namevec);
2798 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2799 sizeof (ssa_op_iter));
2800 VEC_pop (ssa_op_iter, itervec);
2801 goto continue_walking;
2804 use = USE_FROM_PTR (usep);
2806 /* Since we handle phi nodes, we will sometimes get
2807 invariants in the use expression. */
2808 if (TREE_CODE (use) == SSA_NAME)
2810 if (! (VN_INFO (use)->visited))
2812 /* Recurse by pushing the current use walking state on
2813 the stack and starting over. */
2814 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2815 VEC_safe_push(tree, heap, namevec, name);
2816 name = use;
2817 goto start_over;
2819 continue_walking:
2820 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2821 VN_INFO (use)->low);
2823 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2824 && VN_INFO (use)->on_sccstack)
2826 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2827 VN_INFO (name)->low);
2831 usep = op_iter_next_use (&iter);
2835 /* Allocate a value number table. */
2837 static void
2838 allocate_vn_table (vn_tables_t table)
2840 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2841 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2842 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2843 free_reference);
2845 gcc_obstack_init (&table->nary_obstack);
2846 table->phis_pool = create_alloc_pool ("VN phis",
2847 sizeof (struct vn_phi_s),
2848 30);
2849 table->references_pool = create_alloc_pool ("VN references",
2850 sizeof (struct vn_reference_s),
2851 30);
2854 /* Free a value number table. */
2856 static void
2857 free_vn_table (vn_tables_t table)
2859 htab_delete (table->phis);
2860 htab_delete (table->nary);
2861 htab_delete (table->references);
2862 obstack_free (&table->nary_obstack, NULL);
2863 free_alloc_pool (table->phis_pool);
2864 free_alloc_pool (table->references_pool);
2867 static void
2868 init_scc_vn (void)
2870 size_t i;
2871 int j;
2872 int *rpo_numbers_temp;
2874 calculate_dominance_info (CDI_DOMINATORS);
2875 sccstack = NULL;
2876 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2877 free);
2879 constant_value_ids = BITMAP_ALLOC (NULL);
2881 next_dfs_num = 1;
2882 next_value_id = 1;
2884 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2885 /* VEC_alloc doesn't actually grow it to the right size, it just
2886 preallocates the space to do so. */
2887 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2888 gcc_obstack_init (&vn_ssa_aux_obstack);
2890 shared_lookup_phiargs = NULL;
2891 shared_lookup_references = NULL;
2892 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2893 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2894 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2896 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2897 the i'th block in RPO order is bb. We want to map bb's to RPO
2898 numbers, so we need to rearrange this array. */
2899 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2900 rpo_numbers[rpo_numbers_temp[j]] = j;
2902 XDELETE (rpo_numbers_temp);
2904 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2906 /* Create the VN_INFO structures, and initialize value numbers to
2907 TOP. */
2908 for (i = 0; i < num_ssa_names; i++)
2910 tree name = ssa_name (i);
2911 if (name)
2913 VN_INFO_GET (name)->valnum = VN_TOP;
2914 VN_INFO (name)->expr = NULL_TREE;
2915 VN_INFO (name)->value_id = 0;
2919 renumber_gimple_stmt_uids ();
2921 /* Create the valid and optimistic value numbering tables. */
2922 valid_info = XCNEW (struct vn_tables_s);
2923 allocate_vn_table (valid_info);
2924 optimistic_info = XCNEW (struct vn_tables_s);
2925 allocate_vn_table (optimistic_info);
2928 void
2929 free_scc_vn (void)
2931 size_t i;
2933 htab_delete (constant_to_value_id);
2934 BITMAP_FREE (constant_value_ids);
2935 VEC_free (tree, heap, shared_lookup_phiargs);
2936 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2937 XDELETEVEC (rpo_numbers);
2939 for (i = 0; i < num_ssa_names; i++)
2941 tree name = ssa_name (i);
2942 if (name
2943 && VN_INFO (name)->needs_insertion)
2944 release_ssa_name (name);
2946 obstack_free (&vn_ssa_aux_obstack, NULL);
2947 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2949 VEC_free (tree, heap, sccstack);
2950 free_vn_table (valid_info);
2951 XDELETE (valid_info);
2952 free_vn_table (optimistic_info);
2953 XDELETE (optimistic_info);
2956 /* Set the value ids in the valid hash tables. */
2958 static void
2959 set_hashtable_value_ids (void)
2961 htab_iterator hi;
2962 vn_nary_op_t vno;
2963 vn_reference_t vr;
2964 vn_phi_t vp;
2966 /* Now set the value ids of the things we had put in the hash
2967 table. */
2969 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2970 vno, vn_nary_op_t, hi)
2972 if (vno->result)
2974 if (TREE_CODE (vno->result) == SSA_NAME)
2975 vno->value_id = VN_INFO (vno->result)->value_id;
2976 else if (is_gimple_min_invariant (vno->result))
2977 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2981 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2982 vp, vn_phi_t, hi)
2984 if (vp->result)
2986 if (TREE_CODE (vp->result) == SSA_NAME)
2987 vp->value_id = VN_INFO (vp->result)->value_id;
2988 else if (is_gimple_min_invariant (vp->result))
2989 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2993 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2994 vr, vn_reference_t, hi)
2996 if (vr->result)
2998 if (TREE_CODE (vr->result) == SSA_NAME)
2999 vr->value_id = VN_INFO (vr->result)->value_id;
3000 else if (is_gimple_min_invariant (vr->result))
3001 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3006 /* Do SCCVN. Returns true if it finished, false if we bailed out
3007 due to resource constraints. */
3009 bool
3010 run_scc_vn (bool may_insert_arg)
3012 size_t i;
3013 tree param;
3014 bool changed = true;
3016 may_insert = may_insert_arg;
3018 init_scc_vn ();
3019 current_info = valid_info;
3021 for (param = DECL_ARGUMENTS (current_function_decl);
3022 param;
3023 param = TREE_CHAIN (param))
3025 if (gimple_default_def (cfun, param) != NULL)
3027 tree def = gimple_default_def (cfun, param);
3028 VN_INFO (def)->valnum = def;
3032 for (i = 1; i < num_ssa_names; ++i)
3034 tree name = ssa_name (i);
3035 if (name
3036 && VN_INFO (name)->visited == false
3037 && !has_zero_uses (name))
3038 if (!DFS (name))
3040 free_scc_vn ();
3041 may_insert = false;
3042 return false;
3046 /* Initialize the value ids. */
3048 for (i = 1; i < num_ssa_names; ++i)
3050 tree name = ssa_name (i);
3051 vn_ssa_aux_t info;
3052 if (!name)
3053 continue;
3054 info = VN_INFO (name);
3055 if (info->valnum == name
3056 || info->valnum == VN_TOP)
3057 info->value_id = get_next_value_id ();
3058 else if (is_gimple_min_invariant (info->valnum))
3059 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3062 /* Propagate until they stop changing. */
3063 while (changed)
3065 changed = false;
3066 for (i = 1; i < num_ssa_names; ++i)
3068 tree name = ssa_name (i);
3069 vn_ssa_aux_t info;
3070 if (!name)
3071 continue;
3072 info = VN_INFO (name);
3073 if (TREE_CODE (info->valnum) == SSA_NAME
3074 && info->valnum != name
3075 && info->value_id != VN_INFO (info->valnum)->value_id)
3077 changed = true;
3078 info->value_id = VN_INFO (info->valnum)->value_id;
3083 set_hashtable_value_ids ();
3085 if (dump_file && (dump_flags & TDF_DETAILS))
3087 fprintf (dump_file, "Value numbers:\n");
3088 for (i = 0; i < num_ssa_names; i++)
3090 tree name = ssa_name (i);
3091 if (name
3092 && VN_INFO (name)->visited
3093 && SSA_VAL (name) != name)
3095 print_generic_expr (dump_file, name, 0);
3096 fprintf (dump_file, " = ");
3097 print_generic_expr (dump_file, SSA_VAL (name), 0);
3098 fprintf (dump_file, "\n");
3103 may_insert = false;
3104 return true;
3107 /* Return the maximum value id we have ever seen. */
3109 unsigned int
3110 get_max_value_id (void)
3112 return next_value_id;
3115 /* Return the next unique value id. */
3117 unsigned int
3118 get_next_value_id (void)
3120 return next_value_id++;
3124 /* Compare two expressions E1 and E2 and return true if they are equal. */
3126 bool
3127 expressions_equal_p (tree e1, tree e2)
3129 /* The obvious case. */
3130 if (e1 == e2)
3131 return true;
3133 /* If only one of them is null, they cannot be equal. */
3134 if (!e1 || !e2)
3135 return false;
3137 /* Recurse on elements of lists. */
3138 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3140 tree lop1 = e1;
3141 tree lop2 = e2;
3142 for (lop1 = e1, lop2 = e2;
3143 lop1 || lop2;
3144 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3146 if (!lop1 || !lop2)
3147 return false;
3148 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3149 return false;
3151 return true;
3154 /* Now perform the actual comparison. */
3155 if (TREE_CODE (e1) == TREE_CODE (e2)
3156 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3157 return true;
3159 return false;
3163 /* Return true if the nary operation NARY may trap. This is a copy
3164 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3166 bool
3167 vn_nary_may_trap (vn_nary_op_t nary)
3169 tree type;
3170 tree rhs2;
3171 bool honor_nans = false;
3172 bool honor_snans = false;
3173 bool fp_operation = false;
3174 bool honor_trapv = false;
3175 bool handled, ret;
3176 unsigned i;
3178 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3179 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3180 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3182 type = nary->type;
3183 fp_operation = FLOAT_TYPE_P (type);
3184 if (fp_operation)
3186 honor_nans = flag_trapping_math && !flag_finite_math_only;
3187 honor_snans = flag_signaling_nans != 0;
3189 else if (INTEGRAL_TYPE_P (type)
3190 && TYPE_OVERFLOW_TRAPS (type))
3191 honor_trapv = true;
3193 rhs2 = nary->op[1];
3194 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3195 honor_trapv,
3196 honor_nans, honor_snans, rhs2,
3197 &handled);
3198 if (handled
3199 && ret)
3200 return true;
3202 for (i = 0; i < nary->length; ++i)
3203 if (tree_could_trap_p (nary->op[i]))
3204 return true;
3206 return false;