Create branch to syn ICI 2.0 with gcc mainline.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob0a8dcaa36c4b091c06e8dd6b2ae3dca1629fb0f1
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t
176 VN_INFO (tree name)
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
216 tree
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
224 return name;
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
259 case tcc_reference:
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
267 break;
269 case tcc_unary:
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
273 break;
275 case tcc_binary:
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
280 break;
282 default:;
284 if (expr == NULL_TREE)
285 return vn->valnum;
287 /* Cache the expression. */
288 vn->expr = expr;
290 return expr;
294 /* Free a phi operation structure VP. */
296 static void
297 free_phi (void *vp)
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
303 /* Free a reference operation structure VP. */
305 static void
306 free_reference (void *vp)
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
312 /* Hash table equality function for vn_constant_t. */
314 static int
315 vn_constant_eq (const void *p1, const void *p2)
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
320 if (vc1->hashcode != vc2->hashcode)
321 return false;
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 /* Hash table hash function for vn_constant_t. */
328 static hashval_t
329 vn_constant_hash (const void *p1)
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
335 /* Lookup a value id for CONSTANT and return it. If it does not
336 exist returns 0. */
338 unsigned int
339 get_constant_value_id (tree constant)
341 void **slot;
342 struct vn_constant_s vc;
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
348 if (slot)
349 return ((vn_constant_t)*slot)->value_id;
350 return 0;
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
356 unsigned int
357 get_or_alloc_constant_value_id (tree constant)
359 void **slot;
360 vn_constant_t vc = XNEW (struct vn_constant_s);
362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT);
366 if (*slot)
368 free (vc);
369 return ((vn_constant_t)*slot)->value_id;
371 vc->value_id = get_next_value_id ();
372 *slot = vc;
373 bitmap_set_bit (constant_value_ids, vc->value_id);
374 return vc->value_id;
377 /* Return true if V is a value id for a constant. */
379 bool
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
388 static int
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
401 /* Compute the hash for a reference operand VRO1. */
403 static hashval_t
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
406 hashval_t result = 0;
407 if (vro1->op0)
408 result += iterative_hash_expr (vro1->op0, vro1->opcode);
409 if (vro1->op1)
410 result += iterative_hash_expr (vro1->op1, vro1->opcode);
411 if (vro1->op2)
412 result += iterative_hash_expr (vro1->op2, vro1->opcode);
413 return result;
416 /* Return the hashcode for a given reference operation P1. */
418 static hashval_t
419 vn_reference_hash (const void *p1)
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
425 /* Compute a hash for the reference operation VR1 and return it. */
427 hashval_t
428 vn_reference_compute_hash (const vn_reference_t vr1)
430 hashval_t result;
431 int i;
432 vn_reference_op_t vro;
434 result = iterative_hash_expr (vr1->vuse, 0);
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
436 result += vn_reference_op_compute_hash (vro);
438 return result;
441 /* Return true if reference operations P1 and P2 are equivalent. This
442 means they have the same set of operands and vuses. */
445 vn_reference_eq (const void *p1, const void *p2)
447 int i;
448 vn_reference_op_t vro;
450 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
451 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
452 if (vr1->hashcode != vr2->hashcode)
453 return false;
455 /* Early out if this is not a hash collision. */
456 if (vr1->hashcode != vr2->hashcode)
457 return false;
459 /* The VOP needs to be the same. */
460 if (vr1->vuse != vr2->vuse)
461 return false;
463 /* If the operands are the same we are done. */
464 if (vr1->operands == vr2->operands)
465 return true;
467 /* We require that address operands be canonicalized in a way that
468 two memory references will have the same operands if they are
469 equivalent. */
470 if (VEC_length (vn_reference_op_s, vr1->operands)
471 != VEC_length (vn_reference_op_s, vr2->operands))
472 return false;
474 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
475 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
476 vro))
477 return false;
479 return true;
482 /* Copy the operations present in load/store REF into RESULT, a vector of
483 vn_reference_op_s's. */
485 void
486 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
488 if (TREE_CODE (ref) == TARGET_MEM_REF)
490 vn_reference_op_s temp;
491 tree base;
493 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
494 if (!base)
495 base = build_int_cst (ptr_type_node, 0);
497 memset (&temp, 0, sizeof (temp));
498 /* We do not care for spurious type qualifications. */
499 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
500 temp.opcode = TREE_CODE (ref);
501 temp.op0 = TMR_INDEX (ref);
502 temp.op1 = TMR_STEP (ref);
503 temp.op2 = TMR_OFFSET (ref);
504 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
506 memset (&temp, 0, sizeof (temp));
507 temp.type = NULL_TREE;
508 temp.opcode = TREE_CODE (base);
509 temp.op0 = base;
510 temp.op1 = TMR_ORIGINAL (ref);
511 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
512 return;
515 /* For non-calls, store the information that makes up the address. */
517 while (ref)
519 vn_reference_op_s temp;
521 memset (&temp, 0, sizeof (temp));
522 /* We do not care for spurious type qualifications. */
523 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
524 temp.opcode = TREE_CODE (ref);
526 switch (temp.opcode)
528 case ALIGN_INDIRECT_REF:
529 case INDIRECT_REF:
530 /* The only operand is the address, which gets its own
531 vn_reference_op_s structure. */
532 break;
533 case MISALIGNED_INDIRECT_REF:
534 temp.op0 = TREE_OPERAND (ref, 1);
535 break;
536 case BIT_FIELD_REF:
537 /* Record bits and position. */
538 temp.op0 = TREE_OPERAND (ref, 1);
539 temp.op1 = TREE_OPERAND (ref, 2);
540 break;
541 case COMPONENT_REF:
542 /* The field decl is enough to unambiguously specify the field,
543 a matching type is not necessary and a mismatching type
544 is always a spurious difference. */
545 temp.type = NULL_TREE;
546 temp.op0 = TREE_OPERAND (ref, 1);
547 temp.op1 = TREE_OPERAND (ref, 2);
548 /* If this is a reference to a union member, record the union
549 member size as operand. Do so only if we are doing
550 expression insertion (during FRE), as PRE currently gets
551 confused with this. */
552 if (may_insert
553 && temp.op1 == NULL_TREE
554 && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
555 && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
556 && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
557 && host_integerp (DECL_SIZE (temp.op0), 0))
558 temp.op0 = DECL_SIZE (temp.op0);
559 break;
560 case ARRAY_RANGE_REF:
561 case ARRAY_REF:
562 /* Record index as operand. */
563 temp.op0 = TREE_OPERAND (ref, 1);
564 /* Always record lower bounds and element size. */
565 temp.op1 = array_ref_low_bound (ref);
566 temp.op2 = array_ref_element_size (ref);
567 break;
568 case STRING_CST:
569 case INTEGER_CST:
570 case COMPLEX_CST:
571 case VECTOR_CST:
572 case REAL_CST:
573 case CONSTRUCTOR:
574 case VAR_DECL:
575 case PARM_DECL:
576 case CONST_DECL:
577 case RESULT_DECL:
578 case SSA_NAME:
579 temp.op0 = ref;
580 break;
581 case ADDR_EXPR:
582 if (is_gimple_min_invariant (ref))
584 temp.op0 = ref;
585 break;
587 /* Fallthrough. */
588 /* These are only interesting for their operands, their
589 existence, and their type. They will never be the last
590 ref in the chain of references (IE they require an
591 operand), so we don't have to put anything
592 for op* as it will be handled by the iteration */
593 case IMAGPART_EXPR:
594 case REALPART_EXPR:
595 case VIEW_CONVERT_EXPR:
596 break;
597 default:
598 gcc_unreachable ();
600 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
602 if (REFERENCE_CLASS_P (ref)
603 || (TREE_CODE (ref) == ADDR_EXPR
604 && !is_gimple_min_invariant (ref)))
605 ref = TREE_OPERAND (ref, 0);
606 else
607 ref = NULL_TREE;
611 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
612 operands in *OPS, the reference alias set SET and the reference type TYPE.
613 Return true if something useful was produced. */
615 bool
616 ao_ref_init_from_vn_reference (ao_ref *ref,
617 alias_set_type set, tree type,
618 VEC (vn_reference_op_s, heap) *ops)
620 vn_reference_op_t op;
621 unsigned i;
622 tree base = NULL_TREE;
623 tree *op0_p = &base;
624 HOST_WIDE_INT offset = 0;
625 HOST_WIDE_INT max_size;
626 HOST_WIDE_INT size = -1;
627 tree size_tree = NULL_TREE;
629 /* First get the final access size from just the outermost expression. */
630 op = VEC_index (vn_reference_op_s, ops, 0);
631 if (op->opcode == COMPONENT_REF)
633 if (TREE_CODE (op->op0) == INTEGER_CST)
634 size_tree = op->op0;
635 else
636 size_tree = DECL_SIZE (op->op0);
638 else if (op->opcode == BIT_FIELD_REF)
639 size_tree = op->op0;
640 else
642 enum machine_mode mode = TYPE_MODE (type);
643 if (mode == BLKmode)
644 size_tree = TYPE_SIZE (type);
645 else
646 size = GET_MODE_BITSIZE (mode);
648 if (size_tree != NULL_TREE)
650 if (!host_integerp (size_tree, 1))
651 size = -1;
652 else
653 size = TREE_INT_CST_LOW (size_tree);
656 /* Initially, maxsize is the same as the accessed element size.
657 In the following it will only grow (or become -1). */
658 max_size = size;
660 /* Compute cumulative bit-offset for nested component-refs and array-refs,
661 and find the ultimate containing object. */
662 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
664 switch (op->opcode)
666 /* These may be in the reference ops, but we cannot do anything
667 sensible with them here. */
668 case CALL_EXPR:
669 case ADDR_EXPR:
670 return false;
672 /* Record the base objects. */
673 case ALIGN_INDIRECT_REF:
674 case INDIRECT_REF:
675 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
676 op0_p = &TREE_OPERAND (*op0_p, 0);
677 break;
679 case MISALIGNED_INDIRECT_REF:
680 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
681 NULL_TREE, op->op0);
682 op0_p = &TREE_OPERAND (*op0_p, 0);
683 break;
685 case VAR_DECL:
686 case PARM_DECL:
687 case RESULT_DECL:
688 case SSA_NAME:
689 *op0_p = op->op0;
690 break;
692 /* And now the usual component-reference style ops. */
693 case BIT_FIELD_REF:
694 offset += tree_low_cst (op->op1, 0);
695 break;
697 case COMPONENT_REF:
699 tree field = op->op0;
700 /* We do not have a complete COMPONENT_REF tree here so we
701 cannot use component_ref_field_offset. Do the interesting
702 parts manually. */
704 /* Our union trick, done for offset zero only. */
705 if (TREE_CODE (field) == INTEGER_CST)
707 else if (op->op1
708 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
709 max_size = -1;
710 else
712 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
713 * BITS_PER_UNIT);
714 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
716 break;
719 case ARRAY_RANGE_REF:
720 case ARRAY_REF:
721 /* We recorded the lower bound and the element size. */
722 if (!host_integerp (op->op0, 0)
723 || !host_integerp (op->op1, 0)
724 || !host_integerp (op->op2, 0))
725 max_size = -1;
726 else
728 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
729 hindex -= TREE_INT_CST_LOW (op->op1);
730 hindex *= TREE_INT_CST_LOW (op->op2);
731 hindex *= BITS_PER_UNIT;
732 offset += hindex;
734 break;
736 case REALPART_EXPR:
737 break;
739 case IMAGPART_EXPR:
740 offset += size;
741 break;
743 case VIEW_CONVERT_EXPR:
744 break;
746 case STRING_CST:
747 case INTEGER_CST:
748 case COMPLEX_CST:
749 case VECTOR_CST:
750 case REAL_CST:
751 case CONSTRUCTOR:
752 case CONST_DECL:
753 return false;
755 default:
756 return false;
760 if (base == NULL_TREE)
761 return false;
763 ref->ref = NULL_TREE;
764 ref->base = base;
765 ref->offset = offset;
766 ref->size = size;
767 ref->max_size = max_size;
768 ref->ref_alias_set = set;
769 ref->base_alias_set = -1;
771 return true;
774 /* Copy the operations present in load/store/call REF into RESULT, a vector of
775 vn_reference_op_s's. */
777 void
778 copy_reference_ops_from_call (gimple call,
779 VEC(vn_reference_op_s, heap) **result)
781 vn_reference_op_s temp;
782 unsigned i;
784 /* Copy the type, opcode, function being called and static chain. */
785 memset (&temp, 0, sizeof (temp));
786 temp.type = gimple_call_return_type (call);
787 temp.opcode = CALL_EXPR;
788 temp.op0 = gimple_call_fn (call);
789 temp.op1 = gimple_call_chain (call);
790 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
792 /* Copy the call arguments. As they can be references as well,
793 just chain them together. */
794 for (i = 0; i < gimple_call_num_args (call); ++i)
796 tree callarg = gimple_call_arg (call, i);
797 copy_reference_ops_from_ref (callarg, result);
801 /* Create a vector of vn_reference_op_s structures from REF, a
802 REFERENCE_CLASS_P tree. The vector is not shared. */
804 static VEC(vn_reference_op_s, heap) *
805 create_reference_ops_from_ref (tree ref)
807 VEC (vn_reference_op_s, heap) *result = NULL;
809 copy_reference_ops_from_ref (ref, &result);
810 return result;
813 /* Create a vector of vn_reference_op_s structures from CALL, a
814 call statement. The vector is not shared. */
816 static VEC(vn_reference_op_s, heap) *
817 create_reference_ops_from_call (gimple call)
819 VEC (vn_reference_op_s, heap) *result = NULL;
821 copy_reference_ops_from_call (call, &result);
822 return result;
825 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
826 *I_P to point to the last element of the replacement. */
827 void
828 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
829 unsigned int *i_p)
831 VEC(vn_reference_op_s, heap) *mem = NULL;
832 vn_reference_op_t op;
833 unsigned int i = *i_p;
834 unsigned int j;
836 /* Get ops for the addressed object. */
837 op = VEC_index (vn_reference_op_s, *ops, i);
838 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
839 around it to avoid later ICEs. */
840 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
841 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
843 vn_reference_op_s aref;
844 tree dom;
845 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
846 aref.opcode = ARRAY_REF;
847 aref.op0 = integer_zero_node;
848 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
849 && TYPE_MIN_VALUE (dom))
850 aref.op0 = TYPE_MIN_VALUE (dom);
851 aref.op1 = aref.op0;
852 aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
853 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
855 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
857 /* Do the replacement - we should have at least one op in mem now. */
858 if (VEC_length (vn_reference_op_s, mem) == 1)
860 VEC_replace (vn_reference_op_s, *ops, i - 1,
861 VEC_index (vn_reference_op_s, mem, 0));
862 VEC_ordered_remove (vn_reference_op_s, *ops, i);
863 i--;
865 else if (VEC_length (vn_reference_op_s, mem) == 2)
867 VEC_replace (vn_reference_op_s, *ops, i - 1,
868 VEC_index (vn_reference_op_s, mem, 0));
869 VEC_replace (vn_reference_op_s, *ops, i,
870 VEC_index (vn_reference_op_s, mem, 1));
872 else if (VEC_length (vn_reference_op_s, mem) > 2)
874 VEC_replace (vn_reference_op_s, *ops, i - 1,
875 VEC_index (vn_reference_op_s, mem, 0));
876 VEC_replace (vn_reference_op_s, *ops, i,
877 VEC_index (vn_reference_op_s, mem, 1));
878 /* ??? There is no VEC_splice. */
879 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
880 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
882 else
883 gcc_unreachable ();
885 VEC_free (vn_reference_op_s, heap, mem);
886 *i_p = i;
889 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
890 structures into their value numbers. This is done in-place, and
891 the vector passed in is returned. */
893 static VEC (vn_reference_op_s, heap) *
894 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
896 vn_reference_op_t vro;
897 unsigned int i;
899 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
901 if (vro->opcode == SSA_NAME
902 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
904 vro->op0 = SSA_VAL (vro->op0);
905 /* If it transforms from an SSA_NAME to a constant, update
906 the opcode. */
907 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
908 vro->opcode = TREE_CODE (vro->op0);
909 /* If it transforms from an SSA_NAME to an address, fold with
910 a preceding indirect reference. */
911 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
912 && VEC_index (vn_reference_op_s,
913 orig, i - 1)->opcode == INDIRECT_REF)
915 vn_reference_fold_indirect (&orig, &i);
916 continue;
919 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
920 vro->op1 = SSA_VAL (vro->op1);
921 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
922 vro->op2 = SSA_VAL (vro->op2);
925 return orig;
928 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
930 /* Create a vector of vn_reference_op_s structures from REF, a
931 REFERENCE_CLASS_P tree. The vector is shared among all callers of
932 this function. */
934 static VEC(vn_reference_op_s, heap) *
935 valueize_shared_reference_ops_from_ref (tree ref)
937 if (!ref)
938 return NULL;
939 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
940 copy_reference_ops_from_ref (ref, &shared_lookup_references);
941 shared_lookup_references = valueize_refs (shared_lookup_references);
942 return shared_lookup_references;
945 /* Create a vector of vn_reference_op_s structures from CALL, a
946 call statement. The vector is shared among all callers of
947 this function. */
949 static VEC(vn_reference_op_s, heap) *
950 valueize_shared_reference_ops_from_call (gimple call)
952 if (!call)
953 return NULL;
954 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
955 copy_reference_ops_from_call (call, &shared_lookup_references);
956 shared_lookup_references = valueize_refs (shared_lookup_references);
957 return shared_lookup_references;
960 /* Lookup a SCCVN reference operation VR in the current hash table.
961 Returns the resulting value number if it exists in the hash table,
962 NULL_TREE otherwise. VNRESULT will be filled in with the actual
963 vn_reference_t stored in the hashtable if something is found. */
965 static tree
966 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
968 void **slot;
969 hashval_t hash;
971 hash = vr->hashcode;
972 slot = htab_find_slot_with_hash (current_info->references, vr,
973 hash, NO_INSERT);
974 if (!slot && current_info == optimistic_info)
975 slot = htab_find_slot_with_hash (valid_info->references, vr,
976 hash, NO_INSERT);
977 if (slot)
979 if (vnresult)
980 *vnresult = (vn_reference_t)*slot;
981 return ((vn_reference_t)*slot)->result;
984 return NULL_TREE;
987 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
988 with the current VUSE and performs the expression lookup. */
990 static void *
991 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
993 vn_reference_t vr = (vn_reference_t)vr_;
994 void **slot;
995 hashval_t hash;
997 /* Fixup vuse and hash. */
998 vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
999 vr->vuse = SSA_VAL (vuse);
1000 vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
1002 hash = vr->hashcode;
1003 slot = htab_find_slot_with_hash (current_info->references, vr,
1004 hash, NO_INSERT);
1005 if (!slot && current_info == optimistic_info)
1006 slot = htab_find_slot_with_hash (valid_info->references, vr,
1007 hash, NO_INSERT);
1008 if (slot)
1009 return *slot;
1011 return NULL;
1014 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1015 from the statement defining VUSE and if not successful tries to
1016 translate *REFP and VR_ through an aggregate copy at the defintion
1017 of VUSE. */
1019 static void *
1020 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1022 vn_reference_t vr = (vn_reference_t)vr_;
1023 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1024 tree fndecl;
1025 tree base;
1026 HOST_WIDE_INT offset, size, maxsize;
1028 base = ao_ref_base (ref);
1029 offset = ref->offset;
1030 size = ref->size;
1031 maxsize = ref->max_size;
1033 /* If we cannot constrain the size of the reference we cannot
1034 test if anything kills it. */
1035 if (maxsize == -1)
1036 return (void *)-1;
1038 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1039 from that defintion.
1040 1) Memset. */
1041 if (is_gimple_reg_type (vr->type)
1042 && is_gimple_call (def_stmt)
1043 && (fndecl = gimple_call_fndecl (def_stmt))
1044 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1045 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1046 && integer_zerop (gimple_call_arg (def_stmt, 1))
1047 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1048 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1050 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1051 tree base2;
1052 HOST_WIDE_INT offset2, size2, maxsize2;
1053 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1054 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1055 if ((unsigned HOST_WIDE_INT)size2 / 8
1056 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1057 && operand_equal_p (base, base2, 0)
1058 && offset2 <= offset
1059 && offset2 + size2 >= offset + maxsize)
1061 tree val = fold_convert (vr->type, integer_zero_node);
1062 unsigned int value_id = get_or_alloc_constant_value_id (val);
1063 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1064 VEC_copy (vn_reference_op_s,
1065 heap, vr->operands),
1066 val, value_id);
1070 /* 2) Assignment from an empty CONSTRUCTOR. */
1071 else if (is_gimple_reg_type (vr->type)
1072 && gimple_assign_single_p (def_stmt)
1073 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1074 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1076 tree base2;
1077 HOST_WIDE_INT offset2, size2, maxsize2;
1078 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1079 &offset2, &size2, &maxsize2);
1080 if (operand_equal_p (base, base2, 0)
1081 && offset2 <= offset
1082 && offset2 + size2 >= offset + maxsize)
1084 tree val = fold_convert (vr->type, integer_zero_node);
1085 unsigned int value_id = get_or_alloc_constant_value_id (val);
1086 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1087 VEC_copy (vn_reference_op_s,
1088 heap, vr->operands),
1089 val, value_id);
1093 /* For aggregate copies translate the reference through them if
1094 the copy kills ref. */
1095 else if (gimple_assign_single_p (def_stmt)
1096 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1097 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1098 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1100 tree base2;
1101 HOST_WIDE_INT offset2, size2, maxsize2;
1102 int i, j;
1103 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1104 vn_reference_op_t vro;
1105 ao_ref r;
1107 /* See if the assignment kills REF. */
1108 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1109 &offset2, &size2, &maxsize2);
1110 if (!operand_equal_p (base, base2, 0)
1111 || offset2 > offset
1112 || offset2 + size2 < offset + maxsize)
1113 return (void *)-1;
1115 /* Find the common base of ref and the lhs. */
1116 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1117 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1118 j = VEC_length (vn_reference_op_s, lhs) - 1;
1119 while (j >= 0 && i >= 0
1120 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1121 vr->operands, i),
1122 VEC_index (vn_reference_op_s, lhs, j)))
1124 i--;
1125 j--;
1128 VEC_free (vn_reference_op_s, heap, lhs);
1129 /* i now points to the first additional op.
1130 ??? LHS may not be completely contained in VR, one or more
1131 VIEW_CONVERT_EXPRs could be in its way. We could at least
1132 try handling outermost VIEW_CONVERT_EXPRs. */
1133 if (j != -1)
1134 return (void *)-1;
1136 /* Now re-write REF to be based on the rhs of the assignment. */
1137 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1138 /* We need to pre-pend vr->operands[0..i] to rhs. */
1139 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1140 > VEC_length (vn_reference_op_s, vr->operands))
1142 VEC (vn_reference_op_s, heap) *old = vr->operands;
1143 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1144 i + 1 + VEC_length (vn_reference_op_s, rhs));
1145 if (old == shared_lookup_references
1146 && vr->operands != old)
1147 shared_lookup_references = NULL;
1149 else
1150 VEC_truncate (vn_reference_op_s, vr->operands,
1151 i + 1 + VEC_length (vn_reference_op_s, rhs));
1152 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1153 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1154 VEC_free (vn_reference_op_s, heap, rhs);
1155 vr->hashcode = vn_reference_compute_hash (vr);
1157 /* Adjust *ref from the new operands. */
1158 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1159 return (void *)-1;
1160 /* This can happen with bitfields. */
1161 if (ref->size != r.size)
1162 return (void *)-1;
1163 *ref = r;
1165 /* Keep looking for the adjusted *REF / VR pair. */
1166 return NULL;
1169 /* Bail out and stop walking. */
1170 return (void *)-1;
1173 /* Lookup a reference operation by it's parts, in the current hash table.
1174 Returns the resulting value number if it exists in the hash table,
1175 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1176 vn_reference_t stored in the hashtable if something is found. */
1178 tree
1179 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1180 VEC (vn_reference_op_s, heap) *operands,
1181 vn_reference_t *vnresult, bool maywalk)
1183 struct vn_reference_s vr1;
1184 vn_reference_t tmp;
1186 if (!vnresult)
1187 vnresult = &tmp;
1188 *vnresult = NULL;
1190 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1191 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1192 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1193 VEC_length (vn_reference_op_s, operands));
1194 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1195 VEC_address (vn_reference_op_s, operands),
1196 sizeof (vn_reference_op_s)
1197 * VEC_length (vn_reference_op_s, operands));
1198 vr1.operands = operands = shared_lookup_references
1199 = valueize_refs (shared_lookup_references);
1200 vr1.type = type;
1201 vr1.set = set;
1202 vr1.hashcode = vn_reference_compute_hash (&vr1);
1203 vn_reference_lookup_1 (&vr1, vnresult);
1205 if (!*vnresult
1206 && maywalk
1207 && vr1.vuse)
1209 ao_ref r;
1210 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1211 *vnresult =
1212 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1213 vn_reference_lookup_2,
1214 vn_reference_lookup_3, &vr1);
1215 if (vr1.operands != operands)
1216 VEC_free (vn_reference_op_s, heap, vr1.operands);
1219 if (*vnresult)
1220 return (*vnresult)->result;
1222 return NULL_TREE;
1225 /* Lookup OP in the current hash table, and return the resulting value
1226 number if it exists in the hash table. Return NULL_TREE if it does
1227 not exist in the hash table or if the result field of the structure
1228 was NULL.. VNRESULT will be filled in with the vn_reference_t
1229 stored in the hashtable if one exists. */
1231 tree
1232 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1233 vn_reference_t *vnresult)
1235 VEC (vn_reference_op_s, heap) *operands;
1236 struct vn_reference_s vr1;
1238 if (vnresult)
1239 *vnresult = NULL;
1241 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1242 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1243 vr1.type = TREE_TYPE (op);
1244 vr1.set = get_alias_set (op);
1245 vr1.hashcode = vn_reference_compute_hash (&vr1);
1247 if (maywalk
1248 && vr1.vuse)
1250 vn_reference_t wvnresult;
1251 ao_ref r;
1252 ao_ref_init (&r, op);
1253 wvnresult =
1254 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1255 vn_reference_lookup_2,
1256 vn_reference_lookup_3, &vr1);
1257 if (vr1.operands != operands)
1258 VEC_free (vn_reference_op_s, heap, vr1.operands);
1259 if (wvnresult)
1261 if (vnresult)
1262 *vnresult = wvnresult;
1263 return wvnresult->result;
1266 return NULL_TREE;
1269 return vn_reference_lookup_1 (&vr1, vnresult);
1273 /* Insert OP into the current hash table with a value number of
1274 RESULT, and return the resulting reference structure we created. */
1276 vn_reference_t
1277 vn_reference_insert (tree op, tree result, tree vuse)
1279 void **slot;
1280 vn_reference_t vr1;
1282 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1283 if (TREE_CODE (result) == SSA_NAME)
1284 vr1->value_id = VN_INFO (result)->value_id;
1285 else
1286 vr1->value_id = get_or_alloc_constant_value_id (result);
1287 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1288 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1289 vr1->type = TREE_TYPE (op);
1290 vr1->set = get_alias_set (op);
1291 vr1->hashcode = vn_reference_compute_hash (vr1);
1292 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1294 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1295 INSERT);
1297 /* Because we lookup stores using vuses, and value number failures
1298 using the vdefs (see visit_reference_op_store for how and why),
1299 it's possible that on failure we may try to insert an already
1300 inserted store. This is not wrong, there is no ssa name for a
1301 store that we could use as a differentiator anyway. Thus, unlike
1302 the other lookup functions, you cannot gcc_assert (!*slot)
1303 here. */
1305 /* But free the old slot in case of a collision. */
1306 if (*slot)
1307 free_reference (*slot);
1309 *slot = vr1;
1310 return vr1;
1313 /* Insert a reference by it's pieces into the current hash table with
1314 a value number of RESULT. Return the resulting reference
1315 structure we created. */
1317 vn_reference_t
1318 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1319 VEC (vn_reference_op_s, heap) *operands,
1320 tree result, unsigned int value_id)
1323 void **slot;
1324 vn_reference_t vr1;
1326 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1327 vr1->value_id = value_id;
1328 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1329 vr1->operands = valueize_refs (operands);
1330 vr1->type = type;
1331 vr1->set = set;
1332 vr1->hashcode = vn_reference_compute_hash (vr1);
1333 if (result && TREE_CODE (result) == SSA_NAME)
1334 result = SSA_VAL (result);
1335 vr1->result = result;
1337 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1338 INSERT);
1340 /* At this point we should have all the things inserted that we have
1341 seen before, and we should never try inserting something that
1342 already exists. */
1343 gcc_assert (!*slot);
1344 if (*slot)
1345 free_reference (*slot);
1347 *slot = vr1;
1348 return vr1;
1351 /* Compute and return the hash value for nary operation VBO1. */
1353 hashval_t
1354 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1356 hashval_t hash = 0;
1357 unsigned i;
1359 for (i = 0; i < vno1->length; ++i)
1360 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1361 vno1->op[i] = SSA_VAL (vno1->op[i]);
1363 if (vno1->length == 2
1364 && commutative_tree_code (vno1->opcode)
1365 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1367 tree temp = vno1->op[0];
1368 vno1->op[0] = vno1->op[1];
1369 vno1->op[1] = temp;
1372 for (i = 0; i < vno1->length; ++i)
1373 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1375 return hash;
1378 /* Return the computed hashcode for nary operation P1. */
1380 static hashval_t
1381 vn_nary_op_hash (const void *p1)
1383 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1384 return vno1->hashcode;
1387 /* Compare nary operations P1 and P2 and return true if they are
1388 equivalent. */
1391 vn_nary_op_eq (const void *p1, const void *p2)
1393 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1394 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1395 unsigned i;
1397 if (vno1->hashcode != vno2->hashcode)
1398 return false;
1400 if (vno1->opcode != vno2->opcode
1401 || !types_compatible_p (vno1->type, vno2->type))
1402 return false;
1404 for (i = 0; i < vno1->length; ++i)
1405 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1406 return false;
1408 return true;
1411 /* Lookup a n-ary operation by its pieces and return the resulting value
1412 number if it exists in the hash table. Return NULL_TREE if it does
1413 not exist in the hash table or if the result field of the operation
1414 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1415 if it exists. */
1417 tree
1418 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1419 tree type, tree op0, tree op1, tree op2,
1420 tree op3, vn_nary_op_t *vnresult)
1422 void **slot;
1423 struct vn_nary_op_s vno1;
1424 if (vnresult)
1425 *vnresult = NULL;
1426 vno1.opcode = code;
1427 vno1.length = length;
1428 vno1.type = type;
1429 vno1.op[0] = op0;
1430 vno1.op[1] = op1;
1431 vno1.op[2] = op2;
1432 vno1.op[3] = op3;
1433 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1434 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1435 NO_INSERT);
1436 if (!slot && current_info == optimistic_info)
1437 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1438 NO_INSERT);
1439 if (!slot)
1440 return NULL_TREE;
1441 if (vnresult)
1442 *vnresult = (vn_nary_op_t)*slot;
1443 return ((vn_nary_op_t)*slot)->result;
1446 /* Lookup OP in the current hash table, and return the resulting value
1447 number if it exists in the hash table. Return NULL_TREE if it does
1448 not exist in the hash table or if the result field of the operation
1449 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1450 if it exists. */
1452 tree
1453 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1455 void **slot;
1456 struct vn_nary_op_s vno1;
1457 unsigned i;
1459 if (vnresult)
1460 *vnresult = NULL;
1461 vno1.opcode = TREE_CODE (op);
1462 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1463 vno1.type = TREE_TYPE (op);
1464 for (i = 0; i < vno1.length; ++i)
1465 vno1.op[i] = TREE_OPERAND (op, i);
1466 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1467 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1468 NO_INSERT);
1469 if (!slot && current_info == optimistic_info)
1470 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1471 NO_INSERT);
1472 if (!slot)
1473 return NULL_TREE;
1474 if (vnresult)
1475 *vnresult = (vn_nary_op_t)*slot;
1476 return ((vn_nary_op_t)*slot)->result;
1479 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1480 value number if it exists in the hash table. Return NULL_TREE if
1481 it does not exist in the hash table. VNRESULT will contain the
1482 vn_nary_op_t from the hashtable if it exists. */
1484 tree
1485 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1487 void **slot;
1488 struct vn_nary_op_s vno1;
1489 unsigned i;
1491 if (vnresult)
1492 *vnresult = NULL;
1493 vno1.opcode = gimple_assign_rhs_code (stmt);
1494 vno1.length = gimple_num_ops (stmt) - 1;
1495 vno1.type = gimple_expr_type (stmt);
1496 for (i = 0; i < vno1.length; ++i)
1497 vno1.op[i] = gimple_op (stmt, i + 1);
1498 if (vno1.opcode == REALPART_EXPR
1499 || vno1.opcode == IMAGPART_EXPR
1500 || vno1.opcode == VIEW_CONVERT_EXPR)
1501 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1502 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1503 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1504 NO_INSERT);
1505 if (!slot && current_info == optimistic_info)
1506 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1507 NO_INSERT);
1508 if (!slot)
1509 return NULL_TREE;
1510 if (vnresult)
1511 *vnresult = (vn_nary_op_t)*slot;
1512 return ((vn_nary_op_t)*slot)->result;
1515 /* Insert a n-ary operation into the current hash table using it's
1516 pieces. Return the vn_nary_op_t structure we created and put in
1517 the hashtable. */
1519 vn_nary_op_t
1520 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1521 tree type, tree op0,
1522 tree op1, tree op2, tree op3,
1523 tree result,
1524 unsigned int value_id)
1526 void **slot;
1527 vn_nary_op_t vno1;
1529 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1530 (sizeof (struct vn_nary_op_s)
1531 - sizeof (tree) * (4 - length)));
1532 vno1->value_id = value_id;
1533 vno1->opcode = code;
1534 vno1->length = length;
1535 vno1->type = type;
1536 if (length >= 1)
1537 vno1->op[0] = op0;
1538 if (length >= 2)
1539 vno1->op[1] = op1;
1540 if (length >= 3)
1541 vno1->op[2] = op2;
1542 if (length >= 4)
1543 vno1->op[3] = op3;
1544 vno1->result = result;
1545 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1546 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1547 INSERT);
1548 gcc_assert (!*slot);
1550 *slot = vno1;
1551 return vno1;
1555 /* Insert OP into the current hash table with a value number of
1556 RESULT. Return the vn_nary_op_t structure we created and put in
1557 the hashtable. */
1559 vn_nary_op_t
1560 vn_nary_op_insert (tree op, tree result)
1562 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1563 void **slot;
1564 vn_nary_op_t vno1;
1565 unsigned i;
1567 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1568 (sizeof (struct vn_nary_op_s)
1569 - sizeof (tree) * (4 - length)));
1570 vno1->value_id = VN_INFO (result)->value_id;
1571 vno1->opcode = TREE_CODE (op);
1572 vno1->length = length;
1573 vno1->type = TREE_TYPE (op);
1574 for (i = 0; i < vno1->length; ++i)
1575 vno1->op[i] = TREE_OPERAND (op, i);
1576 vno1->result = result;
1577 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1578 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1579 INSERT);
1580 gcc_assert (!*slot);
1582 *slot = vno1;
1583 return vno1;
1586 /* Insert the rhs of STMT into the current hash table with a value number of
1587 RESULT. */
1589 vn_nary_op_t
1590 vn_nary_op_insert_stmt (gimple stmt, tree result)
1592 unsigned length = gimple_num_ops (stmt) - 1;
1593 void **slot;
1594 vn_nary_op_t vno1;
1595 unsigned i;
1597 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1598 (sizeof (struct vn_nary_op_s)
1599 - sizeof (tree) * (4 - length)));
1600 vno1->value_id = VN_INFO (result)->value_id;
1601 vno1->opcode = gimple_assign_rhs_code (stmt);
1602 vno1->length = length;
1603 vno1->type = gimple_expr_type (stmt);
1604 for (i = 0; i < vno1->length; ++i)
1605 vno1->op[i] = gimple_op (stmt, i + 1);
1606 if (vno1->opcode == REALPART_EXPR
1607 || vno1->opcode == IMAGPART_EXPR
1608 || vno1->opcode == VIEW_CONVERT_EXPR)
1609 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1610 vno1->result = result;
1611 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1612 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1613 INSERT);
1614 gcc_assert (!*slot);
1616 *slot = vno1;
1617 return vno1;
1620 /* Compute a hashcode for PHI operation VP1 and return it. */
1622 static inline hashval_t
1623 vn_phi_compute_hash (vn_phi_t vp1)
1625 hashval_t result = 0;
1626 int i;
1627 tree phi1op;
1628 tree type;
1630 result = vp1->block->index;
1632 /* If all PHI arguments are constants we need to distinguish
1633 the PHI node via its type. */
1634 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1635 result += (INTEGRAL_TYPE_P (type)
1636 + (INTEGRAL_TYPE_P (type)
1637 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1639 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1641 if (phi1op == VN_TOP)
1642 continue;
1643 result += iterative_hash_expr (phi1op, result);
1646 return result;
1649 /* Return the computed hashcode for phi operation P1. */
1651 static hashval_t
1652 vn_phi_hash (const void *p1)
1654 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1655 return vp1->hashcode;
1658 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1660 static int
1661 vn_phi_eq (const void *p1, const void *p2)
1663 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1664 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1666 if (vp1->hashcode != vp2->hashcode)
1667 return false;
1669 if (vp1->block == vp2->block)
1671 int i;
1672 tree phi1op;
1674 /* If the PHI nodes do not have compatible types
1675 they are not the same. */
1676 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1677 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1678 return false;
1680 /* Any phi in the same block will have it's arguments in the
1681 same edge order, because of how we store phi nodes. */
1682 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1684 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1685 if (phi1op == VN_TOP || phi2op == VN_TOP)
1686 continue;
1687 if (!expressions_equal_p (phi1op, phi2op))
1688 return false;
1690 return true;
1692 return false;
1695 static VEC(tree, heap) *shared_lookup_phiargs;
1697 /* Lookup PHI in the current hash table, and return the resulting
1698 value number if it exists in the hash table. Return NULL_TREE if
1699 it does not exist in the hash table. */
1701 static tree
1702 vn_phi_lookup (gimple phi)
1704 void **slot;
1705 struct vn_phi_s vp1;
1706 unsigned i;
1708 VEC_truncate (tree, shared_lookup_phiargs, 0);
1710 /* Canonicalize the SSA_NAME's to their value number. */
1711 for (i = 0; i < gimple_phi_num_args (phi); i++)
1713 tree def = PHI_ARG_DEF (phi, i);
1714 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1715 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1717 vp1.phiargs = shared_lookup_phiargs;
1718 vp1.block = gimple_bb (phi);
1719 vp1.hashcode = vn_phi_compute_hash (&vp1);
1720 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1721 NO_INSERT);
1722 if (!slot && current_info == optimistic_info)
1723 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1724 NO_INSERT);
1725 if (!slot)
1726 return NULL_TREE;
1727 return ((vn_phi_t)*slot)->result;
1730 /* Insert PHI into the current hash table with a value number of
1731 RESULT. */
1733 static vn_phi_t
1734 vn_phi_insert (gimple phi, tree result)
1736 void **slot;
1737 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1738 unsigned i;
1739 VEC (tree, heap) *args = NULL;
1741 /* Canonicalize the SSA_NAME's to their value number. */
1742 for (i = 0; i < gimple_phi_num_args (phi); i++)
1744 tree def = PHI_ARG_DEF (phi, i);
1745 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1746 VEC_safe_push (tree, heap, args, def);
1748 vp1->value_id = VN_INFO (result)->value_id;
1749 vp1->phiargs = args;
1750 vp1->block = gimple_bb (phi);
1751 vp1->result = result;
1752 vp1->hashcode = vn_phi_compute_hash (vp1);
1754 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1755 INSERT);
1757 /* Because we iterate over phi operations more than once, it's
1758 possible the slot might already exist here, hence no assert.*/
1759 *slot = vp1;
1760 return vp1;
1764 /* Print set of components in strongly connected component SCC to OUT. */
1766 static void
1767 print_scc (FILE *out, VEC (tree, heap) *scc)
1769 tree var;
1770 unsigned int i;
1772 fprintf (out, "SCC consists of: ");
1773 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1775 print_generic_expr (out, var, 0);
1776 fprintf (out, " ");
1778 fprintf (out, "\n");
1781 /* Set the value number of FROM to TO, return true if it has changed
1782 as a result. */
1784 static inline bool
1785 set_ssa_val_to (tree from, tree to)
1787 tree currval;
1789 if (from != to
1790 && TREE_CODE (to) == SSA_NAME
1791 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1792 to = from;
1794 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1795 and invariants. So assert that here. */
1796 gcc_assert (to != NULL_TREE
1797 && (to == VN_TOP
1798 || TREE_CODE (to) == SSA_NAME
1799 || is_gimple_min_invariant (to)));
1801 if (dump_file && (dump_flags & TDF_DETAILS))
1803 fprintf (dump_file, "Setting value number of ");
1804 print_generic_expr (dump_file, from, 0);
1805 fprintf (dump_file, " to ");
1806 print_generic_expr (dump_file, to, 0);
1809 currval = SSA_VAL (from);
1811 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1813 VN_INFO (from)->valnum = to;
1814 if (dump_file && (dump_flags & TDF_DETAILS))
1815 fprintf (dump_file, " (changed)\n");
1816 return true;
1818 if (dump_file && (dump_flags & TDF_DETAILS))
1819 fprintf (dump_file, "\n");
1820 return false;
1823 /* Set all definitions in STMT to value number to themselves.
1824 Return true if a value number changed. */
1826 static bool
1827 defs_to_varying (gimple stmt)
1829 bool changed = false;
1830 ssa_op_iter iter;
1831 def_operand_p defp;
1833 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1835 tree def = DEF_FROM_PTR (defp);
1837 VN_INFO (def)->use_processed = true;
1838 changed |= set_ssa_val_to (def, def);
1840 return changed;
1843 static bool expr_has_constants (tree expr);
1844 static tree valueize_expr (tree expr);
1846 /* Visit a copy between LHS and RHS, return true if the value number
1847 changed. */
1849 static bool
1850 visit_copy (tree lhs, tree rhs)
1852 /* Follow chains of copies to their destination. */
1853 while (TREE_CODE (rhs) == SSA_NAME
1854 && SSA_VAL (rhs) != rhs)
1855 rhs = SSA_VAL (rhs);
1857 /* The copy may have a more interesting constant filled expression
1858 (we don't, since we know our RHS is just an SSA name). */
1859 if (TREE_CODE (rhs) == SSA_NAME)
1861 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1862 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1865 return set_ssa_val_to (lhs, rhs);
1868 /* Visit a unary operator RHS, value number it, and return true if the
1869 value number of LHS has changed as a result. */
1871 static bool
1872 visit_unary_op (tree lhs, gimple stmt)
1874 bool changed = false;
1875 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1877 if (result)
1879 changed = set_ssa_val_to (lhs, result);
1881 else
1883 changed = set_ssa_val_to (lhs, lhs);
1884 vn_nary_op_insert_stmt (stmt, lhs);
1887 return changed;
1890 /* Visit a binary operator RHS, value number it, and return true if the
1891 value number of LHS has changed as a result. */
1893 static bool
1894 visit_binary_op (tree lhs, gimple stmt)
1896 bool changed = false;
1897 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1899 if (result)
1901 changed = set_ssa_val_to (lhs, result);
1903 else
1905 changed = set_ssa_val_to (lhs, lhs);
1906 vn_nary_op_insert_stmt (stmt, lhs);
1909 return changed;
1912 /* Visit a call STMT storing into LHS. Return true if the value number
1913 of the LHS has changed as a result. */
1915 static bool
1916 visit_reference_op_call (tree lhs, gimple stmt)
1918 bool changed = false;
1919 struct vn_reference_s vr1;
1920 tree result;
1921 tree vuse = gimple_vuse (stmt);
1923 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1924 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1925 vr1.type = gimple_expr_type (stmt);
1926 vr1.set = 0;
1927 vr1.hashcode = vn_reference_compute_hash (&vr1);
1928 result = vn_reference_lookup_1 (&vr1, NULL);
1929 if (result)
1931 changed = set_ssa_val_to (lhs, result);
1932 if (TREE_CODE (result) == SSA_NAME
1933 && VN_INFO (result)->has_constants)
1934 VN_INFO (lhs)->has_constants = true;
1936 else
1938 void **slot;
1939 vn_reference_t vr2;
1940 changed = set_ssa_val_to (lhs, lhs);
1941 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1942 vr2->vuse = vr1.vuse;
1943 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1944 vr2->type = vr1.type;
1945 vr2->set = vr1.set;
1946 vr2->hashcode = vr1.hashcode;
1947 vr2->result = lhs;
1948 slot = htab_find_slot_with_hash (current_info->references,
1949 vr2, vr2->hashcode, INSERT);
1950 if (*slot)
1951 free_reference (*slot);
1952 *slot = vr2;
1955 return changed;
1958 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1959 and return true if the value number of the LHS has changed as a result. */
1961 static bool
1962 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1964 bool changed = false;
1965 tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1967 /* If we have a VCE, try looking up its operand as it might be stored in
1968 a different type. */
1969 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
1970 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
1971 true, NULL);
1973 /* We handle type-punning through unions by value-numbering based
1974 on offset and size of the access. Be prepared to handle a
1975 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1976 if (result
1977 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1979 /* We will be setting the value number of lhs to the value number
1980 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1981 So first simplify and lookup this expression to see if it
1982 is already available. */
1983 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1984 if ((CONVERT_EXPR_P (val)
1985 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1986 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1988 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1989 if ((CONVERT_EXPR_P (tem)
1990 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1991 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1992 TREE_TYPE (val), tem)))
1993 val = tem;
1995 result = val;
1996 if (!is_gimple_min_invariant (val)
1997 && TREE_CODE (val) != SSA_NAME)
1998 result = vn_nary_op_lookup (val, NULL);
1999 /* If the expression is not yet available, value-number lhs to
2000 a new SSA_NAME we create. */
2001 if (!result && may_insert)
2003 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
2004 /* Initialize value-number information properly. */
2005 VN_INFO_GET (result)->valnum = result;
2006 VN_INFO (result)->value_id = get_next_value_id ();
2007 VN_INFO (result)->expr = val;
2008 VN_INFO (result)->has_constants = expr_has_constants (val);
2009 VN_INFO (result)->needs_insertion = true;
2010 /* As all "inserted" statements are singleton SCCs, insert
2011 to the valid table. This is strictly needed to
2012 avoid re-generating new value SSA_NAMEs for the same
2013 expression during SCC iteration over and over (the
2014 optimistic table gets cleared after each iteration).
2015 We do not need to insert into the optimistic table, as
2016 lookups there will fall back to the valid table. */
2017 if (current_info == optimistic_info)
2019 current_info = valid_info;
2020 vn_nary_op_insert (val, result);
2021 current_info = optimistic_info;
2023 else
2024 vn_nary_op_insert (val, result);
2025 if (dump_file && (dump_flags & TDF_DETAILS))
2027 fprintf (dump_file, "Inserting name ");
2028 print_generic_expr (dump_file, result, 0);
2029 fprintf (dump_file, " for expression ");
2030 print_generic_expr (dump_file, val, 0);
2031 fprintf (dump_file, "\n");
2036 if (result)
2038 changed = set_ssa_val_to (lhs, result);
2039 if (TREE_CODE (result) == SSA_NAME
2040 && VN_INFO (result)->has_constants)
2042 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2043 VN_INFO (lhs)->has_constants = true;
2046 else
2048 changed = set_ssa_val_to (lhs, lhs);
2049 vn_reference_insert (op, lhs, gimple_vuse (stmt));
2052 return changed;
2056 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2057 and return true if the value number of the LHS has changed as a result. */
2059 static bool
2060 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2062 bool changed = false;
2063 tree result;
2064 bool resultsame = false;
2066 /* First we want to lookup using the *vuses* from the store and see
2067 if there the last store to this location with the same address
2068 had the same value.
2070 The vuses represent the memory state before the store. If the
2071 memory state, address, and value of the store is the same as the
2072 last store to this location, then this store will produce the
2073 same memory state as that store.
2075 In this case the vdef versions for this store are value numbered to those
2076 vuse versions, since they represent the same memory state after
2077 this store.
2079 Otherwise, the vdefs for the store are used when inserting into
2080 the table, since the store generates a new memory state. */
2082 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2084 if (result)
2086 if (TREE_CODE (result) == SSA_NAME)
2087 result = SSA_VAL (result);
2088 if (TREE_CODE (op) == SSA_NAME)
2089 op = SSA_VAL (op);
2090 resultsame = expressions_equal_p (result, op);
2093 if (!result || !resultsame)
2095 tree vdef;
2097 if (dump_file && (dump_flags & TDF_DETAILS))
2099 fprintf (dump_file, "No store match\n");
2100 fprintf (dump_file, "Value numbering store ");
2101 print_generic_expr (dump_file, lhs, 0);
2102 fprintf (dump_file, " to ");
2103 print_generic_expr (dump_file, op, 0);
2104 fprintf (dump_file, "\n");
2106 /* Have to set value numbers before insert, since insert is
2107 going to valueize the references in-place. */
2108 if ((vdef = gimple_vdef (stmt)))
2110 VN_INFO (vdef)->use_processed = true;
2111 changed |= set_ssa_val_to (vdef, vdef);
2114 /* Do not insert structure copies into the tables. */
2115 if (is_gimple_min_invariant (op)
2116 || is_gimple_reg (op))
2117 vn_reference_insert (lhs, op, vdef);
2119 else
2121 /* We had a match, so value number the vdef to have the value
2122 number of the vuse it came from. */
2123 tree def, use;
2125 if (dump_file && (dump_flags & TDF_DETAILS))
2126 fprintf (dump_file, "Store matched earlier value,"
2127 "value numbering store vdefs to matching vuses.\n");
2129 def = gimple_vdef (stmt);
2130 use = gimple_vuse (stmt);
2132 VN_INFO (def)->use_processed = true;
2133 changed |= set_ssa_val_to (def, SSA_VAL (use));
2136 return changed;
2139 /* Visit and value number PHI, return true if the value number
2140 changed. */
2142 static bool
2143 visit_phi (gimple phi)
2145 bool changed = false;
2146 tree result;
2147 tree sameval = VN_TOP;
2148 bool allsame = true;
2149 unsigned i;
2151 /* TODO: We could check for this in init_sccvn, and replace this
2152 with a gcc_assert. */
2153 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2154 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2156 /* See if all non-TOP arguments have the same value. TOP is
2157 equivalent to everything, so we can ignore it. */
2158 for (i = 0; i < gimple_phi_num_args (phi); i++)
2160 tree def = PHI_ARG_DEF (phi, i);
2162 if (TREE_CODE (def) == SSA_NAME)
2163 def = SSA_VAL (def);
2164 if (def == VN_TOP)
2165 continue;
2166 if (sameval == VN_TOP)
2168 sameval = def;
2170 else
2172 if (!expressions_equal_p (def, sameval))
2174 allsame = false;
2175 break;
2180 /* If all value numbered to the same value, the phi node has that
2181 value. */
2182 if (allsame)
2184 if (is_gimple_min_invariant (sameval))
2186 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2187 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2189 else
2191 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2192 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2195 if (TREE_CODE (sameval) == SSA_NAME)
2196 return visit_copy (PHI_RESULT (phi), sameval);
2198 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2201 /* Otherwise, see if it is equivalent to a phi node in this block. */
2202 result = vn_phi_lookup (phi);
2203 if (result)
2205 if (TREE_CODE (result) == SSA_NAME)
2206 changed = visit_copy (PHI_RESULT (phi), result);
2207 else
2208 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2210 else
2212 vn_phi_insert (phi, PHI_RESULT (phi));
2213 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2214 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2215 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2218 return changed;
2221 /* Return true if EXPR contains constants. */
2223 static bool
2224 expr_has_constants (tree expr)
2226 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2228 case tcc_unary:
2229 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2231 case tcc_binary:
2232 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2233 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2234 /* Constants inside reference ops are rarely interesting, but
2235 it can take a lot of looking to find them. */
2236 case tcc_reference:
2237 case tcc_declaration:
2238 return false;
2239 default:
2240 return is_gimple_min_invariant (expr);
2242 return false;
2245 /* Return true if STMT contains constants. */
2247 static bool
2248 stmt_has_constants (gimple stmt)
2250 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2251 return false;
2253 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2255 case GIMPLE_UNARY_RHS:
2256 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2258 case GIMPLE_BINARY_RHS:
2259 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2260 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2261 case GIMPLE_SINGLE_RHS:
2262 /* Constants inside reference ops are rarely interesting, but
2263 it can take a lot of looking to find them. */
2264 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2265 default:
2266 gcc_unreachable ();
2268 return false;
2271 /* Replace SSA_NAMES in expr with their value numbers, and return the
2272 result.
2273 This is performed in place. */
2275 static tree
2276 valueize_expr (tree expr)
2278 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2280 case tcc_unary:
2281 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2282 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2283 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2284 break;
2285 case tcc_binary:
2286 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2287 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2288 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2289 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2290 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2291 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2292 break;
2293 default:
2294 break;
2296 return expr;
2299 /* Simplify the binary expression RHS, and return the result if
2300 simplified. */
2302 static tree
2303 simplify_binary_expression (gimple stmt)
2305 tree result = NULL_TREE;
2306 tree op0 = gimple_assign_rhs1 (stmt);
2307 tree op1 = gimple_assign_rhs2 (stmt);
2309 /* This will not catch every single case we could combine, but will
2310 catch those with constants. The goal here is to simultaneously
2311 combine constants between expressions, but avoid infinite
2312 expansion of expressions during simplification. */
2313 if (TREE_CODE (op0) == SSA_NAME)
2315 if (VN_INFO (op0)->has_constants
2316 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2317 op0 = valueize_expr (vn_get_expr_for (op0));
2318 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2319 op0 = SSA_VAL (op0);
2322 if (TREE_CODE (op1) == SSA_NAME)
2324 if (VN_INFO (op1)->has_constants)
2325 op1 = valueize_expr (vn_get_expr_for (op1));
2326 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2327 op1 = SSA_VAL (op1);
2330 /* Avoid folding if nothing changed. */
2331 if (op0 == gimple_assign_rhs1 (stmt)
2332 && op1 == gimple_assign_rhs2 (stmt))
2333 return NULL_TREE;
2335 fold_defer_overflow_warnings ();
2337 result = fold_binary (gimple_assign_rhs_code (stmt),
2338 gimple_expr_type (stmt), op0, op1);
2339 if (result)
2340 STRIP_USELESS_TYPE_CONVERSION (result);
2342 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2343 stmt, 0);
2345 /* Make sure result is not a complex expression consisting
2346 of operators of operators (IE (a + b) + (a + c))
2347 Otherwise, we will end up with unbounded expressions if
2348 fold does anything at all. */
2349 if (result && valid_gimple_rhs_p (result))
2350 return result;
2352 return NULL_TREE;
2355 /* Simplify the unary expression RHS, and return the result if
2356 simplified. */
2358 static tree
2359 simplify_unary_expression (gimple stmt)
2361 tree result = NULL_TREE;
2362 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2364 /* We handle some tcc_reference codes here that are all
2365 GIMPLE_ASSIGN_SINGLE codes. */
2366 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2367 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2368 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2369 op0 = TREE_OPERAND (op0, 0);
2371 if (TREE_CODE (op0) != SSA_NAME)
2372 return NULL_TREE;
2374 orig_op0 = op0;
2375 if (VN_INFO (op0)->has_constants)
2376 op0 = valueize_expr (vn_get_expr_for (op0));
2377 else if (gimple_assign_cast_p (stmt)
2378 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2379 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2380 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2382 /* We want to do tree-combining on conversion-like expressions.
2383 Make sure we feed only SSA_NAMEs or constants to fold though. */
2384 tree tem = valueize_expr (vn_get_expr_for (op0));
2385 if (UNARY_CLASS_P (tem)
2386 || BINARY_CLASS_P (tem)
2387 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2388 || TREE_CODE (tem) == SSA_NAME
2389 || is_gimple_min_invariant (tem))
2390 op0 = tem;
2393 /* Avoid folding if nothing changed, but remember the expression. */
2394 if (op0 == orig_op0)
2395 return NULL_TREE;
2397 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2398 gimple_expr_type (stmt), op0);
2399 if (result)
2401 STRIP_USELESS_TYPE_CONVERSION (result);
2402 if (valid_gimple_rhs_p (result))
2403 return result;
2406 return NULL_TREE;
2409 /* Try to simplify RHS using equivalences and constant folding. */
2411 static tree
2412 try_to_simplify (gimple stmt)
2414 tree tem;
2416 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2417 in this case, there is no point in doing extra work. */
2418 if (gimple_assign_copy_p (stmt)
2419 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2420 return NULL_TREE;
2422 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2424 case tcc_declaration:
2425 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2426 if (tem)
2427 return tem;
2428 break;
2430 case tcc_reference:
2431 /* Do not do full-blown reference lookup here, but simplify
2432 reads from constant aggregates. */
2433 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2434 if (tem)
2435 return tem;
2437 /* Fallthrough for some codes that can operate on registers. */
2438 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2439 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2440 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2441 break;
2442 /* We could do a little more with unary ops, if they expand
2443 into binary ops, but it's debatable whether it is worth it. */
2444 case tcc_unary:
2445 return simplify_unary_expression (stmt);
2446 break;
2447 case tcc_comparison:
2448 case tcc_binary:
2449 return simplify_binary_expression (stmt);
2450 break;
2451 default:
2452 break;
2455 return NULL_TREE;
2458 /* Visit and value number USE, return true if the value number
2459 changed. */
2461 static bool
2462 visit_use (tree use)
2464 bool changed = false;
2465 gimple stmt = SSA_NAME_DEF_STMT (use);
2467 VN_INFO (use)->use_processed = true;
2469 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2470 if (dump_file && (dump_flags & TDF_DETAILS)
2471 && !SSA_NAME_IS_DEFAULT_DEF (use))
2473 fprintf (dump_file, "Value numbering ");
2474 print_generic_expr (dump_file, use, 0);
2475 fprintf (dump_file, " stmt = ");
2476 print_gimple_stmt (dump_file, stmt, 0, 0);
2479 /* Handle uninitialized uses. */
2480 if (SSA_NAME_IS_DEFAULT_DEF (use))
2481 changed = set_ssa_val_to (use, use);
2482 else
2484 if (gimple_code (stmt) == GIMPLE_PHI)
2485 changed = visit_phi (stmt);
2486 else if (!gimple_has_lhs (stmt)
2487 || gimple_has_volatile_ops (stmt)
2488 || stmt_could_throw_p (stmt))
2489 changed = defs_to_varying (stmt);
2490 else if (is_gimple_assign (stmt))
2492 tree lhs = gimple_assign_lhs (stmt);
2493 tree simplified;
2495 /* Shortcut for copies. Simplifying copies is pointless,
2496 since we copy the expression and value they represent. */
2497 if (gimple_assign_copy_p (stmt)
2498 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2499 && TREE_CODE (lhs) == SSA_NAME)
2501 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2502 goto done;
2504 simplified = try_to_simplify (stmt);
2505 if (simplified)
2507 if (dump_file && (dump_flags & TDF_DETAILS))
2509 fprintf (dump_file, "RHS ");
2510 print_gimple_expr (dump_file, stmt, 0, 0);
2511 fprintf (dump_file, " simplified to ");
2512 print_generic_expr (dump_file, simplified, 0);
2513 if (TREE_CODE (lhs) == SSA_NAME)
2514 fprintf (dump_file, " has constants %d\n",
2515 expr_has_constants (simplified));
2516 else
2517 fprintf (dump_file, "\n");
2520 /* Setting value numbers to constants will occasionally
2521 screw up phi congruence because constants are not
2522 uniquely associated with a single ssa name that can be
2523 looked up. */
2524 if (simplified
2525 && is_gimple_min_invariant (simplified)
2526 && TREE_CODE (lhs) == SSA_NAME)
2528 VN_INFO (lhs)->expr = simplified;
2529 VN_INFO (lhs)->has_constants = true;
2530 changed = set_ssa_val_to (lhs, simplified);
2531 goto done;
2533 else if (simplified
2534 && TREE_CODE (simplified) == SSA_NAME
2535 && TREE_CODE (lhs) == SSA_NAME)
2537 changed = visit_copy (lhs, simplified);
2538 goto done;
2540 else if (simplified)
2542 if (TREE_CODE (lhs) == SSA_NAME)
2544 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2545 /* We have to unshare the expression or else
2546 valuizing may change the IL stream. */
2547 VN_INFO (lhs)->expr = unshare_expr (simplified);
2550 else if (stmt_has_constants (stmt)
2551 && TREE_CODE (lhs) == SSA_NAME)
2552 VN_INFO (lhs)->has_constants = true;
2553 else if (TREE_CODE (lhs) == SSA_NAME)
2555 /* We reset expr and constantness here because we may
2556 have been value numbering optimistically, and
2557 iterating. They may become non-constant in this case,
2558 even if they were optimistically constant. */
2560 VN_INFO (lhs)->has_constants = false;
2561 VN_INFO (lhs)->expr = NULL_TREE;
2564 if ((TREE_CODE (lhs) == SSA_NAME
2565 /* We can substitute SSA_NAMEs that are live over
2566 abnormal edges with their constant value. */
2567 && !(gimple_assign_copy_p (stmt)
2568 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2569 && !(simplified
2570 && is_gimple_min_invariant (simplified))
2571 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2572 /* Stores or copies from SSA_NAMEs that are live over
2573 abnormal edges are a problem. */
2574 || (gimple_assign_single_p (stmt)
2575 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2576 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2577 changed = defs_to_varying (stmt);
2578 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2580 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2582 else if (TREE_CODE (lhs) == SSA_NAME)
2584 if ((gimple_assign_copy_p (stmt)
2585 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2586 || (simplified
2587 && is_gimple_min_invariant (simplified)))
2589 VN_INFO (lhs)->has_constants = true;
2590 if (simplified)
2591 changed = set_ssa_val_to (lhs, simplified);
2592 else
2593 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2595 else
2597 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2599 case GIMPLE_UNARY_RHS:
2600 changed = visit_unary_op (lhs, stmt);
2601 break;
2602 case GIMPLE_BINARY_RHS:
2603 changed = visit_binary_op (lhs, stmt);
2604 break;
2605 case GIMPLE_SINGLE_RHS:
2606 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2608 case tcc_reference:
2609 /* VOP-less references can go through unary case. */
2610 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2611 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2612 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2613 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2615 changed = visit_unary_op (lhs, stmt);
2616 break;
2618 /* Fallthrough. */
2619 case tcc_declaration:
2620 changed = visit_reference_op_load
2621 (lhs, gimple_assign_rhs1 (stmt), stmt);
2622 break;
2623 case tcc_expression:
2624 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2626 changed = visit_unary_op (lhs, stmt);
2627 break;
2629 /* Fallthrough. */
2630 default:
2631 changed = defs_to_varying (stmt);
2633 break;
2634 default:
2635 changed = defs_to_varying (stmt);
2636 break;
2640 else
2641 changed = defs_to_varying (stmt);
2643 else if (is_gimple_call (stmt))
2645 tree lhs = gimple_call_lhs (stmt);
2647 /* ??? We could try to simplify calls. */
2649 if (stmt_has_constants (stmt)
2650 && TREE_CODE (lhs) == SSA_NAME)
2651 VN_INFO (lhs)->has_constants = true;
2652 else if (TREE_CODE (lhs) == SSA_NAME)
2654 /* We reset expr and constantness here because we may
2655 have been value numbering optimistically, and
2656 iterating. They may become non-constant in this case,
2657 even if they were optimistically constant. */
2658 VN_INFO (lhs)->has_constants = false;
2659 VN_INFO (lhs)->expr = NULL_TREE;
2662 if (TREE_CODE (lhs) == SSA_NAME
2663 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2664 changed = defs_to_varying (stmt);
2665 /* ??? We should handle stores from calls. */
2666 else if (TREE_CODE (lhs) == SSA_NAME)
2668 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2669 changed = visit_reference_op_call (lhs, stmt);
2670 else
2671 changed = defs_to_varying (stmt);
2673 else
2674 changed = defs_to_varying (stmt);
2677 done:
2678 return changed;
2681 /* Compare two operands by reverse postorder index */
2683 static int
2684 compare_ops (const void *pa, const void *pb)
2686 const tree opa = *((const tree *)pa);
2687 const tree opb = *((const tree *)pb);
2688 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2689 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2690 basic_block bba;
2691 basic_block bbb;
2693 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2694 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2695 else if (gimple_nop_p (opstmta))
2696 return -1;
2697 else if (gimple_nop_p (opstmtb))
2698 return 1;
2700 bba = gimple_bb (opstmta);
2701 bbb = gimple_bb (opstmtb);
2703 if (!bba && !bbb)
2704 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2705 else if (!bba)
2706 return -1;
2707 else if (!bbb)
2708 return 1;
2710 if (bba == bbb)
2712 if (gimple_code (opstmta) == GIMPLE_PHI
2713 && gimple_code (opstmtb) == GIMPLE_PHI)
2714 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2715 else if (gimple_code (opstmta) == GIMPLE_PHI)
2716 return -1;
2717 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2718 return 1;
2719 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2720 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2721 else
2722 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2724 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2727 /* Sort an array containing members of a strongly connected component
2728 SCC so that the members are ordered by RPO number.
2729 This means that when the sort is complete, iterating through the
2730 array will give you the members in RPO order. */
2732 static void
2733 sort_scc (VEC (tree, heap) *scc)
2735 qsort (VEC_address (tree, scc),
2736 VEC_length (tree, scc),
2737 sizeof (tree),
2738 compare_ops);
2741 /* Process a strongly connected component in the SSA graph. */
2743 static void
2744 process_scc (VEC (tree, heap) *scc)
2746 /* If the SCC has a single member, just visit it. */
2748 if (VEC_length (tree, scc) == 1)
2750 tree use = VEC_index (tree, scc, 0);
2751 if (!VN_INFO (use)->use_processed)
2752 visit_use (use);
2754 else
2756 tree var;
2757 unsigned int i;
2758 unsigned int iterations = 0;
2759 bool changed = true;
2761 /* Iterate over the SCC with the optimistic table until it stops
2762 changing. */
2763 current_info = optimistic_info;
2764 while (changed)
2766 changed = false;
2767 iterations++;
2768 /* As we are value-numbering optimistically we have to
2769 clear the expression tables and the simplified expressions
2770 in each iteration until we converge. */
2771 htab_empty (optimistic_info->nary);
2772 htab_empty (optimistic_info->phis);
2773 htab_empty (optimistic_info->references);
2774 obstack_free (&optimistic_info->nary_obstack, NULL);
2775 gcc_obstack_init (&optimistic_info->nary_obstack);
2776 empty_alloc_pool (optimistic_info->phis_pool);
2777 empty_alloc_pool (optimistic_info->references_pool);
2778 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2779 VN_INFO (var)->expr = NULL_TREE;
2780 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2781 changed |= visit_use (var);
2784 statistics_histogram_event (cfun, "SCC iterations", iterations);
2786 /* Finally, visit the SCC once using the valid table. */
2787 current_info = valid_info;
2788 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2789 visit_use (var);
2793 DEF_VEC_O(ssa_op_iter);
2794 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2796 /* Pop the components of the found SCC for NAME off the SCC stack
2797 and process them. Returns true if all went well, false if
2798 we run into resource limits. */
2800 static bool
2801 extract_and_process_scc_for_name (tree name)
2803 VEC (tree, heap) *scc = NULL;
2804 tree x;
2806 /* Found an SCC, pop the components off the SCC stack and
2807 process them. */
2810 x = VEC_pop (tree, sccstack);
2812 VN_INFO (x)->on_sccstack = false;
2813 VEC_safe_push (tree, heap, scc, x);
2814 } while (x != name);
2816 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2817 if (VEC_length (tree, scc)
2818 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2820 if (dump_file)
2821 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2822 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2823 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2824 return false;
2827 if (VEC_length (tree, scc) > 1)
2828 sort_scc (scc);
2830 if (dump_file && (dump_flags & TDF_DETAILS))
2831 print_scc (dump_file, scc);
2833 process_scc (scc);
2835 VEC_free (tree, heap, scc);
2837 return true;
2840 /* Depth first search on NAME to discover and process SCC's in the SSA
2841 graph.
2842 Execution of this algorithm relies on the fact that the SCC's are
2843 popped off the stack in topological order.
2844 Returns true if successful, false if we stopped processing SCC's due
2845 to resource constraints. */
2847 static bool
2848 DFS (tree name)
2850 VEC(ssa_op_iter, heap) *itervec = NULL;
2851 VEC(tree, heap) *namevec = NULL;
2852 use_operand_p usep = NULL;
2853 gimple defstmt;
2854 tree use;
2855 ssa_op_iter iter;
2857 start_over:
2858 /* SCC info */
2859 VN_INFO (name)->dfsnum = next_dfs_num++;
2860 VN_INFO (name)->visited = true;
2861 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2863 VEC_safe_push (tree, heap, sccstack, name);
2864 VN_INFO (name)->on_sccstack = true;
2865 defstmt = SSA_NAME_DEF_STMT (name);
2867 /* Recursively DFS on our operands, looking for SCC's. */
2868 if (!gimple_nop_p (defstmt))
2870 /* Push a new iterator. */
2871 if (gimple_code (defstmt) == GIMPLE_PHI)
2872 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2873 else
2874 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2876 else
2877 clear_and_done_ssa_iter (&iter);
2879 while (1)
2881 /* If we are done processing uses of a name, go up the stack
2882 of iterators and process SCCs as we found them. */
2883 if (op_iter_done (&iter))
2885 /* See if we found an SCC. */
2886 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2887 if (!extract_and_process_scc_for_name (name))
2889 VEC_free (tree, heap, namevec);
2890 VEC_free (ssa_op_iter, heap, itervec);
2891 return false;
2894 /* Check if we are done. */
2895 if (VEC_empty (tree, namevec))
2897 VEC_free (tree, heap, namevec);
2898 VEC_free (ssa_op_iter, heap, itervec);
2899 return true;
2902 /* Restore the last use walker and continue walking there. */
2903 use = name;
2904 name = VEC_pop (tree, namevec);
2905 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2906 sizeof (ssa_op_iter));
2907 VEC_pop (ssa_op_iter, itervec);
2908 goto continue_walking;
2911 use = USE_FROM_PTR (usep);
2913 /* Since we handle phi nodes, we will sometimes get
2914 invariants in the use expression. */
2915 if (TREE_CODE (use) == SSA_NAME)
2917 if (! (VN_INFO (use)->visited))
2919 /* Recurse by pushing the current use walking state on
2920 the stack and starting over. */
2921 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2922 VEC_safe_push(tree, heap, namevec, name);
2923 name = use;
2924 goto start_over;
2926 continue_walking:
2927 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2928 VN_INFO (use)->low);
2930 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2931 && VN_INFO (use)->on_sccstack)
2933 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2934 VN_INFO (name)->low);
2938 usep = op_iter_next_use (&iter);
2942 /* Allocate a value number table. */
2944 static void
2945 allocate_vn_table (vn_tables_t table)
2947 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2948 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2949 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2950 free_reference);
2952 gcc_obstack_init (&table->nary_obstack);
2953 table->phis_pool = create_alloc_pool ("VN phis",
2954 sizeof (struct vn_phi_s),
2955 30);
2956 table->references_pool = create_alloc_pool ("VN references",
2957 sizeof (struct vn_reference_s),
2958 30);
2961 /* Free a value number table. */
2963 static void
2964 free_vn_table (vn_tables_t table)
2966 htab_delete (table->phis);
2967 htab_delete (table->nary);
2968 htab_delete (table->references);
2969 obstack_free (&table->nary_obstack, NULL);
2970 free_alloc_pool (table->phis_pool);
2971 free_alloc_pool (table->references_pool);
2974 static void
2975 init_scc_vn (void)
2977 size_t i;
2978 int j;
2979 int *rpo_numbers_temp;
2981 calculate_dominance_info (CDI_DOMINATORS);
2982 sccstack = NULL;
2983 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2984 free);
2986 constant_value_ids = BITMAP_ALLOC (NULL);
2988 next_dfs_num = 1;
2989 next_value_id = 1;
2991 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2992 /* VEC_alloc doesn't actually grow it to the right size, it just
2993 preallocates the space to do so. */
2994 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2995 gcc_obstack_init (&vn_ssa_aux_obstack);
2997 shared_lookup_phiargs = NULL;
2998 shared_lookup_references = NULL;
2999 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3000 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3001 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3003 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3004 the i'th block in RPO order is bb. We want to map bb's to RPO
3005 numbers, so we need to rearrange this array. */
3006 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3007 rpo_numbers[rpo_numbers_temp[j]] = j;
3009 XDELETE (rpo_numbers_temp);
3011 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3013 /* Create the VN_INFO structures, and initialize value numbers to
3014 TOP. */
3015 for (i = 0; i < num_ssa_names; i++)
3017 tree name = ssa_name (i);
3018 if (name)
3020 VN_INFO_GET (name)->valnum = VN_TOP;
3021 VN_INFO (name)->expr = NULL_TREE;
3022 VN_INFO (name)->value_id = 0;
3026 renumber_gimple_stmt_uids ();
3028 /* Create the valid and optimistic value numbering tables. */
3029 valid_info = XCNEW (struct vn_tables_s);
3030 allocate_vn_table (valid_info);
3031 optimistic_info = XCNEW (struct vn_tables_s);
3032 allocate_vn_table (optimistic_info);
3035 void
3036 free_scc_vn (void)
3038 size_t i;
3040 htab_delete (constant_to_value_id);
3041 BITMAP_FREE (constant_value_ids);
3042 VEC_free (tree, heap, shared_lookup_phiargs);
3043 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3044 XDELETEVEC (rpo_numbers);
3046 for (i = 0; i < num_ssa_names; i++)
3048 tree name = ssa_name (i);
3049 if (name
3050 && VN_INFO (name)->needs_insertion)
3051 release_ssa_name (name);
3053 obstack_free (&vn_ssa_aux_obstack, NULL);
3054 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3056 VEC_free (tree, heap, sccstack);
3057 free_vn_table (valid_info);
3058 XDELETE (valid_info);
3059 free_vn_table (optimistic_info);
3060 XDELETE (optimistic_info);
3063 /* Set the value ids in the valid hash tables. */
3065 static void
3066 set_hashtable_value_ids (void)
3068 htab_iterator hi;
3069 vn_nary_op_t vno;
3070 vn_reference_t vr;
3071 vn_phi_t vp;
3073 /* Now set the value ids of the things we had put in the hash
3074 table. */
3076 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3077 vno, vn_nary_op_t, hi)
3079 if (vno->result)
3081 if (TREE_CODE (vno->result) == SSA_NAME)
3082 vno->value_id = VN_INFO (vno->result)->value_id;
3083 else if (is_gimple_min_invariant (vno->result))
3084 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3088 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3089 vp, vn_phi_t, hi)
3091 if (vp->result)
3093 if (TREE_CODE (vp->result) == SSA_NAME)
3094 vp->value_id = VN_INFO (vp->result)->value_id;
3095 else if (is_gimple_min_invariant (vp->result))
3096 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3100 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3101 vr, vn_reference_t, hi)
3103 if (vr->result)
3105 if (TREE_CODE (vr->result) == SSA_NAME)
3106 vr->value_id = VN_INFO (vr->result)->value_id;
3107 else if (is_gimple_min_invariant (vr->result))
3108 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3113 /* Do SCCVN. Returns true if it finished, false if we bailed out
3114 due to resource constraints. */
3116 bool
3117 run_scc_vn (bool may_insert_arg)
3119 size_t i;
3120 tree param;
3121 bool changed = true;
3123 may_insert = may_insert_arg;
3125 init_scc_vn ();
3126 current_info = valid_info;
3128 for (param = DECL_ARGUMENTS (current_function_decl);
3129 param;
3130 param = TREE_CHAIN (param))
3132 if (gimple_default_def (cfun, param) != NULL)
3134 tree def = gimple_default_def (cfun, param);
3135 VN_INFO (def)->valnum = def;
3139 for (i = 1; i < num_ssa_names; ++i)
3141 tree name = ssa_name (i);
3142 if (name
3143 && VN_INFO (name)->visited == false
3144 && !has_zero_uses (name))
3145 if (!DFS (name))
3147 free_scc_vn ();
3148 may_insert = false;
3149 return false;
3153 /* Initialize the value ids. */
3155 for (i = 1; i < num_ssa_names; ++i)
3157 tree name = ssa_name (i);
3158 vn_ssa_aux_t info;
3159 if (!name)
3160 continue;
3161 info = VN_INFO (name);
3162 if (info->valnum == name
3163 || info->valnum == VN_TOP)
3164 info->value_id = get_next_value_id ();
3165 else if (is_gimple_min_invariant (info->valnum))
3166 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3169 /* Propagate until they stop changing. */
3170 while (changed)
3172 changed = false;
3173 for (i = 1; i < num_ssa_names; ++i)
3175 tree name = ssa_name (i);
3176 vn_ssa_aux_t info;
3177 if (!name)
3178 continue;
3179 info = VN_INFO (name);
3180 if (TREE_CODE (info->valnum) == SSA_NAME
3181 && info->valnum != name
3182 && info->value_id != VN_INFO (info->valnum)->value_id)
3184 changed = true;
3185 info->value_id = VN_INFO (info->valnum)->value_id;
3190 set_hashtable_value_ids ();
3192 if (dump_file && (dump_flags & TDF_DETAILS))
3194 fprintf (dump_file, "Value numbers:\n");
3195 for (i = 0; i < num_ssa_names; i++)
3197 tree name = ssa_name (i);
3198 if (name
3199 && VN_INFO (name)->visited
3200 && SSA_VAL (name) != name)
3202 print_generic_expr (dump_file, name, 0);
3203 fprintf (dump_file, " = ");
3204 print_generic_expr (dump_file, SSA_VAL (name), 0);
3205 fprintf (dump_file, "\n");
3210 may_insert = false;
3211 return true;
3214 /* Return the maximum value id we have ever seen. */
3216 unsigned int
3217 get_max_value_id (void)
3219 return next_value_id;
3222 /* Return the next unique value id. */
3224 unsigned int
3225 get_next_value_id (void)
3227 return next_value_id++;
3231 /* Compare two expressions E1 and E2 and return true if they are equal. */
3233 bool
3234 expressions_equal_p (tree e1, tree e2)
3236 /* The obvious case. */
3237 if (e1 == e2)
3238 return true;
3240 /* If only one of them is null, they cannot be equal. */
3241 if (!e1 || !e2)
3242 return false;
3244 /* Recurse on elements of lists. */
3245 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3247 tree lop1 = e1;
3248 tree lop2 = e2;
3249 for (lop1 = e1, lop2 = e2;
3250 lop1 || lop2;
3251 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3253 if (!lop1 || !lop2)
3254 return false;
3255 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3256 return false;
3258 return true;
3261 /* Now perform the actual comparison. */
3262 if (TREE_CODE (e1) == TREE_CODE (e2)
3263 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3264 return true;
3266 return false;
3270 /* Return true if the nary operation NARY may trap. This is a copy
3271 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3273 bool
3274 vn_nary_may_trap (vn_nary_op_t nary)
3276 tree type;
3277 tree rhs2;
3278 bool honor_nans = false;
3279 bool honor_snans = false;
3280 bool fp_operation = false;
3281 bool honor_trapv = false;
3282 bool handled, ret;
3283 unsigned i;
3285 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3286 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3287 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3289 type = nary->type;
3290 fp_operation = FLOAT_TYPE_P (type);
3291 if (fp_operation)
3293 honor_nans = flag_trapping_math && !flag_finite_math_only;
3294 honor_snans = flag_signaling_nans != 0;
3296 else if (INTEGRAL_TYPE_P (type)
3297 && TYPE_OVERFLOW_TRAPS (type))
3298 honor_trapv = true;
3300 rhs2 = nary->op[1];
3301 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3302 honor_trapv,
3303 honor_nans, honor_snans, rhs2,
3304 &handled);
3305 if (handled
3306 && ret)
3307 return true;
3309 for (i = 0; i < nary->length; ++i)
3310 if (tree_could_trap_p (nary->op[i]))
3311 return true;
3313 return false;