Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / tree-ssa-sccvn.c
blob3d814fc2e0bb023d62c6fa06b526841738094953
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t
176 VN_INFO (tree name)
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
216 tree
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
224 return name;
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
259 case tcc_reference:
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
267 break;
269 case tcc_unary:
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
273 break;
275 case tcc_binary:
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
280 break;
282 default:;
284 if (expr == NULL_TREE)
285 return vn->valnum;
287 /* Cache the expression. */
288 vn->expr = expr;
290 return expr;
294 /* Free a phi operation structure VP. */
296 static void
297 free_phi (void *vp)
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
303 /* Free a reference operation structure VP. */
305 static void
306 free_reference (void *vp)
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
312 /* Hash table equality function for vn_constant_t. */
314 static int
315 vn_constant_eq (const void *p1, const void *p2)
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
320 if (vc1->hashcode != vc2->hashcode)
321 return false;
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 /* Hash table hash function for vn_constant_t. */
328 static hashval_t
329 vn_constant_hash (const void *p1)
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
335 /* Lookup a value id for CONSTANT and return it. If it does not
336 exist returns 0. */
338 unsigned int
339 get_constant_value_id (tree constant)
341 void **slot;
342 struct vn_constant_s vc;
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
348 if (slot)
349 return ((vn_constant_t)*slot)->value_id;
350 return 0;
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
356 unsigned int
357 get_or_alloc_constant_value_id (tree constant)
359 void **slot;
360 vn_constant_t vc = XNEW (struct vn_constant_s);
362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT);
366 if (*slot)
368 free (vc);
369 return ((vn_constant_t)*slot)->value_id;
371 vc->value_id = get_next_value_id ();
372 *slot = vc;
373 bitmap_set_bit (constant_value_ids, vc->value_id);
374 return vc->value_id;
377 /* Return true if V is a value id for a constant. */
379 bool
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
388 static int
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
401 /* Compute the hash for a reference operand VRO1. */
403 static hashval_t
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
406 hashval_t result = 0;
407 if (vro1->op0)
408 result += iterative_hash_expr (vro1->op0, vro1->opcode);
409 if (vro1->op1)
410 result += iterative_hash_expr (vro1->op1, vro1->opcode);
411 if (vro1->op2)
412 result += iterative_hash_expr (vro1->op2, vro1->opcode);
413 return result;
416 /* Return the hashcode for a given reference operation P1. */
418 static hashval_t
419 vn_reference_hash (const void *p1)
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
425 /* Compute a hash for the reference operation VR1 and return it. */
427 hashval_t
428 vn_reference_compute_hash (const vn_reference_t vr1)
430 hashval_t result;
431 int i;
432 vn_reference_op_t vro;
434 result = iterative_hash_expr (vr1->vuse, 0);
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
436 result += vn_reference_op_compute_hash (vro);
438 return result;
441 /* Return true if reference operations P1 and P2 are equivalent. This
442 means they have the same set of operands and vuses. */
445 vn_reference_eq (const void *p1, const void *p2)
447 int i;
448 vn_reference_op_t vro;
450 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
451 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
452 if (vr1->hashcode != vr2->hashcode)
453 return false;
455 /* Early out if this is not a hash collision. */
456 if (vr1->hashcode != vr2->hashcode)
457 return false;
459 /* The VOP needs to be the same. */
460 if (vr1->vuse != vr2->vuse)
461 return false;
463 /* If the operands are the same we are done. */
464 if (vr1->operands == vr2->operands)
465 return true;
467 /* We require that address operands be canonicalized in a way that
468 two memory references will have the same operands if they are
469 equivalent. */
470 if (VEC_length (vn_reference_op_s, vr1->operands)
471 != VEC_length (vn_reference_op_s, vr2->operands))
472 return false;
474 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
475 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
476 vro))
477 return false;
479 return true;
482 /* Copy the operations present in load/store REF into RESULT, a vector of
483 vn_reference_op_s's. */
485 void
486 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
488 if (TREE_CODE (ref) == TARGET_MEM_REF)
490 vn_reference_op_s temp;
491 tree base;
493 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
494 if (!base)
495 base = build_int_cst (ptr_type_node, 0);
497 memset (&temp, 0, sizeof (temp));
498 /* We do not care for spurious type qualifications. */
499 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
500 temp.opcode = TREE_CODE (ref);
501 temp.op0 = TMR_INDEX (ref);
502 temp.op1 = TMR_STEP (ref);
503 temp.op2 = TMR_OFFSET (ref);
504 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
506 memset (&temp, 0, sizeof (temp));
507 temp.type = NULL_TREE;
508 temp.opcode = TREE_CODE (base);
509 temp.op0 = base;
510 temp.op1 = TMR_ORIGINAL (ref);
511 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
512 return;
515 /* For non-calls, store the information that makes up the address. */
517 while (ref)
519 vn_reference_op_s temp;
521 memset (&temp, 0, sizeof (temp));
522 /* We do not care for spurious type qualifications. */
523 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
524 temp.opcode = TREE_CODE (ref);
526 switch (temp.opcode)
528 case ALIGN_INDIRECT_REF:
529 case INDIRECT_REF:
530 /* The only operand is the address, which gets its own
531 vn_reference_op_s structure. */
532 break;
533 case MISALIGNED_INDIRECT_REF:
534 temp.op0 = TREE_OPERAND (ref, 1);
535 break;
536 case BIT_FIELD_REF:
537 /* Record bits and position. */
538 temp.op0 = TREE_OPERAND (ref, 1);
539 temp.op1 = TREE_OPERAND (ref, 2);
540 break;
541 case COMPONENT_REF:
542 /* The field decl is enough to unambiguously specify the field,
543 a matching type is not necessary and a mismatching type
544 is always a spurious difference. */
545 temp.type = NULL_TREE;
546 temp.op0 = TREE_OPERAND (ref, 1);
547 temp.op1 = TREE_OPERAND (ref, 2);
548 /* If this is a reference to a union member, record the union
549 member size as operand. Do so only if we are doing
550 expression insertion (during FRE), as PRE currently gets
551 confused with this. */
552 if (may_insert
553 && temp.op1 == NULL_TREE
554 && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
555 && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
556 && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
557 && host_integerp (DECL_SIZE (temp.op0), 0))
558 temp.op0 = DECL_SIZE (temp.op0);
559 break;
560 case ARRAY_RANGE_REF:
561 case ARRAY_REF:
562 /* Record index as operand. */
563 temp.op0 = TREE_OPERAND (ref, 1);
564 /* Always record lower bounds and element size. */
565 temp.op1 = array_ref_low_bound (ref);
566 temp.op2 = array_ref_element_size (ref);
567 break;
568 case STRING_CST:
569 case INTEGER_CST:
570 case COMPLEX_CST:
571 case VECTOR_CST:
572 case REAL_CST:
573 case CONSTRUCTOR:
574 case VAR_DECL:
575 case PARM_DECL:
576 case CONST_DECL:
577 case RESULT_DECL:
578 case SSA_NAME:
579 case EXC_PTR_EXPR:
580 case FILTER_EXPR:
581 temp.op0 = ref;
582 break;
583 case ADDR_EXPR:
584 if (is_gimple_min_invariant (ref))
586 temp.op0 = ref;
587 break;
589 /* Fallthrough. */
590 /* These are only interesting for their operands, their
591 existence, and their type. They will never be the last
592 ref in the chain of references (IE they require an
593 operand), so we don't have to put anything
594 for op* as it will be handled by the iteration */
595 case IMAGPART_EXPR:
596 case REALPART_EXPR:
597 case VIEW_CONVERT_EXPR:
598 break;
599 default:
600 gcc_unreachable ();
602 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
604 if (REFERENCE_CLASS_P (ref)
605 || (TREE_CODE (ref) == ADDR_EXPR
606 && !is_gimple_min_invariant (ref)))
607 ref = TREE_OPERAND (ref, 0);
608 else
609 ref = NULL_TREE;
613 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
614 operands in *OPS, the reference alias set SET and the reference type TYPE.
615 Return true if something useful was produced. */
617 bool
618 ao_ref_init_from_vn_reference (ao_ref *ref,
619 alias_set_type set, tree type,
620 VEC (vn_reference_op_s, heap) *ops)
622 vn_reference_op_t op;
623 unsigned i;
624 tree base = NULL_TREE;
625 tree *op0_p = &base;
626 HOST_WIDE_INT offset = 0;
627 HOST_WIDE_INT max_size;
628 HOST_WIDE_INT size = -1;
629 tree size_tree = NULL_TREE;
631 /* First get the final access size from just the outermost expression. */
632 op = VEC_index (vn_reference_op_s, ops, 0);
633 if (op->opcode == COMPONENT_REF)
635 if (TREE_CODE (op->op0) == INTEGER_CST)
636 size_tree = op->op0;
637 else
638 size_tree = DECL_SIZE (op->op0);
640 else if (op->opcode == BIT_FIELD_REF)
641 size_tree = op->op0;
642 else
644 enum machine_mode mode = TYPE_MODE (type);
645 if (mode == BLKmode)
646 size_tree = TYPE_SIZE (type);
647 else
648 size = GET_MODE_BITSIZE (mode);
650 if (size_tree != NULL_TREE)
652 if (!host_integerp (size_tree, 1))
653 size = -1;
654 else
655 size = TREE_INT_CST_LOW (size_tree);
658 /* Initially, maxsize is the same as the accessed element size.
659 In the following it will only grow (or become -1). */
660 max_size = size;
662 /* Compute cumulative bit-offset for nested component-refs and array-refs,
663 and find the ultimate containing object. */
664 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
666 switch (op->opcode)
668 /* These may be in the reference ops, but we cannot do anything
669 sensible with them here. */
670 case CALL_EXPR:
671 case ADDR_EXPR:
672 return false;
674 /* Record the base objects. */
675 case ALIGN_INDIRECT_REF:
676 case INDIRECT_REF:
677 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
678 op0_p = &TREE_OPERAND (*op0_p, 0);
679 break;
681 case MISALIGNED_INDIRECT_REF:
682 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
683 NULL_TREE, op->op0);
684 op0_p = &TREE_OPERAND (*op0_p, 0);
685 break;
687 case VAR_DECL:
688 case PARM_DECL:
689 case RESULT_DECL:
690 case SSA_NAME:
691 case FILTER_EXPR:
692 case EXC_PTR_EXPR:
693 *op0_p = op->op0;
694 break;
696 /* And now the usual component-reference style ops. */
697 case BIT_FIELD_REF:
698 offset += tree_low_cst (op->op1, 0);
699 break;
701 case COMPONENT_REF:
703 tree field = op->op0;
704 /* We do not have a complete COMPONENT_REF tree here so we
705 cannot use component_ref_field_offset. Do the interesting
706 parts manually. */
708 /* Our union trick, done for offset zero only. */
709 if (TREE_CODE (field) == INTEGER_CST)
711 else if (op->op1
712 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
713 max_size = -1;
714 else
716 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
717 * BITS_PER_UNIT);
718 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
720 break;
723 case ARRAY_RANGE_REF:
724 case ARRAY_REF:
725 /* We recorded the lower bound and the element size. */
726 if (!host_integerp (op->op0, 0)
727 || !host_integerp (op->op1, 0)
728 || !host_integerp (op->op2, 0))
729 max_size = -1;
730 else
732 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
733 hindex -= TREE_INT_CST_LOW (op->op1);
734 hindex *= TREE_INT_CST_LOW (op->op2);
735 hindex *= BITS_PER_UNIT;
736 offset += hindex;
738 break;
740 case REALPART_EXPR:
741 break;
743 case IMAGPART_EXPR:
744 offset += size;
745 break;
747 case VIEW_CONVERT_EXPR:
748 break;
750 case STRING_CST:
751 case INTEGER_CST:
752 case COMPLEX_CST:
753 case VECTOR_CST:
754 case REAL_CST:
755 case CONSTRUCTOR:
756 case CONST_DECL:
757 return false;
759 default:
760 return false;
764 if (base == NULL_TREE)
765 return false;
767 ref->ref = NULL_TREE;
768 ref->base = base;
769 ref->offset = offset;
770 ref->size = size;
771 ref->max_size = max_size;
772 ref->ref_alias_set = set;
773 ref->base_alias_set = -1;
775 return true;
778 /* Copy the operations present in load/store/call REF into RESULT, a vector of
779 vn_reference_op_s's. */
781 void
782 copy_reference_ops_from_call (gimple call,
783 VEC(vn_reference_op_s, heap) **result)
785 vn_reference_op_s temp;
786 unsigned i;
788 /* Copy the type, opcode, function being called and static chain. */
789 memset (&temp, 0, sizeof (temp));
790 temp.type = gimple_call_return_type (call);
791 temp.opcode = CALL_EXPR;
792 temp.op0 = gimple_call_fn (call);
793 temp.op1 = gimple_call_chain (call);
794 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
796 /* Copy the call arguments. As they can be references as well,
797 just chain them together. */
798 for (i = 0; i < gimple_call_num_args (call); ++i)
800 tree callarg = gimple_call_arg (call, i);
801 copy_reference_ops_from_ref (callarg, result);
805 /* Create a vector of vn_reference_op_s structures from REF, a
806 REFERENCE_CLASS_P tree. The vector is not shared. */
808 static VEC(vn_reference_op_s, heap) *
809 create_reference_ops_from_ref (tree ref)
811 VEC (vn_reference_op_s, heap) *result = NULL;
813 copy_reference_ops_from_ref (ref, &result);
814 return result;
817 /* Create a vector of vn_reference_op_s structures from CALL, a
818 call statement. The vector is not shared. */
820 static VEC(vn_reference_op_s, heap) *
821 create_reference_ops_from_call (gimple call)
823 VEC (vn_reference_op_s, heap) *result = NULL;
825 copy_reference_ops_from_call (call, &result);
826 return result;
829 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
830 *I_P to point to the last element of the replacement. */
831 void
832 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
833 unsigned int *i_p)
835 VEC(vn_reference_op_s, heap) *mem = NULL;
836 vn_reference_op_t op;
837 unsigned int i = *i_p;
838 unsigned int j;
840 /* Get ops for the addressed object. */
841 op = VEC_index (vn_reference_op_s, *ops, i);
842 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
843 around it to avoid later ICEs. */
844 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
845 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
847 vn_reference_op_s aref;
848 tree dom;
849 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
850 aref.opcode = ARRAY_REF;
851 aref.op0 = integer_zero_node;
852 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
853 && TYPE_MIN_VALUE (dom))
854 aref.op0 = TYPE_MIN_VALUE (dom);
855 aref.op1 = aref.op0;
856 aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
857 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
859 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
861 /* Do the replacement - we should have at least one op in mem now. */
862 if (VEC_length (vn_reference_op_s, mem) == 1)
864 VEC_replace (vn_reference_op_s, *ops, i - 1,
865 VEC_index (vn_reference_op_s, mem, 0));
866 VEC_ordered_remove (vn_reference_op_s, *ops, i);
867 i--;
869 else if (VEC_length (vn_reference_op_s, mem) == 2)
871 VEC_replace (vn_reference_op_s, *ops, i - 1,
872 VEC_index (vn_reference_op_s, mem, 0));
873 VEC_replace (vn_reference_op_s, *ops, i,
874 VEC_index (vn_reference_op_s, mem, 1));
876 else if (VEC_length (vn_reference_op_s, mem) > 2)
878 VEC_replace (vn_reference_op_s, *ops, i - 1,
879 VEC_index (vn_reference_op_s, mem, 0));
880 VEC_replace (vn_reference_op_s, *ops, i,
881 VEC_index (vn_reference_op_s, mem, 1));
882 /* ??? There is no VEC_splice. */
883 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
884 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
886 else
887 gcc_unreachable ();
889 VEC_free (vn_reference_op_s, heap, mem);
890 *i_p = i;
893 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
894 structures into their value numbers. This is done in-place, and
895 the vector passed in is returned. */
897 static VEC (vn_reference_op_s, heap) *
898 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
900 vn_reference_op_t vro;
901 unsigned int i;
903 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
905 if (vro->opcode == SSA_NAME
906 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
908 vro->op0 = SSA_VAL (vro->op0);
909 /* If it transforms from an SSA_NAME to a constant, update
910 the opcode. */
911 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
912 vro->opcode = TREE_CODE (vro->op0);
913 /* If it transforms from an SSA_NAME to an address, fold with
914 a preceding indirect reference. */
915 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
916 && VEC_index (vn_reference_op_s,
917 orig, i - 1)->opcode == INDIRECT_REF)
919 vn_reference_fold_indirect (&orig, &i);
920 continue;
923 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
924 vro->op1 = SSA_VAL (vro->op1);
925 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
926 vro->op2 = SSA_VAL (vro->op2);
929 return orig;
932 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
934 /* Create a vector of vn_reference_op_s structures from REF, a
935 REFERENCE_CLASS_P tree. The vector is shared among all callers of
936 this function. */
938 static VEC(vn_reference_op_s, heap) *
939 valueize_shared_reference_ops_from_ref (tree ref)
941 if (!ref)
942 return NULL;
943 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
944 copy_reference_ops_from_ref (ref, &shared_lookup_references);
945 shared_lookup_references = valueize_refs (shared_lookup_references);
946 return shared_lookup_references;
949 /* Create a vector of vn_reference_op_s structures from CALL, a
950 call statement. The vector is shared among all callers of
951 this function. */
953 static VEC(vn_reference_op_s, heap) *
954 valueize_shared_reference_ops_from_call (gimple call)
956 if (!call)
957 return NULL;
958 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
959 copy_reference_ops_from_call (call, &shared_lookup_references);
960 shared_lookup_references = valueize_refs (shared_lookup_references);
961 return shared_lookup_references;
964 /* Lookup a SCCVN reference operation VR in the current hash table.
965 Returns the resulting value number if it exists in the hash table,
966 NULL_TREE otherwise. VNRESULT will be filled in with the actual
967 vn_reference_t stored in the hashtable if something is found. */
969 static tree
970 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
972 void **slot;
973 hashval_t hash;
975 hash = vr->hashcode;
976 slot = htab_find_slot_with_hash (current_info->references, vr,
977 hash, NO_INSERT);
978 if (!slot && current_info == optimistic_info)
979 slot = htab_find_slot_with_hash (valid_info->references, vr,
980 hash, NO_INSERT);
981 if (slot)
983 if (vnresult)
984 *vnresult = (vn_reference_t)*slot;
985 return ((vn_reference_t)*slot)->result;
988 return NULL_TREE;
991 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
992 with the current VUSE and performs the expression lookup. */
994 static void *
995 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
997 vn_reference_t vr = (vn_reference_t)vr_;
998 void **slot;
999 hashval_t hash;
1001 /* Fixup vuse and hash. */
1002 vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
1003 vr->vuse = SSA_VAL (vuse);
1004 vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
1006 hash = vr->hashcode;
1007 slot = htab_find_slot_with_hash (current_info->references, vr,
1008 hash, NO_INSERT);
1009 if (!slot && current_info == optimistic_info)
1010 slot = htab_find_slot_with_hash (valid_info->references, vr,
1011 hash, NO_INSERT);
1012 if (slot)
1013 return *slot;
1015 return NULL;
1018 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1019 from the statement defining VUSE and if not successful tries to
1020 translate *REFP and VR_ through an aggregate copy at the defintion
1021 of VUSE. */
1023 static void *
1024 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1026 vn_reference_t vr = (vn_reference_t)vr_;
1027 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1028 tree fndecl;
1029 tree base;
1030 HOST_WIDE_INT offset, size, maxsize;
1032 base = ao_ref_base (ref);
1033 offset = ref->offset;
1034 size = ref->size;
1035 maxsize = ref->max_size;
1037 /* If we cannot constrain the size of the reference we cannot
1038 test if anything kills it. */
1039 if (maxsize == -1)
1040 return (void *)-1;
1042 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1043 from that defintion.
1044 1) Memset. */
1045 if (is_gimple_reg_type (vr->type)
1046 && is_gimple_call (def_stmt)
1047 && (fndecl = gimple_call_fndecl (def_stmt))
1048 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1049 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1050 && integer_zerop (gimple_call_arg (def_stmt, 1))
1051 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1052 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1054 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1055 tree base2;
1056 HOST_WIDE_INT offset2, size2, maxsize2;
1057 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1058 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1059 if ((unsigned HOST_WIDE_INT)size2 / 8
1060 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1061 && operand_equal_p (base, base2, 0)
1062 && offset2 <= offset
1063 && offset2 + size2 >= offset + maxsize)
1065 tree val = fold_convert (vr->type, integer_zero_node);
1066 unsigned int value_id = get_or_alloc_constant_value_id (val);
1067 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1068 VEC_copy (vn_reference_op_s,
1069 heap, vr->operands),
1070 val, value_id);
1074 /* 2) Assignment from an empty CONSTRUCTOR. */
1075 else if (is_gimple_reg_type (vr->type)
1076 && gimple_assign_single_p (def_stmt)
1077 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1078 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1080 tree base2;
1081 HOST_WIDE_INT offset2, size2, maxsize2;
1082 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1083 &offset2, &size2, &maxsize2);
1084 if (operand_equal_p (base, base2, 0)
1085 && offset2 <= offset
1086 && offset2 + size2 >= offset + maxsize)
1088 tree val = fold_convert (vr->type, integer_zero_node);
1089 unsigned int value_id = get_or_alloc_constant_value_id (val);
1090 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1091 VEC_copy (vn_reference_op_s,
1092 heap, vr->operands),
1093 val, value_id);
1097 /* For aggregate copies translate the reference through them if
1098 the copy kills ref. */
1099 else if (gimple_assign_single_p (def_stmt)
1100 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1101 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1102 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1104 tree base2;
1105 HOST_WIDE_INT offset2, size2, maxsize2;
1106 int i, j;
1107 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1108 vn_reference_op_t vro;
1109 ao_ref r;
1111 /* See if the assignment kills REF. */
1112 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1113 &offset2, &size2, &maxsize2);
1114 if (!operand_equal_p (base, base2, 0)
1115 || offset2 > offset
1116 || offset2 + size2 < offset + maxsize)
1117 return (void *)-1;
1119 /* Find the common base of ref and the lhs. */
1120 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1121 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1122 j = VEC_length (vn_reference_op_s, lhs) - 1;
1123 while (j >= 0
1124 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1125 vr->operands, i),
1126 VEC_index (vn_reference_op_s, lhs, j)))
1128 i--;
1129 j--;
1131 /* i now points to the first additional op.
1132 ??? LHS may not be completely contained in VR, one or more
1133 VIEW_CONVERT_EXPRs could be in its way. We could at least
1134 try handling outermost VIEW_CONVERT_EXPRs. */
1135 if (j != -1)
1136 return (void *)-1;
1137 VEC_free (vn_reference_op_s, heap, lhs);
1139 /* Now re-write REF to be based on the rhs of the assignment. */
1140 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1141 /* We need to pre-pend vr->operands[0..i] to rhs. */
1142 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1143 > VEC_length (vn_reference_op_s, vr->operands))
1145 VEC (vn_reference_op_s, heap) *old = vr->operands;
1146 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1147 i + 1 + VEC_length (vn_reference_op_s, rhs));
1148 if (old == shared_lookup_references
1149 && vr->operands != old)
1150 shared_lookup_references = NULL;
1152 else
1153 VEC_truncate (vn_reference_op_s, vr->operands,
1154 i + 1 + VEC_length (vn_reference_op_s, rhs));
1155 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1156 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1157 VEC_free (vn_reference_op_s, heap, rhs);
1158 vr->hashcode = vn_reference_compute_hash (vr);
1160 /* Adjust *ref from the new operands. */
1161 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1162 return (void *)-1;
1163 gcc_assert (ref->size == r.size);
1164 *ref = r;
1166 /* Keep looking for the adjusted *REF / VR pair. */
1167 return NULL;
1170 /* Bail out and stop walking. */
1171 return (void *)-1;
1174 /* Lookup a reference operation by it's parts, in the current hash table.
1175 Returns the resulting value number if it exists in the hash table,
1176 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1177 vn_reference_t stored in the hashtable if something is found. */
1179 tree
1180 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1181 VEC (vn_reference_op_s, heap) *operands,
1182 vn_reference_t *vnresult, bool maywalk)
1184 struct vn_reference_s vr1;
1185 vn_reference_t tmp;
1187 if (!vnresult)
1188 vnresult = &tmp;
1189 *vnresult = NULL;
1191 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1192 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1193 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1194 VEC_length (vn_reference_op_s, operands));
1195 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1196 VEC_address (vn_reference_op_s, operands),
1197 sizeof (vn_reference_op_s)
1198 * VEC_length (vn_reference_op_s, operands));
1199 vr1.operands = operands = shared_lookup_references
1200 = valueize_refs (shared_lookup_references);
1201 vr1.type = type;
1202 vr1.set = set;
1203 vr1.hashcode = vn_reference_compute_hash (&vr1);
1204 vn_reference_lookup_1 (&vr1, vnresult);
1206 if (!*vnresult
1207 && maywalk
1208 && vr1.vuse)
1210 ao_ref r;
1211 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1212 *vnresult =
1213 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1214 vn_reference_lookup_2,
1215 vn_reference_lookup_3, &vr1);
1216 if (vr1.operands != operands)
1217 VEC_free (vn_reference_op_s, heap, vr1.operands);
1220 if (*vnresult)
1221 return (*vnresult)->result;
1223 return NULL_TREE;
1226 /* Lookup OP in the current hash table, and return the resulting value
1227 number if it exists in the hash table. Return NULL_TREE if it does
1228 not exist in the hash table or if the result field of the structure
1229 was NULL.. VNRESULT will be filled in with the vn_reference_t
1230 stored in the hashtable if one exists. */
1232 tree
1233 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1234 vn_reference_t *vnresult)
1236 VEC (vn_reference_op_s, heap) *operands;
1237 struct vn_reference_s vr1;
1239 if (vnresult)
1240 *vnresult = NULL;
1242 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1243 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1244 vr1.type = TREE_TYPE (op);
1245 vr1.set = get_alias_set (op);
1246 vr1.hashcode = vn_reference_compute_hash (&vr1);
1248 if (maywalk
1249 && vr1.vuse)
1251 vn_reference_t wvnresult;
1252 ao_ref r;
1253 ao_ref_init (&r, op);
1254 wvnresult =
1255 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1256 vn_reference_lookup_2,
1257 vn_reference_lookup_3, &vr1);
1258 if (vr1.operands != operands)
1259 VEC_free (vn_reference_op_s, heap, vr1.operands);
1260 if (wvnresult)
1262 if (vnresult)
1263 *vnresult = wvnresult;
1264 return wvnresult->result;
1267 return NULL_TREE;
1270 return vn_reference_lookup_1 (&vr1, vnresult);
1274 /* Insert OP into the current hash table with a value number of
1275 RESULT, and return the resulting reference structure we created. */
1277 vn_reference_t
1278 vn_reference_insert (tree op, tree result, tree vuse)
1280 void **slot;
1281 vn_reference_t vr1;
1283 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1284 if (TREE_CODE (result) == SSA_NAME)
1285 vr1->value_id = VN_INFO (result)->value_id;
1286 else
1287 vr1->value_id = get_or_alloc_constant_value_id (result);
1288 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1289 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1290 vr1->type = TREE_TYPE (op);
1291 vr1->set = get_alias_set (op);
1292 vr1->hashcode = vn_reference_compute_hash (vr1);
1293 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1295 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1296 INSERT);
1298 /* Because we lookup stores using vuses, and value number failures
1299 using the vdefs (see visit_reference_op_store for how and why),
1300 it's possible that on failure we may try to insert an already
1301 inserted store. This is not wrong, there is no ssa name for a
1302 store that we could use as a differentiator anyway. Thus, unlike
1303 the other lookup functions, you cannot gcc_assert (!*slot)
1304 here. */
1306 /* But free the old slot in case of a collision. */
1307 if (*slot)
1308 free_reference (*slot);
1310 *slot = vr1;
1311 return vr1;
1314 /* Insert a reference by it's pieces into the current hash table with
1315 a value number of RESULT. Return the resulting reference
1316 structure we created. */
1318 vn_reference_t
1319 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1320 VEC (vn_reference_op_s, heap) *operands,
1321 tree result, unsigned int value_id)
1324 void **slot;
1325 vn_reference_t vr1;
1327 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1328 vr1->value_id = value_id;
1329 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1330 vr1->operands = valueize_refs (operands);
1331 vr1->type = type;
1332 vr1->set = set;
1333 vr1->hashcode = vn_reference_compute_hash (vr1);
1334 if (result && TREE_CODE (result) == SSA_NAME)
1335 result = SSA_VAL (result);
1336 vr1->result = result;
1338 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1339 INSERT);
1341 /* At this point we should have all the things inserted that we have
1342 seen before, and we should never try inserting something that
1343 already exists. */
1344 gcc_assert (!*slot);
1345 if (*slot)
1346 free_reference (*slot);
1348 *slot = vr1;
1349 return vr1;
1352 /* Compute and return the hash value for nary operation VBO1. */
1354 hashval_t
1355 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1357 hashval_t hash = 0;
1358 unsigned i;
1360 for (i = 0; i < vno1->length; ++i)
1361 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1362 vno1->op[i] = SSA_VAL (vno1->op[i]);
1364 if (vno1->length == 2
1365 && commutative_tree_code (vno1->opcode)
1366 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1368 tree temp = vno1->op[0];
1369 vno1->op[0] = vno1->op[1];
1370 vno1->op[1] = temp;
1373 for (i = 0; i < vno1->length; ++i)
1374 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1376 return hash;
1379 /* Return the computed hashcode for nary operation P1. */
1381 static hashval_t
1382 vn_nary_op_hash (const void *p1)
1384 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1385 return vno1->hashcode;
1388 /* Compare nary operations P1 and P2 and return true if they are
1389 equivalent. */
1392 vn_nary_op_eq (const void *p1, const void *p2)
1394 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1395 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1396 unsigned i;
1398 if (vno1->hashcode != vno2->hashcode)
1399 return false;
1401 if (vno1->opcode != vno2->opcode
1402 || !types_compatible_p (vno1->type, vno2->type))
1403 return false;
1405 for (i = 0; i < vno1->length; ++i)
1406 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1407 return false;
1409 return true;
1412 /* Lookup a n-ary operation by its pieces and return the resulting value
1413 number if it exists in the hash table. Return NULL_TREE if it does
1414 not exist in the hash table or if the result field of the operation
1415 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1416 if it exists. */
1418 tree
1419 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1420 tree type, tree op0, tree op1, tree op2,
1421 tree op3, vn_nary_op_t *vnresult)
1423 void **slot;
1424 struct vn_nary_op_s vno1;
1425 if (vnresult)
1426 *vnresult = NULL;
1427 vno1.opcode = code;
1428 vno1.length = length;
1429 vno1.type = type;
1430 vno1.op[0] = op0;
1431 vno1.op[1] = op1;
1432 vno1.op[2] = op2;
1433 vno1.op[3] = op3;
1434 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1435 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1436 NO_INSERT);
1437 if (!slot && current_info == optimistic_info)
1438 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1439 NO_INSERT);
1440 if (!slot)
1441 return NULL_TREE;
1442 if (vnresult)
1443 *vnresult = (vn_nary_op_t)*slot;
1444 return ((vn_nary_op_t)*slot)->result;
1447 /* Lookup OP in the current hash table, and return the resulting value
1448 number if it exists in the hash table. Return NULL_TREE if it does
1449 not exist in the hash table or if the result field of the operation
1450 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1451 if it exists. */
1453 tree
1454 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1456 void **slot;
1457 struct vn_nary_op_s vno1;
1458 unsigned i;
1460 if (vnresult)
1461 *vnresult = NULL;
1462 vno1.opcode = TREE_CODE (op);
1463 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1464 vno1.type = TREE_TYPE (op);
1465 for (i = 0; i < vno1.length; ++i)
1466 vno1.op[i] = TREE_OPERAND (op, i);
1467 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1468 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1469 NO_INSERT);
1470 if (!slot && current_info == optimistic_info)
1471 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1472 NO_INSERT);
1473 if (!slot)
1474 return NULL_TREE;
1475 if (vnresult)
1476 *vnresult = (vn_nary_op_t)*slot;
1477 return ((vn_nary_op_t)*slot)->result;
1480 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1481 value number if it exists in the hash table. Return NULL_TREE if
1482 it does not exist in the hash table. VNRESULT will contain the
1483 vn_nary_op_t from the hashtable if it exists. */
1485 tree
1486 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1488 void **slot;
1489 struct vn_nary_op_s vno1;
1490 unsigned i;
1492 if (vnresult)
1493 *vnresult = NULL;
1494 vno1.opcode = gimple_assign_rhs_code (stmt);
1495 vno1.length = gimple_num_ops (stmt) - 1;
1496 vno1.type = gimple_expr_type (stmt);
1497 for (i = 0; i < vno1.length; ++i)
1498 vno1.op[i] = gimple_op (stmt, i + 1);
1499 if (vno1.opcode == REALPART_EXPR
1500 || vno1.opcode == IMAGPART_EXPR
1501 || vno1.opcode == VIEW_CONVERT_EXPR)
1502 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1503 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1504 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1505 NO_INSERT);
1506 if (!slot && current_info == optimistic_info)
1507 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1508 NO_INSERT);
1509 if (!slot)
1510 return NULL_TREE;
1511 if (vnresult)
1512 *vnresult = (vn_nary_op_t)*slot;
1513 return ((vn_nary_op_t)*slot)->result;
1516 /* Insert a n-ary operation into the current hash table using it's
1517 pieces. Return the vn_nary_op_t structure we created and put in
1518 the hashtable. */
1520 vn_nary_op_t
1521 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1522 tree type, tree op0,
1523 tree op1, tree op2, tree op3,
1524 tree result,
1525 unsigned int value_id)
1527 void **slot;
1528 vn_nary_op_t vno1;
1530 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1531 (sizeof (struct vn_nary_op_s)
1532 - sizeof (tree) * (4 - length)));
1533 vno1->value_id = value_id;
1534 vno1->opcode = code;
1535 vno1->length = length;
1536 vno1->type = type;
1537 if (length >= 1)
1538 vno1->op[0] = op0;
1539 if (length >= 2)
1540 vno1->op[1] = op1;
1541 if (length >= 3)
1542 vno1->op[2] = op2;
1543 if (length >= 4)
1544 vno1->op[3] = op3;
1545 vno1->result = result;
1546 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1547 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1548 INSERT);
1549 gcc_assert (!*slot);
1551 *slot = vno1;
1552 return vno1;
1556 /* Insert OP into the current hash table with a value number of
1557 RESULT. Return the vn_nary_op_t structure we created and put in
1558 the hashtable. */
1560 vn_nary_op_t
1561 vn_nary_op_insert (tree op, tree result)
1563 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1564 void **slot;
1565 vn_nary_op_t vno1;
1566 unsigned i;
1568 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1569 (sizeof (struct vn_nary_op_s)
1570 - sizeof (tree) * (4 - length)));
1571 vno1->value_id = VN_INFO (result)->value_id;
1572 vno1->opcode = TREE_CODE (op);
1573 vno1->length = length;
1574 vno1->type = TREE_TYPE (op);
1575 for (i = 0; i < vno1->length; ++i)
1576 vno1->op[i] = TREE_OPERAND (op, i);
1577 vno1->result = result;
1578 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1579 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1580 INSERT);
1581 gcc_assert (!*slot);
1583 *slot = vno1;
1584 return vno1;
1587 /* Insert the rhs of STMT into the current hash table with a value number of
1588 RESULT. */
1590 vn_nary_op_t
1591 vn_nary_op_insert_stmt (gimple stmt, tree result)
1593 unsigned length = gimple_num_ops (stmt) - 1;
1594 void **slot;
1595 vn_nary_op_t vno1;
1596 unsigned i;
1598 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1599 (sizeof (struct vn_nary_op_s)
1600 - sizeof (tree) * (4 - length)));
1601 vno1->value_id = VN_INFO (result)->value_id;
1602 vno1->opcode = gimple_assign_rhs_code (stmt);
1603 vno1->length = length;
1604 vno1->type = gimple_expr_type (stmt);
1605 for (i = 0; i < vno1->length; ++i)
1606 vno1->op[i] = gimple_op (stmt, i + 1);
1607 if (vno1->opcode == REALPART_EXPR
1608 || vno1->opcode == IMAGPART_EXPR
1609 || vno1->opcode == VIEW_CONVERT_EXPR)
1610 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1611 vno1->result = result;
1612 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1613 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1614 INSERT);
1615 gcc_assert (!*slot);
1617 *slot = vno1;
1618 return vno1;
1621 /* Compute a hashcode for PHI operation VP1 and return it. */
1623 static inline hashval_t
1624 vn_phi_compute_hash (vn_phi_t vp1)
1626 hashval_t result = 0;
1627 int i;
1628 tree phi1op;
1629 tree type;
1631 result = vp1->block->index;
1633 /* If all PHI arguments are constants we need to distinguish
1634 the PHI node via its type. */
1635 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1636 result += (INTEGRAL_TYPE_P (type)
1637 + (INTEGRAL_TYPE_P (type)
1638 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1640 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1642 if (phi1op == VN_TOP)
1643 continue;
1644 result += iterative_hash_expr (phi1op, result);
1647 return result;
1650 /* Return the computed hashcode for phi operation P1. */
1652 static hashval_t
1653 vn_phi_hash (const void *p1)
1655 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1656 return vp1->hashcode;
1659 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1661 static int
1662 vn_phi_eq (const void *p1, const void *p2)
1664 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1665 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1667 if (vp1->hashcode != vp2->hashcode)
1668 return false;
1670 if (vp1->block == vp2->block)
1672 int i;
1673 tree phi1op;
1675 /* If the PHI nodes do not have compatible types
1676 they are not the same. */
1677 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1678 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1679 return false;
1681 /* Any phi in the same block will have it's arguments in the
1682 same edge order, because of how we store phi nodes. */
1683 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1685 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1686 if (phi1op == VN_TOP || phi2op == VN_TOP)
1687 continue;
1688 if (!expressions_equal_p (phi1op, phi2op))
1689 return false;
1691 return true;
1693 return false;
1696 static VEC(tree, heap) *shared_lookup_phiargs;
1698 /* Lookup PHI in the current hash table, and return the resulting
1699 value number if it exists in the hash table. Return NULL_TREE if
1700 it does not exist in the hash table. */
1702 static tree
1703 vn_phi_lookup (gimple phi)
1705 void **slot;
1706 struct vn_phi_s vp1;
1707 unsigned i;
1709 VEC_truncate (tree, shared_lookup_phiargs, 0);
1711 /* Canonicalize the SSA_NAME's to their value number. */
1712 for (i = 0; i < gimple_phi_num_args (phi); i++)
1714 tree def = PHI_ARG_DEF (phi, i);
1715 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1716 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1718 vp1.phiargs = shared_lookup_phiargs;
1719 vp1.block = gimple_bb (phi);
1720 vp1.hashcode = vn_phi_compute_hash (&vp1);
1721 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1722 NO_INSERT);
1723 if (!slot && current_info == optimistic_info)
1724 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1725 NO_INSERT);
1726 if (!slot)
1727 return NULL_TREE;
1728 return ((vn_phi_t)*slot)->result;
1731 /* Insert PHI into the current hash table with a value number of
1732 RESULT. */
1734 static vn_phi_t
1735 vn_phi_insert (gimple phi, tree result)
1737 void **slot;
1738 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1739 unsigned i;
1740 VEC (tree, heap) *args = NULL;
1742 /* Canonicalize the SSA_NAME's to their value number. */
1743 for (i = 0; i < gimple_phi_num_args (phi); i++)
1745 tree def = PHI_ARG_DEF (phi, i);
1746 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1747 VEC_safe_push (tree, heap, args, def);
1749 vp1->value_id = VN_INFO (result)->value_id;
1750 vp1->phiargs = args;
1751 vp1->block = gimple_bb (phi);
1752 vp1->result = result;
1753 vp1->hashcode = vn_phi_compute_hash (vp1);
1755 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1756 INSERT);
1758 /* Because we iterate over phi operations more than once, it's
1759 possible the slot might already exist here, hence no assert.*/
1760 *slot = vp1;
1761 return vp1;
1765 /* Print set of components in strongly connected component SCC to OUT. */
1767 static void
1768 print_scc (FILE *out, VEC (tree, heap) *scc)
1770 tree var;
1771 unsigned int i;
1773 fprintf (out, "SCC consists of: ");
1774 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1776 print_generic_expr (out, var, 0);
1777 fprintf (out, " ");
1779 fprintf (out, "\n");
1782 /* Set the value number of FROM to TO, return true if it has changed
1783 as a result. */
1785 static inline bool
1786 set_ssa_val_to (tree from, tree to)
1788 tree currval;
1790 if (from != to
1791 && TREE_CODE (to) == SSA_NAME
1792 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1793 to = from;
1795 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1796 and invariants. So assert that here. */
1797 gcc_assert (to != NULL_TREE
1798 && (to == VN_TOP
1799 || TREE_CODE (to) == SSA_NAME
1800 || is_gimple_min_invariant (to)));
1802 if (dump_file && (dump_flags & TDF_DETAILS))
1804 fprintf (dump_file, "Setting value number of ");
1805 print_generic_expr (dump_file, from, 0);
1806 fprintf (dump_file, " to ");
1807 print_generic_expr (dump_file, to, 0);
1810 currval = SSA_VAL (from);
1812 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1814 VN_INFO (from)->valnum = to;
1815 if (dump_file && (dump_flags & TDF_DETAILS))
1816 fprintf (dump_file, " (changed)\n");
1817 return true;
1819 if (dump_file && (dump_flags & TDF_DETAILS))
1820 fprintf (dump_file, "\n");
1821 return false;
1824 /* Set all definitions in STMT to value number to themselves.
1825 Return true if a value number changed. */
1827 static bool
1828 defs_to_varying (gimple stmt)
1830 bool changed = false;
1831 ssa_op_iter iter;
1832 def_operand_p defp;
1834 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1836 tree def = DEF_FROM_PTR (defp);
1838 VN_INFO (def)->use_processed = true;
1839 changed |= set_ssa_val_to (def, def);
1841 return changed;
1844 static bool expr_has_constants (tree expr);
1845 static tree valueize_expr (tree expr);
1847 /* Visit a copy between LHS and RHS, return true if the value number
1848 changed. */
1850 static bool
1851 visit_copy (tree lhs, tree rhs)
1853 /* Follow chains of copies to their destination. */
1854 while (TREE_CODE (rhs) == SSA_NAME
1855 && SSA_VAL (rhs) != rhs)
1856 rhs = SSA_VAL (rhs);
1858 /* The copy may have a more interesting constant filled expression
1859 (we don't, since we know our RHS is just an SSA name). */
1860 if (TREE_CODE (rhs) == SSA_NAME)
1862 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1863 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1866 return set_ssa_val_to (lhs, rhs);
1869 /* Visit a unary operator RHS, value number it, and return true if the
1870 value number of LHS has changed as a result. */
1872 static bool
1873 visit_unary_op (tree lhs, gimple stmt)
1875 bool changed = false;
1876 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1878 if (result)
1880 changed = set_ssa_val_to (lhs, result);
1882 else
1884 changed = set_ssa_val_to (lhs, lhs);
1885 vn_nary_op_insert_stmt (stmt, lhs);
1888 return changed;
1891 /* Visit a binary operator RHS, value number it, and return true if the
1892 value number of LHS has changed as a result. */
1894 static bool
1895 visit_binary_op (tree lhs, gimple stmt)
1897 bool changed = false;
1898 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1900 if (result)
1902 changed = set_ssa_val_to (lhs, result);
1904 else
1906 changed = set_ssa_val_to (lhs, lhs);
1907 vn_nary_op_insert_stmt (stmt, lhs);
1910 return changed;
1913 /* Visit a call STMT storing into LHS. Return true if the value number
1914 of the LHS has changed as a result. */
1916 static bool
1917 visit_reference_op_call (tree lhs, gimple stmt)
1919 bool changed = false;
1920 struct vn_reference_s vr1;
1921 tree result;
1922 tree vuse = gimple_vuse (stmt);
1924 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1925 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1926 vr1.type = gimple_expr_type (stmt);
1927 vr1.set = 0;
1928 vr1.hashcode = vn_reference_compute_hash (&vr1);
1929 result = vn_reference_lookup_1 (&vr1, NULL);
1930 if (result)
1932 changed = set_ssa_val_to (lhs, result);
1933 if (TREE_CODE (result) == SSA_NAME
1934 && VN_INFO (result)->has_constants)
1935 VN_INFO (lhs)->has_constants = true;
1937 else
1939 void **slot;
1940 vn_reference_t vr2;
1941 changed = set_ssa_val_to (lhs, lhs);
1942 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1943 vr2->vuse = vr1.vuse;
1944 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1945 vr2->type = vr1.type;
1946 vr2->set = vr1.set;
1947 vr2->hashcode = vr1.hashcode;
1948 vr2->result = lhs;
1949 slot = htab_find_slot_with_hash (current_info->references,
1950 vr2, vr2->hashcode, INSERT);
1951 if (*slot)
1952 free_reference (*slot);
1953 *slot = vr2;
1956 return changed;
1959 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1960 and return true if the value number of the LHS has changed as a result. */
1962 static bool
1963 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1965 bool changed = false;
1966 tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1968 /* If we have a VCE, try looking up its operand as it might be stored in
1969 a different type. */
1970 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
1971 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
1972 true, NULL);
1974 /* We handle type-punning through unions by value-numbering based
1975 on offset and size of the access. Be prepared to handle a
1976 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1977 if (result
1978 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1980 /* We will be setting the value number of lhs to the value number
1981 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1982 So first simplify and lookup this expression to see if it
1983 is already available. */
1984 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1985 if ((CONVERT_EXPR_P (val)
1986 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1987 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1989 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1990 if ((CONVERT_EXPR_P (tem)
1991 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1992 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1993 TREE_TYPE (val), tem)))
1994 val = tem;
1996 result = val;
1997 if (!is_gimple_min_invariant (val)
1998 && TREE_CODE (val) != SSA_NAME)
1999 result = vn_nary_op_lookup (val, NULL);
2000 /* If the expression is not yet available, value-number lhs to
2001 a new SSA_NAME we create. */
2002 if (!result && may_insert)
2004 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
2005 /* Initialize value-number information properly. */
2006 VN_INFO_GET (result)->valnum = result;
2007 VN_INFO (result)->value_id = get_next_value_id ();
2008 VN_INFO (result)->expr = val;
2009 VN_INFO (result)->has_constants = expr_has_constants (val);
2010 VN_INFO (result)->needs_insertion = true;
2011 /* As all "inserted" statements are singleton SCCs, insert
2012 to the valid table. This is strictly needed to
2013 avoid re-generating new value SSA_NAMEs for the same
2014 expression during SCC iteration over and over (the
2015 optimistic table gets cleared after each iteration).
2016 We do not need to insert into the optimistic table, as
2017 lookups there will fall back to the valid table. */
2018 if (current_info == optimistic_info)
2020 current_info = valid_info;
2021 vn_nary_op_insert (val, result);
2022 current_info = optimistic_info;
2024 else
2025 vn_nary_op_insert (val, result);
2026 if (dump_file && (dump_flags & TDF_DETAILS))
2028 fprintf (dump_file, "Inserting name ");
2029 print_generic_expr (dump_file, result, 0);
2030 fprintf (dump_file, " for expression ");
2031 print_generic_expr (dump_file, val, 0);
2032 fprintf (dump_file, "\n");
2037 if (result)
2039 changed = set_ssa_val_to (lhs, result);
2040 if (TREE_CODE (result) == SSA_NAME
2041 && VN_INFO (result)->has_constants)
2043 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2044 VN_INFO (lhs)->has_constants = true;
2047 else
2049 changed = set_ssa_val_to (lhs, lhs);
2050 vn_reference_insert (op, lhs, gimple_vuse (stmt));
2053 return changed;
2057 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2058 and return true if the value number of the LHS has changed as a result. */
2060 static bool
2061 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2063 bool changed = false;
2064 tree result;
2065 bool resultsame = false;
2067 /* First we want to lookup using the *vuses* from the store and see
2068 if there the last store to this location with the same address
2069 had the same value.
2071 The vuses represent the memory state before the store. If the
2072 memory state, address, and value of the store is the same as the
2073 last store to this location, then this store will produce the
2074 same memory state as that store.
2076 In this case the vdef versions for this store are value numbered to those
2077 vuse versions, since they represent the same memory state after
2078 this store.
2080 Otherwise, the vdefs for the store are used when inserting into
2081 the table, since the store generates a new memory state. */
2083 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2085 if (result)
2087 if (TREE_CODE (result) == SSA_NAME)
2088 result = SSA_VAL (result);
2089 if (TREE_CODE (op) == SSA_NAME)
2090 op = SSA_VAL (op);
2091 resultsame = expressions_equal_p (result, op);
2094 if (!result || !resultsame)
2096 tree vdef;
2098 if (dump_file && (dump_flags & TDF_DETAILS))
2100 fprintf (dump_file, "No store match\n");
2101 fprintf (dump_file, "Value numbering store ");
2102 print_generic_expr (dump_file, lhs, 0);
2103 fprintf (dump_file, " to ");
2104 print_generic_expr (dump_file, op, 0);
2105 fprintf (dump_file, "\n");
2107 /* Have to set value numbers before insert, since insert is
2108 going to valueize the references in-place. */
2109 if ((vdef = gimple_vdef (stmt)))
2111 VN_INFO (vdef)->use_processed = true;
2112 changed |= set_ssa_val_to (vdef, vdef);
2115 /* Do not insert structure copies into the tables. */
2116 if (is_gimple_min_invariant (op)
2117 || is_gimple_reg (op))
2118 vn_reference_insert (lhs, op, vdef);
2120 else
2122 /* We had a match, so value number the vdef to have the value
2123 number of the vuse it came from. */
2124 tree def, use;
2126 if (dump_file && (dump_flags & TDF_DETAILS))
2127 fprintf (dump_file, "Store matched earlier value,"
2128 "value numbering store vdefs to matching vuses.\n");
2130 def = gimple_vdef (stmt);
2131 use = gimple_vuse (stmt);
2133 VN_INFO (def)->use_processed = true;
2134 changed |= set_ssa_val_to (def, SSA_VAL (use));
2137 return changed;
2140 /* Visit and value number PHI, return true if the value number
2141 changed. */
2143 static bool
2144 visit_phi (gimple phi)
2146 bool changed = false;
2147 tree result;
2148 tree sameval = VN_TOP;
2149 bool allsame = true;
2150 unsigned i;
2152 /* TODO: We could check for this in init_sccvn, and replace this
2153 with a gcc_assert. */
2154 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2155 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2157 /* See if all non-TOP arguments have the same value. TOP is
2158 equivalent to everything, so we can ignore it. */
2159 for (i = 0; i < gimple_phi_num_args (phi); i++)
2161 tree def = PHI_ARG_DEF (phi, i);
2163 if (TREE_CODE (def) == SSA_NAME)
2164 def = SSA_VAL (def);
2165 if (def == VN_TOP)
2166 continue;
2167 if (sameval == VN_TOP)
2169 sameval = def;
2171 else
2173 if (!expressions_equal_p (def, sameval))
2175 allsame = false;
2176 break;
2181 /* If all value numbered to the same value, the phi node has that
2182 value. */
2183 if (allsame)
2185 if (is_gimple_min_invariant (sameval))
2187 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2188 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2190 else
2192 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2193 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2196 if (TREE_CODE (sameval) == SSA_NAME)
2197 return visit_copy (PHI_RESULT (phi), sameval);
2199 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2202 /* Otherwise, see if it is equivalent to a phi node in this block. */
2203 result = vn_phi_lookup (phi);
2204 if (result)
2206 if (TREE_CODE (result) == SSA_NAME)
2207 changed = visit_copy (PHI_RESULT (phi), result);
2208 else
2209 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2211 else
2213 vn_phi_insert (phi, PHI_RESULT (phi));
2214 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2215 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2216 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2219 return changed;
2222 /* Return true if EXPR contains constants. */
2224 static bool
2225 expr_has_constants (tree expr)
2227 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2229 case tcc_unary:
2230 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2232 case tcc_binary:
2233 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2234 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2235 /* Constants inside reference ops are rarely interesting, but
2236 it can take a lot of looking to find them. */
2237 case tcc_reference:
2238 case tcc_declaration:
2239 return false;
2240 default:
2241 return is_gimple_min_invariant (expr);
2243 return false;
2246 /* Return true if STMT contains constants. */
2248 static bool
2249 stmt_has_constants (gimple stmt)
2251 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2252 return false;
2254 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2256 case GIMPLE_UNARY_RHS:
2257 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2259 case GIMPLE_BINARY_RHS:
2260 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2261 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2262 case GIMPLE_SINGLE_RHS:
2263 /* Constants inside reference ops are rarely interesting, but
2264 it can take a lot of looking to find them. */
2265 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2266 default:
2267 gcc_unreachable ();
2269 return false;
2272 /* Replace SSA_NAMES in expr with their value numbers, and return the
2273 result.
2274 This is performed in place. */
2276 static tree
2277 valueize_expr (tree expr)
2279 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2281 case tcc_unary:
2282 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2283 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2284 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2285 break;
2286 case tcc_binary:
2287 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2288 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2289 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2290 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2291 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2292 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2293 break;
2294 default:
2295 break;
2297 return expr;
2300 /* Simplify the binary expression RHS, and return the result if
2301 simplified. */
2303 static tree
2304 simplify_binary_expression (gimple stmt)
2306 tree result = NULL_TREE;
2307 tree op0 = gimple_assign_rhs1 (stmt);
2308 tree op1 = gimple_assign_rhs2 (stmt);
2310 /* This will not catch every single case we could combine, but will
2311 catch those with constants. The goal here is to simultaneously
2312 combine constants between expressions, but avoid infinite
2313 expansion of expressions during simplification. */
2314 if (TREE_CODE (op0) == SSA_NAME)
2316 if (VN_INFO (op0)->has_constants
2317 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2318 op0 = valueize_expr (vn_get_expr_for (op0));
2319 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2320 op0 = SSA_VAL (op0);
2323 if (TREE_CODE (op1) == SSA_NAME)
2325 if (VN_INFO (op1)->has_constants)
2326 op1 = valueize_expr (vn_get_expr_for (op1));
2327 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2328 op1 = SSA_VAL (op1);
2331 /* Avoid folding if nothing changed. */
2332 if (op0 == gimple_assign_rhs1 (stmt)
2333 && op1 == gimple_assign_rhs2 (stmt))
2334 return NULL_TREE;
2336 fold_defer_overflow_warnings ();
2338 result = fold_binary (gimple_assign_rhs_code (stmt),
2339 gimple_expr_type (stmt), op0, op1);
2340 if (result)
2341 STRIP_USELESS_TYPE_CONVERSION (result);
2343 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2344 stmt, 0);
2346 /* Make sure result is not a complex expression consisting
2347 of operators of operators (IE (a + b) + (a + c))
2348 Otherwise, we will end up with unbounded expressions if
2349 fold does anything at all. */
2350 if (result && valid_gimple_rhs_p (result))
2351 return result;
2353 return NULL_TREE;
2356 /* Simplify the unary expression RHS, and return the result if
2357 simplified. */
2359 static tree
2360 simplify_unary_expression (gimple stmt)
2362 tree result = NULL_TREE;
2363 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2365 /* We handle some tcc_reference codes here that are all
2366 GIMPLE_ASSIGN_SINGLE codes. */
2367 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2368 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2369 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2370 op0 = TREE_OPERAND (op0, 0);
2372 if (TREE_CODE (op0) != SSA_NAME)
2373 return NULL_TREE;
2375 orig_op0 = op0;
2376 if (VN_INFO (op0)->has_constants)
2377 op0 = valueize_expr (vn_get_expr_for (op0));
2378 else if (gimple_assign_cast_p (stmt)
2379 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2380 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2381 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2383 /* We want to do tree-combining on conversion-like expressions.
2384 Make sure we feed only SSA_NAMEs or constants to fold though. */
2385 tree tem = valueize_expr (vn_get_expr_for (op0));
2386 if (UNARY_CLASS_P (tem)
2387 || BINARY_CLASS_P (tem)
2388 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2389 || TREE_CODE (tem) == SSA_NAME
2390 || is_gimple_min_invariant (tem))
2391 op0 = tem;
2394 /* Avoid folding if nothing changed, but remember the expression. */
2395 if (op0 == orig_op0)
2396 return NULL_TREE;
2398 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2399 gimple_expr_type (stmt), op0);
2400 if (result)
2402 STRIP_USELESS_TYPE_CONVERSION (result);
2403 if (valid_gimple_rhs_p (result))
2404 return result;
2407 return NULL_TREE;
2410 /* Try to simplify RHS using equivalences and constant folding. */
2412 static tree
2413 try_to_simplify (gimple stmt)
2415 tree tem;
2417 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2418 in this case, there is no point in doing extra work. */
2419 if (gimple_assign_copy_p (stmt)
2420 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2421 return NULL_TREE;
2423 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2425 case tcc_declaration:
2426 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2427 if (tem)
2428 return tem;
2429 break;
2431 case tcc_reference:
2432 /* Do not do full-blown reference lookup here, but simplify
2433 reads from constant aggregates. */
2434 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2435 if (tem)
2436 return tem;
2438 /* Fallthrough for some codes that can operate on registers. */
2439 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2440 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2441 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2442 break;
2443 /* We could do a little more with unary ops, if they expand
2444 into binary ops, but it's debatable whether it is worth it. */
2445 case tcc_unary:
2446 return simplify_unary_expression (stmt);
2447 break;
2448 case tcc_comparison:
2449 case tcc_binary:
2450 return simplify_binary_expression (stmt);
2451 break;
2452 default:
2453 break;
2456 return NULL_TREE;
2459 /* Visit and value number USE, return true if the value number
2460 changed. */
2462 static bool
2463 visit_use (tree use)
2465 bool changed = false;
2466 gimple stmt = SSA_NAME_DEF_STMT (use);
2468 VN_INFO (use)->use_processed = true;
2470 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2471 if (dump_file && (dump_flags & TDF_DETAILS)
2472 && !SSA_NAME_IS_DEFAULT_DEF (use))
2474 fprintf (dump_file, "Value numbering ");
2475 print_generic_expr (dump_file, use, 0);
2476 fprintf (dump_file, " stmt = ");
2477 print_gimple_stmt (dump_file, stmt, 0, 0);
2480 /* Handle uninitialized uses. */
2481 if (SSA_NAME_IS_DEFAULT_DEF (use))
2482 changed = set_ssa_val_to (use, use);
2483 else
2485 if (gimple_code (stmt) == GIMPLE_PHI)
2486 changed = visit_phi (stmt);
2487 else if (!gimple_has_lhs (stmt)
2488 || gimple_has_volatile_ops (stmt)
2489 || stmt_could_throw_p (stmt))
2490 changed = defs_to_varying (stmt);
2491 else if (is_gimple_assign (stmt))
2493 tree lhs = gimple_assign_lhs (stmt);
2494 tree simplified;
2496 /* Shortcut for copies. Simplifying copies is pointless,
2497 since we copy the expression and value they represent. */
2498 if (gimple_assign_copy_p (stmt)
2499 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2500 && TREE_CODE (lhs) == SSA_NAME)
2502 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2503 goto done;
2505 simplified = try_to_simplify (stmt);
2506 if (simplified)
2508 if (dump_file && (dump_flags & TDF_DETAILS))
2510 fprintf (dump_file, "RHS ");
2511 print_gimple_expr (dump_file, stmt, 0, 0);
2512 fprintf (dump_file, " simplified to ");
2513 print_generic_expr (dump_file, simplified, 0);
2514 if (TREE_CODE (lhs) == SSA_NAME)
2515 fprintf (dump_file, " has constants %d\n",
2516 expr_has_constants (simplified));
2517 else
2518 fprintf (dump_file, "\n");
2521 /* Setting value numbers to constants will occasionally
2522 screw up phi congruence because constants are not
2523 uniquely associated with a single ssa name that can be
2524 looked up. */
2525 if (simplified
2526 && is_gimple_min_invariant (simplified)
2527 && TREE_CODE (lhs) == SSA_NAME)
2529 VN_INFO (lhs)->expr = simplified;
2530 VN_INFO (lhs)->has_constants = true;
2531 changed = set_ssa_val_to (lhs, simplified);
2532 goto done;
2534 else if (simplified
2535 && TREE_CODE (simplified) == SSA_NAME
2536 && TREE_CODE (lhs) == SSA_NAME)
2538 changed = visit_copy (lhs, simplified);
2539 goto done;
2541 else if (simplified)
2543 if (TREE_CODE (lhs) == SSA_NAME)
2545 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2546 /* We have to unshare the expression or else
2547 valuizing may change the IL stream. */
2548 VN_INFO (lhs)->expr = unshare_expr (simplified);
2551 else if (stmt_has_constants (stmt)
2552 && TREE_CODE (lhs) == SSA_NAME)
2553 VN_INFO (lhs)->has_constants = true;
2554 else if (TREE_CODE (lhs) == SSA_NAME)
2556 /* We reset expr and constantness here because we may
2557 have been value numbering optimistically, and
2558 iterating. They may become non-constant in this case,
2559 even if they were optimistically constant. */
2561 VN_INFO (lhs)->has_constants = false;
2562 VN_INFO (lhs)->expr = NULL_TREE;
2565 if ((TREE_CODE (lhs) == SSA_NAME
2566 /* We can substitute SSA_NAMEs that are live over
2567 abnormal edges with their constant value. */
2568 && !(gimple_assign_copy_p (stmt)
2569 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2570 && !(simplified
2571 && is_gimple_min_invariant (simplified))
2572 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2573 /* Stores or copies from SSA_NAMEs that are live over
2574 abnormal edges are a problem. */
2575 || (gimple_assign_single_p (stmt)
2576 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2577 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2578 changed = defs_to_varying (stmt);
2579 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2581 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2583 else if (TREE_CODE (lhs) == SSA_NAME)
2585 if ((gimple_assign_copy_p (stmt)
2586 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2587 || (simplified
2588 && is_gimple_min_invariant (simplified)))
2590 VN_INFO (lhs)->has_constants = true;
2591 if (simplified)
2592 changed = set_ssa_val_to (lhs, simplified);
2593 else
2594 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2596 else
2598 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2600 case GIMPLE_UNARY_RHS:
2601 changed = visit_unary_op (lhs, stmt);
2602 break;
2603 case GIMPLE_BINARY_RHS:
2604 changed = visit_binary_op (lhs, stmt);
2605 break;
2606 case GIMPLE_SINGLE_RHS:
2607 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2609 case tcc_reference:
2610 /* VOP-less references can go through unary case. */
2611 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2612 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2613 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2614 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2616 changed = visit_unary_op (lhs, stmt);
2617 break;
2619 /* Fallthrough. */
2620 case tcc_declaration:
2621 changed = visit_reference_op_load
2622 (lhs, gimple_assign_rhs1 (stmt), stmt);
2623 break;
2624 case tcc_expression:
2625 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2627 changed = visit_unary_op (lhs, stmt);
2628 break;
2630 /* Fallthrough. */
2631 default:
2632 changed = defs_to_varying (stmt);
2634 break;
2635 default:
2636 changed = defs_to_varying (stmt);
2637 break;
2641 else
2642 changed = defs_to_varying (stmt);
2644 else if (is_gimple_call (stmt))
2646 tree lhs = gimple_call_lhs (stmt);
2648 /* ??? We could try to simplify calls. */
2650 if (stmt_has_constants (stmt)
2651 && TREE_CODE (lhs) == SSA_NAME)
2652 VN_INFO (lhs)->has_constants = true;
2653 else if (TREE_CODE (lhs) == SSA_NAME)
2655 /* We reset expr and constantness here because we may
2656 have been value numbering optimistically, and
2657 iterating. They may become non-constant in this case,
2658 even if they were optimistically constant. */
2659 VN_INFO (lhs)->has_constants = false;
2660 VN_INFO (lhs)->expr = NULL_TREE;
2663 if (TREE_CODE (lhs) == SSA_NAME
2664 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2665 changed = defs_to_varying (stmt);
2666 /* ??? We should handle stores from calls. */
2667 else if (TREE_CODE (lhs) == SSA_NAME)
2669 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2670 changed = visit_reference_op_call (lhs, stmt);
2671 else
2672 changed = defs_to_varying (stmt);
2674 else
2675 changed = defs_to_varying (stmt);
2678 done:
2679 return changed;
2682 /* Compare two operands by reverse postorder index */
2684 static int
2685 compare_ops (const void *pa, const void *pb)
2687 const tree opa = *((const tree *)pa);
2688 const tree opb = *((const tree *)pb);
2689 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2690 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2691 basic_block bba;
2692 basic_block bbb;
2694 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2695 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2696 else if (gimple_nop_p (opstmta))
2697 return -1;
2698 else if (gimple_nop_p (opstmtb))
2699 return 1;
2701 bba = gimple_bb (opstmta);
2702 bbb = gimple_bb (opstmtb);
2704 if (!bba && !bbb)
2705 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2706 else if (!bba)
2707 return -1;
2708 else if (!bbb)
2709 return 1;
2711 if (bba == bbb)
2713 if (gimple_code (opstmta) == GIMPLE_PHI
2714 && gimple_code (opstmtb) == GIMPLE_PHI)
2715 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2716 else if (gimple_code (opstmta) == GIMPLE_PHI)
2717 return -1;
2718 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2719 return 1;
2720 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2721 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2722 else
2723 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2725 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2728 /* Sort an array containing members of a strongly connected component
2729 SCC so that the members are ordered by RPO number.
2730 This means that when the sort is complete, iterating through the
2731 array will give you the members in RPO order. */
2733 static void
2734 sort_scc (VEC (tree, heap) *scc)
2736 qsort (VEC_address (tree, scc),
2737 VEC_length (tree, scc),
2738 sizeof (tree),
2739 compare_ops);
2742 /* Process a strongly connected component in the SSA graph. */
2744 static void
2745 process_scc (VEC (tree, heap) *scc)
2747 /* If the SCC has a single member, just visit it. */
2749 if (VEC_length (tree, scc) == 1)
2751 tree use = VEC_index (tree, scc, 0);
2752 if (!VN_INFO (use)->use_processed)
2753 visit_use (use);
2755 else
2757 tree var;
2758 unsigned int i;
2759 unsigned int iterations = 0;
2760 bool changed = true;
2762 /* Iterate over the SCC with the optimistic table until it stops
2763 changing. */
2764 current_info = optimistic_info;
2765 while (changed)
2767 changed = false;
2768 iterations++;
2769 /* As we are value-numbering optimistically we have to
2770 clear the expression tables and the simplified expressions
2771 in each iteration until we converge. */
2772 htab_empty (optimistic_info->nary);
2773 htab_empty (optimistic_info->phis);
2774 htab_empty (optimistic_info->references);
2775 obstack_free (&optimistic_info->nary_obstack, NULL);
2776 gcc_obstack_init (&optimistic_info->nary_obstack);
2777 empty_alloc_pool (optimistic_info->phis_pool);
2778 empty_alloc_pool (optimistic_info->references_pool);
2779 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2780 VN_INFO (var)->expr = NULL_TREE;
2781 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2782 changed |= visit_use (var);
2785 statistics_histogram_event (cfun, "SCC iterations", iterations);
2787 /* Finally, visit the SCC once using the valid table. */
2788 current_info = valid_info;
2789 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2790 visit_use (var);
2794 DEF_VEC_O(ssa_op_iter);
2795 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2797 /* Pop the components of the found SCC for NAME off the SCC stack
2798 and process them. Returns true if all went well, false if
2799 we run into resource limits. */
2801 static bool
2802 extract_and_process_scc_for_name (tree name)
2804 VEC (tree, heap) *scc = NULL;
2805 tree x;
2807 /* Found an SCC, pop the components off the SCC stack and
2808 process them. */
2811 x = VEC_pop (tree, sccstack);
2813 VN_INFO (x)->on_sccstack = false;
2814 VEC_safe_push (tree, heap, scc, x);
2815 } while (x != name);
2817 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2818 if (VEC_length (tree, scc)
2819 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2821 if (dump_file)
2822 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2823 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2824 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2825 return false;
2828 if (VEC_length (tree, scc) > 1)
2829 sort_scc (scc);
2831 if (dump_file && (dump_flags & TDF_DETAILS))
2832 print_scc (dump_file, scc);
2834 process_scc (scc);
2836 VEC_free (tree, heap, scc);
2838 return true;
2841 /* Depth first search on NAME to discover and process SCC's in the SSA
2842 graph.
2843 Execution of this algorithm relies on the fact that the SCC's are
2844 popped off the stack in topological order.
2845 Returns true if successful, false if we stopped processing SCC's due
2846 to resource constraints. */
2848 static bool
2849 DFS (tree name)
2851 VEC(ssa_op_iter, heap) *itervec = NULL;
2852 VEC(tree, heap) *namevec = NULL;
2853 use_operand_p usep = NULL;
2854 gimple defstmt;
2855 tree use;
2856 ssa_op_iter iter;
2858 start_over:
2859 /* SCC info */
2860 VN_INFO (name)->dfsnum = next_dfs_num++;
2861 VN_INFO (name)->visited = true;
2862 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2864 VEC_safe_push (tree, heap, sccstack, name);
2865 VN_INFO (name)->on_sccstack = true;
2866 defstmt = SSA_NAME_DEF_STMT (name);
2868 /* Recursively DFS on our operands, looking for SCC's. */
2869 if (!gimple_nop_p (defstmt))
2871 /* Push a new iterator. */
2872 if (gimple_code (defstmt) == GIMPLE_PHI)
2873 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2874 else
2875 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2877 else
2878 clear_and_done_ssa_iter (&iter);
2880 while (1)
2882 /* If we are done processing uses of a name, go up the stack
2883 of iterators and process SCCs as we found them. */
2884 if (op_iter_done (&iter))
2886 /* See if we found an SCC. */
2887 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2888 if (!extract_and_process_scc_for_name (name))
2890 VEC_free (tree, heap, namevec);
2891 VEC_free (ssa_op_iter, heap, itervec);
2892 return false;
2895 /* Check if we are done. */
2896 if (VEC_empty (tree, namevec))
2898 VEC_free (tree, heap, namevec);
2899 VEC_free (ssa_op_iter, heap, itervec);
2900 return true;
2903 /* Restore the last use walker and continue walking there. */
2904 use = name;
2905 name = VEC_pop (tree, namevec);
2906 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2907 sizeof (ssa_op_iter));
2908 VEC_pop (ssa_op_iter, itervec);
2909 goto continue_walking;
2912 use = USE_FROM_PTR (usep);
2914 /* Since we handle phi nodes, we will sometimes get
2915 invariants in the use expression. */
2916 if (TREE_CODE (use) == SSA_NAME)
2918 if (! (VN_INFO (use)->visited))
2920 /* Recurse by pushing the current use walking state on
2921 the stack and starting over. */
2922 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2923 VEC_safe_push(tree, heap, namevec, name);
2924 name = use;
2925 goto start_over;
2927 continue_walking:
2928 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2929 VN_INFO (use)->low);
2931 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2932 && VN_INFO (use)->on_sccstack)
2934 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2935 VN_INFO (name)->low);
2939 usep = op_iter_next_use (&iter);
2943 /* Allocate a value number table. */
2945 static void
2946 allocate_vn_table (vn_tables_t table)
2948 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2949 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2950 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2951 free_reference);
2953 gcc_obstack_init (&table->nary_obstack);
2954 table->phis_pool = create_alloc_pool ("VN phis",
2955 sizeof (struct vn_phi_s),
2956 30);
2957 table->references_pool = create_alloc_pool ("VN references",
2958 sizeof (struct vn_reference_s),
2959 30);
2962 /* Free a value number table. */
2964 static void
2965 free_vn_table (vn_tables_t table)
2967 htab_delete (table->phis);
2968 htab_delete (table->nary);
2969 htab_delete (table->references);
2970 obstack_free (&table->nary_obstack, NULL);
2971 free_alloc_pool (table->phis_pool);
2972 free_alloc_pool (table->references_pool);
2975 static void
2976 init_scc_vn (void)
2978 size_t i;
2979 int j;
2980 int *rpo_numbers_temp;
2982 calculate_dominance_info (CDI_DOMINATORS);
2983 sccstack = NULL;
2984 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2985 free);
2987 constant_value_ids = BITMAP_ALLOC (NULL);
2989 next_dfs_num = 1;
2990 next_value_id = 1;
2992 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2993 /* VEC_alloc doesn't actually grow it to the right size, it just
2994 preallocates the space to do so. */
2995 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2996 gcc_obstack_init (&vn_ssa_aux_obstack);
2998 shared_lookup_phiargs = NULL;
2999 shared_lookup_references = NULL;
3000 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3001 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3002 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3004 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3005 the i'th block in RPO order is bb. We want to map bb's to RPO
3006 numbers, so we need to rearrange this array. */
3007 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3008 rpo_numbers[rpo_numbers_temp[j]] = j;
3010 XDELETE (rpo_numbers_temp);
3012 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3014 /* Create the VN_INFO structures, and initialize value numbers to
3015 TOP. */
3016 for (i = 0; i < num_ssa_names; i++)
3018 tree name = ssa_name (i);
3019 if (name)
3021 VN_INFO_GET (name)->valnum = VN_TOP;
3022 VN_INFO (name)->expr = NULL_TREE;
3023 VN_INFO (name)->value_id = 0;
3027 renumber_gimple_stmt_uids ();
3029 /* Create the valid and optimistic value numbering tables. */
3030 valid_info = XCNEW (struct vn_tables_s);
3031 allocate_vn_table (valid_info);
3032 optimistic_info = XCNEW (struct vn_tables_s);
3033 allocate_vn_table (optimistic_info);
3036 void
3037 free_scc_vn (void)
3039 size_t i;
3041 htab_delete (constant_to_value_id);
3042 BITMAP_FREE (constant_value_ids);
3043 VEC_free (tree, heap, shared_lookup_phiargs);
3044 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3045 XDELETEVEC (rpo_numbers);
3047 for (i = 0; i < num_ssa_names; i++)
3049 tree name = ssa_name (i);
3050 if (name
3051 && VN_INFO (name)->needs_insertion)
3052 release_ssa_name (name);
3054 obstack_free (&vn_ssa_aux_obstack, NULL);
3055 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3057 VEC_free (tree, heap, sccstack);
3058 free_vn_table (valid_info);
3059 XDELETE (valid_info);
3060 free_vn_table (optimistic_info);
3061 XDELETE (optimistic_info);
3064 /* Set the value ids in the valid hash tables. */
3066 static void
3067 set_hashtable_value_ids (void)
3069 htab_iterator hi;
3070 vn_nary_op_t vno;
3071 vn_reference_t vr;
3072 vn_phi_t vp;
3074 /* Now set the value ids of the things we had put in the hash
3075 table. */
3077 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3078 vno, vn_nary_op_t, hi)
3080 if (vno->result)
3082 if (TREE_CODE (vno->result) == SSA_NAME)
3083 vno->value_id = VN_INFO (vno->result)->value_id;
3084 else if (is_gimple_min_invariant (vno->result))
3085 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3089 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3090 vp, vn_phi_t, hi)
3092 if (vp->result)
3094 if (TREE_CODE (vp->result) == SSA_NAME)
3095 vp->value_id = VN_INFO (vp->result)->value_id;
3096 else if (is_gimple_min_invariant (vp->result))
3097 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3101 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3102 vr, vn_reference_t, hi)
3104 if (vr->result)
3106 if (TREE_CODE (vr->result) == SSA_NAME)
3107 vr->value_id = VN_INFO (vr->result)->value_id;
3108 else if (is_gimple_min_invariant (vr->result))
3109 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3114 /* Do SCCVN. Returns true if it finished, false if we bailed out
3115 due to resource constraints. */
3117 bool
3118 run_scc_vn (bool may_insert_arg)
3120 size_t i;
3121 tree param;
3122 bool changed = true;
3124 may_insert = may_insert_arg;
3126 init_scc_vn ();
3127 current_info = valid_info;
3129 for (param = DECL_ARGUMENTS (current_function_decl);
3130 param;
3131 param = TREE_CHAIN (param))
3133 if (gimple_default_def (cfun, param) != NULL)
3135 tree def = gimple_default_def (cfun, param);
3136 VN_INFO (def)->valnum = def;
3140 for (i = 1; i < num_ssa_names; ++i)
3142 tree name = ssa_name (i);
3143 if (name
3144 && VN_INFO (name)->visited == false
3145 && !has_zero_uses (name))
3146 if (!DFS (name))
3148 free_scc_vn ();
3149 may_insert = false;
3150 return false;
3154 /* Initialize the value ids. */
3156 for (i = 1; i < num_ssa_names; ++i)
3158 tree name = ssa_name (i);
3159 vn_ssa_aux_t info;
3160 if (!name)
3161 continue;
3162 info = VN_INFO (name);
3163 if (info->valnum == name
3164 || info->valnum == VN_TOP)
3165 info->value_id = get_next_value_id ();
3166 else if (is_gimple_min_invariant (info->valnum))
3167 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3170 /* Propagate until they stop changing. */
3171 while (changed)
3173 changed = false;
3174 for (i = 1; i < num_ssa_names; ++i)
3176 tree name = ssa_name (i);
3177 vn_ssa_aux_t info;
3178 if (!name)
3179 continue;
3180 info = VN_INFO (name);
3181 if (TREE_CODE (info->valnum) == SSA_NAME
3182 && info->valnum != name
3183 && info->value_id != VN_INFO (info->valnum)->value_id)
3185 changed = true;
3186 info->value_id = VN_INFO (info->valnum)->value_id;
3191 set_hashtable_value_ids ();
3193 if (dump_file && (dump_flags & TDF_DETAILS))
3195 fprintf (dump_file, "Value numbers:\n");
3196 for (i = 0; i < num_ssa_names; i++)
3198 tree name = ssa_name (i);
3199 if (name
3200 && VN_INFO (name)->visited
3201 && SSA_VAL (name) != name)
3203 print_generic_expr (dump_file, name, 0);
3204 fprintf (dump_file, " = ");
3205 print_generic_expr (dump_file, SSA_VAL (name), 0);
3206 fprintf (dump_file, "\n");
3211 may_insert = false;
3212 return true;
3215 /* Return the maximum value id we have ever seen. */
3217 unsigned int
3218 get_max_value_id (void)
3220 return next_value_id;
3223 /* Return the next unique value id. */
3225 unsigned int
3226 get_next_value_id (void)
3228 return next_value_id++;
3232 /* Compare two expressions E1 and E2 and return true if they are equal. */
3234 bool
3235 expressions_equal_p (tree e1, tree e2)
3237 /* The obvious case. */
3238 if (e1 == e2)
3239 return true;
3241 /* If only one of them is null, they cannot be equal. */
3242 if (!e1 || !e2)
3243 return false;
3245 /* Recurse on elements of lists. */
3246 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3248 tree lop1 = e1;
3249 tree lop2 = e2;
3250 for (lop1 = e1, lop2 = e2;
3251 lop1 || lop2;
3252 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3254 if (!lop1 || !lop2)
3255 return false;
3256 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3257 return false;
3259 return true;
3262 /* Now perform the actual comparison. */
3263 if (TREE_CODE (e1) == TREE_CODE (e2)
3264 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3265 return true;
3267 return false;
3271 /* Return true if the nary operation NARY may trap. This is a copy
3272 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3274 bool
3275 vn_nary_may_trap (vn_nary_op_t nary)
3277 tree type;
3278 tree rhs2;
3279 bool honor_nans = false;
3280 bool honor_snans = false;
3281 bool fp_operation = false;
3282 bool honor_trapv = false;
3283 bool handled, ret;
3284 unsigned i;
3286 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3287 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3288 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3290 type = nary->type;
3291 fp_operation = FLOAT_TYPE_P (type);
3292 if (fp_operation)
3294 honor_nans = flag_trapping_math && !flag_finite_math_only;
3295 honor_snans = flag_signaling_nans != 0;
3297 else if (INTEGRAL_TYPE_P (type)
3298 && TYPE_OVERFLOW_TRAPS (type))
3299 honor_trapv = true;
3301 rhs2 = nary->op[1];
3302 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3303 honor_trapv,
3304 honor_nans, honor_snans, rhs2,
3305 &handled);
3306 if (handled
3307 && ret)
3308 return true;
3310 for (i = 0; i < nary->length; ++i)
3311 if (tree_could_trap_p (nary->op[i]))
3312 return true;
3314 return false;