bitmap.h (bitmap_ior_and_into): New.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob8557b0b07c9d37215f28c682afcd907fc9be06bd
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "real.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
41 #include "flags.h"
42 #include "bitmap.h"
43 #include "langhooks.h"
44 #include "cfgloop.h"
45 #include "params.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
175 vn_ssa_aux_t
176 VN_INFO (tree name)
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
180 gcc_assert (res);
181 return res;
184 /* Set the value numbering info for a given SSA name to a given
185 value. */
187 static inline void
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
197 vn_ssa_aux_t
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
209 return newinfo;
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
216 tree
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
220 gimple def_stmt;
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
224 return name;
226 /* If the value-number is a constant it is the representative
227 expression. */
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
229 return vn->valnum;
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
235 expression. */
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
237 return vn->valnum;
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
241 return vn->expr;
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
247 use it directly. */
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
250 return vn->valnum;
252 if (!is_gimple_assign (def_stmt))
253 return vn->valnum;
255 /* FIXME tuples. This is incomplete and likely will miss some
256 simplifications. */
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
259 case tcc_reference:
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
267 break;
269 case tcc_unary:
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
273 break;
275 case tcc_binary:
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
280 break;
282 default:;
284 if (expr == NULL_TREE)
285 return vn->valnum;
287 /* Cache the expression. */
288 vn->expr = expr;
290 return expr;
294 /* Free a phi operation structure VP. */
296 static void
297 free_phi (void *vp)
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
303 /* Free a reference operation structure VP. */
305 static void
306 free_reference (void *vp)
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
312 /* Hash table equality function for vn_constant_t. */
314 static int
315 vn_constant_eq (const void *p1, const void *p2)
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
320 if (vc1->hashcode != vc2->hashcode)
321 return false;
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 /* Hash table hash function for vn_constant_t. */
328 static hashval_t
329 vn_constant_hash (const void *p1)
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
335 /* Lookup a value id for CONSTANT and return it. If it does not
336 exist returns 0. */
338 unsigned int
339 get_constant_value_id (tree constant)
341 void **slot;
342 struct vn_constant_s vc;
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
348 if (slot)
349 return ((vn_constant_t)*slot)->value_id;
350 return 0;
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
356 unsigned int
357 get_or_alloc_constant_value_id (tree constant)
359 void **slot;
360 vn_constant_t vc = XNEW (struct vn_constant_s);
362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT);
366 if (*slot)
368 free (vc);
369 return ((vn_constant_t)*slot)->value_id;
371 vc->value_id = get_next_value_id ();
372 *slot = vc;
373 bitmap_set_bit (constant_value_ids, vc->value_id);
374 return vc->value_id;
377 /* Return true if V is a value id for a constant. */
379 bool
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
388 static int
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
401 /* Compute the hash for a reference operand VRO1. */
403 static hashval_t
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
406 hashval_t result = 0;
407 if (vro1->op0)
408 result += iterative_hash_expr (vro1->op0, vro1->opcode);
409 if (vro1->op1)
410 result += iterative_hash_expr (vro1->op1, vro1->opcode);
411 if (vro1->op2)
412 result += iterative_hash_expr (vro1->op2, vro1->opcode);
413 return result;
416 /* Return the hashcode for a given reference operation P1. */
418 static hashval_t
419 vn_reference_hash (const void *p1)
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
425 /* Compute a hash for the reference operation VR1 and return it. */
427 hashval_t
428 vn_reference_compute_hash (const vn_reference_t vr1)
430 hashval_t result;
431 int i;
432 vn_reference_op_t vro;
434 result = iterative_hash_expr (vr1->vuse, 0);
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
436 result += vn_reference_op_compute_hash (vro);
438 return result;
441 /* Return true if reference operations P1 and P2 are equivalent. This
442 means they have the same set of operands and vuses. */
445 vn_reference_eq (const void *p1, const void *p2)
447 int i;
448 vn_reference_op_t vro;
450 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
451 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
452 if (vr1->hashcode != vr2->hashcode)
453 return false;
455 /* Early out if this is not a hash collision. */
456 if (vr1->hashcode != vr2->hashcode)
457 return false;
459 /* The VOP needs to be the same. */
460 if (vr1->vuse != vr2->vuse)
461 return false;
463 /* If the operands are the same we are done. */
464 if (vr1->operands == vr2->operands)
465 return true;
467 /* We require that address operands be canonicalized in a way that
468 two memory references will have the same operands if they are
469 equivalent. */
470 if (VEC_length (vn_reference_op_s, vr1->operands)
471 != VEC_length (vn_reference_op_s, vr2->operands))
472 return false;
474 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
475 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
476 vro))
477 return false;
479 return true;
482 /* Copy the operations present in load/store REF into RESULT, a vector of
483 vn_reference_op_s's. */
485 void
486 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
488 if (TREE_CODE (ref) == TARGET_MEM_REF)
490 vn_reference_op_s temp;
491 tree base;
493 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
494 if (!base)
495 base = build_int_cst (ptr_type_node, 0);
497 memset (&temp, 0, sizeof (temp));
498 /* We do not care for spurious type qualifications. */
499 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
500 temp.opcode = TREE_CODE (ref);
501 temp.op0 = TMR_INDEX (ref);
502 temp.op1 = TMR_STEP (ref);
503 temp.op2 = TMR_OFFSET (ref);
504 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
506 memset (&temp, 0, sizeof (temp));
507 temp.type = NULL_TREE;
508 temp.opcode = TREE_CODE (base);
509 temp.op0 = base;
510 temp.op1 = TMR_ORIGINAL (ref);
511 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
512 return;
515 /* For non-calls, store the information that makes up the address. */
517 while (ref)
519 vn_reference_op_s temp;
521 memset (&temp, 0, sizeof (temp));
522 /* We do not care for spurious type qualifications. */
523 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
524 temp.opcode = TREE_CODE (ref);
526 switch (temp.opcode)
528 case ALIGN_INDIRECT_REF:
529 case INDIRECT_REF:
530 /* The only operand is the address, which gets its own
531 vn_reference_op_s structure. */
532 break;
533 case MISALIGNED_INDIRECT_REF:
534 temp.op0 = TREE_OPERAND (ref, 1);
535 break;
536 case BIT_FIELD_REF:
537 /* Record bits and position. */
538 temp.op0 = TREE_OPERAND (ref, 1);
539 temp.op1 = TREE_OPERAND (ref, 2);
540 break;
541 case COMPONENT_REF:
542 /* The field decl is enough to unambiguously specify the field,
543 a matching type is not necessary and a mismatching type
544 is always a spurious difference. */
545 temp.type = NULL_TREE;
546 temp.op0 = TREE_OPERAND (ref, 1);
547 temp.op1 = TREE_OPERAND (ref, 2);
548 /* If this is a reference to a union member, record the union
549 member size as operand. Do so only if we are doing
550 expression insertion (during FRE), as PRE currently gets
551 confused with this. */
552 if (may_insert
553 && temp.op1 == NULL_TREE
554 && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
555 && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
556 && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
557 && host_integerp (DECL_SIZE (temp.op0), 0))
558 temp.op0 = DECL_SIZE (temp.op0);
559 break;
560 case ARRAY_RANGE_REF:
561 case ARRAY_REF:
562 /* Record index as operand. */
563 temp.op0 = TREE_OPERAND (ref, 1);
564 /* Record even constant lower bounds. */
565 if (TREE_OPERAND (ref, 2))
566 temp.op1 = TREE_OPERAND (ref, 2);
567 else
569 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
570 if (domain
571 && TYPE_MIN_VALUE (domain)
572 && !integer_zerop (TYPE_MIN_VALUE (domain)))
573 temp.op1 = TYPE_MIN_VALUE (domain);
575 temp.op2 = TREE_OPERAND (ref, 3);
576 break;
577 case STRING_CST:
578 case INTEGER_CST:
579 case COMPLEX_CST:
580 case VECTOR_CST:
581 case REAL_CST:
582 case CONSTRUCTOR:
583 case VAR_DECL:
584 case PARM_DECL:
585 case CONST_DECL:
586 case RESULT_DECL:
587 case SSA_NAME:
588 case EXC_PTR_EXPR:
589 case FILTER_EXPR:
590 temp.op0 = ref;
591 break;
592 case ADDR_EXPR:
593 if (is_gimple_min_invariant (ref))
595 temp.op0 = ref;
596 break;
598 /* Fallthrough. */
599 /* These are only interesting for their operands, their
600 existence, and their type. They will never be the last
601 ref in the chain of references (IE they require an
602 operand), so we don't have to put anything
603 for op* as it will be handled by the iteration */
604 case IMAGPART_EXPR:
605 case REALPART_EXPR:
606 case VIEW_CONVERT_EXPR:
607 break;
608 default:
609 gcc_unreachable ();
611 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
613 if (REFERENCE_CLASS_P (ref)
614 || (TREE_CODE (ref) == ADDR_EXPR
615 && !is_gimple_min_invariant (ref)))
616 ref = TREE_OPERAND (ref, 0);
617 else
618 ref = NULL_TREE;
622 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
623 operands in *OPS, the reference alias set SET and the reference type TYPE.
624 Return true if something useful was produced. */
626 bool
627 ao_ref_init_from_vn_reference (ao_ref *ref,
628 alias_set_type set, tree type,
629 VEC (vn_reference_op_s, heap) *ops)
631 vn_reference_op_t op;
632 unsigned i;
633 tree base = NULL_TREE;
634 tree *op0_p = &base;
635 HOST_WIDE_INT offset = 0;
636 HOST_WIDE_INT max_size;
637 HOST_WIDE_INT size = -1;
638 tree size_tree = NULL_TREE;
640 /* First get the final access size from just the outermost expression. */
641 op = VEC_index (vn_reference_op_s, ops, 0);
642 if (op->opcode == COMPONENT_REF)
644 if (TREE_CODE (op->op0) == INTEGER_CST)
645 size_tree = op->op0;
646 else
647 size_tree = DECL_SIZE (op->op0);
649 else if (op->opcode == BIT_FIELD_REF)
650 size_tree = op->op0;
651 else
653 enum machine_mode mode = TYPE_MODE (type);
654 if (mode == BLKmode)
655 size_tree = TYPE_SIZE (type);
656 else
657 size = GET_MODE_BITSIZE (mode);
659 if (size_tree != NULL_TREE)
661 if (!host_integerp (size_tree, 1))
662 size = -1;
663 else
664 size = TREE_INT_CST_LOW (size_tree);
667 /* Initially, maxsize is the same as the accessed element size.
668 In the following it will only grow (or become -1). */
669 max_size = size;
671 /* Compute cumulative bit-offset for nested component-refs and array-refs,
672 and find the ultimate containing object. */
673 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
675 switch (op->opcode)
677 /* These may be in the reference ops, but we cannot do anything
678 sensible with them here. */
679 case CALL_EXPR:
680 case ADDR_EXPR:
681 return false;
683 /* Record the base objects. */
684 case ALIGN_INDIRECT_REF:
685 case INDIRECT_REF:
686 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
687 op0_p = &TREE_OPERAND (*op0_p, 0);
688 break;
690 case MISALIGNED_INDIRECT_REF:
691 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
692 NULL_TREE, op->op0);
693 op0_p = &TREE_OPERAND (*op0_p, 0);
694 break;
696 case VAR_DECL:
697 case PARM_DECL:
698 case RESULT_DECL:
699 case SSA_NAME:
700 case FILTER_EXPR:
701 case EXC_PTR_EXPR:
702 *op0_p = op->op0;
703 break;
705 /* And now the usual component-reference style ops. */
706 case BIT_FIELD_REF:
707 offset += tree_low_cst (op->op1, 0);
708 break;
710 case COMPONENT_REF:
712 tree field = op->op0;
713 /* We do not have a complete COMPONENT_REF tree here so we
714 cannot use component_ref_field_offset. Do the interesting
715 parts manually. */
717 /* Our union trick, done for offset zero only. */
718 if (TREE_CODE (field) == INTEGER_CST)
720 else if (op->op1
721 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
722 max_size = -1;
723 else
725 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
726 * BITS_PER_UNIT);
727 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
729 break;
732 case ARRAY_RANGE_REF:
733 case ARRAY_REF:
734 /* Same for ARRAY_REFs. We do not have access to the array
735 type here, but we recorded the lower bound in op1. */
736 if (op->op2
737 || !host_integerp (op->op0, 0)
738 || (op->op1 && !host_integerp (op->op1, 0))
739 || !host_integerp (TYPE_SIZE (op->type), 1))
740 max_size = -1;
741 else
743 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
744 if (op->op1)
745 hindex -= TREE_INT_CST_LOW (op->op1);
746 hindex *= TREE_INT_CST_LOW (TYPE_SIZE (op->type));
747 offset += hindex;
749 break;
751 case REALPART_EXPR:
752 break;
754 case IMAGPART_EXPR:
755 offset += size;
756 break;
758 case VIEW_CONVERT_EXPR:
759 break;
761 case STRING_CST:
762 case INTEGER_CST:
763 case COMPLEX_CST:
764 case VECTOR_CST:
765 case REAL_CST:
766 case CONSTRUCTOR:
767 case CONST_DECL:
768 return false;
770 default:
771 return false;
775 if (base == NULL_TREE)
776 return false;
778 ref->ref = NULL_TREE;
779 ref->base = base;
780 ref->offset = offset;
781 ref->size = size;
782 ref->max_size = max_size;
783 ref->ref_alias_set = set;
784 ref->base_alias_set = -1;
786 return true;
789 /* Copy the operations present in load/store/call REF into RESULT, a vector of
790 vn_reference_op_s's. */
792 void
793 copy_reference_ops_from_call (gimple call,
794 VEC(vn_reference_op_s, heap) **result)
796 vn_reference_op_s temp;
797 unsigned i;
799 /* Copy the type, opcode, function being called and static chain. */
800 memset (&temp, 0, sizeof (temp));
801 temp.type = gimple_call_return_type (call);
802 temp.opcode = CALL_EXPR;
803 temp.op0 = gimple_call_fn (call);
804 temp.op1 = gimple_call_chain (call);
805 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
807 /* Copy the call arguments. As they can be references as well,
808 just chain them together. */
809 for (i = 0; i < gimple_call_num_args (call); ++i)
811 tree callarg = gimple_call_arg (call, i);
812 copy_reference_ops_from_ref (callarg, result);
816 /* Create a vector of vn_reference_op_s structures from REF, a
817 REFERENCE_CLASS_P tree. The vector is not shared. */
819 static VEC(vn_reference_op_s, heap) *
820 create_reference_ops_from_ref (tree ref)
822 VEC (vn_reference_op_s, heap) *result = NULL;
824 copy_reference_ops_from_ref (ref, &result);
825 return result;
828 /* Create a vector of vn_reference_op_s structures from CALL, a
829 call statement. The vector is not shared. */
831 static VEC(vn_reference_op_s, heap) *
832 create_reference_ops_from_call (gimple call)
834 VEC (vn_reference_op_s, heap) *result = NULL;
836 copy_reference_ops_from_call (call, &result);
837 return result;
840 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
841 *I_P to point to the last element of the replacement. */
842 void
843 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
844 unsigned int *i_p)
846 VEC(vn_reference_op_s, heap) *mem = NULL;
847 vn_reference_op_t op;
848 unsigned int i = *i_p;
849 unsigned int j;
851 /* Get ops for the addressed object. */
852 op = VEC_index (vn_reference_op_s, *ops, i);
853 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
854 around it to avoid later ICEs. */
855 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
856 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
858 vn_reference_op_s aref;
859 tree dom;
860 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
861 aref.opcode = ARRAY_REF;
862 aref.op0 = integer_zero_node;
863 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
864 && TYPE_MIN_VALUE (dom))
865 aref.op0 = TYPE_MIN_VALUE (dom);
866 aref.op1 = NULL_TREE;
867 aref.op2 = NULL_TREE;
868 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
870 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
872 /* Do the replacement - we should have at least one op in mem now. */
873 if (VEC_length (vn_reference_op_s, mem) == 1)
875 VEC_replace (vn_reference_op_s, *ops, i - 1,
876 VEC_index (vn_reference_op_s, mem, 0));
877 VEC_ordered_remove (vn_reference_op_s, *ops, i);
878 i--;
880 else if (VEC_length (vn_reference_op_s, mem) == 2)
882 VEC_replace (vn_reference_op_s, *ops, i - 1,
883 VEC_index (vn_reference_op_s, mem, 0));
884 VEC_replace (vn_reference_op_s, *ops, i,
885 VEC_index (vn_reference_op_s, mem, 1));
887 else if (VEC_length (vn_reference_op_s, mem) > 2)
889 VEC_replace (vn_reference_op_s, *ops, i - 1,
890 VEC_index (vn_reference_op_s, mem, 0));
891 VEC_replace (vn_reference_op_s, *ops, i,
892 VEC_index (vn_reference_op_s, mem, 1));
893 /* ??? There is no VEC_splice. */
894 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
895 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
897 else
898 gcc_unreachable ();
900 VEC_free (vn_reference_op_s, heap, mem);
901 *i_p = i;
904 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
905 structures into their value numbers. This is done in-place, and
906 the vector passed in is returned. */
908 static VEC (vn_reference_op_s, heap) *
909 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
911 vn_reference_op_t vro;
912 unsigned int i;
914 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
916 if (vro->opcode == SSA_NAME
917 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
919 vro->op0 = SSA_VAL (vro->op0);
920 /* If it transforms from an SSA_NAME to a constant, update
921 the opcode. */
922 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
923 vro->opcode = TREE_CODE (vro->op0);
924 /* If it transforms from an SSA_NAME to an address, fold with
925 a preceding indirect reference. */
926 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
927 && VEC_index (vn_reference_op_s,
928 orig, i - 1)->opcode == INDIRECT_REF)
930 vn_reference_fold_indirect (&orig, &i);
931 continue;
934 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
935 vro->op1 = SSA_VAL (vro->op1);
936 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
937 vro->op2 = SSA_VAL (vro->op2);
940 return orig;
943 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
945 /* Create a vector of vn_reference_op_s structures from REF, a
946 REFERENCE_CLASS_P tree. The vector is shared among all callers of
947 this function. */
949 static VEC(vn_reference_op_s, heap) *
950 valueize_shared_reference_ops_from_ref (tree ref)
952 if (!ref)
953 return NULL;
954 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
955 copy_reference_ops_from_ref (ref, &shared_lookup_references);
956 shared_lookup_references = valueize_refs (shared_lookup_references);
957 return shared_lookup_references;
960 /* Create a vector of vn_reference_op_s structures from CALL, a
961 call statement. The vector is shared among all callers of
962 this function. */
964 static VEC(vn_reference_op_s, heap) *
965 valueize_shared_reference_ops_from_call (gimple call)
967 if (!call)
968 return NULL;
969 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
970 copy_reference_ops_from_call (call, &shared_lookup_references);
971 shared_lookup_references = valueize_refs (shared_lookup_references);
972 return shared_lookup_references;
975 /* Lookup a SCCVN reference operation VR in the current hash table.
976 Returns the resulting value number if it exists in the hash table,
977 NULL_TREE otherwise. VNRESULT will be filled in with the actual
978 vn_reference_t stored in the hashtable if something is found. */
980 static tree
981 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
983 void **slot;
984 hashval_t hash;
986 hash = vr->hashcode;
987 slot = htab_find_slot_with_hash (current_info->references, vr,
988 hash, NO_INSERT);
989 if (!slot && current_info == optimistic_info)
990 slot = htab_find_slot_with_hash (valid_info->references, vr,
991 hash, NO_INSERT);
992 if (slot)
994 if (vnresult)
995 *vnresult = (vn_reference_t)*slot;
996 return ((vn_reference_t)*slot)->result;
999 return NULL_TREE;
1002 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1003 with the current VUSE and performs the expression lookup. */
1005 static void *
1006 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1008 vn_reference_t vr = (vn_reference_t)vr_;
1009 void **slot;
1010 hashval_t hash;
1012 /* Fixup vuse and hash. */
1013 vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
1014 vr->vuse = SSA_VAL (vuse);
1015 vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
1017 hash = vr->hashcode;
1018 slot = htab_find_slot_with_hash (current_info->references, vr,
1019 hash, NO_INSERT);
1020 if (!slot && current_info == optimistic_info)
1021 slot = htab_find_slot_with_hash (valid_info->references, vr,
1022 hash, NO_INSERT);
1023 if (slot)
1024 return *slot;
1026 return NULL;
1029 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1030 from the statement defining VUSE and if not successful tries to
1031 translate *REFP and VR_ through an aggregate copy at the defintion
1032 of VUSE. */
1034 static void *
1035 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1037 vn_reference_t vr = (vn_reference_t)vr_;
1038 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1039 tree fndecl;
1040 tree base;
1041 HOST_WIDE_INT offset, size, maxsize;
1043 base = ao_ref_base (ref);
1044 offset = ref->offset;
1045 size = ref->size;
1046 maxsize = ref->max_size;
1048 /* If we cannot constrain the size of the reference we cannot
1049 test if anything kills it. */
1050 if (maxsize == -1)
1051 return (void *)-1;
1053 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1054 from that defintion.
1055 1) Memset. */
1056 if (is_gimple_reg_type (vr->type)
1057 && is_gimple_call (def_stmt)
1058 && (fndecl = gimple_call_fndecl (def_stmt))
1059 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1060 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1061 && integer_zerop (gimple_call_arg (def_stmt, 1))
1062 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1063 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1065 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1066 tree base2;
1067 HOST_WIDE_INT offset2, size2, maxsize2;
1068 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1069 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1070 if ((unsigned HOST_WIDE_INT)size2 / 8
1071 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1072 && operand_equal_p (base, base2, 0)
1073 && offset2 <= offset
1074 && offset2 + size2 >= offset + maxsize)
1076 tree val = fold_convert (vr->type, integer_zero_node);
1077 unsigned int value_id = get_or_alloc_constant_value_id (val);
1078 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1079 VEC_copy (vn_reference_op_s,
1080 heap, vr->operands),
1081 val, value_id);
1085 /* 2) Assignment from an empty CONSTRUCTOR. */
1086 else if (is_gimple_reg_type (vr->type)
1087 && gimple_assign_single_p (def_stmt)
1088 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1089 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1091 tree base2;
1092 HOST_WIDE_INT offset2, size2, maxsize2;
1093 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1094 &offset2, &size2, &maxsize2);
1095 if (operand_equal_p (base, base2, 0)
1096 && offset2 <= offset
1097 && offset2 + size2 >= offset + maxsize)
1099 tree val = fold_convert (vr->type, integer_zero_node);
1100 unsigned int value_id = get_or_alloc_constant_value_id (val);
1101 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1102 VEC_copy (vn_reference_op_s,
1103 heap, vr->operands),
1104 val, value_id);
1108 /* For aggregate copies translate the reference through them if
1109 the copy kills ref. */
1110 else if (gimple_assign_single_p (def_stmt)
1111 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1112 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1113 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1115 tree base2;
1116 HOST_WIDE_INT offset2, size2, maxsize2;
1117 int i, j;
1118 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1119 vn_reference_op_t vro;
1120 ao_ref r;
1122 /* See if the assignment kills REF. */
1123 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1124 &offset2, &size2, &maxsize2);
1125 if (!operand_equal_p (base, base2, 0)
1126 || offset2 > offset
1127 || offset2 + size2 < offset + maxsize)
1128 return (void *)-1;
1130 /* Find the common base of ref and the lhs. */
1131 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1132 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1133 j = VEC_length (vn_reference_op_s, lhs) - 1;
1134 while (j >= 0
1135 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1136 vr->operands, i),
1137 VEC_index (vn_reference_op_s, lhs, j)))
1139 i--;
1140 j--;
1142 /* i now points to the first additional op.
1143 ??? LHS may not be completely contained in VR, one or more
1144 VIEW_CONVERT_EXPRs could be in its way. We could at least
1145 try handling outermost VIEW_CONVERT_EXPRs. */
1146 if (j != -1)
1147 return (void *)-1;
1148 VEC_free (vn_reference_op_s, heap, lhs);
1150 /* Now re-write REF to be based on the rhs of the assignment. */
1151 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1152 /* We need to pre-pend vr->operands[0..i] to rhs. */
1153 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1154 > VEC_length (vn_reference_op_s, vr->operands))
1156 VEC (vn_reference_op_s, heap) *old = vr->operands;
1157 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1158 i + 1 + VEC_length (vn_reference_op_s, rhs));
1159 if (old == shared_lookup_references
1160 && vr->operands != old)
1161 shared_lookup_references = NULL;
1163 else
1164 VEC_truncate (vn_reference_op_s, vr->operands,
1165 i + 1 + VEC_length (vn_reference_op_s, rhs));
1166 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1167 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1168 VEC_free (vn_reference_op_s, heap, rhs);
1169 vr->hashcode = vn_reference_compute_hash (vr);
1171 /* Adjust *ref from the new operands. */
1172 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1173 return (void *)-1;
1174 gcc_assert (ref->size == r.size);
1175 *ref = r;
1177 /* Keep looking for the adjusted *REF / VR pair. */
1178 return NULL;
1181 /* Bail out and stop walking. */
1182 return (void *)-1;
1185 /* Lookup a reference operation by it's parts, in the current hash table.
1186 Returns the resulting value number if it exists in the hash table,
1187 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1188 vn_reference_t stored in the hashtable if something is found. */
1190 tree
1191 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1192 VEC (vn_reference_op_s, heap) *operands,
1193 vn_reference_t *vnresult, bool maywalk)
1195 struct vn_reference_s vr1;
1196 vn_reference_t tmp;
1198 if (!vnresult)
1199 vnresult = &tmp;
1200 *vnresult = NULL;
1202 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1203 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1204 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1205 VEC_length (vn_reference_op_s, operands));
1206 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1207 VEC_address (vn_reference_op_s, operands),
1208 sizeof (vn_reference_op_s)
1209 * VEC_length (vn_reference_op_s, operands));
1210 vr1.operands = operands = shared_lookup_references
1211 = valueize_refs (shared_lookup_references);
1212 vr1.type = type;
1213 vr1.set = set;
1214 vr1.hashcode = vn_reference_compute_hash (&vr1);
1215 vn_reference_lookup_1 (&vr1, vnresult);
1217 if (!*vnresult
1218 && maywalk
1219 && vr1.vuse)
1221 ao_ref r;
1222 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1223 *vnresult =
1224 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1225 vn_reference_lookup_2,
1226 vn_reference_lookup_3, &vr1);
1227 if (vr1.operands != operands)
1228 VEC_free (vn_reference_op_s, heap, vr1.operands);
1231 if (*vnresult)
1232 return (*vnresult)->result;
1234 return NULL_TREE;
1237 /* Lookup OP in the current hash table, and return the resulting value
1238 number if it exists in the hash table. Return NULL_TREE if it does
1239 not exist in the hash table or if the result field of the structure
1240 was NULL.. VNRESULT will be filled in with the vn_reference_t
1241 stored in the hashtable if one exists. */
1243 tree
1244 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1245 vn_reference_t *vnresult)
1247 VEC (vn_reference_op_s, heap) *operands;
1248 struct vn_reference_s vr1;
1250 if (vnresult)
1251 *vnresult = NULL;
1253 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1254 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1255 vr1.type = TREE_TYPE (op);
1256 vr1.set = get_alias_set (op);
1257 vr1.hashcode = vn_reference_compute_hash (&vr1);
1259 if (maywalk
1260 && vr1.vuse)
1262 vn_reference_t wvnresult;
1263 ao_ref r;
1264 ao_ref_init (&r, op);
1265 wvnresult =
1266 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1267 vn_reference_lookup_2,
1268 vn_reference_lookup_3, &vr1);
1269 if (vr1.operands != operands)
1270 VEC_free (vn_reference_op_s, heap, vr1.operands);
1271 if (wvnresult)
1273 if (vnresult)
1274 *vnresult = wvnresult;
1275 return wvnresult->result;
1278 return NULL_TREE;
1281 return vn_reference_lookup_1 (&vr1, vnresult);
1285 /* Insert OP into the current hash table with a value number of
1286 RESULT, and return the resulting reference structure we created. */
1288 vn_reference_t
1289 vn_reference_insert (tree op, tree result, tree vuse)
1291 void **slot;
1292 vn_reference_t vr1;
1294 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1295 if (TREE_CODE (result) == SSA_NAME)
1296 vr1->value_id = VN_INFO (result)->value_id;
1297 else
1298 vr1->value_id = get_or_alloc_constant_value_id (result);
1299 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1300 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1301 vr1->type = TREE_TYPE (op);
1302 vr1->set = get_alias_set (op);
1303 vr1->hashcode = vn_reference_compute_hash (vr1);
1304 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1306 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1307 INSERT);
1309 /* Because we lookup stores using vuses, and value number failures
1310 using the vdefs (see visit_reference_op_store for how and why),
1311 it's possible that on failure we may try to insert an already
1312 inserted store. This is not wrong, there is no ssa name for a
1313 store that we could use as a differentiator anyway. Thus, unlike
1314 the other lookup functions, you cannot gcc_assert (!*slot)
1315 here. */
1317 /* But free the old slot in case of a collision. */
1318 if (*slot)
1319 free_reference (*slot);
1321 *slot = vr1;
1322 return vr1;
1325 /* Insert a reference by it's pieces into the current hash table with
1326 a value number of RESULT. Return the resulting reference
1327 structure we created. */
1329 vn_reference_t
1330 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1331 VEC (vn_reference_op_s, heap) *operands,
1332 tree result, unsigned int value_id)
1335 void **slot;
1336 vn_reference_t vr1;
1338 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1339 vr1->value_id = value_id;
1340 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1341 vr1->operands = valueize_refs (operands);
1342 vr1->type = type;
1343 vr1->set = set;
1344 vr1->hashcode = vn_reference_compute_hash (vr1);
1345 if (result && TREE_CODE (result) == SSA_NAME)
1346 result = SSA_VAL (result);
1347 vr1->result = result;
1349 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1350 INSERT);
1352 /* At this point we should have all the things inserted that we have
1353 seen before, and we should never try inserting something that
1354 already exists. */
1355 gcc_assert (!*slot);
1356 if (*slot)
1357 free_reference (*slot);
1359 *slot = vr1;
1360 return vr1;
1363 /* Compute and return the hash value for nary operation VBO1. */
1365 hashval_t
1366 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1368 hashval_t hash = 0;
1369 unsigned i;
1371 for (i = 0; i < vno1->length; ++i)
1372 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1373 vno1->op[i] = SSA_VAL (vno1->op[i]);
1375 if (vno1->length == 2
1376 && commutative_tree_code (vno1->opcode)
1377 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1379 tree temp = vno1->op[0];
1380 vno1->op[0] = vno1->op[1];
1381 vno1->op[1] = temp;
1384 for (i = 0; i < vno1->length; ++i)
1385 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1387 return hash;
1390 /* Return the computed hashcode for nary operation P1. */
1392 static hashval_t
1393 vn_nary_op_hash (const void *p1)
1395 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1396 return vno1->hashcode;
1399 /* Compare nary operations P1 and P2 and return true if they are
1400 equivalent. */
1403 vn_nary_op_eq (const void *p1, const void *p2)
1405 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1406 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1407 unsigned i;
1409 if (vno1->hashcode != vno2->hashcode)
1410 return false;
1412 if (vno1->opcode != vno2->opcode
1413 || !types_compatible_p (vno1->type, vno2->type))
1414 return false;
1416 for (i = 0; i < vno1->length; ++i)
1417 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1418 return false;
1420 return true;
1423 /* Lookup a n-ary operation by its pieces and return the resulting value
1424 number if it exists in the hash table. Return NULL_TREE if it does
1425 not exist in the hash table or if the result field of the operation
1426 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1427 if it exists. */
1429 tree
1430 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1431 tree type, tree op0, tree op1, tree op2,
1432 tree op3, vn_nary_op_t *vnresult)
1434 void **slot;
1435 struct vn_nary_op_s vno1;
1436 if (vnresult)
1437 *vnresult = NULL;
1438 vno1.opcode = code;
1439 vno1.length = length;
1440 vno1.type = type;
1441 vno1.op[0] = op0;
1442 vno1.op[1] = op1;
1443 vno1.op[2] = op2;
1444 vno1.op[3] = op3;
1445 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1446 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1447 NO_INSERT);
1448 if (!slot && current_info == optimistic_info)
1449 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1450 NO_INSERT);
1451 if (!slot)
1452 return NULL_TREE;
1453 if (vnresult)
1454 *vnresult = (vn_nary_op_t)*slot;
1455 return ((vn_nary_op_t)*slot)->result;
1458 /* Lookup OP in the current hash table, and return the resulting value
1459 number if it exists in the hash table. Return NULL_TREE if it does
1460 not exist in the hash table or if the result field of the operation
1461 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1462 if it exists. */
1464 tree
1465 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1467 void **slot;
1468 struct vn_nary_op_s vno1;
1469 unsigned i;
1471 if (vnresult)
1472 *vnresult = NULL;
1473 vno1.opcode = TREE_CODE (op);
1474 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1475 vno1.type = TREE_TYPE (op);
1476 for (i = 0; i < vno1.length; ++i)
1477 vno1.op[i] = TREE_OPERAND (op, i);
1478 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1479 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1480 NO_INSERT);
1481 if (!slot && current_info == optimistic_info)
1482 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1483 NO_INSERT);
1484 if (!slot)
1485 return NULL_TREE;
1486 if (vnresult)
1487 *vnresult = (vn_nary_op_t)*slot;
1488 return ((vn_nary_op_t)*slot)->result;
1491 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1492 value number if it exists in the hash table. Return NULL_TREE if
1493 it does not exist in the hash table. VNRESULT will contain the
1494 vn_nary_op_t from the hashtable if it exists. */
1496 tree
1497 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1499 void **slot;
1500 struct vn_nary_op_s vno1;
1501 unsigned i;
1503 if (vnresult)
1504 *vnresult = NULL;
1505 vno1.opcode = gimple_assign_rhs_code (stmt);
1506 vno1.length = gimple_num_ops (stmt) - 1;
1507 vno1.type = gimple_expr_type (stmt);
1508 for (i = 0; i < vno1.length; ++i)
1509 vno1.op[i] = gimple_op (stmt, i + 1);
1510 if (vno1.opcode == REALPART_EXPR
1511 || vno1.opcode == IMAGPART_EXPR
1512 || vno1.opcode == VIEW_CONVERT_EXPR)
1513 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1514 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1515 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1516 NO_INSERT);
1517 if (!slot && current_info == optimistic_info)
1518 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1519 NO_INSERT);
1520 if (!slot)
1521 return NULL_TREE;
1522 if (vnresult)
1523 *vnresult = (vn_nary_op_t)*slot;
1524 return ((vn_nary_op_t)*slot)->result;
1527 /* Insert a n-ary operation into the current hash table using it's
1528 pieces. Return the vn_nary_op_t structure we created and put in
1529 the hashtable. */
1531 vn_nary_op_t
1532 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1533 tree type, tree op0,
1534 tree op1, tree op2, tree op3,
1535 tree result,
1536 unsigned int value_id)
1538 void **slot;
1539 vn_nary_op_t vno1;
1541 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1542 (sizeof (struct vn_nary_op_s)
1543 - sizeof (tree) * (4 - length)));
1544 vno1->value_id = value_id;
1545 vno1->opcode = code;
1546 vno1->length = length;
1547 vno1->type = type;
1548 if (length >= 1)
1549 vno1->op[0] = op0;
1550 if (length >= 2)
1551 vno1->op[1] = op1;
1552 if (length >= 3)
1553 vno1->op[2] = op2;
1554 if (length >= 4)
1555 vno1->op[3] = op3;
1556 vno1->result = result;
1557 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1558 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1559 INSERT);
1560 gcc_assert (!*slot);
1562 *slot = vno1;
1563 return vno1;
1567 /* Insert OP into the current hash table with a value number of
1568 RESULT. Return the vn_nary_op_t structure we created and put in
1569 the hashtable. */
1571 vn_nary_op_t
1572 vn_nary_op_insert (tree op, tree result)
1574 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1575 void **slot;
1576 vn_nary_op_t vno1;
1577 unsigned i;
1579 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1580 (sizeof (struct vn_nary_op_s)
1581 - sizeof (tree) * (4 - length)));
1582 vno1->value_id = VN_INFO (result)->value_id;
1583 vno1->opcode = TREE_CODE (op);
1584 vno1->length = length;
1585 vno1->type = TREE_TYPE (op);
1586 for (i = 0; i < vno1->length; ++i)
1587 vno1->op[i] = TREE_OPERAND (op, i);
1588 vno1->result = result;
1589 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1590 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1591 INSERT);
1592 gcc_assert (!*slot);
1594 *slot = vno1;
1595 return vno1;
1598 /* Insert the rhs of STMT into the current hash table with a value number of
1599 RESULT. */
1601 vn_nary_op_t
1602 vn_nary_op_insert_stmt (gimple stmt, tree result)
1604 unsigned length = gimple_num_ops (stmt) - 1;
1605 void **slot;
1606 vn_nary_op_t vno1;
1607 unsigned i;
1609 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1610 (sizeof (struct vn_nary_op_s)
1611 - sizeof (tree) * (4 - length)));
1612 vno1->value_id = VN_INFO (result)->value_id;
1613 vno1->opcode = gimple_assign_rhs_code (stmt);
1614 vno1->length = length;
1615 vno1->type = gimple_expr_type (stmt);
1616 for (i = 0; i < vno1->length; ++i)
1617 vno1->op[i] = gimple_op (stmt, i + 1);
1618 if (vno1->opcode == REALPART_EXPR
1619 || vno1->opcode == IMAGPART_EXPR
1620 || vno1->opcode == VIEW_CONVERT_EXPR)
1621 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1622 vno1->result = result;
1623 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1624 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1625 INSERT);
1626 gcc_assert (!*slot);
1628 *slot = vno1;
1629 return vno1;
1632 /* Compute a hashcode for PHI operation VP1 and return it. */
1634 static inline hashval_t
1635 vn_phi_compute_hash (vn_phi_t vp1)
1637 hashval_t result = 0;
1638 int i;
1639 tree phi1op;
1640 tree type;
1642 result = vp1->block->index;
1644 /* If all PHI arguments are constants we need to distinguish
1645 the PHI node via its type. */
1646 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1647 result += (INTEGRAL_TYPE_P (type)
1648 + (INTEGRAL_TYPE_P (type)
1649 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1651 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1653 if (phi1op == VN_TOP)
1654 continue;
1655 result += iterative_hash_expr (phi1op, result);
1658 return result;
1661 /* Return the computed hashcode for phi operation P1. */
1663 static hashval_t
1664 vn_phi_hash (const void *p1)
1666 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1667 return vp1->hashcode;
1670 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1672 static int
1673 vn_phi_eq (const void *p1, const void *p2)
1675 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1676 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1678 if (vp1->hashcode != vp2->hashcode)
1679 return false;
1681 if (vp1->block == vp2->block)
1683 int i;
1684 tree phi1op;
1686 /* If the PHI nodes do not have compatible types
1687 they are not the same. */
1688 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1689 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1690 return false;
1692 /* Any phi in the same block will have it's arguments in the
1693 same edge order, because of how we store phi nodes. */
1694 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1696 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1697 if (phi1op == VN_TOP || phi2op == VN_TOP)
1698 continue;
1699 if (!expressions_equal_p (phi1op, phi2op))
1700 return false;
1702 return true;
1704 return false;
1707 static VEC(tree, heap) *shared_lookup_phiargs;
1709 /* Lookup PHI in the current hash table, and return the resulting
1710 value number if it exists in the hash table. Return NULL_TREE if
1711 it does not exist in the hash table. */
1713 static tree
1714 vn_phi_lookup (gimple phi)
1716 void **slot;
1717 struct vn_phi_s vp1;
1718 unsigned i;
1720 VEC_truncate (tree, shared_lookup_phiargs, 0);
1722 /* Canonicalize the SSA_NAME's to their value number. */
1723 for (i = 0; i < gimple_phi_num_args (phi); i++)
1725 tree def = PHI_ARG_DEF (phi, i);
1726 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1727 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1729 vp1.phiargs = shared_lookup_phiargs;
1730 vp1.block = gimple_bb (phi);
1731 vp1.hashcode = vn_phi_compute_hash (&vp1);
1732 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1733 NO_INSERT);
1734 if (!slot && current_info == optimistic_info)
1735 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1736 NO_INSERT);
1737 if (!slot)
1738 return NULL_TREE;
1739 return ((vn_phi_t)*slot)->result;
1742 /* Insert PHI into the current hash table with a value number of
1743 RESULT. */
1745 static vn_phi_t
1746 vn_phi_insert (gimple phi, tree result)
1748 void **slot;
1749 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1750 unsigned i;
1751 VEC (tree, heap) *args = NULL;
1753 /* Canonicalize the SSA_NAME's to their value number. */
1754 for (i = 0; i < gimple_phi_num_args (phi); i++)
1756 tree def = PHI_ARG_DEF (phi, i);
1757 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1758 VEC_safe_push (tree, heap, args, def);
1760 vp1->value_id = VN_INFO (result)->value_id;
1761 vp1->phiargs = args;
1762 vp1->block = gimple_bb (phi);
1763 vp1->result = result;
1764 vp1->hashcode = vn_phi_compute_hash (vp1);
1766 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1767 INSERT);
1769 /* Because we iterate over phi operations more than once, it's
1770 possible the slot might already exist here, hence no assert.*/
1771 *slot = vp1;
1772 return vp1;
1776 /* Print set of components in strongly connected component SCC to OUT. */
1778 static void
1779 print_scc (FILE *out, VEC (tree, heap) *scc)
1781 tree var;
1782 unsigned int i;
1784 fprintf (out, "SCC consists of: ");
1785 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1787 print_generic_expr (out, var, 0);
1788 fprintf (out, " ");
1790 fprintf (out, "\n");
1793 /* Set the value number of FROM to TO, return true if it has changed
1794 as a result. */
1796 static inline bool
1797 set_ssa_val_to (tree from, tree to)
1799 tree currval;
1801 if (from != to
1802 && TREE_CODE (to) == SSA_NAME
1803 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1804 to = from;
1806 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1807 and invariants. So assert that here. */
1808 gcc_assert (to != NULL_TREE
1809 && (to == VN_TOP
1810 || TREE_CODE (to) == SSA_NAME
1811 || is_gimple_min_invariant (to)));
1813 if (dump_file && (dump_flags & TDF_DETAILS))
1815 fprintf (dump_file, "Setting value number of ");
1816 print_generic_expr (dump_file, from, 0);
1817 fprintf (dump_file, " to ");
1818 print_generic_expr (dump_file, to, 0);
1821 currval = SSA_VAL (from);
1823 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1825 VN_INFO (from)->valnum = to;
1826 if (dump_file && (dump_flags & TDF_DETAILS))
1827 fprintf (dump_file, " (changed)\n");
1828 return true;
1830 if (dump_file && (dump_flags & TDF_DETAILS))
1831 fprintf (dump_file, "\n");
1832 return false;
1835 /* Set all definitions in STMT to value number to themselves.
1836 Return true if a value number changed. */
1838 static bool
1839 defs_to_varying (gimple stmt)
1841 bool changed = false;
1842 ssa_op_iter iter;
1843 def_operand_p defp;
1845 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1847 tree def = DEF_FROM_PTR (defp);
1849 VN_INFO (def)->use_processed = true;
1850 changed |= set_ssa_val_to (def, def);
1852 return changed;
1855 static bool expr_has_constants (tree expr);
1856 static tree valueize_expr (tree expr);
1858 /* Visit a copy between LHS and RHS, return true if the value number
1859 changed. */
1861 static bool
1862 visit_copy (tree lhs, tree rhs)
1864 /* Follow chains of copies to their destination. */
1865 while (TREE_CODE (rhs) == SSA_NAME
1866 && SSA_VAL (rhs) != rhs)
1867 rhs = SSA_VAL (rhs);
1869 /* The copy may have a more interesting constant filled expression
1870 (we don't, since we know our RHS is just an SSA name). */
1871 if (TREE_CODE (rhs) == SSA_NAME)
1873 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1874 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1877 return set_ssa_val_to (lhs, rhs);
1880 /* Visit a unary operator RHS, value number it, and return true if the
1881 value number of LHS has changed as a result. */
1883 static bool
1884 visit_unary_op (tree lhs, gimple stmt)
1886 bool changed = false;
1887 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1889 if (result)
1891 changed = set_ssa_val_to (lhs, result);
1893 else
1895 changed = set_ssa_val_to (lhs, lhs);
1896 vn_nary_op_insert_stmt (stmt, lhs);
1899 return changed;
1902 /* Visit a binary operator RHS, value number it, and return true if the
1903 value number of LHS has changed as a result. */
1905 static bool
1906 visit_binary_op (tree lhs, gimple stmt)
1908 bool changed = false;
1909 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1911 if (result)
1913 changed = set_ssa_val_to (lhs, result);
1915 else
1917 changed = set_ssa_val_to (lhs, lhs);
1918 vn_nary_op_insert_stmt (stmt, lhs);
1921 return changed;
1924 /* Visit a call STMT storing into LHS. Return true if the value number
1925 of the LHS has changed as a result. */
1927 static bool
1928 visit_reference_op_call (tree lhs, gimple stmt)
1930 bool changed = false;
1931 struct vn_reference_s vr1;
1932 tree result;
1933 tree vuse = gimple_vuse (stmt);
1935 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1936 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1937 vr1.type = gimple_expr_type (stmt);
1938 vr1.set = 0;
1939 vr1.hashcode = vn_reference_compute_hash (&vr1);
1940 result = vn_reference_lookup_1 (&vr1, NULL);
1941 if (result)
1943 changed = set_ssa_val_to (lhs, result);
1944 if (TREE_CODE (result) == SSA_NAME
1945 && VN_INFO (result)->has_constants)
1946 VN_INFO (lhs)->has_constants = true;
1948 else
1950 void **slot;
1951 vn_reference_t vr2;
1952 changed = set_ssa_val_to (lhs, lhs);
1953 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1954 vr2->vuse = vr1.vuse;
1955 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1956 vr2->type = vr1.type;
1957 vr2->set = vr1.set;
1958 vr2->hashcode = vr1.hashcode;
1959 vr2->result = lhs;
1960 slot = htab_find_slot_with_hash (current_info->references,
1961 vr2, vr2->hashcode, INSERT);
1962 if (*slot)
1963 free_reference (*slot);
1964 *slot = vr2;
1967 return changed;
1970 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1971 and return true if the value number of the LHS has changed as a result. */
1973 static bool
1974 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1976 bool changed = false;
1977 tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1979 /* If we have a VCE, try looking up its operand as it might be stored in
1980 a different type. */
1981 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
1982 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
1983 true, NULL);
1985 /* We handle type-punning through unions by value-numbering based
1986 on offset and size of the access. Be prepared to handle a
1987 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1988 if (result
1989 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1991 /* We will be setting the value number of lhs to the value number
1992 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1993 So first simplify and lookup this expression to see if it
1994 is already available. */
1995 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1996 if ((CONVERT_EXPR_P (val)
1997 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1998 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2000 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2001 if ((CONVERT_EXPR_P (tem)
2002 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2003 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2004 TREE_TYPE (val), tem)))
2005 val = tem;
2007 result = val;
2008 if (!is_gimple_min_invariant (val)
2009 && TREE_CODE (val) != SSA_NAME)
2010 result = vn_nary_op_lookup (val, NULL);
2011 /* If the expression is not yet available, value-number lhs to
2012 a new SSA_NAME we create. */
2013 if (!result && may_insert)
2015 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
2016 /* Initialize value-number information properly. */
2017 VN_INFO_GET (result)->valnum = result;
2018 VN_INFO (result)->value_id = get_next_value_id ();
2019 VN_INFO (result)->expr = val;
2020 VN_INFO (result)->has_constants = expr_has_constants (val);
2021 VN_INFO (result)->needs_insertion = true;
2022 /* As all "inserted" statements are singleton SCCs, insert
2023 to the valid table. This is strictly needed to
2024 avoid re-generating new value SSA_NAMEs for the same
2025 expression during SCC iteration over and over (the
2026 optimistic table gets cleared after each iteration).
2027 We do not need to insert into the optimistic table, as
2028 lookups there will fall back to the valid table. */
2029 if (current_info == optimistic_info)
2031 current_info = valid_info;
2032 vn_nary_op_insert (val, result);
2033 current_info = optimistic_info;
2035 else
2036 vn_nary_op_insert (val, result);
2037 if (dump_file && (dump_flags & TDF_DETAILS))
2039 fprintf (dump_file, "Inserting name ");
2040 print_generic_expr (dump_file, result, 0);
2041 fprintf (dump_file, " for expression ");
2042 print_generic_expr (dump_file, val, 0);
2043 fprintf (dump_file, "\n");
2048 if (result)
2050 changed = set_ssa_val_to (lhs, result);
2051 if (TREE_CODE (result) == SSA_NAME
2052 && VN_INFO (result)->has_constants)
2054 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2055 VN_INFO (lhs)->has_constants = true;
2058 else
2060 changed = set_ssa_val_to (lhs, lhs);
2061 vn_reference_insert (op, lhs, gimple_vuse (stmt));
2064 return changed;
2068 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2069 and return true if the value number of the LHS has changed as a result. */
2071 static bool
2072 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2074 bool changed = false;
2075 tree result;
2076 bool resultsame = false;
2078 /* First we want to lookup using the *vuses* from the store and see
2079 if there the last store to this location with the same address
2080 had the same value.
2082 The vuses represent the memory state before the store. If the
2083 memory state, address, and value of the store is the same as the
2084 last store to this location, then this store will produce the
2085 same memory state as that store.
2087 In this case the vdef versions for this store are value numbered to those
2088 vuse versions, since they represent the same memory state after
2089 this store.
2091 Otherwise, the vdefs for the store are used when inserting into
2092 the table, since the store generates a new memory state. */
2094 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2096 if (result)
2098 if (TREE_CODE (result) == SSA_NAME)
2099 result = SSA_VAL (result);
2100 if (TREE_CODE (op) == SSA_NAME)
2101 op = SSA_VAL (op);
2102 resultsame = expressions_equal_p (result, op);
2105 if (!result || !resultsame)
2107 tree vdef;
2109 if (dump_file && (dump_flags & TDF_DETAILS))
2111 fprintf (dump_file, "No store match\n");
2112 fprintf (dump_file, "Value numbering store ");
2113 print_generic_expr (dump_file, lhs, 0);
2114 fprintf (dump_file, " to ");
2115 print_generic_expr (dump_file, op, 0);
2116 fprintf (dump_file, "\n");
2118 /* Have to set value numbers before insert, since insert is
2119 going to valueize the references in-place. */
2120 if ((vdef = gimple_vdef (stmt)))
2122 VN_INFO (vdef)->use_processed = true;
2123 changed |= set_ssa_val_to (vdef, vdef);
2126 /* Do not insert structure copies into the tables. */
2127 if (is_gimple_min_invariant (op)
2128 || is_gimple_reg (op))
2129 vn_reference_insert (lhs, op, vdef);
2131 else
2133 /* We had a match, so value number the vdef to have the value
2134 number of the vuse it came from. */
2135 tree def, use;
2137 if (dump_file && (dump_flags & TDF_DETAILS))
2138 fprintf (dump_file, "Store matched earlier value,"
2139 "value numbering store vdefs to matching vuses.\n");
2141 def = gimple_vdef (stmt);
2142 use = gimple_vuse (stmt);
2144 VN_INFO (def)->use_processed = true;
2145 changed |= set_ssa_val_to (def, SSA_VAL (use));
2148 return changed;
2151 /* Visit and value number PHI, return true if the value number
2152 changed. */
2154 static bool
2155 visit_phi (gimple phi)
2157 bool changed = false;
2158 tree result;
2159 tree sameval = VN_TOP;
2160 bool allsame = true;
2161 unsigned i;
2163 /* TODO: We could check for this in init_sccvn, and replace this
2164 with a gcc_assert. */
2165 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2166 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2168 /* See if all non-TOP arguments have the same value. TOP is
2169 equivalent to everything, so we can ignore it. */
2170 for (i = 0; i < gimple_phi_num_args (phi); i++)
2172 tree def = PHI_ARG_DEF (phi, i);
2174 if (TREE_CODE (def) == SSA_NAME)
2175 def = SSA_VAL (def);
2176 if (def == VN_TOP)
2177 continue;
2178 if (sameval == VN_TOP)
2180 sameval = def;
2182 else
2184 if (!expressions_equal_p (def, sameval))
2186 allsame = false;
2187 break;
2192 /* If all value numbered to the same value, the phi node has that
2193 value. */
2194 if (allsame)
2196 if (is_gimple_min_invariant (sameval))
2198 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2199 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2201 else
2203 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2204 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2207 if (TREE_CODE (sameval) == SSA_NAME)
2208 return visit_copy (PHI_RESULT (phi), sameval);
2210 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2213 /* Otherwise, see if it is equivalent to a phi node in this block. */
2214 result = vn_phi_lookup (phi);
2215 if (result)
2217 if (TREE_CODE (result) == SSA_NAME)
2218 changed = visit_copy (PHI_RESULT (phi), result);
2219 else
2220 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2222 else
2224 vn_phi_insert (phi, PHI_RESULT (phi));
2225 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2226 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2227 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2230 return changed;
2233 /* Return true if EXPR contains constants. */
2235 static bool
2236 expr_has_constants (tree expr)
2238 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2240 case tcc_unary:
2241 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2243 case tcc_binary:
2244 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2245 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2246 /* Constants inside reference ops are rarely interesting, but
2247 it can take a lot of looking to find them. */
2248 case tcc_reference:
2249 case tcc_declaration:
2250 return false;
2251 default:
2252 return is_gimple_min_invariant (expr);
2254 return false;
2257 /* Return true if STMT contains constants. */
2259 static bool
2260 stmt_has_constants (gimple stmt)
2262 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2263 return false;
2265 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2267 case GIMPLE_UNARY_RHS:
2268 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2270 case GIMPLE_BINARY_RHS:
2271 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2272 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2273 case GIMPLE_SINGLE_RHS:
2274 /* Constants inside reference ops are rarely interesting, but
2275 it can take a lot of looking to find them. */
2276 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2277 default:
2278 gcc_unreachable ();
2280 return false;
2283 /* Replace SSA_NAMES in expr with their value numbers, and return the
2284 result.
2285 This is performed in place. */
2287 static tree
2288 valueize_expr (tree expr)
2290 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2292 case tcc_unary:
2293 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2294 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2295 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2296 break;
2297 case tcc_binary:
2298 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2299 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2300 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2301 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2302 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2303 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2304 break;
2305 default:
2306 break;
2308 return expr;
2311 /* Simplify the binary expression RHS, and return the result if
2312 simplified. */
2314 static tree
2315 simplify_binary_expression (gimple stmt)
2317 tree result = NULL_TREE;
2318 tree op0 = gimple_assign_rhs1 (stmt);
2319 tree op1 = gimple_assign_rhs2 (stmt);
2321 /* This will not catch every single case we could combine, but will
2322 catch those with constants. The goal here is to simultaneously
2323 combine constants between expressions, but avoid infinite
2324 expansion of expressions during simplification. */
2325 if (TREE_CODE (op0) == SSA_NAME)
2327 if (VN_INFO (op0)->has_constants
2328 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2329 op0 = valueize_expr (vn_get_expr_for (op0));
2330 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2331 op0 = SSA_VAL (op0);
2334 if (TREE_CODE (op1) == SSA_NAME)
2336 if (VN_INFO (op1)->has_constants)
2337 op1 = valueize_expr (vn_get_expr_for (op1));
2338 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2339 op1 = SSA_VAL (op1);
2342 /* Avoid folding if nothing changed. */
2343 if (op0 == gimple_assign_rhs1 (stmt)
2344 && op1 == gimple_assign_rhs2 (stmt))
2345 return NULL_TREE;
2347 fold_defer_overflow_warnings ();
2349 result = fold_binary (gimple_assign_rhs_code (stmt),
2350 gimple_expr_type (stmt), op0, op1);
2351 if (result)
2352 STRIP_USELESS_TYPE_CONVERSION (result);
2354 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2355 stmt, 0);
2357 /* Make sure result is not a complex expression consisting
2358 of operators of operators (IE (a + b) + (a + c))
2359 Otherwise, we will end up with unbounded expressions if
2360 fold does anything at all. */
2361 if (result && valid_gimple_rhs_p (result))
2362 return result;
2364 return NULL_TREE;
2367 /* Simplify the unary expression RHS, and return the result if
2368 simplified. */
2370 static tree
2371 simplify_unary_expression (gimple stmt)
2373 tree result = NULL_TREE;
2374 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2376 /* We handle some tcc_reference codes here that are all
2377 GIMPLE_ASSIGN_SINGLE codes. */
2378 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2379 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2380 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2381 op0 = TREE_OPERAND (op0, 0);
2383 if (TREE_CODE (op0) != SSA_NAME)
2384 return NULL_TREE;
2386 orig_op0 = op0;
2387 if (VN_INFO (op0)->has_constants)
2388 op0 = valueize_expr (vn_get_expr_for (op0));
2389 else if (gimple_assign_cast_p (stmt)
2390 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2391 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2392 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2394 /* We want to do tree-combining on conversion-like expressions.
2395 Make sure we feed only SSA_NAMEs or constants to fold though. */
2396 tree tem = valueize_expr (vn_get_expr_for (op0));
2397 if (UNARY_CLASS_P (tem)
2398 || BINARY_CLASS_P (tem)
2399 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2400 || TREE_CODE (tem) == SSA_NAME
2401 || is_gimple_min_invariant (tem))
2402 op0 = tem;
2405 /* Avoid folding if nothing changed, but remember the expression. */
2406 if (op0 == orig_op0)
2407 return NULL_TREE;
2409 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2410 gimple_expr_type (stmt), op0);
2411 if (result)
2413 STRIP_USELESS_TYPE_CONVERSION (result);
2414 if (valid_gimple_rhs_p (result))
2415 return result;
2418 return NULL_TREE;
2421 /* Try to simplify RHS using equivalences and constant folding. */
2423 static tree
2424 try_to_simplify (gimple stmt)
2426 tree tem;
2428 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2429 in this case, there is no point in doing extra work. */
2430 if (gimple_assign_copy_p (stmt)
2431 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2432 return NULL_TREE;
2434 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2436 case tcc_declaration:
2437 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2438 if (tem)
2439 return tem;
2440 break;
2442 case tcc_reference:
2443 /* Do not do full-blown reference lookup here, but simplify
2444 reads from constant aggregates. */
2445 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2446 if (tem)
2447 return tem;
2449 /* Fallthrough for some codes that can operate on registers. */
2450 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2451 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2452 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2453 break;
2454 /* We could do a little more with unary ops, if they expand
2455 into binary ops, but it's debatable whether it is worth it. */
2456 case tcc_unary:
2457 return simplify_unary_expression (stmt);
2458 break;
2459 case tcc_comparison:
2460 case tcc_binary:
2461 return simplify_binary_expression (stmt);
2462 break;
2463 default:
2464 break;
2467 return NULL_TREE;
2470 /* Visit and value number USE, return true if the value number
2471 changed. */
2473 static bool
2474 visit_use (tree use)
2476 bool changed = false;
2477 gimple stmt = SSA_NAME_DEF_STMT (use);
2479 VN_INFO (use)->use_processed = true;
2481 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2482 if (dump_file && (dump_flags & TDF_DETAILS)
2483 && !SSA_NAME_IS_DEFAULT_DEF (use))
2485 fprintf (dump_file, "Value numbering ");
2486 print_generic_expr (dump_file, use, 0);
2487 fprintf (dump_file, " stmt = ");
2488 print_gimple_stmt (dump_file, stmt, 0, 0);
2491 /* Handle uninitialized uses. */
2492 if (SSA_NAME_IS_DEFAULT_DEF (use))
2493 changed = set_ssa_val_to (use, use);
2494 else
2496 if (gimple_code (stmt) == GIMPLE_PHI)
2497 changed = visit_phi (stmt);
2498 else if (!gimple_has_lhs (stmt)
2499 || gimple_has_volatile_ops (stmt)
2500 || stmt_could_throw_p (stmt))
2501 changed = defs_to_varying (stmt);
2502 else if (is_gimple_assign (stmt))
2504 tree lhs = gimple_assign_lhs (stmt);
2505 tree simplified;
2507 /* Shortcut for copies. Simplifying copies is pointless,
2508 since we copy the expression and value they represent. */
2509 if (gimple_assign_copy_p (stmt)
2510 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2511 && TREE_CODE (lhs) == SSA_NAME)
2513 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2514 goto done;
2516 simplified = try_to_simplify (stmt);
2517 if (simplified)
2519 if (dump_file && (dump_flags & TDF_DETAILS))
2521 fprintf (dump_file, "RHS ");
2522 print_gimple_expr (dump_file, stmt, 0, 0);
2523 fprintf (dump_file, " simplified to ");
2524 print_generic_expr (dump_file, simplified, 0);
2525 if (TREE_CODE (lhs) == SSA_NAME)
2526 fprintf (dump_file, " has constants %d\n",
2527 expr_has_constants (simplified));
2528 else
2529 fprintf (dump_file, "\n");
2532 /* Setting value numbers to constants will occasionally
2533 screw up phi congruence because constants are not
2534 uniquely associated with a single ssa name that can be
2535 looked up. */
2536 if (simplified
2537 && is_gimple_min_invariant (simplified)
2538 && TREE_CODE (lhs) == SSA_NAME)
2540 VN_INFO (lhs)->expr = simplified;
2541 VN_INFO (lhs)->has_constants = true;
2542 changed = set_ssa_val_to (lhs, simplified);
2543 goto done;
2545 else if (simplified
2546 && TREE_CODE (simplified) == SSA_NAME
2547 && TREE_CODE (lhs) == SSA_NAME)
2549 changed = visit_copy (lhs, simplified);
2550 goto done;
2552 else if (simplified)
2554 if (TREE_CODE (lhs) == SSA_NAME)
2556 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2557 /* We have to unshare the expression or else
2558 valuizing may change the IL stream. */
2559 VN_INFO (lhs)->expr = unshare_expr (simplified);
2562 else if (stmt_has_constants (stmt)
2563 && TREE_CODE (lhs) == SSA_NAME)
2564 VN_INFO (lhs)->has_constants = true;
2565 else if (TREE_CODE (lhs) == SSA_NAME)
2567 /* We reset expr and constantness here because we may
2568 have been value numbering optimistically, and
2569 iterating. They may become non-constant in this case,
2570 even if they were optimistically constant. */
2572 VN_INFO (lhs)->has_constants = false;
2573 VN_INFO (lhs)->expr = NULL_TREE;
2576 if ((TREE_CODE (lhs) == SSA_NAME
2577 /* We can substitute SSA_NAMEs that are live over
2578 abnormal edges with their constant value. */
2579 && !(gimple_assign_copy_p (stmt)
2580 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2581 && !(simplified
2582 && is_gimple_min_invariant (simplified))
2583 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2584 /* Stores or copies from SSA_NAMEs that are live over
2585 abnormal edges are a problem. */
2586 || (gimple_assign_single_p (stmt)
2587 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2588 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2589 changed = defs_to_varying (stmt);
2590 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2592 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2594 else if (TREE_CODE (lhs) == SSA_NAME)
2596 if ((gimple_assign_copy_p (stmt)
2597 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2598 || (simplified
2599 && is_gimple_min_invariant (simplified)))
2601 VN_INFO (lhs)->has_constants = true;
2602 if (simplified)
2603 changed = set_ssa_val_to (lhs, simplified);
2604 else
2605 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2607 else
2609 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2611 case GIMPLE_UNARY_RHS:
2612 changed = visit_unary_op (lhs, stmt);
2613 break;
2614 case GIMPLE_BINARY_RHS:
2615 changed = visit_binary_op (lhs, stmt);
2616 break;
2617 case GIMPLE_SINGLE_RHS:
2618 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2620 case tcc_reference:
2621 /* VOP-less references can go through unary case. */
2622 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2623 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2624 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2625 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2627 changed = visit_unary_op (lhs, stmt);
2628 break;
2630 /* Fallthrough. */
2631 case tcc_declaration:
2632 changed = visit_reference_op_load
2633 (lhs, gimple_assign_rhs1 (stmt), stmt);
2634 break;
2635 case tcc_expression:
2636 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2638 changed = visit_unary_op (lhs, stmt);
2639 break;
2641 /* Fallthrough. */
2642 default:
2643 changed = defs_to_varying (stmt);
2645 break;
2646 default:
2647 changed = defs_to_varying (stmt);
2648 break;
2652 else
2653 changed = defs_to_varying (stmt);
2655 else if (is_gimple_call (stmt))
2657 tree lhs = gimple_call_lhs (stmt);
2659 /* ??? We could try to simplify calls. */
2661 if (stmt_has_constants (stmt)
2662 && TREE_CODE (lhs) == SSA_NAME)
2663 VN_INFO (lhs)->has_constants = true;
2664 else if (TREE_CODE (lhs) == SSA_NAME)
2666 /* We reset expr and constantness here because we may
2667 have been value numbering optimistically, and
2668 iterating. They may become non-constant in this case,
2669 even if they were optimistically constant. */
2670 VN_INFO (lhs)->has_constants = false;
2671 VN_INFO (lhs)->expr = NULL_TREE;
2674 if (TREE_CODE (lhs) == SSA_NAME
2675 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2676 changed = defs_to_varying (stmt);
2677 /* ??? We should handle stores from calls. */
2678 else if (TREE_CODE (lhs) == SSA_NAME)
2680 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2681 changed = visit_reference_op_call (lhs, stmt);
2682 else
2683 changed = defs_to_varying (stmt);
2685 else
2686 changed = defs_to_varying (stmt);
2689 done:
2690 return changed;
2693 /* Compare two operands by reverse postorder index */
2695 static int
2696 compare_ops (const void *pa, const void *pb)
2698 const tree opa = *((const tree *)pa);
2699 const tree opb = *((const tree *)pb);
2700 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2701 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2702 basic_block bba;
2703 basic_block bbb;
2705 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2706 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2707 else if (gimple_nop_p (opstmta))
2708 return -1;
2709 else if (gimple_nop_p (opstmtb))
2710 return 1;
2712 bba = gimple_bb (opstmta);
2713 bbb = gimple_bb (opstmtb);
2715 if (!bba && !bbb)
2716 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2717 else if (!bba)
2718 return -1;
2719 else if (!bbb)
2720 return 1;
2722 if (bba == bbb)
2724 if (gimple_code (opstmta) == GIMPLE_PHI
2725 && gimple_code (opstmtb) == GIMPLE_PHI)
2726 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2727 else if (gimple_code (opstmta) == GIMPLE_PHI)
2728 return -1;
2729 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2730 return 1;
2731 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2732 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2733 else
2734 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2736 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2739 /* Sort an array containing members of a strongly connected component
2740 SCC so that the members are ordered by RPO number.
2741 This means that when the sort is complete, iterating through the
2742 array will give you the members in RPO order. */
2744 static void
2745 sort_scc (VEC (tree, heap) *scc)
2747 qsort (VEC_address (tree, scc),
2748 VEC_length (tree, scc),
2749 sizeof (tree),
2750 compare_ops);
2753 /* Process a strongly connected component in the SSA graph. */
2755 static void
2756 process_scc (VEC (tree, heap) *scc)
2758 /* If the SCC has a single member, just visit it. */
2760 if (VEC_length (tree, scc) == 1)
2762 tree use = VEC_index (tree, scc, 0);
2763 if (!VN_INFO (use)->use_processed)
2764 visit_use (use);
2766 else
2768 tree var;
2769 unsigned int i;
2770 unsigned int iterations = 0;
2771 bool changed = true;
2773 /* Iterate over the SCC with the optimistic table until it stops
2774 changing. */
2775 current_info = optimistic_info;
2776 while (changed)
2778 changed = false;
2779 iterations++;
2780 /* As we are value-numbering optimistically we have to
2781 clear the expression tables and the simplified expressions
2782 in each iteration until we converge. */
2783 htab_empty (optimistic_info->nary);
2784 htab_empty (optimistic_info->phis);
2785 htab_empty (optimistic_info->references);
2786 obstack_free (&optimistic_info->nary_obstack, NULL);
2787 gcc_obstack_init (&optimistic_info->nary_obstack);
2788 empty_alloc_pool (optimistic_info->phis_pool);
2789 empty_alloc_pool (optimistic_info->references_pool);
2790 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2791 VN_INFO (var)->expr = NULL_TREE;
2792 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2793 changed |= visit_use (var);
2796 statistics_histogram_event (cfun, "SCC iterations", iterations);
2798 /* Finally, visit the SCC once using the valid table. */
2799 current_info = valid_info;
2800 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2801 visit_use (var);
2805 DEF_VEC_O(ssa_op_iter);
2806 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2808 /* Pop the components of the found SCC for NAME off the SCC stack
2809 and process them. Returns true if all went well, false if
2810 we run into resource limits. */
2812 static bool
2813 extract_and_process_scc_for_name (tree name)
2815 VEC (tree, heap) *scc = NULL;
2816 tree x;
2818 /* Found an SCC, pop the components off the SCC stack and
2819 process them. */
2822 x = VEC_pop (tree, sccstack);
2824 VN_INFO (x)->on_sccstack = false;
2825 VEC_safe_push (tree, heap, scc, x);
2826 } while (x != name);
2828 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2829 if (VEC_length (tree, scc)
2830 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2832 if (dump_file)
2833 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2834 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2835 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2836 return false;
2839 if (VEC_length (tree, scc) > 1)
2840 sort_scc (scc);
2842 if (dump_file && (dump_flags & TDF_DETAILS))
2843 print_scc (dump_file, scc);
2845 process_scc (scc);
2847 VEC_free (tree, heap, scc);
2849 return true;
2852 /* Depth first search on NAME to discover and process SCC's in the SSA
2853 graph.
2854 Execution of this algorithm relies on the fact that the SCC's are
2855 popped off the stack in topological order.
2856 Returns true if successful, false if we stopped processing SCC's due
2857 to resource constraints. */
2859 static bool
2860 DFS (tree name)
2862 VEC(ssa_op_iter, heap) *itervec = NULL;
2863 VEC(tree, heap) *namevec = NULL;
2864 use_operand_p usep = NULL;
2865 gimple defstmt;
2866 tree use;
2867 ssa_op_iter iter;
2869 start_over:
2870 /* SCC info */
2871 VN_INFO (name)->dfsnum = next_dfs_num++;
2872 VN_INFO (name)->visited = true;
2873 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2875 VEC_safe_push (tree, heap, sccstack, name);
2876 VN_INFO (name)->on_sccstack = true;
2877 defstmt = SSA_NAME_DEF_STMT (name);
2879 /* Recursively DFS on our operands, looking for SCC's. */
2880 if (!gimple_nop_p (defstmt))
2882 /* Push a new iterator. */
2883 if (gimple_code (defstmt) == GIMPLE_PHI)
2884 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2885 else
2886 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2888 else
2889 clear_and_done_ssa_iter (&iter);
2891 while (1)
2893 /* If we are done processing uses of a name, go up the stack
2894 of iterators and process SCCs as we found them. */
2895 if (op_iter_done (&iter))
2897 /* See if we found an SCC. */
2898 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2899 if (!extract_and_process_scc_for_name (name))
2901 VEC_free (tree, heap, namevec);
2902 VEC_free (ssa_op_iter, heap, itervec);
2903 return false;
2906 /* Check if we are done. */
2907 if (VEC_empty (tree, namevec))
2909 VEC_free (tree, heap, namevec);
2910 VEC_free (ssa_op_iter, heap, itervec);
2911 return true;
2914 /* Restore the last use walker and continue walking there. */
2915 use = name;
2916 name = VEC_pop (tree, namevec);
2917 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2918 sizeof (ssa_op_iter));
2919 VEC_pop (ssa_op_iter, itervec);
2920 goto continue_walking;
2923 use = USE_FROM_PTR (usep);
2925 /* Since we handle phi nodes, we will sometimes get
2926 invariants in the use expression. */
2927 if (TREE_CODE (use) == SSA_NAME)
2929 if (! (VN_INFO (use)->visited))
2931 /* Recurse by pushing the current use walking state on
2932 the stack and starting over. */
2933 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2934 VEC_safe_push(tree, heap, namevec, name);
2935 name = use;
2936 goto start_over;
2938 continue_walking:
2939 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2940 VN_INFO (use)->low);
2942 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2943 && VN_INFO (use)->on_sccstack)
2945 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2946 VN_INFO (name)->low);
2950 usep = op_iter_next_use (&iter);
2954 /* Allocate a value number table. */
2956 static void
2957 allocate_vn_table (vn_tables_t table)
2959 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2960 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2961 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2962 free_reference);
2964 gcc_obstack_init (&table->nary_obstack);
2965 table->phis_pool = create_alloc_pool ("VN phis",
2966 sizeof (struct vn_phi_s),
2967 30);
2968 table->references_pool = create_alloc_pool ("VN references",
2969 sizeof (struct vn_reference_s),
2970 30);
2973 /* Free a value number table. */
2975 static void
2976 free_vn_table (vn_tables_t table)
2978 htab_delete (table->phis);
2979 htab_delete (table->nary);
2980 htab_delete (table->references);
2981 obstack_free (&table->nary_obstack, NULL);
2982 free_alloc_pool (table->phis_pool);
2983 free_alloc_pool (table->references_pool);
2986 static void
2987 init_scc_vn (void)
2989 size_t i;
2990 int j;
2991 int *rpo_numbers_temp;
2993 calculate_dominance_info (CDI_DOMINATORS);
2994 sccstack = NULL;
2995 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2996 free);
2998 constant_value_ids = BITMAP_ALLOC (NULL);
3000 next_dfs_num = 1;
3001 next_value_id = 1;
3003 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3004 /* VEC_alloc doesn't actually grow it to the right size, it just
3005 preallocates the space to do so. */
3006 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3007 gcc_obstack_init (&vn_ssa_aux_obstack);
3009 shared_lookup_phiargs = NULL;
3010 shared_lookup_references = NULL;
3011 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3012 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3013 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3015 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3016 the i'th block in RPO order is bb. We want to map bb's to RPO
3017 numbers, so we need to rearrange this array. */
3018 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3019 rpo_numbers[rpo_numbers_temp[j]] = j;
3021 XDELETE (rpo_numbers_temp);
3023 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3025 /* Create the VN_INFO structures, and initialize value numbers to
3026 TOP. */
3027 for (i = 0; i < num_ssa_names; i++)
3029 tree name = ssa_name (i);
3030 if (name)
3032 VN_INFO_GET (name)->valnum = VN_TOP;
3033 VN_INFO (name)->expr = NULL_TREE;
3034 VN_INFO (name)->value_id = 0;
3038 renumber_gimple_stmt_uids ();
3040 /* Create the valid and optimistic value numbering tables. */
3041 valid_info = XCNEW (struct vn_tables_s);
3042 allocate_vn_table (valid_info);
3043 optimistic_info = XCNEW (struct vn_tables_s);
3044 allocate_vn_table (optimistic_info);
3047 void
3048 free_scc_vn (void)
3050 size_t i;
3052 htab_delete (constant_to_value_id);
3053 BITMAP_FREE (constant_value_ids);
3054 VEC_free (tree, heap, shared_lookup_phiargs);
3055 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3056 XDELETEVEC (rpo_numbers);
3058 for (i = 0; i < num_ssa_names; i++)
3060 tree name = ssa_name (i);
3061 if (name
3062 && VN_INFO (name)->needs_insertion)
3063 release_ssa_name (name);
3065 obstack_free (&vn_ssa_aux_obstack, NULL);
3066 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3068 VEC_free (tree, heap, sccstack);
3069 free_vn_table (valid_info);
3070 XDELETE (valid_info);
3071 free_vn_table (optimistic_info);
3072 XDELETE (optimistic_info);
3075 /* Set the value ids in the valid hash tables. */
3077 static void
3078 set_hashtable_value_ids (void)
3080 htab_iterator hi;
3081 vn_nary_op_t vno;
3082 vn_reference_t vr;
3083 vn_phi_t vp;
3085 /* Now set the value ids of the things we had put in the hash
3086 table. */
3088 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3089 vno, vn_nary_op_t, hi)
3091 if (vno->result)
3093 if (TREE_CODE (vno->result) == SSA_NAME)
3094 vno->value_id = VN_INFO (vno->result)->value_id;
3095 else if (is_gimple_min_invariant (vno->result))
3096 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3100 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3101 vp, vn_phi_t, hi)
3103 if (vp->result)
3105 if (TREE_CODE (vp->result) == SSA_NAME)
3106 vp->value_id = VN_INFO (vp->result)->value_id;
3107 else if (is_gimple_min_invariant (vp->result))
3108 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3112 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3113 vr, vn_reference_t, hi)
3115 if (vr->result)
3117 if (TREE_CODE (vr->result) == SSA_NAME)
3118 vr->value_id = VN_INFO (vr->result)->value_id;
3119 else if (is_gimple_min_invariant (vr->result))
3120 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3125 /* Do SCCVN. Returns true if it finished, false if we bailed out
3126 due to resource constraints. */
3128 bool
3129 run_scc_vn (bool may_insert_arg)
3131 size_t i;
3132 tree param;
3133 bool changed = true;
3135 may_insert = may_insert_arg;
3137 init_scc_vn ();
3138 current_info = valid_info;
3140 for (param = DECL_ARGUMENTS (current_function_decl);
3141 param;
3142 param = TREE_CHAIN (param))
3144 if (gimple_default_def (cfun, param) != NULL)
3146 tree def = gimple_default_def (cfun, param);
3147 VN_INFO (def)->valnum = def;
3151 for (i = 1; i < num_ssa_names; ++i)
3153 tree name = ssa_name (i);
3154 if (name
3155 && VN_INFO (name)->visited == false
3156 && !has_zero_uses (name))
3157 if (!DFS (name))
3159 free_scc_vn ();
3160 may_insert = false;
3161 return false;
3165 /* Initialize the value ids. */
3167 for (i = 1; i < num_ssa_names; ++i)
3169 tree name = ssa_name (i);
3170 vn_ssa_aux_t info;
3171 if (!name)
3172 continue;
3173 info = VN_INFO (name);
3174 if (info->valnum == name
3175 || info->valnum == VN_TOP)
3176 info->value_id = get_next_value_id ();
3177 else if (is_gimple_min_invariant (info->valnum))
3178 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3181 /* Propagate until they stop changing. */
3182 while (changed)
3184 changed = false;
3185 for (i = 1; i < num_ssa_names; ++i)
3187 tree name = ssa_name (i);
3188 vn_ssa_aux_t info;
3189 if (!name)
3190 continue;
3191 info = VN_INFO (name);
3192 if (TREE_CODE (info->valnum) == SSA_NAME
3193 && info->valnum != name
3194 && info->value_id != VN_INFO (info->valnum)->value_id)
3196 changed = true;
3197 info->value_id = VN_INFO (info->valnum)->value_id;
3202 set_hashtable_value_ids ();
3204 if (dump_file && (dump_flags & TDF_DETAILS))
3206 fprintf (dump_file, "Value numbers:\n");
3207 for (i = 0; i < num_ssa_names; i++)
3209 tree name = ssa_name (i);
3210 if (name
3211 && VN_INFO (name)->visited
3212 && SSA_VAL (name) != name)
3214 print_generic_expr (dump_file, name, 0);
3215 fprintf (dump_file, " = ");
3216 print_generic_expr (dump_file, SSA_VAL (name), 0);
3217 fprintf (dump_file, "\n");
3222 may_insert = false;
3223 return true;
3226 /* Return the maximum value id we have ever seen. */
3228 unsigned int
3229 get_max_value_id (void)
3231 return next_value_id;
3234 /* Return the next unique value id. */
3236 unsigned int
3237 get_next_value_id (void)
3239 return next_value_id++;
3243 /* Compare two expressions E1 and E2 and return true if they are equal. */
3245 bool
3246 expressions_equal_p (tree e1, tree e2)
3248 /* The obvious case. */
3249 if (e1 == e2)
3250 return true;
3252 /* If only one of them is null, they cannot be equal. */
3253 if (!e1 || !e2)
3254 return false;
3256 /* Recurse on elements of lists. */
3257 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3259 tree lop1 = e1;
3260 tree lop2 = e2;
3261 for (lop1 = e1, lop2 = e2;
3262 lop1 || lop2;
3263 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3265 if (!lop1 || !lop2)
3266 return false;
3267 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3268 return false;
3270 return true;
3273 /* Now perform the actual comparison. */
3274 if (TREE_CODE (e1) == TREE_CODE (e2)
3275 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3276 return true;
3278 return false;
3282 /* Return true if the nary operation NARY may trap. This is a copy
3283 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3285 bool
3286 vn_nary_may_trap (vn_nary_op_t nary)
3288 tree type;
3289 tree rhs2;
3290 bool honor_nans = false;
3291 bool honor_snans = false;
3292 bool fp_operation = false;
3293 bool honor_trapv = false;
3294 bool handled, ret;
3295 unsigned i;
3297 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3298 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3299 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3301 type = nary->type;
3302 fp_operation = FLOAT_TYPE_P (type);
3303 if (fp_operation)
3305 honor_nans = flag_trapping_math && !flag_finite_math_only;
3306 honor_snans = flag_signaling_nans != 0;
3308 else if (INTEGRAL_TYPE_P (type)
3309 && TYPE_OVERFLOW_TRAPS (type))
3310 honor_trapv = true;
3312 rhs2 = nary->op[1];
3313 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3314 honor_trapv,
3315 honor_nans, honor_snans, rhs2,
3316 &handled);
3317 if (handled
3318 && ret)
3319 return true;
3321 for (i = 0; i < nary->length; ++i)
3322 if (tree_could_trap_p (nary->op[i]))
3323 return true;
3325 return false;