Daily bump.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob07bfdcccb81beb56075db9e133cc6360f38d6bd4
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-inline.h"
29 #include "tree-flow.h"
30 #include "gimple.h"
31 #include "dumpfile.h"
32 #include "hash-table.h"
33 #include "alloc-pool.h"
34 #include "flags.h"
35 #include "bitmap.h"
36 #include "cfgloop.h"
37 #include "params.h"
38 #include "tree-ssa-propagate.h"
39 #include "tree-ssa-sccvn.h"
40 #include "gimple-fold.h"
42 /* This algorithm is based on the SCC algorithm presented by Keith
43 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
44 (http://citeseer.ist.psu.edu/41805.html). In
45 straight line code, it is equivalent to a regular hash based value
46 numbering that is performed in reverse postorder.
48 For code with cycles, there are two alternatives, both of which
49 require keeping the hashtables separate from the actual list of
50 value numbers for SSA names.
52 1. Iterate value numbering in an RPO walk of the blocks, removing
53 all the entries from the hashtable after each iteration (but
54 keeping the SSA name->value number mapping between iterations).
55 Iterate until it does not change.
57 2. Perform value numbering as part of an SCC walk on the SSA graph,
58 iterating only the cycles in the SSA graph until they do not change
59 (using a separate, optimistic hashtable for value numbering the SCC
60 operands).
62 The second is not just faster in practice (because most SSA graph
63 cycles do not involve all the variables in the graph), it also has
64 some nice properties.
66 One of these nice properties is that when we pop an SCC off the
67 stack, we are guaranteed to have processed all the operands coming from
68 *outside of that SCC*, so we do not need to do anything special to
69 ensure they have value numbers.
71 Another nice property is that the SCC walk is done as part of a DFS
72 of the SSA graph, which makes it easy to perform combining and
73 simplifying operations at the same time.
75 The code below is deliberately written in a way that makes it easy
76 to separate the SCC walk from the other work it does.
78 In order to propagate constants through the code, we track which
79 expressions contain constants, and use those while folding. In
80 theory, we could also track expressions whose value numbers are
81 replaced, in case we end up folding based on expression
82 identities.
84 In order to value number memory, we assign value numbers to vuses.
85 This enables us to note that, for example, stores to the same
86 address of the same value from the same starting memory states are
87 equivalent.
88 TODO:
90 1. We can iterate only the changing portions of the SCC's, but
91 I have not seen an SCC big enough for this to be a win.
92 2. If you differentiate between phi nodes for loops and phi nodes
93 for if-then-else, you can properly consider phi nodes in different
94 blocks for equivalence.
95 3. We could value number vuses in more cases, particularly, whole
96 structure copies.
100 /* vn_nary_op hashtable helpers. */
102 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
104 typedef vn_nary_op_s value_type;
105 typedef vn_nary_op_s compare_type;
106 static inline hashval_t hash (const value_type *);
107 static inline bool equal (const value_type *, const compare_type *);
110 /* Return the computed hashcode for nary operation P1. */
112 inline hashval_t
113 vn_nary_op_hasher::hash (const value_type *vno1)
115 return vno1->hashcode;
118 /* Compare nary operations P1 and P2 and return true if they are
119 equivalent. */
121 inline bool
122 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
124 return vn_nary_op_eq (vno1, vno2);
127 typedef hash_table <vn_nary_op_hasher> vn_nary_op_table_type;
128 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
131 /* vn_phi hashtable helpers. */
133 static int
134 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
136 struct vn_phi_hasher
138 typedef vn_phi_s value_type;
139 typedef vn_phi_s compare_type;
140 static inline hashval_t hash (const value_type *);
141 static inline bool equal (const value_type *, const compare_type *);
142 static inline void remove (value_type *);
145 /* Return the computed hashcode for phi operation P1. */
147 inline hashval_t
148 vn_phi_hasher::hash (const value_type *vp1)
150 return vp1->hashcode;
153 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
155 inline bool
156 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
158 return vn_phi_eq (vp1, vp2);
161 /* Free a phi operation structure VP. */
163 inline void
164 vn_phi_hasher::remove (value_type *phi)
166 phi->phiargs.release ();
169 typedef hash_table <vn_phi_hasher> vn_phi_table_type;
170 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
173 /* Compare two reference operands P1 and P2 for equality. Return true if
174 they are equal, and false otherwise. */
176 static int
177 vn_reference_op_eq (const void *p1, const void *p2)
179 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
180 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
182 return (vro1->opcode == vro2->opcode
183 /* We do not care for differences in type qualification. */
184 && (vro1->type == vro2->type
185 || (vro1->type && vro2->type
186 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
187 TYPE_MAIN_VARIANT (vro2->type))))
188 && expressions_equal_p (vro1->op0, vro2->op0)
189 && expressions_equal_p (vro1->op1, vro2->op1)
190 && expressions_equal_p (vro1->op2, vro2->op2));
193 /* Free a reference operation structure VP. */
195 static inline void
196 free_reference (vn_reference_s *vr)
198 vr->operands.release ();
202 /* vn_reference hashtable helpers. */
204 struct vn_reference_hasher
206 typedef vn_reference_s value_type;
207 typedef vn_reference_s compare_type;
208 static inline hashval_t hash (const value_type *);
209 static inline bool equal (const value_type *, const compare_type *);
210 static inline void remove (value_type *);
213 /* Return the hashcode for a given reference operation P1. */
215 inline hashval_t
216 vn_reference_hasher::hash (const value_type *vr1)
218 return vr1->hashcode;
221 inline bool
222 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
224 return vn_reference_eq (v, c);
227 inline void
228 vn_reference_hasher::remove (value_type *v)
230 free_reference (v);
233 typedef hash_table <vn_reference_hasher> vn_reference_table_type;
234 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
237 /* The set of hashtables and alloc_pool's for their items. */
239 typedef struct vn_tables_s
241 vn_nary_op_table_type nary;
242 vn_phi_table_type phis;
243 vn_reference_table_type references;
244 struct obstack nary_obstack;
245 alloc_pool phis_pool;
246 alloc_pool references_pool;
247 } *vn_tables_t;
250 /* vn_constant hashtable helpers. */
252 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
254 typedef vn_constant_s value_type;
255 typedef vn_constant_s compare_type;
256 static inline hashval_t hash (const value_type *);
257 static inline bool equal (const value_type *, const compare_type *);
260 /* Hash table hash function for vn_constant_t. */
262 inline hashval_t
263 vn_constant_hasher::hash (const value_type *vc1)
265 return vc1->hashcode;
268 /* Hash table equality function for vn_constant_t. */
270 inline bool
271 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
273 if (vc1->hashcode != vc2->hashcode)
274 return false;
276 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
279 static hash_table <vn_constant_hasher> constant_to_value_id;
280 static bitmap constant_value_ids;
283 /* Valid hashtables storing information we have proven to be
284 correct. */
286 static vn_tables_t valid_info;
288 /* Optimistic hashtables storing information we are making assumptions about
289 during iterations. */
291 static vn_tables_t optimistic_info;
293 /* Pointer to the set of hashtables that is currently being used.
294 Should always point to either the optimistic_info, or the
295 valid_info. */
297 static vn_tables_t current_info;
300 /* Reverse post order index for each basic block. */
302 static int *rpo_numbers;
304 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
306 /* This represents the top of the VN lattice, which is the universal
307 value. */
309 tree VN_TOP;
311 /* Unique counter for our value ids. */
313 static unsigned int next_value_id;
315 /* Next DFS number and the stack for strongly connected component
316 detection. */
318 static unsigned int next_dfs_num;
319 static vec<tree> sccstack;
323 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
324 are allocated on an obstack for locality reasons, and to free them
325 without looping over the vec. */
327 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
328 static struct obstack vn_ssa_aux_obstack;
330 /* Return the value numbering information for a given SSA name. */
332 vn_ssa_aux_t
333 VN_INFO (tree name)
335 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
336 gcc_checking_assert (res);
337 return res;
340 /* Set the value numbering info for a given SSA name to a given
341 value. */
343 static inline void
344 VN_INFO_SET (tree name, vn_ssa_aux_t value)
346 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
349 /* Initialize the value numbering info for a given SSA name.
350 This should be called just once for every SSA name. */
352 vn_ssa_aux_t
353 VN_INFO_GET (tree name)
355 vn_ssa_aux_t newinfo;
357 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
358 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
359 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
360 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
361 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
362 return newinfo;
366 /* Get the representative expression for the SSA_NAME NAME. Returns
367 the representative SSA_NAME if there is no expression associated with it. */
369 tree
370 vn_get_expr_for (tree name)
372 vn_ssa_aux_t vn = VN_INFO (name);
373 gimple def_stmt;
374 tree expr = NULL_TREE;
375 enum tree_code code;
377 if (vn->valnum == VN_TOP)
378 return name;
380 /* If the value-number is a constant it is the representative
381 expression. */
382 if (TREE_CODE (vn->valnum) != SSA_NAME)
383 return vn->valnum;
385 /* Get to the information of the value of this SSA_NAME. */
386 vn = VN_INFO (vn->valnum);
388 /* If the value-number is a constant it is the representative
389 expression. */
390 if (TREE_CODE (vn->valnum) != SSA_NAME)
391 return vn->valnum;
393 /* Else if we have an expression, return it. */
394 if (vn->expr != NULL_TREE)
395 return vn->expr;
397 /* Otherwise use the defining statement to build the expression. */
398 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
400 /* If the value number is not an assignment use it directly. */
401 if (!is_gimple_assign (def_stmt))
402 return vn->valnum;
404 /* FIXME tuples. This is incomplete and likely will miss some
405 simplifications. */
406 code = gimple_assign_rhs_code (def_stmt);
407 switch (TREE_CODE_CLASS (code))
409 case tcc_reference:
410 if ((code == REALPART_EXPR
411 || code == IMAGPART_EXPR
412 || code == VIEW_CONVERT_EXPR)
413 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
414 0)) == SSA_NAME)
415 expr = fold_build1 (code,
416 gimple_expr_type (def_stmt),
417 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
418 break;
420 case tcc_unary:
421 expr = fold_build1 (code,
422 gimple_expr_type (def_stmt),
423 gimple_assign_rhs1 (def_stmt));
424 break;
426 case tcc_binary:
427 expr = fold_build2 (code,
428 gimple_expr_type (def_stmt),
429 gimple_assign_rhs1 (def_stmt),
430 gimple_assign_rhs2 (def_stmt));
431 break;
433 case tcc_exceptional:
434 if (code == CONSTRUCTOR
435 && TREE_CODE
436 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
437 expr = gimple_assign_rhs1 (def_stmt);
438 break;
440 default:;
442 if (expr == NULL_TREE)
443 return vn->valnum;
445 /* Cache the expression. */
446 vn->expr = expr;
448 return expr;
451 /* Return the vn_kind the expression computed by the stmt should be
452 associated with. */
454 enum vn_kind
455 vn_get_stmt_kind (gimple stmt)
457 switch (gimple_code (stmt))
459 case GIMPLE_CALL:
460 return VN_REFERENCE;
461 case GIMPLE_PHI:
462 return VN_PHI;
463 case GIMPLE_ASSIGN:
465 enum tree_code code = gimple_assign_rhs_code (stmt);
466 tree rhs1 = gimple_assign_rhs1 (stmt);
467 switch (get_gimple_rhs_class (code))
469 case GIMPLE_UNARY_RHS:
470 case GIMPLE_BINARY_RHS:
471 case GIMPLE_TERNARY_RHS:
472 return VN_NARY;
473 case GIMPLE_SINGLE_RHS:
474 switch (TREE_CODE_CLASS (code))
476 case tcc_reference:
477 /* VOP-less references can go through unary case. */
478 if ((code == REALPART_EXPR
479 || code == IMAGPART_EXPR
480 || code == VIEW_CONVERT_EXPR
481 || code == BIT_FIELD_REF)
482 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
483 return VN_NARY;
485 /* Fallthrough. */
486 case tcc_declaration:
487 return VN_REFERENCE;
489 case tcc_constant:
490 return VN_CONSTANT;
492 default:
493 if (code == ADDR_EXPR)
494 return (is_gimple_min_invariant (rhs1)
495 ? VN_CONSTANT : VN_REFERENCE);
496 else if (code == CONSTRUCTOR)
497 return VN_NARY;
498 return VN_NONE;
500 default:
501 return VN_NONE;
504 default:
505 return VN_NONE;
509 /* Lookup a value id for CONSTANT and return it. If it does not
510 exist returns 0. */
512 unsigned int
513 get_constant_value_id (tree constant)
515 vn_constant_s **slot;
516 struct vn_constant_s vc;
518 vc.hashcode = vn_hash_constant_with_type (constant);
519 vc.constant = constant;
520 slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, NO_INSERT);
521 if (slot)
522 return (*slot)->value_id;
523 return 0;
526 /* Lookup a value id for CONSTANT, and if it does not exist, create a
527 new one and return it. If it does exist, return it. */
529 unsigned int
530 get_or_alloc_constant_value_id (tree constant)
532 vn_constant_s **slot;
533 struct vn_constant_s vc;
534 vn_constant_t vcp;
536 vc.hashcode = vn_hash_constant_with_type (constant);
537 vc.constant = constant;
538 slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, INSERT);
539 if (*slot)
540 return (*slot)->value_id;
542 vcp = XNEW (struct vn_constant_s);
543 vcp->hashcode = vc.hashcode;
544 vcp->constant = constant;
545 vcp->value_id = get_next_value_id ();
546 *slot = vcp;
547 bitmap_set_bit (constant_value_ids, vcp->value_id);
548 return vcp->value_id;
551 /* Return true if V is a value id for a constant. */
553 bool
554 value_id_constant_p (unsigned int v)
556 return bitmap_bit_p (constant_value_ids, v);
559 /* Compute the hash for a reference operand VRO1. */
561 static hashval_t
562 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
564 result = iterative_hash_hashval_t (vro1->opcode, result);
565 if (vro1->op0)
566 result = iterative_hash_expr (vro1->op0, result);
567 if (vro1->op1)
568 result = iterative_hash_expr (vro1->op1, result);
569 if (vro1->op2)
570 result = iterative_hash_expr (vro1->op2, result);
571 return result;
574 /* Compute a hash for the reference operation VR1 and return it. */
576 hashval_t
577 vn_reference_compute_hash (const vn_reference_t vr1)
579 hashval_t result = 0;
580 int i;
581 vn_reference_op_t vro;
582 HOST_WIDE_INT off = -1;
583 bool deref = false;
585 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
587 if (vro->opcode == MEM_REF)
588 deref = true;
589 else if (vro->opcode != ADDR_EXPR)
590 deref = false;
591 if (vro->off != -1)
593 if (off == -1)
594 off = 0;
595 off += vro->off;
597 else
599 if (off != -1
600 && off != 0)
601 result = iterative_hash_hashval_t (off, result);
602 off = -1;
603 if (deref
604 && vro->opcode == ADDR_EXPR)
606 if (vro->op0)
608 tree op = TREE_OPERAND (vro->op0, 0);
609 result = iterative_hash_hashval_t (TREE_CODE (op), result);
610 result = iterative_hash_expr (op, result);
613 else
614 result = vn_reference_op_compute_hash (vro, result);
617 if (vr1->vuse)
618 result += SSA_NAME_VERSION (vr1->vuse);
620 return result;
623 /* Return true if reference operations VR1 and VR2 are equivalent. This
624 means they have the same set of operands and vuses. */
626 bool
627 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
629 unsigned i, j;
631 if (vr1->hashcode != vr2->hashcode)
632 return false;
634 /* Early out if this is not a hash collision. */
635 if (vr1->hashcode != vr2->hashcode)
636 return false;
638 /* The VOP needs to be the same. */
639 if (vr1->vuse != vr2->vuse)
640 return false;
642 /* If the operands are the same we are done. */
643 if (vr1->operands == vr2->operands)
644 return true;
646 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
647 return false;
649 if (INTEGRAL_TYPE_P (vr1->type)
650 && INTEGRAL_TYPE_P (vr2->type))
652 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
653 return false;
655 else if (INTEGRAL_TYPE_P (vr1->type)
656 && (TYPE_PRECISION (vr1->type)
657 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
658 return false;
659 else if (INTEGRAL_TYPE_P (vr2->type)
660 && (TYPE_PRECISION (vr2->type)
661 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
662 return false;
664 i = 0;
665 j = 0;
668 HOST_WIDE_INT off1 = 0, off2 = 0;
669 vn_reference_op_t vro1, vro2;
670 vn_reference_op_s tem1, tem2;
671 bool deref1 = false, deref2 = false;
672 for (; vr1->operands.iterate (i, &vro1); i++)
674 if (vro1->opcode == MEM_REF)
675 deref1 = true;
676 if (vro1->off == -1)
677 break;
678 off1 += vro1->off;
680 for (; vr2->operands.iterate (j, &vro2); j++)
682 if (vro2->opcode == MEM_REF)
683 deref2 = true;
684 if (vro2->off == -1)
685 break;
686 off2 += vro2->off;
688 if (off1 != off2)
689 return false;
690 if (deref1 && vro1->opcode == ADDR_EXPR)
692 memset (&tem1, 0, sizeof (tem1));
693 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
694 tem1.type = TREE_TYPE (tem1.op0);
695 tem1.opcode = TREE_CODE (tem1.op0);
696 vro1 = &tem1;
697 deref1 = false;
699 if (deref2 && vro2->opcode == ADDR_EXPR)
701 memset (&tem2, 0, sizeof (tem2));
702 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
703 tem2.type = TREE_TYPE (tem2.op0);
704 tem2.opcode = TREE_CODE (tem2.op0);
705 vro2 = &tem2;
706 deref2 = false;
708 if (deref1 != deref2)
709 return false;
710 if (!vn_reference_op_eq (vro1, vro2))
711 return false;
712 ++j;
713 ++i;
715 while (vr1->operands.length () != i
716 || vr2->operands.length () != j);
718 return true;
721 /* Copy the operations present in load/store REF into RESULT, a vector of
722 vn_reference_op_s's. */
724 void
725 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
727 if (TREE_CODE (ref) == TARGET_MEM_REF)
729 vn_reference_op_s temp;
731 memset (&temp, 0, sizeof (temp));
732 temp.type = TREE_TYPE (ref);
733 temp.opcode = TREE_CODE (ref);
734 temp.op0 = TMR_INDEX (ref);
735 temp.op1 = TMR_STEP (ref);
736 temp.op2 = TMR_OFFSET (ref);
737 temp.off = -1;
738 result->safe_push (temp);
740 memset (&temp, 0, sizeof (temp));
741 temp.type = NULL_TREE;
742 temp.opcode = ERROR_MARK;
743 temp.op0 = TMR_INDEX2 (ref);
744 temp.off = -1;
745 result->safe_push (temp);
747 memset (&temp, 0, sizeof (temp));
748 temp.type = NULL_TREE;
749 temp.opcode = TREE_CODE (TMR_BASE (ref));
750 temp.op0 = TMR_BASE (ref);
751 temp.off = -1;
752 result->safe_push (temp);
753 return;
756 /* For non-calls, store the information that makes up the address. */
758 while (ref)
760 vn_reference_op_s temp;
762 memset (&temp, 0, sizeof (temp));
763 temp.type = TREE_TYPE (ref);
764 temp.opcode = TREE_CODE (ref);
765 temp.off = -1;
767 switch (temp.opcode)
769 case MODIFY_EXPR:
770 temp.op0 = TREE_OPERAND (ref, 1);
771 break;
772 case WITH_SIZE_EXPR:
773 temp.op0 = TREE_OPERAND (ref, 1);
774 temp.off = 0;
775 break;
776 case MEM_REF:
777 /* The base address gets its own vn_reference_op_s structure. */
778 temp.op0 = TREE_OPERAND (ref, 1);
779 if (host_integerp (TREE_OPERAND (ref, 1), 0))
780 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
781 break;
782 case BIT_FIELD_REF:
783 /* Record bits and position. */
784 temp.op0 = TREE_OPERAND (ref, 1);
785 temp.op1 = TREE_OPERAND (ref, 2);
786 break;
787 case COMPONENT_REF:
788 /* The field decl is enough to unambiguously specify the field,
789 a matching type is not necessary and a mismatching type
790 is always a spurious difference. */
791 temp.type = NULL_TREE;
792 temp.op0 = TREE_OPERAND (ref, 1);
793 temp.op1 = TREE_OPERAND (ref, 2);
795 tree this_offset = component_ref_field_offset (ref);
796 if (this_offset
797 && TREE_CODE (this_offset) == INTEGER_CST)
799 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
800 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
802 double_int off
803 = tree_to_double_int (this_offset)
804 + tree_to_double_int (bit_offset)
805 .arshift (BITS_PER_UNIT == 8
806 ? 3 : exact_log2 (BITS_PER_UNIT),
807 HOST_BITS_PER_DOUBLE_INT);
808 if (off.fits_shwi ())
809 temp.off = off.low;
813 break;
814 case ARRAY_RANGE_REF:
815 case ARRAY_REF:
816 /* Record index as operand. */
817 temp.op0 = TREE_OPERAND (ref, 1);
818 /* Always record lower bounds and element size. */
819 temp.op1 = array_ref_low_bound (ref);
820 temp.op2 = array_ref_element_size (ref);
821 if (TREE_CODE (temp.op0) == INTEGER_CST
822 && TREE_CODE (temp.op1) == INTEGER_CST
823 && TREE_CODE (temp.op2) == INTEGER_CST)
825 double_int off = tree_to_double_int (temp.op0);
826 off += -tree_to_double_int (temp.op1);
827 off *= tree_to_double_int (temp.op2);
828 if (off.fits_shwi ())
829 temp.off = off.low;
831 break;
832 case VAR_DECL:
833 if (DECL_HARD_REGISTER (ref))
835 temp.op0 = ref;
836 break;
838 /* Fallthru. */
839 case PARM_DECL:
840 case CONST_DECL:
841 case RESULT_DECL:
842 /* Canonicalize decls to MEM[&decl] which is what we end up with
843 when valueizing MEM[ptr] with ptr = &decl. */
844 temp.opcode = MEM_REF;
845 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
846 temp.off = 0;
847 result->safe_push (temp);
848 temp.opcode = ADDR_EXPR;
849 temp.op0 = build_fold_addr_expr (ref);
850 temp.type = TREE_TYPE (temp.op0);
851 temp.off = -1;
852 break;
853 case STRING_CST:
854 case INTEGER_CST:
855 case COMPLEX_CST:
856 case VECTOR_CST:
857 case REAL_CST:
858 case FIXED_CST:
859 case CONSTRUCTOR:
860 case SSA_NAME:
861 temp.op0 = ref;
862 break;
863 case ADDR_EXPR:
864 if (is_gimple_min_invariant (ref))
866 temp.op0 = ref;
867 break;
869 /* Fallthrough. */
870 /* These are only interesting for their operands, their
871 existence, and their type. They will never be the last
872 ref in the chain of references (IE they require an
873 operand), so we don't have to put anything
874 for op* as it will be handled by the iteration */
875 case REALPART_EXPR:
876 case VIEW_CONVERT_EXPR:
877 temp.off = 0;
878 break;
879 case IMAGPART_EXPR:
880 /* This is only interesting for its constant offset. */
881 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
882 break;
883 default:
884 gcc_unreachable ();
886 result->safe_push (temp);
888 if (REFERENCE_CLASS_P (ref)
889 || TREE_CODE (ref) == MODIFY_EXPR
890 || TREE_CODE (ref) == WITH_SIZE_EXPR
891 || (TREE_CODE (ref) == ADDR_EXPR
892 && !is_gimple_min_invariant (ref)))
893 ref = TREE_OPERAND (ref, 0);
894 else
895 ref = NULL_TREE;
899 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
900 operands in *OPS, the reference alias set SET and the reference type TYPE.
901 Return true if something useful was produced. */
903 bool
904 ao_ref_init_from_vn_reference (ao_ref *ref,
905 alias_set_type set, tree type,
906 vec<vn_reference_op_s> ops)
908 vn_reference_op_t op;
909 unsigned i;
910 tree base = NULL_TREE;
911 tree *op0_p = &base;
912 HOST_WIDE_INT offset = 0;
913 HOST_WIDE_INT max_size;
914 HOST_WIDE_INT size = -1;
915 tree size_tree = NULL_TREE;
916 alias_set_type base_alias_set = -1;
918 /* First get the final access size from just the outermost expression. */
919 op = &ops[0];
920 if (op->opcode == COMPONENT_REF)
921 size_tree = DECL_SIZE (op->op0);
922 else if (op->opcode == BIT_FIELD_REF)
923 size_tree = op->op0;
924 else
926 enum machine_mode mode = TYPE_MODE (type);
927 if (mode == BLKmode)
928 size_tree = TYPE_SIZE (type);
929 else
930 size = GET_MODE_BITSIZE (mode);
932 if (size_tree != NULL_TREE)
934 if (!host_integerp (size_tree, 1))
935 size = -1;
936 else
937 size = TREE_INT_CST_LOW (size_tree);
940 /* Initially, maxsize is the same as the accessed element size.
941 In the following it will only grow (or become -1). */
942 max_size = size;
944 /* Compute cumulative bit-offset for nested component-refs and array-refs,
945 and find the ultimate containing object. */
946 FOR_EACH_VEC_ELT (ops, i, op)
948 switch (op->opcode)
950 /* These may be in the reference ops, but we cannot do anything
951 sensible with them here. */
952 case ADDR_EXPR:
953 /* Apart from ADDR_EXPR arguments to MEM_REF. */
954 if (base != NULL_TREE
955 && TREE_CODE (base) == MEM_REF
956 && op->op0
957 && DECL_P (TREE_OPERAND (op->op0, 0)))
959 vn_reference_op_t pop = &ops[i-1];
960 base = TREE_OPERAND (op->op0, 0);
961 if (pop->off == -1)
963 max_size = -1;
964 offset = 0;
966 else
967 offset += pop->off * BITS_PER_UNIT;
968 op0_p = NULL;
969 break;
971 /* Fallthru. */
972 case CALL_EXPR:
973 return false;
975 /* Record the base objects. */
976 case MEM_REF:
977 base_alias_set = get_deref_alias_set (op->op0);
978 *op0_p = build2 (MEM_REF, op->type,
979 NULL_TREE, op->op0);
980 op0_p = &TREE_OPERAND (*op0_p, 0);
981 break;
983 case VAR_DECL:
984 case PARM_DECL:
985 case RESULT_DECL:
986 case SSA_NAME:
987 *op0_p = op->op0;
988 op0_p = NULL;
989 break;
991 /* And now the usual component-reference style ops. */
992 case BIT_FIELD_REF:
993 offset += tree_low_cst (op->op1, 0);
994 break;
996 case COMPONENT_REF:
998 tree field = op->op0;
999 /* We do not have a complete COMPONENT_REF tree here so we
1000 cannot use component_ref_field_offset. Do the interesting
1001 parts manually. */
1003 if (op->op1
1004 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
1005 max_size = -1;
1006 else
1008 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1009 * BITS_PER_UNIT);
1010 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1012 break;
1015 case ARRAY_RANGE_REF:
1016 case ARRAY_REF:
1017 /* We recorded the lower bound and the element size. */
1018 if (!host_integerp (op->op0, 0)
1019 || !host_integerp (op->op1, 0)
1020 || !host_integerp (op->op2, 0))
1021 max_size = -1;
1022 else
1024 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
1025 hindex -= TREE_INT_CST_LOW (op->op1);
1026 hindex *= TREE_INT_CST_LOW (op->op2);
1027 hindex *= BITS_PER_UNIT;
1028 offset += hindex;
1030 break;
1032 case REALPART_EXPR:
1033 break;
1035 case IMAGPART_EXPR:
1036 offset += size;
1037 break;
1039 case VIEW_CONVERT_EXPR:
1040 break;
1042 case STRING_CST:
1043 case INTEGER_CST:
1044 case COMPLEX_CST:
1045 case VECTOR_CST:
1046 case REAL_CST:
1047 case CONSTRUCTOR:
1048 case CONST_DECL:
1049 return false;
1051 default:
1052 return false;
1056 if (base == NULL_TREE)
1057 return false;
1059 ref->ref = NULL_TREE;
1060 ref->base = base;
1061 ref->offset = offset;
1062 ref->size = size;
1063 ref->max_size = max_size;
1064 ref->ref_alias_set = set;
1065 if (base_alias_set != -1)
1066 ref->base_alias_set = base_alias_set;
1067 else
1068 ref->base_alias_set = get_alias_set (base);
1069 /* We discount volatiles from value-numbering elsewhere. */
1070 ref->volatile_p = false;
1072 return true;
1075 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1076 vn_reference_op_s's. */
1078 void
1079 copy_reference_ops_from_call (gimple call,
1080 vec<vn_reference_op_s> *result)
1082 vn_reference_op_s temp;
1083 unsigned i;
1084 tree lhs = gimple_call_lhs (call);
1086 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1087 different. By adding the lhs here in the vector, we ensure that the
1088 hashcode is different, guaranteeing a different value number. */
1089 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1091 memset (&temp, 0, sizeof (temp));
1092 temp.opcode = MODIFY_EXPR;
1093 temp.type = TREE_TYPE (lhs);
1094 temp.op0 = lhs;
1095 temp.off = -1;
1096 result->safe_push (temp);
1099 /* Copy the type, opcode, function being called and static chain. */
1100 memset (&temp, 0, sizeof (temp));
1101 temp.type = gimple_call_return_type (call);
1102 temp.opcode = CALL_EXPR;
1103 temp.op0 = gimple_call_fn (call);
1104 temp.op1 = gimple_call_chain (call);
1105 temp.off = -1;
1106 result->safe_push (temp);
1108 /* Copy the call arguments. As they can be references as well,
1109 just chain them together. */
1110 for (i = 0; i < gimple_call_num_args (call); ++i)
1112 tree callarg = gimple_call_arg (call, i);
1113 copy_reference_ops_from_ref (callarg, result);
1117 /* Create a vector of vn_reference_op_s structures from REF, a
1118 REFERENCE_CLASS_P tree. The vector is not shared. */
1120 static vec<vn_reference_op_s>
1121 create_reference_ops_from_ref (tree ref)
1123 vec<vn_reference_op_s> result = vNULL;
1125 copy_reference_ops_from_ref (ref, &result);
1126 return result;
1129 /* Create a vector of vn_reference_op_s structures from CALL, a
1130 call statement. The vector is not shared. */
1132 static vec<vn_reference_op_s>
1133 create_reference_ops_from_call (gimple call)
1135 vec<vn_reference_op_s> result = vNULL;
1137 copy_reference_ops_from_call (call, &result);
1138 return result;
1141 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1142 *I_P to point to the last element of the replacement. */
1143 void
1144 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1145 unsigned int *i_p)
1147 unsigned int i = *i_p;
1148 vn_reference_op_t op = &(*ops)[i];
1149 vn_reference_op_t mem_op = &(*ops)[i - 1];
1150 tree addr_base;
1151 HOST_WIDE_INT addr_offset = 0;
1153 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1154 from .foo.bar to the preceding MEM_REF offset and replace the
1155 address with &OBJ. */
1156 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1157 &addr_offset);
1158 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1159 if (addr_base != op->op0)
1161 double_int off = tree_to_double_int (mem_op->op0);
1162 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1163 off += double_int::from_shwi (addr_offset);
1164 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1165 op->op0 = build_fold_addr_expr (addr_base);
1166 if (host_integerp (mem_op->op0, 0))
1167 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1168 else
1169 mem_op->off = -1;
1173 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1174 *I_P to point to the last element of the replacement. */
1175 static void
1176 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1177 unsigned int *i_p)
1179 unsigned int i = *i_p;
1180 vn_reference_op_t op = &(*ops)[i];
1181 vn_reference_op_t mem_op = &(*ops)[i - 1];
1182 gimple def_stmt;
1183 enum tree_code code;
1184 double_int off;
1186 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1187 if (!is_gimple_assign (def_stmt))
1188 return;
1190 code = gimple_assign_rhs_code (def_stmt);
1191 if (code != ADDR_EXPR
1192 && code != POINTER_PLUS_EXPR)
1193 return;
1195 off = tree_to_double_int (mem_op->op0);
1196 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1198 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1199 from .foo.bar to the preceding MEM_REF offset and replace the
1200 address with &OBJ. */
1201 if (code == ADDR_EXPR)
1203 tree addr, addr_base;
1204 HOST_WIDE_INT addr_offset;
1206 addr = gimple_assign_rhs1 (def_stmt);
1207 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1208 &addr_offset);
1209 if (!addr_base
1210 || TREE_CODE (addr_base) != MEM_REF)
1211 return;
1213 off += double_int::from_shwi (addr_offset);
1214 off += mem_ref_offset (addr_base);
1215 op->op0 = TREE_OPERAND (addr_base, 0);
1217 else
1219 tree ptr, ptroff;
1220 ptr = gimple_assign_rhs1 (def_stmt);
1221 ptroff = gimple_assign_rhs2 (def_stmt);
1222 if (TREE_CODE (ptr) != SSA_NAME
1223 || TREE_CODE (ptroff) != INTEGER_CST)
1224 return;
1226 off += tree_to_double_int (ptroff);
1227 op->op0 = ptr;
1230 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1231 if (host_integerp (mem_op->op0, 0))
1232 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1233 else
1234 mem_op->off = -1;
1235 if (TREE_CODE (op->op0) == SSA_NAME)
1236 op->op0 = SSA_VAL (op->op0);
1237 if (TREE_CODE (op->op0) != SSA_NAME)
1238 op->opcode = TREE_CODE (op->op0);
1240 /* And recurse. */
1241 if (TREE_CODE (op->op0) == SSA_NAME)
1242 vn_reference_maybe_forwprop_address (ops, i_p);
1243 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1244 vn_reference_fold_indirect (ops, i_p);
1247 /* Optimize the reference REF to a constant if possible or return
1248 NULL_TREE if not. */
1250 tree
1251 fully_constant_vn_reference_p (vn_reference_t ref)
1253 vec<vn_reference_op_s> operands = ref->operands;
1254 vn_reference_op_t op;
1256 /* Try to simplify the translated expression if it is
1257 a call to a builtin function with at most two arguments. */
1258 op = &operands[0];
1259 if (op->opcode == CALL_EXPR
1260 && TREE_CODE (op->op0) == ADDR_EXPR
1261 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1262 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1263 && operands.length () >= 2
1264 && operands.length () <= 3)
1266 vn_reference_op_t arg0, arg1 = NULL;
1267 bool anyconst = false;
1268 arg0 = &operands[1];
1269 if (operands.length () > 2)
1270 arg1 = &operands[2];
1271 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1272 || (arg0->opcode == ADDR_EXPR
1273 && is_gimple_min_invariant (arg0->op0)))
1274 anyconst = true;
1275 if (arg1
1276 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1277 || (arg1->opcode == ADDR_EXPR
1278 && is_gimple_min_invariant (arg1->op0))))
1279 anyconst = true;
1280 if (anyconst)
1282 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1283 arg1 ? 2 : 1,
1284 arg0->op0,
1285 arg1 ? arg1->op0 : NULL);
1286 if (folded
1287 && TREE_CODE (folded) == NOP_EXPR)
1288 folded = TREE_OPERAND (folded, 0);
1289 if (folded
1290 && is_gimple_min_invariant (folded))
1291 return folded;
1295 /* Simplify reads from constant strings. */
1296 else if (op->opcode == ARRAY_REF
1297 && TREE_CODE (op->op0) == INTEGER_CST
1298 && integer_zerop (op->op1)
1299 && operands.length () == 2)
1301 vn_reference_op_t arg0;
1302 arg0 = &operands[1];
1303 if (arg0->opcode == STRING_CST
1304 && (TYPE_MODE (op->type)
1305 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1306 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1307 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1308 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1309 return build_int_cst_type (op->type,
1310 (TREE_STRING_POINTER (arg0->op0)
1311 [TREE_INT_CST_LOW (op->op0)]));
1314 return NULL_TREE;
1317 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1318 structures into their value numbers. This is done in-place, and
1319 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1320 whether any operands were valueized. */
1322 static vec<vn_reference_op_s>
1323 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1325 vn_reference_op_t vro;
1326 unsigned int i;
1328 *valueized_anything = false;
1330 FOR_EACH_VEC_ELT (orig, i, vro)
1332 if (vro->opcode == SSA_NAME
1333 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1335 tree tem = SSA_VAL (vro->op0);
1336 if (tem != vro->op0)
1338 *valueized_anything = true;
1339 vro->op0 = tem;
1341 /* If it transforms from an SSA_NAME to a constant, update
1342 the opcode. */
1343 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1344 vro->opcode = TREE_CODE (vro->op0);
1346 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1348 tree tem = SSA_VAL (vro->op1);
1349 if (tem != vro->op1)
1351 *valueized_anything = true;
1352 vro->op1 = tem;
1355 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1357 tree tem = SSA_VAL (vro->op2);
1358 if (tem != vro->op2)
1360 *valueized_anything = true;
1361 vro->op2 = tem;
1364 /* If it transforms from an SSA_NAME to an address, fold with
1365 a preceding indirect reference. */
1366 if (i > 0
1367 && vro->op0
1368 && TREE_CODE (vro->op0) == ADDR_EXPR
1369 && orig[i - 1].opcode == MEM_REF)
1370 vn_reference_fold_indirect (&orig, &i);
1371 else if (i > 0
1372 && vro->opcode == SSA_NAME
1373 && orig[i - 1].opcode == MEM_REF)
1374 vn_reference_maybe_forwprop_address (&orig, &i);
1375 /* If it transforms a non-constant ARRAY_REF into a constant
1376 one, adjust the constant offset. */
1377 else if (vro->opcode == ARRAY_REF
1378 && vro->off == -1
1379 && TREE_CODE (vro->op0) == INTEGER_CST
1380 && TREE_CODE (vro->op1) == INTEGER_CST
1381 && TREE_CODE (vro->op2) == INTEGER_CST)
1383 double_int off = tree_to_double_int (vro->op0);
1384 off += -tree_to_double_int (vro->op1);
1385 off *= tree_to_double_int (vro->op2);
1386 if (off.fits_shwi ())
1387 vro->off = off.low;
1391 return orig;
1394 static vec<vn_reference_op_s>
1395 valueize_refs (vec<vn_reference_op_s> orig)
1397 bool tem;
1398 return valueize_refs_1 (orig, &tem);
1401 static vec<vn_reference_op_s> shared_lookup_references;
1403 /* Create a vector of vn_reference_op_s structures from REF, a
1404 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1405 this function. *VALUEIZED_ANYTHING will specify whether any
1406 operands were valueized. */
1408 static vec<vn_reference_op_s>
1409 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1411 if (!ref)
1412 return vNULL;
1413 shared_lookup_references.truncate (0);
1414 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1415 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1416 valueized_anything);
1417 return shared_lookup_references;
1420 /* Create a vector of vn_reference_op_s structures from CALL, a
1421 call statement. The vector is shared among all callers of
1422 this function. */
1424 static vec<vn_reference_op_s>
1425 valueize_shared_reference_ops_from_call (gimple call)
1427 if (!call)
1428 return vNULL;
1429 shared_lookup_references.truncate (0);
1430 copy_reference_ops_from_call (call, &shared_lookup_references);
1431 shared_lookup_references = valueize_refs (shared_lookup_references);
1432 return shared_lookup_references;
1435 /* Lookup a SCCVN reference operation VR in the current hash table.
1436 Returns the resulting value number if it exists in the hash table,
1437 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1438 vn_reference_t stored in the hashtable if something is found. */
1440 static tree
1441 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1443 vn_reference_s **slot;
1444 hashval_t hash;
1446 hash = vr->hashcode;
1447 slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1448 if (!slot && current_info == optimistic_info)
1449 slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1450 if (slot)
1452 if (vnresult)
1453 *vnresult = (vn_reference_t)*slot;
1454 return ((vn_reference_t)*slot)->result;
1457 return NULL_TREE;
1460 static tree *last_vuse_ptr;
1461 static vn_lookup_kind vn_walk_kind;
1462 static vn_lookup_kind default_vn_walk_kind;
1464 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1465 with the current VUSE and performs the expression lookup. */
1467 static void *
1468 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1469 unsigned int cnt, void *vr_)
1471 vn_reference_t vr = (vn_reference_t)vr_;
1472 vn_reference_s **slot;
1473 hashval_t hash;
1475 /* This bounds the stmt walks we perform on reference lookups
1476 to O(1) instead of O(N) where N is the number of dominating
1477 stores. */
1478 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1479 return (void *)-1;
1481 if (last_vuse_ptr)
1482 *last_vuse_ptr = vuse;
1484 /* Fixup vuse and hash. */
1485 if (vr->vuse)
1486 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1487 vr->vuse = SSA_VAL (vuse);
1488 if (vr->vuse)
1489 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1491 hash = vr->hashcode;
1492 slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1493 if (!slot && current_info == optimistic_info)
1494 slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1495 if (slot)
1496 return *slot;
1498 return NULL;
1501 /* Lookup an existing or insert a new vn_reference entry into the
1502 value table for the VUSE, SET, TYPE, OPERANDS reference which
1503 has the value VALUE which is either a constant or an SSA name. */
1505 static vn_reference_t
1506 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1507 alias_set_type set,
1508 tree type,
1509 vec<vn_reference_op_s,
1510 va_heap> operands,
1511 tree value)
1513 struct vn_reference_s vr1;
1514 vn_reference_t result;
1515 unsigned value_id;
1516 vr1.vuse = vuse;
1517 vr1.operands = operands;
1518 vr1.type = type;
1519 vr1.set = set;
1520 vr1.hashcode = vn_reference_compute_hash (&vr1);
1521 if (vn_reference_lookup_1 (&vr1, &result))
1522 return result;
1523 if (TREE_CODE (value) == SSA_NAME)
1524 value_id = VN_INFO (value)->value_id;
1525 else
1526 value_id = get_or_alloc_constant_value_id (value);
1527 return vn_reference_insert_pieces (vuse, set, type,
1528 operands.copy (), value, value_id);
1531 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1532 from the statement defining VUSE and if not successful tries to
1533 translate *REFP and VR_ through an aggregate copy at the definition
1534 of VUSE. */
1536 static void *
1537 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1539 vn_reference_t vr = (vn_reference_t)vr_;
1540 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1541 tree base;
1542 HOST_WIDE_INT offset, maxsize;
1543 static vec<vn_reference_op_s>
1544 lhs_ops = vNULL;
1545 ao_ref lhs_ref;
1546 bool lhs_ref_ok = false;
1548 /* First try to disambiguate after value-replacing in the definitions LHS. */
1549 if (is_gimple_assign (def_stmt))
1551 vec<vn_reference_op_s> tem;
1552 tree lhs = gimple_assign_lhs (def_stmt);
1553 bool valueized_anything = false;
1554 /* Avoid re-allocation overhead. */
1555 lhs_ops.truncate (0);
1556 copy_reference_ops_from_ref (lhs, &lhs_ops);
1557 tem = lhs_ops;
1558 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1559 gcc_assert (lhs_ops == tem);
1560 if (valueized_anything)
1562 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1563 get_alias_set (lhs),
1564 TREE_TYPE (lhs), lhs_ops);
1565 if (lhs_ref_ok
1566 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1567 return NULL;
1569 else
1571 ao_ref_init (&lhs_ref, lhs);
1572 lhs_ref_ok = true;
1576 base = ao_ref_base (ref);
1577 offset = ref->offset;
1578 maxsize = ref->max_size;
1580 /* If we cannot constrain the size of the reference we cannot
1581 test if anything kills it. */
1582 if (maxsize == -1)
1583 return (void *)-1;
1585 /* We can't deduce anything useful from clobbers. */
1586 if (gimple_clobber_p (def_stmt))
1587 return (void *)-1;
1589 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1590 from that definition.
1591 1) Memset. */
1592 if (is_gimple_reg_type (vr->type)
1593 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1594 && integer_zerop (gimple_call_arg (def_stmt, 1))
1595 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1596 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1598 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1599 tree base2;
1600 HOST_WIDE_INT offset2, size2, maxsize2;
1601 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1602 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1603 if ((unsigned HOST_WIDE_INT)size2 / 8
1604 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1605 && maxsize2 != -1
1606 && operand_equal_p (base, base2, 0)
1607 && offset2 <= offset
1608 && offset2 + size2 >= offset + maxsize)
1610 tree val = build_zero_cst (vr->type);
1611 return vn_reference_lookup_or_insert_for_pieces
1612 (vuse, vr->set, vr->type, vr->operands, val);
1616 /* 2) Assignment from an empty CONSTRUCTOR. */
1617 else if (is_gimple_reg_type (vr->type)
1618 && gimple_assign_single_p (def_stmt)
1619 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1620 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1622 tree base2;
1623 HOST_WIDE_INT offset2, size2, maxsize2;
1624 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1625 &offset2, &size2, &maxsize2);
1626 if (maxsize2 != -1
1627 && operand_equal_p (base, base2, 0)
1628 && offset2 <= offset
1629 && offset2 + size2 >= offset + maxsize)
1631 tree val = build_zero_cst (vr->type);
1632 return vn_reference_lookup_or_insert_for_pieces
1633 (vuse, vr->set, vr->type, vr->operands, val);
1637 /* 3) Assignment from a constant. We can use folds native encode/interpret
1638 routines to extract the assigned bits. */
1639 else if (vn_walk_kind == VN_WALKREWRITE
1640 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1641 && ref->size == maxsize
1642 && maxsize % BITS_PER_UNIT == 0
1643 && offset % BITS_PER_UNIT == 0
1644 && is_gimple_reg_type (vr->type)
1645 && gimple_assign_single_p (def_stmt)
1646 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1648 tree base2;
1649 HOST_WIDE_INT offset2, size2, maxsize2;
1650 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1651 &offset2, &size2, &maxsize2);
1652 if (maxsize2 != -1
1653 && maxsize2 == size2
1654 && size2 % BITS_PER_UNIT == 0
1655 && offset2 % BITS_PER_UNIT == 0
1656 && operand_equal_p (base, base2, 0)
1657 && offset2 <= offset
1658 && offset2 + size2 >= offset + maxsize)
1660 /* We support up to 512-bit values (for V8DFmode). */
1661 unsigned char buffer[64];
1662 int len;
1664 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1665 buffer, sizeof (buffer));
1666 if (len > 0)
1668 tree val = native_interpret_expr (vr->type,
1669 buffer
1670 + ((offset - offset2)
1671 / BITS_PER_UNIT),
1672 ref->size / BITS_PER_UNIT);
1673 if (val)
1674 return vn_reference_lookup_or_insert_for_pieces
1675 (vuse, vr->set, vr->type, vr->operands, val);
1680 /* 4) Assignment from an SSA name which definition we may be able
1681 to access pieces from. */
1682 else if (ref->size == maxsize
1683 && is_gimple_reg_type (vr->type)
1684 && gimple_assign_single_p (def_stmt)
1685 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1687 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1688 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1689 if (is_gimple_assign (def_stmt2)
1690 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1691 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1692 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1694 tree base2;
1695 HOST_WIDE_INT offset2, size2, maxsize2, off;
1696 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1697 &offset2, &size2, &maxsize2);
1698 off = offset - offset2;
1699 if (maxsize2 != -1
1700 && maxsize2 == size2
1701 && operand_equal_p (base, base2, 0)
1702 && offset2 <= offset
1703 && offset2 + size2 >= offset + maxsize)
1705 tree val = NULL_TREE;
1706 HOST_WIDE_INT elsz
1707 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1708 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1710 if (off == 0)
1711 val = gimple_assign_rhs1 (def_stmt2);
1712 else if (off == elsz)
1713 val = gimple_assign_rhs2 (def_stmt2);
1715 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1716 && off % elsz == 0)
1718 tree ctor = gimple_assign_rhs1 (def_stmt2);
1719 unsigned i = off / elsz;
1720 if (i < CONSTRUCTOR_NELTS (ctor))
1722 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1723 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1725 if (TREE_CODE (TREE_TYPE (elt->value))
1726 != VECTOR_TYPE)
1727 val = elt->value;
1731 if (val)
1732 return vn_reference_lookup_or_insert_for_pieces
1733 (vuse, vr->set, vr->type, vr->operands, val);
1738 /* 5) For aggregate copies translate the reference through them if
1739 the copy kills ref. */
1740 else if (vn_walk_kind == VN_WALKREWRITE
1741 && gimple_assign_single_p (def_stmt)
1742 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1743 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1744 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1746 tree base2;
1747 HOST_WIDE_INT offset2, size2, maxsize2;
1748 int i, j;
1749 vec<vn_reference_op_s>
1750 rhs = vNULL;
1751 vn_reference_op_t vro;
1752 ao_ref r;
1754 if (!lhs_ref_ok)
1755 return (void *)-1;
1757 /* See if the assignment kills REF. */
1758 base2 = ao_ref_base (&lhs_ref);
1759 offset2 = lhs_ref.offset;
1760 size2 = lhs_ref.size;
1761 maxsize2 = lhs_ref.max_size;
1762 if (maxsize2 == -1
1763 || (base != base2 && !operand_equal_p (base, base2, 0))
1764 || offset2 > offset
1765 || offset2 + size2 < offset + maxsize)
1766 return (void *)-1;
1768 /* Find the common base of ref and the lhs. lhs_ops already
1769 contains valueized operands for the lhs. */
1770 i = vr->operands.length () - 1;
1771 j = lhs_ops.length () - 1;
1772 while (j >= 0 && i >= 0
1773 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1775 i--;
1776 j--;
1779 /* ??? The innermost op should always be a MEM_REF and we already
1780 checked that the assignment to the lhs kills vr. Thus for
1781 aggregate copies using char[] types the vn_reference_op_eq
1782 may fail when comparing types for compatibility. But we really
1783 don't care here - further lookups with the rewritten operands
1784 will simply fail if we messed up types too badly. */
1785 if (j == 0 && i >= 0
1786 && lhs_ops[0].opcode == MEM_REF
1787 && lhs_ops[0].off != -1
1788 && (lhs_ops[0].off == vr->operands[i].off))
1789 i--, j--;
1791 /* i now points to the first additional op.
1792 ??? LHS may not be completely contained in VR, one or more
1793 VIEW_CONVERT_EXPRs could be in its way. We could at least
1794 try handling outermost VIEW_CONVERT_EXPRs. */
1795 if (j != -1)
1796 return (void *)-1;
1798 /* Now re-write REF to be based on the rhs of the assignment. */
1799 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1800 /* We need to pre-pend vr->operands[0..i] to rhs. */
1801 if (i + 1 + rhs.length () > vr->operands.length ())
1803 vec<vn_reference_op_s> old = vr->operands;
1804 vr->operands.safe_grow (i + 1 + rhs.length ());
1805 if (old == shared_lookup_references
1806 && vr->operands != old)
1807 shared_lookup_references = vNULL;
1809 else
1810 vr->operands.truncate (i + 1 + rhs.length ());
1811 FOR_EACH_VEC_ELT (rhs, j, vro)
1812 vr->operands[i + 1 + j] = *vro;
1813 rhs.release ();
1814 vr->operands = valueize_refs (vr->operands);
1815 vr->hashcode = vn_reference_compute_hash (vr);
1817 /* Adjust *ref from the new operands. */
1818 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1819 return (void *)-1;
1820 /* This can happen with bitfields. */
1821 if (ref->size != r.size)
1822 return (void *)-1;
1823 *ref = r;
1825 /* Do not update last seen VUSE after translating. */
1826 last_vuse_ptr = NULL;
1828 /* Keep looking for the adjusted *REF / VR pair. */
1829 return NULL;
1832 /* 6) For memcpy copies translate the reference through them if
1833 the copy kills ref. */
1834 else if (vn_walk_kind == VN_WALKREWRITE
1835 && is_gimple_reg_type (vr->type)
1836 /* ??? Handle BCOPY as well. */
1837 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1838 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1839 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1840 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1841 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1842 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1843 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1844 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1846 tree lhs, rhs;
1847 ao_ref r;
1848 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1849 vn_reference_op_s op;
1850 HOST_WIDE_INT at;
1853 /* Only handle non-variable, addressable refs. */
1854 if (ref->size != maxsize
1855 || offset % BITS_PER_UNIT != 0
1856 || ref->size % BITS_PER_UNIT != 0)
1857 return (void *)-1;
1859 /* Extract a pointer base and an offset for the destination. */
1860 lhs = gimple_call_arg (def_stmt, 0);
1861 lhs_offset = 0;
1862 if (TREE_CODE (lhs) == SSA_NAME)
1863 lhs = SSA_VAL (lhs);
1864 if (TREE_CODE (lhs) == ADDR_EXPR)
1866 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1867 &lhs_offset);
1868 if (!tem)
1869 return (void *)-1;
1870 if (TREE_CODE (tem) == MEM_REF
1871 && host_integerp (TREE_OPERAND (tem, 1), 1))
1873 lhs = TREE_OPERAND (tem, 0);
1874 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1876 else if (DECL_P (tem))
1877 lhs = build_fold_addr_expr (tem);
1878 else
1879 return (void *)-1;
1881 if (TREE_CODE (lhs) != SSA_NAME
1882 && TREE_CODE (lhs) != ADDR_EXPR)
1883 return (void *)-1;
1885 /* Extract a pointer base and an offset for the source. */
1886 rhs = gimple_call_arg (def_stmt, 1);
1887 rhs_offset = 0;
1888 if (TREE_CODE (rhs) == SSA_NAME)
1889 rhs = SSA_VAL (rhs);
1890 if (TREE_CODE (rhs) == ADDR_EXPR)
1892 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1893 &rhs_offset);
1894 if (!tem)
1895 return (void *)-1;
1896 if (TREE_CODE (tem) == MEM_REF
1897 && host_integerp (TREE_OPERAND (tem, 1), 1))
1899 rhs = TREE_OPERAND (tem, 0);
1900 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1902 else if (DECL_P (tem))
1903 rhs = build_fold_addr_expr (tem);
1904 else
1905 return (void *)-1;
1907 if (TREE_CODE (rhs) != SSA_NAME
1908 && TREE_CODE (rhs) != ADDR_EXPR)
1909 return (void *)-1;
1911 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1913 /* The bases of the destination and the references have to agree. */
1914 if ((TREE_CODE (base) != MEM_REF
1915 && !DECL_P (base))
1916 || (TREE_CODE (base) == MEM_REF
1917 && (TREE_OPERAND (base, 0) != lhs
1918 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1919 || (DECL_P (base)
1920 && (TREE_CODE (lhs) != ADDR_EXPR
1921 || TREE_OPERAND (lhs, 0) != base)))
1922 return (void *)-1;
1924 /* And the access has to be contained within the memcpy destination. */
1925 at = offset / BITS_PER_UNIT;
1926 if (TREE_CODE (base) == MEM_REF)
1927 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1928 if (lhs_offset > at
1929 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1930 return (void *)-1;
1932 /* Make room for 2 operands in the new reference. */
1933 if (vr->operands.length () < 2)
1935 vec<vn_reference_op_s> old = vr->operands;
1936 vr->operands.safe_grow_cleared (2);
1937 if (old == shared_lookup_references
1938 && vr->operands != old)
1939 shared_lookup_references.create (0);
1941 else
1942 vr->operands.truncate (2);
1944 /* The looked-through reference is a simple MEM_REF. */
1945 memset (&op, 0, sizeof (op));
1946 op.type = vr->type;
1947 op.opcode = MEM_REF;
1948 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1949 op.off = at - lhs_offset + rhs_offset;
1950 vr->operands[0] = op;
1951 op.type = TREE_TYPE (rhs);
1952 op.opcode = TREE_CODE (rhs);
1953 op.op0 = rhs;
1954 op.off = -1;
1955 vr->operands[1] = op;
1956 vr->hashcode = vn_reference_compute_hash (vr);
1958 /* Adjust *ref from the new operands. */
1959 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1960 return (void *)-1;
1961 /* This can happen with bitfields. */
1962 if (ref->size != r.size)
1963 return (void *)-1;
1964 *ref = r;
1966 /* Do not update last seen VUSE after translating. */
1967 last_vuse_ptr = NULL;
1969 /* Keep looking for the adjusted *REF / VR pair. */
1970 return NULL;
1973 /* Bail out and stop walking. */
1974 return (void *)-1;
1977 /* Lookup a reference operation by it's parts, in the current hash table.
1978 Returns the resulting value number if it exists in the hash table,
1979 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1980 vn_reference_t stored in the hashtable if something is found. */
1982 tree
1983 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1984 vec<vn_reference_op_s> operands,
1985 vn_reference_t *vnresult, vn_lookup_kind kind)
1987 struct vn_reference_s vr1;
1988 vn_reference_t tmp;
1989 tree cst;
1991 if (!vnresult)
1992 vnresult = &tmp;
1993 *vnresult = NULL;
1995 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1996 shared_lookup_references.truncate (0);
1997 shared_lookup_references.safe_grow (operands.length ());
1998 memcpy (shared_lookup_references.address (),
1999 operands.address (),
2000 sizeof (vn_reference_op_s)
2001 * operands.length ());
2002 vr1.operands = operands = shared_lookup_references
2003 = valueize_refs (shared_lookup_references);
2004 vr1.type = type;
2005 vr1.set = set;
2006 vr1.hashcode = vn_reference_compute_hash (&vr1);
2007 if ((cst = fully_constant_vn_reference_p (&vr1)))
2008 return cst;
2010 vn_reference_lookup_1 (&vr1, vnresult);
2011 if (!*vnresult
2012 && kind != VN_NOWALK
2013 && vr1.vuse)
2015 ao_ref r;
2016 vn_walk_kind = kind;
2017 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2018 *vnresult =
2019 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2020 vn_reference_lookup_2,
2021 vn_reference_lookup_3, &vr1);
2022 if (vr1.operands != operands)
2023 vr1.operands.release ();
2026 if (*vnresult)
2027 return (*vnresult)->result;
2029 return NULL_TREE;
2032 /* Lookup OP in the current hash table, and return the resulting value
2033 number if it exists in the hash table. Return NULL_TREE if it does
2034 not exist in the hash table or if the result field of the structure
2035 was NULL.. VNRESULT will be filled in with the vn_reference_t
2036 stored in the hashtable if one exists. */
2038 tree
2039 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2040 vn_reference_t *vnresult)
2042 vec<vn_reference_op_s> operands;
2043 struct vn_reference_s vr1;
2044 tree cst;
2045 bool valuezied_anything;
2047 if (vnresult)
2048 *vnresult = NULL;
2050 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2051 vr1.operands = operands
2052 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2053 vr1.type = TREE_TYPE (op);
2054 vr1.set = get_alias_set (op);
2055 vr1.hashcode = vn_reference_compute_hash (&vr1);
2056 if ((cst = fully_constant_vn_reference_p (&vr1)))
2057 return cst;
2059 if (kind != VN_NOWALK
2060 && vr1.vuse)
2062 vn_reference_t wvnresult;
2063 ao_ref r;
2064 /* Make sure to use a valueized reference if we valueized anything.
2065 Otherwise preserve the full reference for advanced TBAA. */
2066 if (!valuezied_anything
2067 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2068 vr1.operands))
2069 ao_ref_init (&r, op);
2070 vn_walk_kind = kind;
2071 wvnresult =
2072 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2073 vn_reference_lookup_2,
2074 vn_reference_lookup_3, &vr1);
2075 if (vr1.operands != operands)
2076 vr1.operands.release ();
2077 if (wvnresult)
2079 if (vnresult)
2080 *vnresult = wvnresult;
2081 return wvnresult->result;
2084 return NULL_TREE;
2087 return vn_reference_lookup_1 (&vr1, vnresult);
2091 /* Insert OP into the current hash table with a value number of
2092 RESULT, and return the resulting reference structure we created. */
2094 vn_reference_t
2095 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2097 vn_reference_s **slot;
2098 vn_reference_t vr1;
2100 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2101 if (TREE_CODE (result) == SSA_NAME)
2102 vr1->value_id = VN_INFO (result)->value_id;
2103 else
2104 vr1->value_id = get_or_alloc_constant_value_id (result);
2105 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2106 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
2107 vr1->type = TREE_TYPE (op);
2108 vr1->set = get_alias_set (op);
2109 vr1->hashcode = vn_reference_compute_hash (vr1);
2110 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2111 vr1->result_vdef = vdef;
2113 slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2114 INSERT);
2116 /* Because we lookup stores using vuses, and value number failures
2117 using the vdefs (see visit_reference_op_store for how and why),
2118 it's possible that on failure we may try to insert an already
2119 inserted store. This is not wrong, there is no ssa name for a
2120 store that we could use as a differentiator anyway. Thus, unlike
2121 the other lookup functions, you cannot gcc_assert (!*slot)
2122 here. */
2124 /* But free the old slot in case of a collision. */
2125 if (*slot)
2126 free_reference (*slot);
2128 *slot = vr1;
2129 return vr1;
2132 /* Insert a reference by it's pieces into the current hash table with
2133 a value number of RESULT. Return the resulting reference
2134 structure we created. */
2136 vn_reference_t
2137 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2138 vec<vn_reference_op_s> operands,
2139 tree result, unsigned int value_id)
2142 vn_reference_s **slot;
2143 vn_reference_t vr1;
2145 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2146 vr1->value_id = value_id;
2147 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2148 vr1->operands = valueize_refs (operands);
2149 vr1->type = type;
2150 vr1->set = set;
2151 vr1->hashcode = vn_reference_compute_hash (vr1);
2152 if (result && TREE_CODE (result) == SSA_NAME)
2153 result = SSA_VAL (result);
2154 vr1->result = result;
2156 slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2157 INSERT);
2159 /* At this point we should have all the things inserted that we have
2160 seen before, and we should never try inserting something that
2161 already exists. */
2162 gcc_assert (!*slot);
2163 if (*slot)
2164 free_reference (*slot);
2166 *slot = vr1;
2167 return vr1;
2170 /* Compute and return the hash value for nary operation VBO1. */
2172 hashval_t
2173 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2175 hashval_t hash;
2176 unsigned i;
2178 for (i = 0; i < vno1->length; ++i)
2179 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2180 vno1->op[i] = SSA_VAL (vno1->op[i]);
2182 if (vno1->length == 2
2183 && commutative_tree_code (vno1->opcode)
2184 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2186 tree temp = vno1->op[0];
2187 vno1->op[0] = vno1->op[1];
2188 vno1->op[1] = temp;
2191 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2192 for (i = 0; i < vno1->length; ++i)
2193 hash = iterative_hash_expr (vno1->op[i], hash);
2195 return hash;
2198 /* Compare nary operations VNO1 and VNO2 and return true if they are
2199 equivalent. */
2201 bool
2202 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2204 unsigned i;
2206 if (vno1->hashcode != vno2->hashcode)
2207 return false;
2209 if (vno1->length != vno2->length)
2210 return false;
2212 if (vno1->opcode != vno2->opcode
2213 || !types_compatible_p (vno1->type, vno2->type))
2214 return false;
2216 for (i = 0; i < vno1->length; ++i)
2217 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2218 return false;
2220 return true;
2223 /* Initialize VNO from the pieces provided. */
2225 static void
2226 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2227 enum tree_code code, tree type, tree *ops)
2229 vno->opcode = code;
2230 vno->length = length;
2231 vno->type = type;
2232 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2235 /* Initialize VNO from OP. */
2237 static void
2238 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2240 unsigned i;
2242 vno->opcode = TREE_CODE (op);
2243 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2244 vno->type = TREE_TYPE (op);
2245 for (i = 0; i < vno->length; ++i)
2246 vno->op[i] = TREE_OPERAND (op, i);
2249 /* Return the number of operands for a vn_nary ops structure from STMT. */
2251 static unsigned int
2252 vn_nary_length_from_stmt (gimple stmt)
2254 switch (gimple_assign_rhs_code (stmt))
2256 case REALPART_EXPR:
2257 case IMAGPART_EXPR:
2258 case VIEW_CONVERT_EXPR:
2259 return 1;
2261 case BIT_FIELD_REF:
2262 return 3;
2264 case CONSTRUCTOR:
2265 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2267 default:
2268 return gimple_num_ops (stmt) - 1;
2272 /* Initialize VNO from STMT. */
2274 static void
2275 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2277 unsigned i;
2279 vno->opcode = gimple_assign_rhs_code (stmt);
2280 vno->type = gimple_expr_type (stmt);
2281 switch (vno->opcode)
2283 case REALPART_EXPR:
2284 case IMAGPART_EXPR:
2285 case VIEW_CONVERT_EXPR:
2286 vno->length = 1;
2287 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2288 break;
2290 case BIT_FIELD_REF:
2291 vno->length = 3;
2292 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2293 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2294 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2295 break;
2297 case CONSTRUCTOR:
2298 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2299 for (i = 0; i < vno->length; ++i)
2300 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2301 break;
2303 default:
2304 gcc_checking_assert (!gimple_assign_single_p (stmt));
2305 vno->length = gimple_num_ops (stmt) - 1;
2306 for (i = 0; i < vno->length; ++i)
2307 vno->op[i] = gimple_op (stmt, i + 1);
2311 /* Compute the hashcode for VNO and look for it in the hash table;
2312 return the resulting value number if it exists in the hash table.
2313 Return NULL_TREE if it does not exist in the hash table or if the
2314 result field of the operation is NULL. VNRESULT will contain the
2315 vn_nary_op_t from the hashtable if it exists. */
2317 static tree
2318 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2320 vn_nary_op_s **slot;
2322 if (vnresult)
2323 *vnresult = NULL;
2325 vno->hashcode = vn_nary_op_compute_hash (vno);
2326 slot = current_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2327 if (!slot && current_info == optimistic_info)
2328 slot = valid_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2329 if (!slot)
2330 return NULL_TREE;
2331 if (vnresult)
2332 *vnresult = *slot;
2333 return (*slot)->result;
2336 /* Lookup a n-ary operation by its pieces and return the resulting value
2337 number if it exists in the hash table. Return NULL_TREE if it does
2338 not exist in the hash table or if the result field of the operation
2339 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2340 if it exists. */
2342 tree
2343 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2344 tree type, tree *ops, vn_nary_op_t *vnresult)
2346 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2347 sizeof_vn_nary_op (length));
2348 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2349 return vn_nary_op_lookup_1 (vno1, vnresult);
2352 /* Lookup OP in the current hash table, and return the resulting value
2353 number if it exists in the hash table. Return NULL_TREE if it does
2354 not exist in the hash table or if the result field of the operation
2355 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2356 if it exists. */
2358 tree
2359 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2361 vn_nary_op_t vno1
2362 = XALLOCAVAR (struct vn_nary_op_s,
2363 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2364 init_vn_nary_op_from_op (vno1, op);
2365 return vn_nary_op_lookup_1 (vno1, vnresult);
2368 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2369 value number if it exists in the hash table. Return NULL_TREE if
2370 it does not exist in the hash table. VNRESULT will contain the
2371 vn_nary_op_t from the hashtable if it exists. */
2373 tree
2374 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2376 vn_nary_op_t vno1
2377 = XALLOCAVAR (struct vn_nary_op_s,
2378 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2379 init_vn_nary_op_from_stmt (vno1, stmt);
2380 return vn_nary_op_lookup_1 (vno1, vnresult);
2383 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2385 static vn_nary_op_t
2386 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2388 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2391 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2392 obstack. */
2394 static vn_nary_op_t
2395 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2397 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2398 &current_info->nary_obstack);
2400 vno1->value_id = value_id;
2401 vno1->length = length;
2402 vno1->result = result;
2404 return vno1;
2407 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2408 VNO->HASHCODE first. */
2410 static vn_nary_op_t
2411 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type table,
2412 bool compute_hash)
2414 vn_nary_op_s **slot;
2416 if (compute_hash)
2417 vno->hashcode = vn_nary_op_compute_hash (vno);
2419 slot = table.find_slot_with_hash (vno, vno->hashcode, INSERT);
2420 gcc_assert (!*slot);
2422 *slot = vno;
2423 return vno;
2426 /* Insert a n-ary operation into the current hash table using it's
2427 pieces. Return the vn_nary_op_t structure we created and put in
2428 the hashtable. */
2430 vn_nary_op_t
2431 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2432 tree type, tree *ops,
2433 tree result, unsigned int value_id)
2435 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2436 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2437 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2440 /* Insert OP into the current hash table with a value number of
2441 RESULT. Return the vn_nary_op_t structure we created and put in
2442 the hashtable. */
2444 vn_nary_op_t
2445 vn_nary_op_insert (tree op, tree result)
2447 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2448 vn_nary_op_t vno1;
2450 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2451 init_vn_nary_op_from_op (vno1, op);
2452 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2455 /* Insert the rhs of STMT into the current hash table with a value number of
2456 RESULT. */
2458 vn_nary_op_t
2459 vn_nary_op_insert_stmt (gimple stmt, tree result)
2461 vn_nary_op_t vno1
2462 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2463 result, VN_INFO (result)->value_id);
2464 init_vn_nary_op_from_stmt (vno1, stmt);
2465 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2468 /* Compute a hashcode for PHI operation VP1 and return it. */
2470 static inline hashval_t
2471 vn_phi_compute_hash (vn_phi_t vp1)
2473 hashval_t result;
2474 int i;
2475 tree phi1op;
2476 tree type;
2478 result = vp1->block->index;
2480 /* If all PHI arguments are constants we need to distinguish
2481 the PHI node via its type. */
2482 type = vp1->type;
2483 result += vn_hash_type (type);
2485 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2487 if (phi1op == VN_TOP)
2488 continue;
2489 result = iterative_hash_expr (phi1op, result);
2492 return result;
2495 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2497 static int
2498 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2500 if (vp1->hashcode != vp2->hashcode)
2501 return false;
2503 if (vp1->block == vp2->block)
2505 int i;
2506 tree phi1op;
2508 /* If the PHI nodes do not have compatible types
2509 they are not the same. */
2510 if (!types_compatible_p (vp1->type, vp2->type))
2511 return false;
2513 /* Any phi in the same block will have it's arguments in the
2514 same edge order, because of how we store phi nodes. */
2515 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2517 tree phi2op = vp2->phiargs[i];
2518 if (phi1op == VN_TOP || phi2op == VN_TOP)
2519 continue;
2520 if (!expressions_equal_p (phi1op, phi2op))
2521 return false;
2523 return true;
2525 return false;
2528 static vec<tree> shared_lookup_phiargs;
2530 /* Lookup PHI in the current hash table, and return the resulting
2531 value number if it exists in the hash table. Return NULL_TREE if
2532 it does not exist in the hash table. */
2534 static tree
2535 vn_phi_lookup (gimple phi)
2537 vn_phi_s **slot;
2538 struct vn_phi_s vp1;
2539 unsigned i;
2541 shared_lookup_phiargs.truncate (0);
2543 /* Canonicalize the SSA_NAME's to their value number. */
2544 for (i = 0; i < gimple_phi_num_args (phi); i++)
2546 tree def = PHI_ARG_DEF (phi, i);
2547 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2548 shared_lookup_phiargs.safe_push (def);
2550 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2551 vp1.phiargs = shared_lookup_phiargs;
2552 vp1.block = gimple_bb (phi);
2553 vp1.hashcode = vn_phi_compute_hash (&vp1);
2554 slot = current_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2555 if (!slot && current_info == optimistic_info)
2556 slot = valid_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2557 if (!slot)
2558 return NULL_TREE;
2559 return (*slot)->result;
2562 /* Insert PHI into the current hash table with a value number of
2563 RESULT. */
2565 static vn_phi_t
2566 vn_phi_insert (gimple phi, tree result)
2568 vn_phi_s **slot;
2569 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2570 unsigned i;
2571 vec<tree> args = vNULL;
2573 /* Canonicalize the SSA_NAME's to their value number. */
2574 for (i = 0; i < gimple_phi_num_args (phi); i++)
2576 tree def = PHI_ARG_DEF (phi, i);
2577 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2578 args.safe_push (def);
2580 vp1->value_id = VN_INFO (result)->value_id;
2581 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2582 vp1->phiargs = args;
2583 vp1->block = gimple_bb (phi);
2584 vp1->result = result;
2585 vp1->hashcode = vn_phi_compute_hash (vp1);
2587 slot = current_info->phis.find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2589 /* Because we iterate over phi operations more than once, it's
2590 possible the slot might already exist here, hence no assert.*/
2591 *slot = vp1;
2592 return vp1;
2596 /* Print set of components in strongly connected component SCC to OUT. */
2598 static void
2599 print_scc (FILE *out, vec<tree> scc)
2601 tree var;
2602 unsigned int i;
2604 fprintf (out, "SCC consists of:");
2605 FOR_EACH_VEC_ELT (scc, i, var)
2607 fprintf (out, " ");
2608 print_generic_expr (out, var, 0);
2610 fprintf (out, "\n");
2613 /* Set the value number of FROM to TO, return true if it has changed
2614 as a result. */
2616 static inline bool
2617 set_ssa_val_to (tree from, tree to)
2619 tree currval = SSA_VAL (from);
2621 if (from != to)
2623 if (currval == from)
2625 if (dump_file && (dump_flags & TDF_DETAILS))
2627 fprintf (dump_file, "Not changing value number of ");
2628 print_generic_expr (dump_file, from, 0);
2629 fprintf (dump_file, " from VARYING to ");
2630 print_generic_expr (dump_file, to, 0);
2631 fprintf (dump_file, "\n");
2633 return false;
2635 else if (TREE_CODE (to) == SSA_NAME
2636 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2637 to = from;
2640 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2641 and invariants. So assert that here. */
2642 gcc_assert (to != NULL_TREE
2643 && (to == VN_TOP
2644 || TREE_CODE (to) == SSA_NAME
2645 || is_gimple_min_invariant (to)));
2647 if (dump_file && (dump_flags & TDF_DETAILS))
2649 fprintf (dump_file, "Setting value number of ");
2650 print_generic_expr (dump_file, from, 0);
2651 fprintf (dump_file, " to ");
2652 print_generic_expr (dump_file, to, 0);
2655 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2657 VN_INFO (from)->valnum = to;
2658 if (dump_file && (dump_flags & TDF_DETAILS))
2659 fprintf (dump_file, " (changed)\n");
2660 return true;
2662 if (dump_file && (dump_flags & TDF_DETAILS))
2663 fprintf (dump_file, "\n");
2664 return false;
2667 /* Mark as processed all the definitions in the defining stmt of USE, or
2668 the USE itself. */
2670 static void
2671 mark_use_processed (tree use)
2673 ssa_op_iter iter;
2674 def_operand_p defp;
2675 gimple stmt = SSA_NAME_DEF_STMT (use);
2677 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2679 VN_INFO (use)->use_processed = true;
2680 return;
2683 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2685 tree def = DEF_FROM_PTR (defp);
2687 VN_INFO (def)->use_processed = true;
2691 /* Set all definitions in STMT to value number to themselves.
2692 Return true if a value number changed. */
2694 static bool
2695 defs_to_varying (gimple stmt)
2697 bool changed = false;
2698 ssa_op_iter iter;
2699 def_operand_p defp;
2701 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2703 tree def = DEF_FROM_PTR (defp);
2704 changed |= set_ssa_val_to (def, def);
2706 return changed;
2709 static bool expr_has_constants (tree expr);
2710 static tree valueize_expr (tree expr);
2712 /* Visit a copy between LHS and RHS, return true if the value number
2713 changed. */
2715 static bool
2716 visit_copy (tree lhs, tree rhs)
2718 /* The copy may have a more interesting constant filled expression
2719 (we don't, since we know our RHS is just an SSA name). */
2720 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2721 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2723 /* And finally valueize. */
2724 rhs = SSA_VAL (rhs);
2726 return set_ssa_val_to (lhs, rhs);
2729 /* Visit a nary operator RHS, value number it, and return true if the
2730 value number of LHS has changed as a result. */
2732 static bool
2733 visit_nary_op (tree lhs, gimple stmt)
2735 bool changed = false;
2736 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2738 if (result)
2739 changed = set_ssa_val_to (lhs, result);
2740 else
2742 changed = set_ssa_val_to (lhs, lhs);
2743 vn_nary_op_insert_stmt (stmt, lhs);
2746 return changed;
2749 /* Visit a call STMT storing into LHS. Return true if the value number
2750 of the LHS has changed as a result. */
2752 static bool
2753 visit_reference_op_call (tree lhs, gimple stmt)
2755 bool changed = false;
2756 struct vn_reference_s vr1;
2757 vn_reference_t vnresult = NULL;
2758 tree vuse = gimple_vuse (stmt);
2759 tree vdef = gimple_vdef (stmt);
2761 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2762 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2763 lhs = NULL_TREE;
2765 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2766 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2767 vr1.type = gimple_expr_type (stmt);
2768 vr1.set = 0;
2769 vr1.hashcode = vn_reference_compute_hash (&vr1);
2770 vn_reference_lookup_1 (&vr1, &vnresult);
2772 if (vnresult)
2774 if (vnresult->result_vdef)
2775 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2777 if (!vnresult->result && lhs)
2778 vnresult->result = lhs;
2780 if (vnresult->result && lhs)
2782 changed |= set_ssa_val_to (lhs, vnresult->result);
2784 if (VN_INFO (vnresult->result)->has_constants)
2785 VN_INFO (lhs)->has_constants = true;
2788 else
2790 vn_reference_s **slot;
2791 vn_reference_t vr2;
2792 if (vdef)
2793 changed |= set_ssa_val_to (vdef, vdef);
2794 if (lhs)
2795 changed |= set_ssa_val_to (lhs, lhs);
2796 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2797 vr2->vuse = vr1.vuse;
2798 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2799 vr2->type = vr1.type;
2800 vr2->set = vr1.set;
2801 vr2->hashcode = vr1.hashcode;
2802 vr2->result = lhs;
2803 vr2->result_vdef = vdef;
2804 slot = current_info->references.find_slot_with_hash (vr2, vr2->hashcode,
2805 INSERT);
2806 if (*slot)
2807 free_reference (*slot);
2808 *slot = vr2;
2811 return changed;
2814 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2815 and return true if the value number of the LHS has changed as a result. */
2817 static bool
2818 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2820 bool changed = false;
2821 tree last_vuse;
2822 tree result;
2824 last_vuse = gimple_vuse (stmt);
2825 last_vuse_ptr = &last_vuse;
2826 result = vn_reference_lookup (op, gimple_vuse (stmt),
2827 default_vn_walk_kind, NULL);
2828 last_vuse_ptr = NULL;
2830 /* If we have a VCE, try looking up its operand as it might be stored in
2831 a different type. */
2832 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2833 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2834 default_vn_walk_kind, NULL);
2836 /* We handle type-punning through unions by value-numbering based
2837 on offset and size of the access. Be prepared to handle a
2838 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2839 if (result
2840 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2842 /* We will be setting the value number of lhs to the value number
2843 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2844 So first simplify and lookup this expression to see if it
2845 is already available. */
2846 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2847 if ((CONVERT_EXPR_P (val)
2848 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2849 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2851 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2852 if ((CONVERT_EXPR_P (tem)
2853 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2854 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2855 TREE_TYPE (val), tem)))
2856 val = tem;
2858 result = val;
2859 if (!is_gimple_min_invariant (val)
2860 && TREE_CODE (val) != SSA_NAME)
2861 result = vn_nary_op_lookup (val, NULL);
2862 /* If the expression is not yet available, value-number lhs to
2863 a new SSA_NAME we create. */
2864 if (!result)
2866 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2867 "vntemp");
2868 /* Initialize value-number information properly. */
2869 VN_INFO_GET (result)->valnum = result;
2870 VN_INFO (result)->value_id = get_next_value_id ();
2871 VN_INFO (result)->expr = val;
2872 VN_INFO (result)->has_constants = expr_has_constants (val);
2873 VN_INFO (result)->needs_insertion = true;
2874 /* As all "inserted" statements are singleton SCCs, insert
2875 to the valid table. This is strictly needed to
2876 avoid re-generating new value SSA_NAMEs for the same
2877 expression during SCC iteration over and over (the
2878 optimistic table gets cleared after each iteration).
2879 We do not need to insert into the optimistic table, as
2880 lookups there will fall back to the valid table. */
2881 if (current_info == optimistic_info)
2883 current_info = valid_info;
2884 vn_nary_op_insert (val, result);
2885 current_info = optimistic_info;
2887 else
2888 vn_nary_op_insert (val, result);
2889 if (dump_file && (dump_flags & TDF_DETAILS))
2891 fprintf (dump_file, "Inserting name ");
2892 print_generic_expr (dump_file, result, 0);
2893 fprintf (dump_file, " for expression ");
2894 print_generic_expr (dump_file, val, 0);
2895 fprintf (dump_file, "\n");
2900 if (result)
2902 changed = set_ssa_val_to (lhs, result);
2903 if (TREE_CODE (result) == SSA_NAME
2904 && VN_INFO (result)->has_constants)
2906 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2907 VN_INFO (lhs)->has_constants = true;
2910 else
2912 changed = set_ssa_val_to (lhs, lhs);
2913 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2916 return changed;
2920 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2921 and return true if the value number of the LHS has changed as a result. */
2923 static bool
2924 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2926 bool changed = false;
2927 vn_reference_t vnresult = NULL;
2928 tree result, assign;
2929 bool resultsame = false;
2930 tree vuse = gimple_vuse (stmt);
2931 tree vdef = gimple_vdef (stmt);
2933 /* First we want to lookup using the *vuses* from the store and see
2934 if there the last store to this location with the same address
2935 had the same value.
2937 The vuses represent the memory state before the store. If the
2938 memory state, address, and value of the store is the same as the
2939 last store to this location, then this store will produce the
2940 same memory state as that store.
2942 In this case the vdef versions for this store are value numbered to those
2943 vuse versions, since they represent the same memory state after
2944 this store.
2946 Otherwise, the vdefs for the store are used when inserting into
2947 the table, since the store generates a new memory state. */
2949 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2951 if (result)
2953 if (TREE_CODE (result) == SSA_NAME)
2954 result = SSA_VAL (result);
2955 if (TREE_CODE (op) == SSA_NAME)
2956 op = SSA_VAL (op);
2957 resultsame = expressions_equal_p (result, op);
2960 if (!result || !resultsame)
2962 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2963 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2964 if (vnresult)
2966 VN_INFO (vdef)->use_processed = true;
2967 return set_ssa_val_to (vdef, vnresult->result_vdef);
2971 if (!result || !resultsame)
2973 if (dump_file && (dump_flags & TDF_DETAILS))
2975 fprintf (dump_file, "No store match\n");
2976 fprintf (dump_file, "Value numbering store ");
2977 print_generic_expr (dump_file, lhs, 0);
2978 fprintf (dump_file, " to ");
2979 print_generic_expr (dump_file, op, 0);
2980 fprintf (dump_file, "\n");
2982 /* Have to set value numbers before insert, since insert is
2983 going to valueize the references in-place. */
2984 if (vdef)
2986 changed |= set_ssa_val_to (vdef, vdef);
2989 /* Do not insert structure copies into the tables. */
2990 if (is_gimple_min_invariant (op)
2991 || is_gimple_reg (op))
2992 vn_reference_insert (lhs, op, vdef, NULL);
2994 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2995 vn_reference_insert (assign, lhs, vuse, vdef);
2997 else
2999 /* We had a match, so value number the vdef to have the value
3000 number of the vuse it came from. */
3002 if (dump_file && (dump_flags & TDF_DETAILS))
3003 fprintf (dump_file, "Store matched earlier value,"
3004 "value numbering store vdefs to matching vuses.\n");
3006 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3009 return changed;
3012 /* Visit and value number PHI, return true if the value number
3013 changed. */
3015 static bool
3016 visit_phi (gimple phi)
3018 bool changed = false;
3019 tree result;
3020 tree sameval = VN_TOP;
3021 bool allsame = true;
3022 unsigned i;
3024 /* TODO: We could check for this in init_sccvn, and replace this
3025 with a gcc_assert. */
3026 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3027 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3029 /* See if all non-TOP arguments have the same value. TOP is
3030 equivalent to everything, so we can ignore it. */
3031 for (i = 0; i < gimple_phi_num_args (phi); i++)
3033 tree def = PHI_ARG_DEF (phi, i);
3035 if (TREE_CODE (def) == SSA_NAME)
3036 def = SSA_VAL (def);
3037 if (def == VN_TOP)
3038 continue;
3039 if (sameval == VN_TOP)
3041 sameval = def;
3043 else
3045 if (!expressions_equal_p (def, sameval))
3047 allsame = false;
3048 break;
3053 /* If all value numbered to the same value, the phi node has that
3054 value. */
3055 if (allsame)
3057 if (is_gimple_min_invariant (sameval))
3059 VN_INFO (PHI_RESULT (phi))->has_constants = true;
3060 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3062 else
3064 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3065 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3068 if (TREE_CODE (sameval) == SSA_NAME)
3069 return visit_copy (PHI_RESULT (phi), sameval);
3071 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3074 /* Otherwise, see if it is equivalent to a phi node in this block. */
3075 result = vn_phi_lookup (phi);
3076 if (result)
3078 if (TREE_CODE (result) == SSA_NAME)
3079 changed = visit_copy (PHI_RESULT (phi), result);
3080 else
3081 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3083 else
3085 vn_phi_insert (phi, PHI_RESULT (phi));
3086 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3087 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3088 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3091 return changed;
3094 /* Return true if EXPR contains constants. */
3096 static bool
3097 expr_has_constants (tree expr)
3099 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3101 case tcc_unary:
3102 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3104 case tcc_binary:
3105 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3106 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3107 /* Constants inside reference ops are rarely interesting, but
3108 it can take a lot of looking to find them. */
3109 case tcc_reference:
3110 case tcc_declaration:
3111 return false;
3112 default:
3113 return is_gimple_min_invariant (expr);
3115 return false;
3118 /* Return true if STMT contains constants. */
3120 static bool
3121 stmt_has_constants (gimple stmt)
3123 tree tem;
3125 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3126 return false;
3128 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3130 case GIMPLE_TERNARY_RHS:
3131 tem = gimple_assign_rhs3 (stmt);
3132 if (TREE_CODE (tem) == SSA_NAME)
3133 tem = SSA_VAL (tem);
3134 if (is_gimple_min_invariant (tem))
3135 return true;
3136 /* Fallthru. */
3138 case GIMPLE_BINARY_RHS:
3139 tem = gimple_assign_rhs2 (stmt);
3140 if (TREE_CODE (tem) == SSA_NAME)
3141 tem = SSA_VAL (tem);
3142 if (is_gimple_min_invariant (tem))
3143 return true;
3144 /* Fallthru. */
3146 case GIMPLE_SINGLE_RHS:
3147 /* Constants inside reference ops are rarely interesting, but
3148 it can take a lot of looking to find them. */
3149 case GIMPLE_UNARY_RHS:
3150 tem = gimple_assign_rhs1 (stmt);
3151 if (TREE_CODE (tem) == SSA_NAME)
3152 tem = SSA_VAL (tem);
3153 return is_gimple_min_invariant (tem);
3155 default:
3156 gcc_unreachable ();
3158 return false;
3161 /* Replace SSA_NAMES in expr with their value numbers, and return the
3162 result.
3163 This is performed in place. */
3165 static tree
3166 valueize_expr (tree expr)
3168 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3170 case tcc_binary:
3171 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3172 /* Fallthru. */
3173 case tcc_unary:
3174 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3175 break;
3176 default:;
3178 return expr;
3181 /* Simplify the binary expression RHS, and return the result if
3182 simplified. */
3184 static tree
3185 simplify_binary_expression (gimple stmt)
3187 tree result = NULL_TREE;
3188 tree op0 = gimple_assign_rhs1 (stmt);
3189 tree op1 = gimple_assign_rhs2 (stmt);
3190 enum tree_code code = gimple_assign_rhs_code (stmt);
3192 /* This will not catch every single case we could combine, but will
3193 catch those with constants. The goal here is to simultaneously
3194 combine constants between expressions, but avoid infinite
3195 expansion of expressions during simplification. */
3196 if (TREE_CODE (op0) == SSA_NAME)
3198 if (VN_INFO (op0)->has_constants
3199 || TREE_CODE_CLASS (code) == tcc_comparison
3200 || code == COMPLEX_EXPR)
3201 op0 = valueize_expr (vn_get_expr_for (op0));
3202 else
3203 op0 = vn_valueize (op0);
3206 if (TREE_CODE (op1) == SSA_NAME)
3208 if (VN_INFO (op1)->has_constants
3209 || code == COMPLEX_EXPR)
3210 op1 = valueize_expr (vn_get_expr_for (op1));
3211 else
3212 op1 = vn_valueize (op1);
3215 /* Pointer plus constant can be represented as invariant address.
3216 Do so to allow further propatation, see also tree forwprop. */
3217 if (code == POINTER_PLUS_EXPR
3218 && host_integerp (op1, 1)
3219 && TREE_CODE (op0) == ADDR_EXPR
3220 && is_gimple_min_invariant (op0))
3221 return build_invariant_address (TREE_TYPE (op0),
3222 TREE_OPERAND (op0, 0),
3223 TREE_INT_CST_LOW (op1));
3225 /* Avoid folding if nothing changed. */
3226 if (op0 == gimple_assign_rhs1 (stmt)
3227 && op1 == gimple_assign_rhs2 (stmt))
3228 return NULL_TREE;
3230 fold_defer_overflow_warnings ();
3232 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3233 if (result)
3234 STRIP_USELESS_TYPE_CONVERSION (result);
3236 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3237 stmt, 0);
3239 /* Make sure result is not a complex expression consisting
3240 of operators of operators (IE (a + b) + (a + c))
3241 Otherwise, we will end up with unbounded expressions if
3242 fold does anything at all. */
3243 if (result && valid_gimple_rhs_p (result))
3244 return result;
3246 return NULL_TREE;
3249 /* Simplify the unary expression RHS, and return the result if
3250 simplified. */
3252 static tree
3253 simplify_unary_expression (gimple stmt)
3255 tree result = NULL_TREE;
3256 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3257 enum tree_code code = gimple_assign_rhs_code (stmt);
3259 /* We handle some tcc_reference codes here that are all
3260 GIMPLE_ASSIGN_SINGLE codes. */
3261 if (code == REALPART_EXPR
3262 || code == IMAGPART_EXPR
3263 || code == VIEW_CONVERT_EXPR
3264 || code == BIT_FIELD_REF)
3265 op0 = TREE_OPERAND (op0, 0);
3267 if (TREE_CODE (op0) != SSA_NAME)
3268 return NULL_TREE;
3270 orig_op0 = op0;
3271 if (VN_INFO (op0)->has_constants)
3272 op0 = valueize_expr (vn_get_expr_for (op0));
3273 else if (CONVERT_EXPR_CODE_P (code)
3274 || code == REALPART_EXPR
3275 || code == IMAGPART_EXPR
3276 || code == VIEW_CONVERT_EXPR
3277 || code == BIT_FIELD_REF)
3279 /* We want to do tree-combining on conversion-like expressions.
3280 Make sure we feed only SSA_NAMEs or constants to fold though. */
3281 tree tem = valueize_expr (vn_get_expr_for (op0));
3282 if (UNARY_CLASS_P (tem)
3283 || BINARY_CLASS_P (tem)
3284 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3285 || TREE_CODE (tem) == SSA_NAME
3286 || TREE_CODE (tem) == CONSTRUCTOR
3287 || is_gimple_min_invariant (tem))
3288 op0 = tem;
3291 /* Avoid folding if nothing changed, but remember the expression. */
3292 if (op0 == orig_op0)
3293 return NULL_TREE;
3295 if (code == BIT_FIELD_REF)
3297 tree rhs = gimple_assign_rhs1 (stmt);
3298 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3299 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3301 else
3302 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3303 if (result)
3305 STRIP_USELESS_TYPE_CONVERSION (result);
3306 if (valid_gimple_rhs_p (result))
3307 return result;
3310 return NULL_TREE;
3313 /* Try to simplify RHS using equivalences and constant folding. */
3315 static tree
3316 try_to_simplify (gimple stmt)
3318 enum tree_code code = gimple_assign_rhs_code (stmt);
3319 tree tem;
3321 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3322 in this case, there is no point in doing extra work. */
3323 if (code == SSA_NAME)
3324 return NULL_TREE;
3326 /* First try constant folding based on our current lattice. */
3327 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3328 if (tem
3329 && (TREE_CODE (tem) == SSA_NAME
3330 || is_gimple_min_invariant (tem)))
3331 return tem;
3333 /* If that didn't work try combining multiple statements. */
3334 switch (TREE_CODE_CLASS (code))
3336 case tcc_reference:
3337 /* Fallthrough for some unary codes that can operate on registers. */
3338 if (!(code == REALPART_EXPR
3339 || code == IMAGPART_EXPR
3340 || code == VIEW_CONVERT_EXPR
3341 || code == BIT_FIELD_REF))
3342 break;
3343 /* We could do a little more with unary ops, if they expand
3344 into binary ops, but it's debatable whether it is worth it. */
3345 case tcc_unary:
3346 return simplify_unary_expression (stmt);
3348 case tcc_comparison:
3349 case tcc_binary:
3350 return simplify_binary_expression (stmt);
3352 default:
3353 break;
3356 return NULL_TREE;
3359 /* Visit and value number USE, return true if the value number
3360 changed. */
3362 static bool
3363 visit_use (tree use)
3365 bool changed = false;
3366 gimple stmt = SSA_NAME_DEF_STMT (use);
3368 mark_use_processed (use);
3370 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3371 if (dump_file && (dump_flags & TDF_DETAILS)
3372 && !SSA_NAME_IS_DEFAULT_DEF (use))
3374 fprintf (dump_file, "Value numbering ");
3375 print_generic_expr (dump_file, use, 0);
3376 fprintf (dump_file, " stmt = ");
3377 print_gimple_stmt (dump_file, stmt, 0, 0);
3380 /* Handle uninitialized uses. */
3381 if (SSA_NAME_IS_DEFAULT_DEF (use))
3382 changed = set_ssa_val_to (use, use);
3383 else
3385 if (gimple_code (stmt) == GIMPLE_PHI)
3386 changed = visit_phi (stmt);
3387 else if (gimple_has_volatile_ops (stmt))
3388 changed = defs_to_varying (stmt);
3389 else if (is_gimple_assign (stmt))
3391 enum tree_code code = gimple_assign_rhs_code (stmt);
3392 tree lhs = gimple_assign_lhs (stmt);
3393 tree rhs1 = gimple_assign_rhs1 (stmt);
3394 tree simplified;
3396 /* Shortcut for copies. Simplifying copies is pointless,
3397 since we copy the expression and value they represent. */
3398 if (code == SSA_NAME
3399 && TREE_CODE (lhs) == SSA_NAME)
3401 changed = visit_copy (lhs, rhs1);
3402 goto done;
3404 simplified = try_to_simplify (stmt);
3405 if (simplified)
3407 if (dump_file && (dump_flags & TDF_DETAILS))
3409 fprintf (dump_file, "RHS ");
3410 print_gimple_expr (dump_file, stmt, 0, 0);
3411 fprintf (dump_file, " simplified to ");
3412 print_generic_expr (dump_file, simplified, 0);
3413 if (TREE_CODE (lhs) == SSA_NAME)
3414 fprintf (dump_file, " has constants %d\n",
3415 expr_has_constants (simplified));
3416 else
3417 fprintf (dump_file, "\n");
3420 /* Setting value numbers to constants will occasionally
3421 screw up phi congruence because constants are not
3422 uniquely associated with a single ssa name that can be
3423 looked up. */
3424 if (simplified
3425 && is_gimple_min_invariant (simplified)
3426 && TREE_CODE (lhs) == SSA_NAME)
3428 VN_INFO (lhs)->expr = simplified;
3429 VN_INFO (lhs)->has_constants = true;
3430 changed = set_ssa_val_to (lhs, simplified);
3431 goto done;
3433 else if (simplified
3434 && TREE_CODE (simplified) == SSA_NAME
3435 && TREE_CODE (lhs) == SSA_NAME)
3437 changed = visit_copy (lhs, simplified);
3438 goto done;
3440 else if (simplified)
3442 if (TREE_CODE (lhs) == SSA_NAME)
3444 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3445 /* We have to unshare the expression or else
3446 valuizing may change the IL stream. */
3447 VN_INFO (lhs)->expr = unshare_expr (simplified);
3450 else if (stmt_has_constants (stmt)
3451 && TREE_CODE (lhs) == SSA_NAME)
3452 VN_INFO (lhs)->has_constants = true;
3453 else if (TREE_CODE (lhs) == SSA_NAME)
3455 /* We reset expr and constantness here because we may
3456 have been value numbering optimistically, and
3457 iterating. They may become non-constant in this case,
3458 even if they were optimistically constant. */
3460 VN_INFO (lhs)->has_constants = false;
3461 VN_INFO (lhs)->expr = NULL_TREE;
3464 if ((TREE_CODE (lhs) == SSA_NAME
3465 /* We can substitute SSA_NAMEs that are live over
3466 abnormal edges with their constant value. */
3467 && !(gimple_assign_copy_p (stmt)
3468 && is_gimple_min_invariant (rhs1))
3469 && !(simplified
3470 && is_gimple_min_invariant (simplified))
3471 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3472 /* Stores or copies from SSA_NAMEs that are live over
3473 abnormal edges are a problem. */
3474 || (code == SSA_NAME
3475 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3476 changed = defs_to_varying (stmt);
3477 else if (REFERENCE_CLASS_P (lhs)
3478 || DECL_P (lhs))
3479 changed = visit_reference_op_store (lhs, rhs1, stmt);
3480 else if (TREE_CODE (lhs) == SSA_NAME)
3482 if ((gimple_assign_copy_p (stmt)
3483 && is_gimple_min_invariant (rhs1))
3484 || (simplified
3485 && is_gimple_min_invariant (simplified)))
3487 VN_INFO (lhs)->has_constants = true;
3488 if (simplified)
3489 changed = set_ssa_val_to (lhs, simplified);
3490 else
3491 changed = set_ssa_val_to (lhs, rhs1);
3493 else
3495 /* First try to lookup the simplified expression. */
3496 if (simplified)
3498 enum gimple_rhs_class rhs_class;
3501 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3502 if ((rhs_class == GIMPLE_UNARY_RHS
3503 || rhs_class == GIMPLE_BINARY_RHS
3504 || rhs_class == GIMPLE_TERNARY_RHS)
3505 && valid_gimple_rhs_p (simplified))
3507 tree result = vn_nary_op_lookup (simplified, NULL);
3508 if (result)
3510 changed = set_ssa_val_to (lhs, result);
3511 goto done;
3516 /* Otherwise visit the original statement. */
3517 switch (vn_get_stmt_kind (stmt))
3519 case VN_NARY:
3520 changed = visit_nary_op (lhs, stmt);
3521 break;
3522 case VN_REFERENCE:
3523 changed = visit_reference_op_load (lhs, rhs1, stmt);
3524 break;
3525 default:
3526 changed = defs_to_varying (stmt);
3527 break;
3531 else
3532 changed = defs_to_varying (stmt);
3534 else if (is_gimple_call (stmt))
3536 tree lhs = gimple_call_lhs (stmt);
3538 /* ??? We could try to simplify calls. */
3540 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3542 if (stmt_has_constants (stmt))
3543 VN_INFO (lhs)->has_constants = true;
3544 else
3546 /* We reset expr and constantness here because we may
3547 have been value numbering optimistically, and
3548 iterating. They may become non-constant in this case,
3549 even if they were optimistically constant. */
3550 VN_INFO (lhs)->has_constants = false;
3551 VN_INFO (lhs)->expr = NULL_TREE;
3554 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3556 changed = defs_to_varying (stmt);
3557 goto done;
3561 if (!gimple_call_internal_p (stmt)
3562 && (/* Calls to the same function with the same vuse
3563 and the same operands do not necessarily return the same
3564 value, unless they're pure or const. */
3565 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3566 /* If calls have a vdef, subsequent calls won't have
3567 the same incoming vuse. So, if 2 calls with vdef have the
3568 same vuse, we know they're not subsequent.
3569 We can value number 2 calls to the same function with the
3570 same vuse and the same operands which are not subsequent
3571 the same, because there is no code in the program that can
3572 compare the 2 values... */
3573 || (gimple_vdef (stmt)
3574 /* ... unless the call returns a pointer which does
3575 not alias with anything else. In which case the
3576 information that the values are distinct are encoded
3577 in the IL. */
3578 && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
3579 changed = visit_reference_op_call (lhs, stmt);
3580 else
3581 changed = defs_to_varying (stmt);
3583 else
3584 changed = defs_to_varying (stmt);
3586 done:
3587 return changed;
3590 /* Compare two operands by reverse postorder index */
3592 static int
3593 compare_ops (const void *pa, const void *pb)
3595 const tree opa = *((const tree *)pa);
3596 const tree opb = *((const tree *)pb);
3597 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3598 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3599 basic_block bba;
3600 basic_block bbb;
3602 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3603 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3604 else if (gimple_nop_p (opstmta))
3605 return -1;
3606 else if (gimple_nop_p (opstmtb))
3607 return 1;
3609 bba = gimple_bb (opstmta);
3610 bbb = gimple_bb (opstmtb);
3612 if (!bba && !bbb)
3613 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3614 else if (!bba)
3615 return -1;
3616 else if (!bbb)
3617 return 1;
3619 if (bba == bbb)
3621 if (gimple_code (opstmta) == GIMPLE_PHI
3622 && gimple_code (opstmtb) == GIMPLE_PHI)
3623 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3624 else if (gimple_code (opstmta) == GIMPLE_PHI)
3625 return -1;
3626 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3627 return 1;
3628 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3629 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3630 else
3631 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3633 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3636 /* Sort an array containing members of a strongly connected component
3637 SCC so that the members are ordered by RPO number.
3638 This means that when the sort is complete, iterating through the
3639 array will give you the members in RPO order. */
3641 static void
3642 sort_scc (vec<tree> scc)
3644 scc.qsort (compare_ops);
3647 /* Insert the no longer used nary ONARY to the hash INFO. */
3649 static void
3650 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3652 size_t size = sizeof_vn_nary_op (onary->length);
3653 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3654 &info->nary_obstack);
3655 memcpy (nary, onary, size);
3656 vn_nary_op_insert_into (nary, info->nary, false);
3659 /* Insert the no longer used phi OPHI to the hash INFO. */
3661 static void
3662 copy_phi (vn_phi_t ophi, vn_tables_t info)
3664 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3665 vn_phi_s **slot;
3666 memcpy (phi, ophi, sizeof (*phi));
3667 ophi->phiargs.create (0);
3668 slot = info->phis.find_slot_with_hash (phi, phi->hashcode, INSERT);
3669 gcc_assert (!*slot);
3670 *slot = phi;
3673 /* Insert the no longer used reference OREF to the hash INFO. */
3675 static void
3676 copy_reference (vn_reference_t oref, vn_tables_t info)
3678 vn_reference_t ref;
3679 vn_reference_s **slot;
3680 ref = (vn_reference_t) pool_alloc (info->references_pool);
3681 memcpy (ref, oref, sizeof (*ref));
3682 oref->operands.create (0);
3683 slot = info->references.find_slot_with_hash (ref, ref->hashcode, INSERT);
3684 if (*slot)
3685 free_reference (*slot);
3686 *slot = ref;
3689 /* Process a strongly connected component in the SSA graph. */
3691 static void
3692 process_scc (vec<tree> scc)
3694 tree var;
3695 unsigned int i;
3696 unsigned int iterations = 0;
3697 bool changed = true;
3698 vn_nary_op_iterator_type hin;
3699 vn_phi_iterator_type hip;
3700 vn_reference_iterator_type hir;
3701 vn_nary_op_t nary;
3702 vn_phi_t phi;
3703 vn_reference_t ref;
3705 /* If the SCC has a single member, just visit it. */
3706 if (scc.length () == 1)
3708 tree use = scc[0];
3709 if (VN_INFO (use)->use_processed)
3710 return;
3711 /* We need to make sure it doesn't form a cycle itself, which can
3712 happen for self-referential PHI nodes. In that case we would
3713 end up inserting an expression with VN_TOP operands into the
3714 valid table which makes us derive bogus equivalences later.
3715 The cheapest way to check this is to assume it for all PHI nodes. */
3716 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3717 /* Fallthru to iteration. */ ;
3718 else
3720 visit_use (use);
3721 return;
3725 /* Iterate over the SCC with the optimistic table until it stops
3726 changing. */
3727 current_info = optimistic_info;
3728 while (changed)
3730 changed = false;
3731 iterations++;
3732 if (dump_file && (dump_flags & TDF_DETAILS))
3733 fprintf (dump_file, "Starting iteration %d\n", iterations);
3734 /* As we are value-numbering optimistically we have to
3735 clear the expression tables and the simplified expressions
3736 in each iteration until we converge. */
3737 optimistic_info->nary.empty ();
3738 optimistic_info->phis.empty ();
3739 optimistic_info->references.empty ();
3740 obstack_free (&optimistic_info->nary_obstack, NULL);
3741 gcc_obstack_init (&optimistic_info->nary_obstack);
3742 empty_alloc_pool (optimistic_info->phis_pool);
3743 empty_alloc_pool (optimistic_info->references_pool);
3744 FOR_EACH_VEC_ELT (scc, i, var)
3745 VN_INFO (var)->expr = NULL_TREE;
3746 FOR_EACH_VEC_ELT (scc, i, var)
3747 changed |= visit_use (var);
3750 statistics_histogram_event (cfun, "SCC iterations", iterations);
3752 /* Finally, copy the contents of the no longer used optimistic
3753 table to the valid table. */
3754 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hin)
3755 copy_nary (nary, valid_info);
3756 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hip)
3757 copy_phi (phi, valid_info);
3758 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->references,
3759 ref, vn_reference_t, hir)
3760 copy_reference (ref, valid_info);
3762 current_info = valid_info;
3766 /* Pop the components of the found SCC for NAME off the SCC stack
3767 and process them. Returns true if all went well, false if
3768 we run into resource limits. */
3770 static bool
3771 extract_and_process_scc_for_name (tree name)
3773 vec<tree> scc = vNULL;
3774 tree x;
3776 /* Found an SCC, pop the components off the SCC stack and
3777 process them. */
3780 x = sccstack.pop ();
3782 VN_INFO (x)->on_sccstack = false;
3783 scc.safe_push (x);
3784 } while (x != name);
3786 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3787 if (scc.length ()
3788 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3790 if (dump_file)
3791 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3792 "SCC size %u exceeding %u\n", scc.length (),
3793 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3795 scc.release ();
3796 return false;
3799 if (scc.length () > 1)
3800 sort_scc (scc);
3802 if (dump_file && (dump_flags & TDF_DETAILS))
3803 print_scc (dump_file, scc);
3805 process_scc (scc);
3807 scc.release ();
3809 return true;
3812 /* Depth first search on NAME to discover and process SCC's in the SSA
3813 graph.
3814 Execution of this algorithm relies on the fact that the SCC's are
3815 popped off the stack in topological order.
3816 Returns true if successful, false if we stopped processing SCC's due
3817 to resource constraints. */
3819 static bool
3820 DFS (tree name)
3822 vec<ssa_op_iter> itervec = vNULL;
3823 vec<tree> namevec = vNULL;
3824 use_operand_p usep = NULL;
3825 gimple defstmt;
3826 tree use;
3827 ssa_op_iter iter;
3829 start_over:
3830 /* SCC info */
3831 VN_INFO (name)->dfsnum = next_dfs_num++;
3832 VN_INFO (name)->visited = true;
3833 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3835 sccstack.safe_push (name);
3836 VN_INFO (name)->on_sccstack = true;
3837 defstmt = SSA_NAME_DEF_STMT (name);
3839 /* Recursively DFS on our operands, looking for SCC's. */
3840 if (!gimple_nop_p (defstmt))
3842 /* Push a new iterator. */
3843 if (gimple_code (defstmt) == GIMPLE_PHI)
3844 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3845 else
3846 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3848 else
3849 clear_and_done_ssa_iter (&iter);
3851 while (1)
3853 /* If we are done processing uses of a name, go up the stack
3854 of iterators and process SCCs as we found them. */
3855 if (op_iter_done (&iter))
3857 /* See if we found an SCC. */
3858 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3859 if (!extract_and_process_scc_for_name (name))
3861 namevec.release ();
3862 itervec.release ();
3863 return false;
3866 /* Check if we are done. */
3867 if (namevec.is_empty ())
3869 namevec.release ();
3870 itervec.release ();
3871 return true;
3874 /* Restore the last use walker and continue walking there. */
3875 use = name;
3876 name = namevec.pop ();
3877 memcpy (&iter, &itervec.last (),
3878 sizeof (ssa_op_iter));
3879 itervec.pop ();
3880 goto continue_walking;
3883 use = USE_FROM_PTR (usep);
3885 /* Since we handle phi nodes, we will sometimes get
3886 invariants in the use expression. */
3887 if (TREE_CODE (use) == SSA_NAME)
3889 if (! (VN_INFO (use)->visited))
3891 /* Recurse by pushing the current use walking state on
3892 the stack and starting over. */
3893 itervec.safe_push (iter);
3894 namevec.safe_push (name);
3895 name = use;
3896 goto start_over;
3898 continue_walking:
3899 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3900 VN_INFO (use)->low);
3902 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3903 && VN_INFO (use)->on_sccstack)
3905 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3906 VN_INFO (name)->low);
3910 usep = op_iter_next_use (&iter);
3914 /* Allocate a value number table. */
3916 static void
3917 allocate_vn_table (vn_tables_t table)
3919 table->phis.create (23);
3920 table->nary.create (23);
3921 table->references.create (23);
3923 gcc_obstack_init (&table->nary_obstack);
3924 table->phis_pool = create_alloc_pool ("VN phis",
3925 sizeof (struct vn_phi_s),
3926 30);
3927 table->references_pool = create_alloc_pool ("VN references",
3928 sizeof (struct vn_reference_s),
3929 30);
3932 /* Free a value number table. */
3934 static void
3935 free_vn_table (vn_tables_t table)
3937 table->phis.dispose ();
3938 table->nary.dispose ();
3939 table->references.dispose ();
3940 obstack_free (&table->nary_obstack, NULL);
3941 free_alloc_pool (table->phis_pool);
3942 free_alloc_pool (table->references_pool);
3945 static void
3946 init_scc_vn (void)
3948 size_t i;
3949 int j;
3950 int *rpo_numbers_temp;
3952 calculate_dominance_info (CDI_DOMINATORS);
3953 sccstack.create (0);
3954 constant_to_value_id.create (23);
3956 constant_value_ids = BITMAP_ALLOC (NULL);
3958 next_dfs_num = 1;
3959 next_value_id = 1;
3961 vn_ssa_aux_table.create (num_ssa_names + 1);
3962 /* VEC_alloc doesn't actually grow it to the right size, it just
3963 preallocates the space to do so. */
3964 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3965 gcc_obstack_init (&vn_ssa_aux_obstack);
3967 shared_lookup_phiargs.create (0);
3968 shared_lookup_references.create (0);
3969 rpo_numbers = XNEWVEC (int, last_basic_block);
3970 rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3971 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3973 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3974 the i'th block in RPO order is bb. We want to map bb's to RPO
3975 numbers, so we need to rearrange this array. */
3976 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3977 rpo_numbers[rpo_numbers_temp[j]] = j;
3979 XDELETE (rpo_numbers_temp);
3981 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3983 /* Create the VN_INFO structures, and initialize value numbers to
3984 TOP. */
3985 for (i = 0; i < num_ssa_names; i++)
3987 tree name = ssa_name (i);
3988 if (name)
3990 VN_INFO_GET (name)->valnum = VN_TOP;
3991 VN_INFO (name)->expr = NULL_TREE;
3992 VN_INFO (name)->value_id = 0;
3996 renumber_gimple_stmt_uids ();
3998 /* Create the valid and optimistic value numbering tables. */
3999 valid_info = XCNEW (struct vn_tables_s);
4000 allocate_vn_table (valid_info);
4001 optimistic_info = XCNEW (struct vn_tables_s);
4002 allocate_vn_table (optimistic_info);
4005 void
4006 free_scc_vn (void)
4008 size_t i;
4010 constant_to_value_id.dispose ();
4011 BITMAP_FREE (constant_value_ids);
4012 shared_lookup_phiargs.release ();
4013 shared_lookup_references.release ();
4014 XDELETEVEC (rpo_numbers);
4016 for (i = 0; i < num_ssa_names; i++)
4018 tree name = ssa_name (i);
4019 if (name
4020 && VN_INFO (name)->needs_insertion)
4021 release_ssa_name (name);
4023 obstack_free (&vn_ssa_aux_obstack, NULL);
4024 vn_ssa_aux_table.release ();
4026 sccstack.release ();
4027 free_vn_table (valid_info);
4028 XDELETE (valid_info);
4029 free_vn_table (optimistic_info);
4030 XDELETE (optimistic_info);
4033 /* Set *ID according to RESULT. */
4035 static void
4036 set_value_id_for_result (tree result, unsigned int *id)
4038 if (result && TREE_CODE (result) == SSA_NAME)
4039 *id = VN_INFO (result)->value_id;
4040 else if (result && is_gimple_min_invariant (result))
4041 *id = get_or_alloc_constant_value_id (result);
4042 else
4043 *id = get_next_value_id ();
4046 /* Set the value ids in the valid hash tables. */
4048 static void
4049 set_hashtable_value_ids (void)
4051 vn_nary_op_iterator_type hin;
4052 vn_phi_iterator_type hip;
4053 vn_reference_iterator_type hir;
4054 vn_nary_op_t vno;
4055 vn_reference_t vr;
4056 vn_phi_t vp;
4058 /* Now set the value ids of the things we had put in the hash
4059 table. */
4061 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->nary, vno, vn_nary_op_t, hin)
4062 set_value_id_for_result (vno->result, &vno->value_id);
4064 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->phis, vp, vn_phi_t, hip)
4065 set_value_id_for_result (vp->result, &vp->value_id);
4067 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->references, vr, vn_reference_t, hir)
4068 set_value_id_for_result (vr->result, &vr->value_id);
4071 /* Do SCCVN. Returns true if it finished, false if we bailed out
4072 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4073 how we use the alias oracle walking during the VN process. */
4075 bool
4076 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4078 size_t i;
4079 tree param;
4081 default_vn_walk_kind = default_vn_walk_kind_;
4083 init_scc_vn ();
4084 current_info = valid_info;
4086 for (param = DECL_ARGUMENTS (current_function_decl);
4087 param;
4088 param = DECL_CHAIN (param))
4090 tree def = ssa_default_def (cfun, param);
4091 if (def)
4092 VN_INFO (def)->valnum = def;
4095 for (i = 1; i < num_ssa_names; ++i)
4097 tree name = ssa_name (i);
4098 if (name
4099 && VN_INFO (name)->visited == false
4100 && !has_zero_uses (name))
4101 if (!DFS (name))
4103 free_scc_vn ();
4104 return false;
4108 /* Initialize the value ids. */
4110 for (i = 1; i < num_ssa_names; ++i)
4112 tree name = ssa_name (i);
4113 vn_ssa_aux_t info;
4114 if (!name)
4115 continue;
4116 info = VN_INFO (name);
4117 if (info->valnum == name
4118 || info->valnum == VN_TOP)
4119 info->value_id = get_next_value_id ();
4120 else if (is_gimple_min_invariant (info->valnum))
4121 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4124 /* Propagate. */
4125 for (i = 1; i < num_ssa_names; ++i)
4127 tree name = ssa_name (i);
4128 vn_ssa_aux_t info;
4129 if (!name)
4130 continue;
4131 info = VN_INFO (name);
4132 if (TREE_CODE (info->valnum) == SSA_NAME
4133 && info->valnum != name
4134 && info->value_id != VN_INFO (info->valnum)->value_id)
4135 info->value_id = VN_INFO (info->valnum)->value_id;
4138 set_hashtable_value_ids ();
4140 if (dump_file && (dump_flags & TDF_DETAILS))
4142 fprintf (dump_file, "Value numbers:\n");
4143 for (i = 0; i < num_ssa_names; i++)
4145 tree name = ssa_name (i);
4146 if (name
4147 && VN_INFO (name)->visited
4148 && SSA_VAL (name) != name)
4150 print_generic_expr (dump_file, name, 0);
4151 fprintf (dump_file, " = ");
4152 print_generic_expr (dump_file, SSA_VAL (name), 0);
4153 fprintf (dump_file, "\n");
4158 return true;
4161 /* Return the maximum value id we have ever seen. */
4163 unsigned int
4164 get_max_value_id (void)
4166 return next_value_id;
4169 /* Return the next unique value id. */
4171 unsigned int
4172 get_next_value_id (void)
4174 return next_value_id++;
4178 /* Compare two expressions E1 and E2 and return true if they are equal. */
4180 bool
4181 expressions_equal_p (tree e1, tree e2)
4183 /* The obvious case. */
4184 if (e1 == e2)
4185 return true;
4187 /* If only one of them is null, they cannot be equal. */
4188 if (!e1 || !e2)
4189 return false;
4191 /* Now perform the actual comparison. */
4192 if (TREE_CODE (e1) == TREE_CODE (e2)
4193 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4194 return true;
4196 return false;
4200 /* Return true if the nary operation NARY may trap. This is a copy
4201 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4203 bool
4204 vn_nary_may_trap (vn_nary_op_t nary)
4206 tree type;
4207 tree rhs2 = NULL_TREE;
4208 bool honor_nans = false;
4209 bool honor_snans = false;
4210 bool fp_operation = false;
4211 bool honor_trapv = false;
4212 bool handled, ret;
4213 unsigned i;
4215 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4216 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4217 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4219 type = nary->type;
4220 fp_operation = FLOAT_TYPE_P (type);
4221 if (fp_operation)
4223 honor_nans = flag_trapping_math && !flag_finite_math_only;
4224 honor_snans = flag_signaling_nans != 0;
4226 else if (INTEGRAL_TYPE_P (type)
4227 && TYPE_OVERFLOW_TRAPS (type))
4228 honor_trapv = true;
4230 if (nary->length >= 2)
4231 rhs2 = nary->op[1];
4232 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4233 honor_trapv,
4234 honor_nans, honor_snans, rhs2,
4235 &handled);
4236 if (handled
4237 && ret)
4238 return true;
4240 for (i = 0; i < nary->length; ++i)
4241 if (tree_could_trap_p (nary->op[i]))
4242 return true;
4244 return false;