PR ipa/64481
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob9aa1bc3bf2fefa53f5cc107e9c1bb3438efd418c
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "predict.h"
38 #include "hard-reg-set.h"
39 #include "input.h"
40 #include "function.h"
41 #include "dominance.h"
42 #include "cfg.h"
43 #include "cfganal.h"
44 #include "basic-block.h"
45 #include "gimple-pretty-print.h"
46 #include "tree-inline.h"
47 #include "hash-table.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "inchash.h"
51 #include "gimple-fold.h"
52 #include "tree-eh.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimplify.h"
57 #include "gimple-ssa.h"
58 #include "tree-phinodes.h"
59 #include "ssa-iterators.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "expr.h"
63 #include "tree-dfa.h"
64 #include "tree-ssa.h"
65 #include "dumpfile.h"
66 #include "alloc-pool.h"
67 #include "flags.h"
68 #include "cfgloop.h"
69 #include "params.h"
70 #include "tree-ssa-propagate.h"
71 #include "tree-ssa-sccvn.h"
72 #include "tree-cfg.h"
73 #include "domwalk.h"
74 #include "ipa-ref.h"
75 #include "plugin-api.h"
76 #include "cgraph.h"
78 /* This algorithm is based on the SCC algorithm presented by Keith
79 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
80 (http://citeseer.ist.psu.edu/41805.html). In
81 straight line code, it is equivalent to a regular hash based value
82 numbering that is performed in reverse postorder.
84 For code with cycles, there are two alternatives, both of which
85 require keeping the hashtables separate from the actual list of
86 value numbers for SSA names.
88 1. Iterate value numbering in an RPO walk of the blocks, removing
89 all the entries from the hashtable after each iteration (but
90 keeping the SSA name->value number mapping between iterations).
91 Iterate until it does not change.
93 2. Perform value numbering as part of an SCC walk on the SSA graph,
94 iterating only the cycles in the SSA graph until they do not change
95 (using a separate, optimistic hashtable for value numbering the SCC
96 operands).
98 The second is not just faster in practice (because most SSA graph
99 cycles do not involve all the variables in the graph), it also has
100 some nice properties.
102 One of these nice properties is that when we pop an SCC off the
103 stack, we are guaranteed to have processed all the operands coming from
104 *outside of that SCC*, so we do not need to do anything special to
105 ensure they have value numbers.
107 Another nice property is that the SCC walk is done as part of a DFS
108 of the SSA graph, which makes it easy to perform combining and
109 simplifying operations at the same time.
111 The code below is deliberately written in a way that makes it easy
112 to separate the SCC walk from the other work it does.
114 In order to propagate constants through the code, we track which
115 expressions contain constants, and use those while folding. In
116 theory, we could also track expressions whose value numbers are
117 replaced, in case we end up folding based on expression
118 identities.
120 In order to value number memory, we assign value numbers to vuses.
121 This enables us to note that, for example, stores to the same
122 address of the same value from the same starting memory states are
123 equivalent.
124 TODO:
126 1. We can iterate only the changing portions of the SCC's, but
127 I have not seen an SCC big enough for this to be a win.
128 2. If you differentiate between phi nodes for loops and phi nodes
129 for if-then-else, you can properly consider phi nodes in different
130 blocks for equivalence.
131 3. We could value number vuses in more cases, particularly, whole
132 structure copies.
136 /* vn_nary_op hashtable helpers. */
138 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
140 typedef vn_nary_op_s value_type;
141 typedef vn_nary_op_s compare_type;
142 static inline hashval_t hash (const value_type *);
143 static inline bool equal (const value_type *, const compare_type *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const value_type *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher
174 typedef vn_phi_s value_type;
175 typedef vn_phi_s compare_type;
176 static inline hashval_t hash (const value_type *);
177 static inline bool equal (const value_type *, const compare_type *);
178 static inline void remove (value_type *);
181 /* Return the computed hashcode for phi operation P1. */
183 inline hashval_t
184 vn_phi_hasher::hash (const value_type *vp1)
186 return vp1->hashcode;
189 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
191 inline bool
192 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
194 return vn_phi_eq (vp1, vp2);
197 /* Free a phi operation structure VP. */
199 inline void
200 vn_phi_hasher::remove (value_type *phi)
202 phi->phiargs.release ();
205 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
206 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
209 /* Compare two reference operands P1 and P2 for equality. Return true if
210 they are equal, and false otherwise. */
212 static int
213 vn_reference_op_eq (const void *p1, const void *p2)
215 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
216 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
218 return (vro1->opcode == vro2->opcode
219 /* We do not care for differences in type qualification. */
220 && (vro1->type == vro2->type
221 || (vro1->type && vro2->type
222 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
223 TYPE_MAIN_VARIANT (vro2->type))))
224 && expressions_equal_p (vro1->op0, vro2->op0)
225 && expressions_equal_p (vro1->op1, vro2->op1)
226 && expressions_equal_p (vro1->op2, vro2->op2));
229 /* Free a reference operation structure VP. */
231 static inline void
232 free_reference (vn_reference_s *vr)
234 vr->operands.release ();
238 /* vn_reference hashtable helpers. */
240 struct vn_reference_hasher
242 typedef vn_reference_s value_type;
243 typedef vn_reference_s compare_type;
244 static inline hashval_t hash (const value_type *);
245 static inline bool equal (const value_type *, const compare_type *);
246 static inline void remove (value_type *);
249 /* Return the hashcode for a given reference operation P1. */
251 inline hashval_t
252 vn_reference_hasher::hash (const value_type *vr1)
254 return vr1->hashcode;
257 inline bool
258 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
260 return vn_reference_eq (v, c);
263 inline void
264 vn_reference_hasher::remove (value_type *v)
266 free_reference (v);
269 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
270 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
273 /* The set of hashtables and alloc_pool's for their items. */
275 typedef struct vn_tables_s
277 vn_nary_op_table_type *nary;
278 vn_phi_table_type *phis;
279 vn_reference_table_type *references;
280 struct obstack nary_obstack;
281 alloc_pool phis_pool;
282 alloc_pool references_pool;
283 } *vn_tables_t;
286 /* vn_constant hashtable helpers. */
288 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
290 typedef vn_constant_s value_type;
291 typedef vn_constant_s compare_type;
292 static inline hashval_t hash (const value_type *);
293 static inline bool equal (const value_type *, const compare_type *);
296 /* Hash table hash function for vn_constant_t. */
298 inline hashval_t
299 vn_constant_hasher::hash (const value_type *vc1)
301 return vc1->hashcode;
304 /* Hash table equality function for vn_constant_t. */
306 inline bool
307 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
309 if (vc1->hashcode != vc2->hashcode)
310 return false;
312 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
315 static hash_table<vn_constant_hasher> *constant_to_value_id;
316 static bitmap constant_value_ids;
319 /* Valid hashtables storing information we have proven to be
320 correct. */
322 static vn_tables_t valid_info;
324 /* Optimistic hashtables storing information we are making assumptions about
325 during iterations. */
327 static vn_tables_t optimistic_info;
329 /* Pointer to the set of hashtables that is currently being used.
330 Should always point to either the optimistic_info, or the
331 valid_info. */
333 static vn_tables_t current_info;
336 /* Reverse post order index for each basic block. */
338 static int *rpo_numbers;
340 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
342 /* Return the SSA value of the VUSE x, supporting released VDEFs
343 during elimination which will value-number the VDEF to the
344 associated VUSE (but not substitute in the whole lattice). */
346 static inline tree
347 vuse_ssa_val (tree x)
349 if (!x)
350 return NULL_TREE;
354 x = SSA_VAL (x);
356 while (SSA_NAME_IN_FREE_LIST (x));
358 return x;
361 /* This represents the top of the VN lattice, which is the universal
362 value. */
364 tree VN_TOP;
366 /* Unique counter for our value ids. */
368 static unsigned int next_value_id;
370 /* Next DFS number and the stack for strongly connected component
371 detection. */
373 static unsigned int next_dfs_num;
374 static vec<tree> sccstack;
378 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
379 are allocated on an obstack for locality reasons, and to free them
380 without looping over the vec. */
382 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
383 static struct obstack vn_ssa_aux_obstack;
385 /* Return the value numbering information for a given SSA name. */
387 vn_ssa_aux_t
388 VN_INFO (tree name)
390 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
391 gcc_checking_assert (res);
392 return res;
395 /* Set the value numbering info for a given SSA name to a given
396 value. */
398 static inline void
399 VN_INFO_SET (tree name, vn_ssa_aux_t value)
401 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
404 /* Initialize the value numbering info for a given SSA name.
405 This should be called just once for every SSA name. */
407 vn_ssa_aux_t
408 VN_INFO_GET (tree name)
410 vn_ssa_aux_t newinfo;
412 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
413 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
414 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
415 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
416 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
417 return newinfo;
421 /* Get the representative expression for the SSA_NAME NAME. Returns
422 the representative SSA_NAME if there is no expression associated with it. */
424 tree
425 vn_get_expr_for (tree name)
427 vn_ssa_aux_t vn = VN_INFO (name);
428 gimple def_stmt;
429 tree expr = NULL_TREE;
430 enum tree_code code;
432 if (vn->valnum == VN_TOP)
433 return name;
435 /* If the value-number is a constant it is the representative
436 expression. */
437 if (TREE_CODE (vn->valnum) != SSA_NAME)
438 return vn->valnum;
440 /* Get to the information of the value of this SSA_NAME. */
441 vn = VN_INFO (vn->valnum);
443 /* If the value-number is a constant it is the representative
444 expression. */
445 if (TREE_CODE (vn->valnum) != SSA_NAME)
446 return vn->valnum;
448 /* Else if we have an expression, return it. */
449 if (vn->expr != NULL_TREE)
450 return vn->expr;
452 /* Otherwise use the defining statement to build the expression. */
453 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
455 /* If the value number is not an assignment use it directly. */
456 if (!is_gimple_assign (def_stmt))
457 return vn->valnum;
459 /* Note that we can valueize here because we clear the cached
460 simplified expressions after each optimistic iteration. */
461 code = gimple_assign_rhs_code (def_stmt);
462 switch (TREE_CODE_CLASS (code))
464 case tcc_reference:
465 if ((code == REALPART_EXPR
466 || code == IMAGPART_EXPR
467 || code == VIEW_CONVERT_EXPR)
468 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
469 0)) == SSA_NAME)
470 expr = fold_build1 (code,
471 gimple_expr_type (def_stmt),
472 vn_valueize (TREE_OPERAND
473 (gimple_assign_rhs1 (def_stmt), 0)));
474 break;
476 case tcc_unary:
477 expr = fold_build1 (code,
478 gimple_expr_type (def_stmt),
479 vn_valueize (gimple_assign_rhs1 (def_stmt)));
480 break;
482 case tcc_binary:
483 expr = fold_build2 (code,
484 gimple_expr_type (def_stmt),
485 vn_valueize (gimple_assign_rhs1 (def_stmt)),
486 vn_valueize (gimple_assign_rhs2 (def_stmt)));
487 break;
489 case tcc_exceptional:
490 if (code == CONSTRUCTOR
491 && TREE_CODE
492 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
493 expr = gimple_assign_rhs1 (def_stmt);
494 break;
496 default:;
498 if (expr == NULL_TREE)
499 return vn->valnum;
501 /* Cache the expression. */
502 vn->expr = expr;
504 return expr;
507 /* Return the vn_kind the expression computed by the stmt should be
508 associated with. */
510 enum vn_kind
511 vn_get_stmt_kind (gimple stmt)
513 switch (gimple_code (stmt))
515 case GIMPLE_CALL:
516 return VN_REFERENCE;
517 case GIMPLE_PHI:
518 return VN_PHI;
519 case GIMPLE_ASSIGN:
521 enum tree_code code = gimple_assign_rhs_code (stmt);
522 tree rhs1 = gimple_assign_rhs1 (stmt);
523 switch (get_gimple_rhs_class (code))
525 case GIMPLE_UNARY_RHS:
526 case GIMPLE_BINARY_RHS:
527 case GIMPLE_TERNARY_RHS:
528 return VN_NARY;
529 case GIMPLE_SINGLE_RHS:
530 switch (TREE_CODE_CLASS (code))
532 case tcc_reference:
533 /* VOP-less references can go through unary case. */
534 if ((code == REALPART_EXPR
535 || code == IMAGPART_EXPR
536 || code == VIEW_CONVERT_EXPR
537 || code == BIT_FIELD_REF)
538 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
539 return VN_NARY;
541 /* Fallthrough. */
542 case tcc_declaration:
543 return VN_REFERENCE;
545 case tcc_constant:
546 return VN_CONSTANT;
548 default:
549 if (code == ADDR_EXPR)
550 return (is_gimple_min_invariant (rhs1)
551 ? VN_CONSTANT : VN_REFERENCE);
552 else if (code == CONSTRUCTOR)
553 return VN_NARY;
554 return VN_NONE;
556 default:
557 return VN_NONE;
560 default:
561 return VN_NONE;
565 /* Lookup a value id for CONSTANT and return it. If it does not
566 exist returns 0. */
568 unsigned int
569 get_constant_value_id (tree constant)
571 vn_constant_s **slot;
572 struct vn_constant_s vc;
574 vc.hashcode = vn_hash_constant_with_type (constant);
575 vc.constant = constant;
576 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
577 if (slot)
578 return (*slot)->value_id;
579 return 0;
582 /* Lookup a value id for CONSTANT, and if it does not exist, create a
583 new one and return it. If it does exist, return it. */
585 unsigned int
586 get_or_alloc_constant_value_id (tree constant)
588 vn_constant_s **slot;
589 struct vn_constant_s vc;
590 vn_constant_t vcp;
592 vc.hashcode = vn_hash_constant_with_type (constant);
593 vc.constant = constant;
594 slot = constant_to_value_id->find_slot (&vc, INSERT);
595 if (*slot)
596 return (*slot)->value_id;
598 vcp = XNEW (struct vn_constant_s);
599 vcp->hashcode = vc.hashcode;
600 vcp->constant = constant;
601 vcp->value_id = get_next_value_id ();
602 *slot = vcp;
603 bitmap_set_bit (constant_value_ids, vcp->value_id);
604 return vcp->value_id;
607 /* Return true if V is a value id for a constant. */
609 bool
610 value_id_constant_p (unsigned int v)
612 return bitmap_bit_p (constant_value_ids, v);
615 /* Compute the hash for a reference operand VRO1. */
617 static void
618 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
620 hstate.add_int (vro1->opcode);
621 if (vro1->op0)
622 inchash::add_expr (vro1->op0, hstate);
623 if (vro1->op1)
624 inchash::add_expr (vro1->op1, hstate);
625 if (vro1->op2)
626 inchash::add_expr (vro1->op2, hstate);
629 /* Compute a hash for the reference operation VR1 and return it. */
631 static hashval_t
632 vn_reference_compute_hash (const vn_reference_t vr1)
634 inchash::hash hstate;
635 hashval_t result;
636 int i;
637 vn_reference_op_t vro;
638 HOST_WIDE_INT off = -1;
639 bool deref = false;
641 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
643 if (vro->opcode == MEM_REF)
644 deref = true;
645 else if (vro->opcode != ADDR_EXPR)
646 deref = false;
647 if (vro->off != -1)
649 if (off == -1)
650 off = 0;
651 off += vro->off;
653 else
655 if (off != -1
656 && off != 0)
657 hstate.add_int (off);
658 off = -1;
659 if (deref
660 && vro->opcode == ADDR_EXPR)
662 if (vro->op0)
664 tree op = TREE_OPERAND (vro->op0, 0);
665 hstate.add_int (TREE_CODE (op));
666 inchash::add_expr (op, hstate);
669 else
670 vn_reference_op_compute_hash (vro, hstate);
673 result = hstate.end ();
674 /* ??? We would ICE later if we hash instead of adding that in. */
675 if (vr1->vuse)
676 result += SSA_NAME_VERSION (vr1->vuse);
678 return result;
681 /* Return true if reference operations VR1 and VR2 are equivalent. This
682 means they have the same set of operands and vuses. */
684 bool
685 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
687 unsigned i, j;
689 /* Early out if this is not a hash collision. */
690 if (vr1->hashcode != vr2->hashcode)
691 return false;
693 /* The VOP needs to be the same. */
694 if (vr1->vuse != vr2->vuse)
695 return false;
697 /* If the operands are the same we are done. */
698 if (vr1->operands == vr2->operands)
699 return true;
701 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
702 return false;
704 if (INTEGRAL_TYPE_P (vr1->type)
705 && INTEGRAL_TYPE_P (vr2->type))
707 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
708 return false;
710 else if (INTEGRAL_TYPE_P (vr1->type)
711 && (TYPE_PRECISION (vr1->type)
712 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
713 return false;
714 else if (INTEGRAL_TYPE_P (vr2->type)
715 && (TYPE_PRECISION (vr2->type)
716 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
717 return false;
719 i = 0;
720 j = 0;
723 HOST_WIDE_INT off1 = 0, off2 = 0;
724 vn_reference_op_t vro1, vro2;
725 vn_reference_op_s tem1, tem2;
726 bool deref1 = false, deref2 = false;
727 for (; vr1->operands.iterate (i, &vro1); i++)
729 if (vro1->opcode == MEM_REF)
730 deref1 = true;
731 if (vro1->off == -1)
732 break;
733 off1 += vro1->off;
735 for (; vr2->operands.iterate (j, &vro2); j++)
737 if (vro2->opcode == MEM_REF)
738 deref2 = true;
739 if (vro2->off == -1)
740 break;
741 off2 += vro2->off;
743 if (off1 != off2)
744 return false;
745 if (deref1 && vro1->opcode == ADDR_EXPR)
747 memset (&tem1, 0, sizeof (tem1));
748 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
749 tem1.type = TREE_TYPE (tem1.op0);
750 tem1.opcode = TREE_CODE (tem1.op0);
751 vro1 = &tem1;
752 deref1 = false;
754 if (deref2 && vro2->opcode == ADDR_EXPR)
756 memset (&tem2, 0, sizeof (tem2));
757 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
758 tem2.type = TREE_TYPE (tem2.op0);
759 tem2.opcode = TREE_CODE (tem2.op0);
760 vro2 = &tem2;
761 deref2 = false;
763 if (deref1 != deref2)
764 return false;
765 if (!vn_reference_op_eq (vro1, vro2))
766 return false;
767 ++j;
768 ++i;
770 while (vr1->operands.length () != i
771 || vr2->operands.length () != j);
773 return true;
776 /* Copy the operations present in load/store REF into RESULT, a vector of
777 vn_reference_op_s's. */
779 static void
780 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
782 if (TREE_CODE (ref) == TARGET_MEM_REF)
784 vn_reference_op_s temp;
786 result->reserve (3);
788 memset (&temp, 0, sizeof (temp));
789 temp.type = TREE_TYPE (ref);
790 temp.opcode = TREE_CODE (ref);
791 temp.op0 = TMR_INDEX (ref);
792 temp.op1 = TMR_STEP (ref);
793 temp.op2 = TMR_OFFSET (ref);
794 temp.off = -1;
795 result->quick_push (temp);
797 memset (&temp, 0, sizeof (temp));
798 temp.type = NULL_TREE;
799 temp.opcode = ERROR_MARK;
800 temp.op0 = TMR_INDEX2 (ref);
801 temp.off = -1;
802 result->quick_push (temp);
804 memset (&temp, 0, sizeof (temp));
805 temp.type = NULL_TREE;
806 temp.opcode = TREE_CODE (TMR_BASE (ref));
807 temp.op0 = TMR_BASE (ref);
808 temp.off = -1;
809 result->quick_push (temp);
810 return;
813 /* For non-calls, store the information that makes up the address. */
814 tree orig = ref;
815 while (ref)
817 vn_reference_op_s temp;
819 memset (&temp, 0, sizeof (temp));
820 temp.type = TREE_TYPE (ref);
821 temp.opcode = TREE_CODE (ref);
822 temp.off = -1;
824 switch (temp.opcode)
826 case MODIFY_EXPR:
827 temp.op0 = TREE_OPERAND (ref, 1);
828 break;
829 case WITH_SIZE_EXPR:
830 temp.op0 = TREE_OPERAND (ref, 1);
831 temp.off = 0;
832 break;
833 case MEM_REF:
834 /* The base address gets its own vn_reference_op_s structure. */
835 temp.op0 = TREE_OPERAND (ref, 1);
836 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
837 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
838 break;
839 case BIT_FIELD_REF:
840 /* Record bits and position. */
841 temp.op0 = TREE_OPERAND (ref, 1);
842 temp.op1 = TREE_OPERAND (ref, 2);
843 break;
844 case COMPONENT_REF:
845 /* The field decl is enough to unambiguously specify the field,
846 a matching type is not necessary and a mismatching type
847 is always a spurious difference. */
848 temp.type = NULL_TREE;
849 temp.op0 = TREE_OPERAND (ref, 1);
850 temp.op1 = TREE_OPERAND (ref, 2);
852 tree this_offset = component_ref_field_offset (ref);
853 if (this_offset
854 && TREE_CODE (this_offset) == INTEGER_CST)
856 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
857 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
859 offset_int off
860 = (wi::to_offset (this_offset)
861 + wi::lrshift (wi::to_offset (bit_offset),
862 LOG2_BITS_PER_UNIT));
863 if (wi::fits_shwi_p (off)
864 /* Probibit value-numbering zero offset components
865 of addresses the same before the pass folding
866 __builtin_object_size had a chance to run
867 (checking cfun->after_inlining does the
868 trick here). */
869 && (TREE_CODE (orig) != ADDR_EXPR
870 || off != 0
871 || cfun->after_inlining))
872 temp.off = off.to_shwi ();
876 break;
877 case ARRAY_RANGE_REF:
878 case ARRAY_REF:
879 /* Record index as operand. */
880 temp.op0 = TREE_OPERAND (ref, 1);
881 /* Always record lower bounds and element size. */
882 temp.op1 = array_ref_low_bound (ref);
883 temp.op2 = array_ref_element_size (ref);
884 if (TREE_CODE (temp.op0) == INTEGER_CST
885 && TREE_CODE (temp.op1) == INTEGER_CST
886 && TREE_CODE (temp.op2) == INTEGER_CST)
888 offset_int off = ((wi::to_offset (temp.op0)
889 - wi::to_offset (temp.op1))
890 * wi::to_offset (temp.op2));
891 if (wi::fits_shwi_p (off))
892 temp.off = off.to_shwi();
894 break;
895 case VAR_DECL:
896 if (DECL_HARD_REGISTER (ref))
898 temp.op0 = ref;
899 break;
901 /* Fallthru. */
902 case PARM_DECL:
903 case CONST_DECL:
904 case RESULT_DECL:
905 /* Canonicalize decls to MEM[&decl] which is what we end up with
906 when valueizing MEM[ptr] with ptr = &decl. */
907 temp.opcode = MEM_REF;
908 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
909 temp.off = 0;
910 result->safe_push (temp);
911 temp.opcode = ADDR_EXPR;
912 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
913 temp.type = TREE_TYPE (temp.op0);
914 temp.off = -1;
915 break;
916 case STRING_CST:
917 case INTEGER_CST:
918 case COMPLEX_CST:
919 case VECTOR_CST:
920 case REAL_CST:
921 case FIXED_CST:
922 case CONSTRUCTOR:
923 case SSA_NAME:
924 temp.op0 = ref;
925 break;
926 case ADDR_EXPR:
927 if (is_gimple_min_invariant (ref))
929 temp.op0 = ref;
930 break;
932 break;
933 /* These are only interesting for their operands, their
934 existence, and their type. They will never be the last
935 ref in the chain of references (IE they require an
936 operand), so we don't have to put anything
937 for op* as it will be handled by the iteration */
938 case REALPART_EXPR:
939 case VIEW_CONVERT_EXPR:
940 temp.off = 0;
941 break;
942 case IMAGPART_EXPR:
943 /* This is only interesting for its constant offset. */
944 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
945 break;
946 default:
947 gcc_unreachable ();
949 result->safe_push (temp);
951 if (REFERENCE_CLASS_P (ref)
952 || TREE_CODE (ref) == MODIFY_EXPR
953 || TREE_CODE (ref) == WITH_SIZE_EXPR
954 || (TREE_CODE (ref) == ADDR_EXPR
955 && !is_gimple_min_invariant (ref)))
956 ref = TREE_OPERAND (ref, 0);
957 else
958 ref = NULL_TREE;
962 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
963 operands in *OPS, the reference alias set SET and the reference type TYPE.
964 Return true if something useful was produced. */
966 bool
967 ao_ref_init_from_vn_reference (ao_ref *ref,
968 alias_set_type set, tree type,
969 vec<vn_reference_op_s> ops)
971 vn_reference_op_t op;
972 unsigned i;
973 tree base = NULL_TREE;
974 tree *op0_p = &base;
975 HOST_WIDE_INT offset = 0;
976 HOST_WIDE_INT max_size;
977 HOST_WIDE_INT size = -1;
978 tree size_tree = NULL_TREE;
979 alias_set_type base_alias_set = -1;
981 /* First get the final access size from just the outermost expression. */
982 op = &ops[0];
983 if (op->opcode == COMPONENT_REF)
984 size_tree = DECL_SIZE (op->op0);
985 else if (op->opcode == BIT_FIELD_REF)
986 size_tree = op->op0;
987 else
989 machine_mode mode = TYPE_MODE (type);
990 if (mode == BLKmode)
991 size_tree = TYPE_SIZE (type);
992 else
993 size = GET_MODE_BITSIZE (mode);
995 if (size_tree != NULL_TREE)
997 if (!tree_fits_uhwi_p (size_tree))
998 size = -1;
999 else
1000 size = tree_to_uhwi (size_tree);
1003 /* Initially, maxsize is the same as the accessed element size.
1004 In the following it will only grow (or become -1). */
1005 max_size = size;
1007 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1008 and find the ultimate containing object. */
1009 FOR_EACH_VEC_ELT (ops, i, op)
1011 switch (op->opcode)
1013 /* These may be in the reference ops, but we cannot do anything
1014 sensible with them here. */
1015 case ADDR_EXPR:
1016 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1017 if (base != NULL_TREE
1018 && TREE_CODE (base) == MEM_REF
1019 && op->op0
1020 && DECL_P (TREE_OPERAND (op->op0, 0)))
1022 vn_reference_op_t pop = &ops[i-1];
1023 base = TREE_OPERAND (op->op0, 0);
1024 if (pop->off == -1)
1026 max_size = -1;
1027 offset = 0;
1029 else
1030 offset += pop->off * BITS_PER_UNIT;
1031 op0_p = NULL;
1032 break;
1034 /* Fallthru. */
1035 case CALL_EXPR:
1036 return false;
1038 /* Record the base objects. */
1039 case MEM_REF:
1040 base_alias_set = get_deref_alias_set (op->op0);
1041 *op0_p = build2 (MEM_REF, op->type,
1042 NULL_TREE, op->op0);
1043 op0_p = &TREE_OPERAND (*op0_p, 0);
1044 break;
1046 case VAR_DECL:
1047 case PARM_DECL:
1048 case RESULT_DECL:
1049 case SSA_NAME:
1050 *op0_p = op->op0;
1051 op0_p = NULL;
1052 break;
1054 /* And now the usual component-reference style ops. */
1055 case BIT_FIELD_REF:
1056 offset += tree_to_shwi (op->op1);
1057 break;
1059 case COMPONENT_REF:
1061 tree field = op->op0;
1062 /* We do not have a complete COMPONENT_REF tree here so we
1063 cannot use component_ref_field_offset. Do the interesting
1064 parts manually. */
1066 if (op->op1
1067 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1068 max_size = -1;
1069 else
1071 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1072 * BITS_PER_UNIT);
1073 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1075 break;
1078 case ARRAY_RANGE_REF:
1079 case ARRAY_REF:
1080 /* We recorded the lower bound and the element size. */
1081 if (!tree_fits_shwi_p (op->op0)
1082 || !tree_fits_shwi_p (op->op1)
1083 || !tree_fits_shwi_p (op->op2))
1084 max_size = -1;
1085 else
1087 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1088 hindex -= tree_to_shwi (op->op1);
1089 hindex *= tree_to_shwi (op->op2);
1090 hindex *= BITS_PER_UNIT;
1091 offset += hindex;
1093 break;
1095 case REALPART_EXPR:
1096 break;
1098 case IMAGPART_EXPR:
1099 offset += size;
1100 break;
1102 case VIEW_CONVERT_EXPR:
1103 break;
1105 case STRING_CST:
1106 case INTEGER_CST:
1107 case COMPLEX_CST:
1108 case VECTOR_CST:
1109 case REAL_CST:
1110 case CONSTRUCTOR:
1111 case CONST_DECL:
1112 return false;
1114 default:
1115 return false;
1119 if (base == NULL_TREE)
1120 return false;
1122 ref->ref = NULL_TREE;
1123 ref->base = base;
1124 ref->offset = offset;
1125 ref->size = size;
1126 ref->max_size = max_size;
1127 ref->ref_alias_set = set;
1128 if (base_alias_set != -1)
1129 ref->base_alias_set = base_alias_set;
1130 else
1131 ref->base_alias_set = get_alias_set (base);
1132 /* We discount volatiles from value-numbering elsewhere. */
1133 ref->volatile_p = false;
1135 return true;
1138 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1139 vn_reference_op_s's. */
1141 static void
1142 copy_reference_ops_from_call (gcall *call,
1143 vec<vn_reference_op_s> *result)
1145 vn_reference_op_s temp;
1146 unsigned i;
1147 tree lhs = gimple_call_lhs (call);
1148 int lr;
1150 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1151 different. By adding the lhs here in the vector, we ensure that the
1152 hashcode is different, guaranteeing a different value number. */
1153 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1155 memset (&temp, 0, sizeof (temp));
1156 temp.opcode = MODIFY_EXPR;
1157 temp.type = TREE_TYPE (lhs);
1158 temp.op0 = lhs;
1159 temp.off = -1;
1160 result->safe_push (temp);
1163 /* Copy the type, opcode, function, static chain and EH region, if any. */
1164 memset (&temp, 0, sizeof (temp));
1165 temp.type = gimple_call_return_type (call);
1166 temp.opcode = CALL_EXPR;
1167 temp.op0 = gimple_call_fn (call);
1168 temp.op1 = gimple_call_chain (call);
1169 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1170 temp.op2 = size_int (lr);
1171 temp.off = -1;
1172 if (gimple_call_with_bounds_p (call))
1173 temp.with_bounds = 1;
1174 result->safe_push (temp);
1176 /* Copy the call arguments. As they can be references as well,
1177 just chain them together. */
1178 for (i = 0; i < gimple_call_num_args (call); ++i)
1180 tree callarg = gimple_call_arg (call, i);
1181 copy_reference_ops_from_ref (callarg, result);
1185 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1186 *I_P to point to the last element of the replacement. */
1187 void
1188 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1189 unsigned int *i_p)
1191 unsigned int i = *i_p;
1192 vn_reference_op_t op = &(*ops)[i];
1193 vn_reference_op_t mem_op = &(*ops)[i - 1];
1194 tree addr_base;
1195 HOST_WIDE_INT addr_offset = 0;
1197 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1198 from .foo.bar to the preceding MEM_REF offset and replace the
1199 address with &OBJ. */
1200 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1201 &addr_offset);
1202 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1203 if (addr_base != TREE_OPERAND (op->op0, 0))
1205 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1206 off += addr_offset;
1207 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1208 op->op0 = build_fold_addr_expr (addr_base);
1209 if (tree_fits_shwi_p (mem_op->op0))
1210 mem_op->off = tree_to_shwi (mem_op->op0);
1211 else
1212 mem_op->off = -1;
1216 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1217 *I_P to point to the last element of the replacement. */
1218 static void
1219 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1220 unsigned int *i_p)
1222 unsigned int i = *i_p;
1223 vn_reference_op_t op = &(*ops)[i];
1224 vn_reference_op_t mem_op = &(*ops)[i - 1];
1225 gimple def_stmt;
1226 enum tree_code code;
1227 offset_int off;
1229 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1230 if (!is_gimple_assign (def_stmt))
1231 return;
1233 code = gimple_assign_rhs_code (def_stmt);
1234 if (code != ADDR_EXPR
1235 && code != POINTER_PLUS_EXPR)
1236 return;
1238 off = offset_int::from (mem_op->op0, SIGNED);
1240 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1241 from .foo.bar to the preceding MEM_REF offset and replace the
1242 address with &OBJ. */
1243 if (code == ADDR_EXPR)
1245 tree addr, addr_base;
1246 HOST_WIDE_INT addr_offset;
1248 addr = gimple_assign_rhs1 (def_stmt);
1249 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1250 &addr_offset);
1251 if (!addr_base
1252 || TREE_CODE (addr_base) != MEM_REF)
1253 return;
1255 off += addr_offset;
1256 off += mem_ref_offset (addr_base);
1257 op->op0 = TREE_OPERAND (addr_base, 0);
1259 else
1261 tree ptr, ptroff;
1262 ptr = gimple_assign_rhs1 (def_stmt);
1263 ptroff = gimple_assign_rhs2 (def_stmt);
1264 if (TREE_CODE (ptr) != SSA_NAME
1265 || TREE_CODE (ptroff) != INTEGER_CST)
1266 return;
1268 off += wi::to_offset (ptroff);
1269 op->op0 = ptr;
1272 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1273 if (tree_fits_shwi_p (mem_op->op0))
1274 mem_op->off = tree_to_shwi (mem_op->op0);
1275 else
1276 mem_op->off = -1;
1277 if (TREE_CODE (op->op0) == SSA_NAME)
1278 op->op0 = SSA_VAL (op->op0);
1279 if (TREE_CODE (op->op0) != SSA_NAME)
1280 op->opcode = TREE_CODE (op->op0);
1282 /* And recurse. */
1283 if (TREE_CODE (op->op0) == SSA_NAME)
1284 vn_reference_maybe_forwprop_address (ops, i_p);
1285 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1286 vn_reference_fold_indirect (ops, i_p);
1289 /* Optimize the reference REF to a constant if possible or return
1290 NULL_TREE if not. */
1292 tree
1293 fully_constant_vn_reference_p (vn_reference_t ref)
1295 vec<vn_reference_op_s> operands = ref->operands;
1296 vn_reference_op_t op;
1298 /* Try to simplify the translated expression if it is
1299 a call to a builtin function with at most two arguments. */
1300 op = &operands[0];
1301 if (op->opcode == CALL_EXPR
1302 && TREE_CODE (op->op0) == ADDR_EXPR
1303 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1304 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1305 && operands.length () >= 2
1306 && operands.length () <= 3)
1308 vn_reference_op_t arg0, arg1 = NULL;
1309 bool anyconst = false;
1310 arg0 = &operands[1];
1311 if (operands.length () > 2)
1312 arg1 = &operands[2];
1313 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1314 || (arg0->opcode == ADDR_EXPR
1315 && is_gimple_min_invariant (arg0->op0)))
1316 anyconst = true;
1317 if (arg1
1318 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1319 || (arg1->opcode == ADDR_EXPR
1320 && is_gimple_min_invariant (arg1->op0))))
1321 anyconst = true;
1322 if (anyconst)
1324 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1325 arg1 ? 2 : 1,
1326 arg0->op0,
1327 arg1 ? arg1->op0 : NULL);
1328 if (folded
1329 && TREE_CODE (folded) == NOP_EXPR)
1330 folded = TREE_OPERAND (folded, 0);
1331 if (folded
1332 && is_gimple_min_invariant (folded))
1333 return folded;
1337 /* Simplify reads from constants or constant initializers. */
1338 else if (BITS_PER_UNIT == 8
1339 && is_gimple_reg_type (ref->type)
1340 && (!INTEGRAL_TYPE_P (ref->type)
1341 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1343 HOST_WIDE_INT off = 0;
1344 HOST_WIDE_INT size = tree_to_shwi (TYPE_SIZE (ref->type));
1345 if (size % BITS_PER_UNIT != 0
1346 || size > MAX_BITSIZE_MODE_ANY_MODE)
1347 return NULL_TREE;
1348 size /= BITS_PER_UNIT;
1349 unsigned i;
1350 for (i = 0; i < operands.length (); ++i)
1352 if (operands[i].off == -1)
1353 return NULL_TREE;
1354 off += operands[i].off;
1355 if (operands[i].opcode == MEM_REF)
1357 ++i;
1358 break;
1361 vn_reference_op_t base = &operands[--i];
1362 tree ctor = error_mark_node;
1363 tree decl = NULL_TREE;
1364 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1365 ctor = base->op0;
1366 else if (base->opcode == MEM_REF
1367 && base[1].opcode == ADDR_EXPR
1368 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1369 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1371 decl = TREE_OPERAND (base[1].op0, 0);
1372 ctor = ctor_for_folding (decl);
1374 if (ctor == NULL_TREE)
1375 return build_zero_cst (ref->type);
1376 else if (ctor != error_mark_node)
1378 if (decl)
1380 tree res = fold_ctor_reference (ref->type, ctor,
1381 off * BITS_PER_UNIT,
1382 size * BITS_PER_UNIT, decl);
1383 if (res)
1385 STRIP_USELESS_TYPE_CONVERSION (res);
1386 if (is_gimple_min_invariant (res))
1387 return res;
1390 else
1392 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1393 if (native_encode_expr (ctor, buf, size, off) > 0)
1394 return native_interpret_expr (ref->type, buf, size);
1399 return NULL_TREE;
1402 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1403 structures into their value numbers. This is done in-place, and
1404 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1405 whether any operands were valueized. */
1407 static vec<vn_reference_op_s>
1408 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1410 vn_reference_op_t vro;
1411 unsigned int i;
1413 *valueized_anything = false;
1415 FOR_EACH_VEC_ELT (orig, i, vro)
1417 if (vro->opcode == SSA_NAME
1418 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1420 tree tem = SSA_VAL (vro->op0);
1421 if (tem != vro->op0)
1423 *valueized_anything = true;
1424 vro->op0 = tem;
1426 /* If it transforms from an SSA_NAME to a constant, update
1427 the opcode. */
1428 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1429 vro->opcode = TREE_CODE (vro->op0);
1431 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1433 tree tem = SSA_VAL (vro->op1);
1434 if (tem != vro->op1)
1436 *valueized_anything = true;
1437 vro->op1 = tem;
1440 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1442 tree tem = SSA_VAL (vro->op2);
1443 if (tem != vro->op2)
1445 *valueized_anything = true;
1446 vro->op2 = tem;
1449 /* If it transforms from an SSA_NAME to an address, fold with
1450 a preceding indirect reference. */
1451 if (i > 0
1452 && vro->op0
1453 && TREE_CODE (vro->op0) == ADDR_EXPR
1454 && orig[i - 1].opcode == MEM_REF)
1455 vn_reference_fold_indirect (&orig, &i);
1456 else if (i > 0
1457 && vro->opcode == SSA_NAME
1458 && orig[i - 1].opcode == MEM_REF)
1459 vn_reference_maybe_forwprop_address (&orig, &i);
1460 /* If it transforms a non-constant ARRAY_REF into a constant
1461 one, adjust the constant offset. */
1462 else if (vro->opcode == ARRAY_REF
1463 && vro->off == -1
1464 && TREE_CODE (vro->op0) == INTEGER_CST
1465 && TREE_CODE (vro->op1) == INTEGER_CST
1466 && TREE_CODE (vro->op2) == INTEGER_CST)
1468 offset_int off = ((wi::to_offset (vro->op0)
1469 - wi::to_offset (vro->op1))
1470 * wi::to_offset (vro->op2));
1471 if (wi::fits_shwi_p (off))
1472 vro->off = off.to_shwi ();
1476 return orig;
1479 static vec<vn_reference_op_s>
1480 valueize_refs (vec<vn_reference_op_s> orig)
1482 bool tem;
1483 return valueize_refs_1 (orig, &tem);
1486 static vec<vn_reference_op_s> shared_lookup_references;
1488 /* Create a vector of vn_reference_op_s structures from REF, a
1489 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1490 this function. *VALUEIZED_ANYTHING will specify whether any
1491 operands were valueized. */
1493 static vec<vn_reference_op_s>
1494 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1496 if (!ref)
1497 return vNULL;
1498 shared_lookup_references.truncate (0);
1499 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1500 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1501 valueized_anything);
1502 return shared_lookup_references;
1505 /* Create a vector of vn_reference_op_s structures from CALL, a
1506 call statement. The vector is shared among all callers of
1507 this function. */
1509 static vec<vn_reference_op_s>
1510 valueize_shared_reference_ops_from_call (gcall *call)
1512 if (!call)
1513 return vNULL;
1514 shared_lookup_references.truncate (0);
1515 copy_reference_ops_from_call (call, &shared_lookup_references);
1516 shared_lookup_references = valueize_refs (shared_lookup_references);
1517 return shared_lookup_references;
1520 /* Lookup a SCCVN reference operation VR in the current hash table.
1521 Returns the resulting value number if it exists in the hash table,
1522 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1523 vn_reference_t stored in the hashtable if something is found. */
1525 static tree
1526 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1528 vn_reference_s **slot;
1529 hashval_t hash;
1531 hash = vr->hashcode;
1532 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1533 if (!slot && current_info == optimistic_info)
1534 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1535 if (slot)
1537 if (vnresult)
1538 *vnresult = (vn_reference_t)*slot;
1539 return ((vn_reference_t)*slot)->result;
1542 return NULL_TREE;
1545 static tree *last_vuse_ptr;
1546 static vn_lookup_kind vn_walk_kind;
1547 static vn_lookup_kind default_vn_walk_kind;
1549 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1550 with the current VUSE and performs the expression lookup. */
1552 static void *
1553 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1554 unsigned int cnt, void *vr_)
1556 vn_reference_t vr = (vn_reference_t)vr_;
1557 vn_reference_s **slot;
1558 hashval_t hash;
1560 /* This bounds the stmt walks we perform on reference lookups
1561 to O(1) instead of O(N) where N is the number of dominating
1562 stores. */
1563 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1564 return (void *)-1;
1566 if (last_vuse_ptr)
1567 *last_vuse_ptr = vuse;
1569 /* Fixup vuse and hash. */
1570 if (vr->vuse)
1571 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1572 vr->vuse = vuse_ssa_val (vuse);
1573 if (vr->vuse)
1574 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1576 hash = vr->hashcode;
1577 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1578 if (!slot && current_info == optimistic_info)
1579 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1580 if (slot)
1581 return *slot;
1583 return NULL;
1586 /* Lookup an existing or insert a new vn_reference entry into the
1587 value table for the VUSE, SET, TYPE, OPERANDS reference which
1588 has the value VALUE which is either a constant or an SSA name. */
1590 static vn_reference_t
1591 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1592 alias_set_type set,
1593 tree type,
1594 vec<vn_reference_op_s,
1595 va_heap> operands,
1596 tree value)
1598 struct vn_reference_s vr1;
1599 vn_reference_t result;
1600 unsigned value_id;
1601 vr1.vuse = vuse;
1602 vr1.operands = operands;
1603 vr1.type = type;
1604 vr1.set = set;
1605 vr1.hashcode = vn_reference_compute_hash (&vr1);
1606 if (vn_reference_lookup_1 (&vr1, &result))
1607 return result;
1608 if (TREE_CODE (value) == SSA_NAME)
1609 value_id = VN_INFO (value)->value_id;
1610 else
1611 value_id = get_or_alloc_constant_value_id (value);
1612 return vn_reference_insert_pieces (vuse, set, type,
1613 operands.copy (), value, value_id);
1616 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1617 from the statement defining VUSE and if not successful tries to
1618 translate *REFP and VR_ through an aggregate copy at the definition
1619 of VUSE. */
1621 static void *
1622 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1623 bool disambiguate_only)
1625 vn_reference_t vr = (vn_reference_t)vr_;
1626 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1627 tree base;
1628 HOST_WIDE_INT offset, maxsize;
1629 static vec<vn_reference_op_s>
1630 lhs_ops = vNULL;
1631 ao_ref lhs_ref;
1632 bool lhs_ref_ok = false;
1634 /* First try to disambiguate after value-replacing in the definitions LHS. */
1635 if (is_gimple_assign (def_stmt))
1637 vec<vn_reference_op_s> tem;
1638 tree lhs = gimple_assign_lhs (def_stmt);
1639 bool valueized_anything = false;
1640 /* Avoid re-allocation overhead. */
1641 lhs_ops.truncate (0);
1642 copy_reference_ops_from_ref (lhs, &lhs_ops);
1643 tem = lhs_ops;
1644 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1645 gcc_assert (lhs_ops == tem);
1646 if (valueized_anything)
1648 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1649 get_alias_set (lhs),
1650 TREE_TYPE (lhs), lhs_ops);
1651 if (lhs_ref_ok
1652 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1653 return NULL;
1655 else
1657 ao_ref_init (&lhs_ref, lhs);
1658 lhs_ref_ok = true;
1661 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1662 && gimple_call_num_args (def_stmt) <= 4)
1664 /* For builtin calls valueize its arguments and call the
1665 alias oracle again. Valueization may improve points-to
1666 info of pointers and constify size and position arguments.
1667 Originally this was motivated by PR61034 which has
1668 conditional calls to free falsely clobbering ref because
1669 of imprecise points-to info of the argument. */
1670 tree oldargs[4];
1671 bool valueized_anything = false;
1672 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1674 oldargs[i] = gimple_call_arg (def_stmt, i);
1675 if (TREE_CODE (oldargs[i]) == SSA_NAME
1676 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1678 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1679 valueized_anything = true;
1682 if (valueized_anything)
1684 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1685 ref);
1686 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1687 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1688 if (!res)
1689 return NULL;
1693 if (disambiguate_only)
1694 return (void *)-1;
1696 base = ao_ref_base (ref);
1697 offset = ref->offset;
1698 maxsize = ref->max_size;
1700 /* If we cannot constrain the size of the reference we cannot
1701 test if anything kills it. */
1702 if (maxsize == -1)
1703 return (void *)-1;
1705 /* We can't deduce anything useful from clobbers. */
1706 if (gimple_clobber_p (def_stmt))
1707 return (void *)-1;
1709 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1710 from that definition.
1711 1) Memset. */
1712 if (is_gimple_reg_type (vr->type)
1713 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1714 && integer_zerop (gimple_call_arg (def_stmt, 1))
1715 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1716 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1718 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1719 tree base2;
1720 HOST_WIDE_INT offset2, size2, maxsize2;
1721 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1722 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1723 if ((unsigned HOST_WIDE_INT)size2 / 8
1724 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1725 && maxsize2 != -1
1726 && operand_equal_p (base, base2, 0)
1727 && offset2 <= offset
1728 && offset2 + size2 >= offset + maxsize)
1730 tree val = build_zero_cst (vr->type);
1731 return vn_reference_lookup_or_insert_for_pieces
1732 (vuse, vr->set, vr->type, vr->operands, val);
1736 /* 2) Assignment from an empty CONSTRUCTOR. */
1737 else if (is_gimple_reg_type (vr->type)
1738 && gimple_assign_single_p (def_stmt)
1739 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1740 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1742 tree base2;
1743 HOST_WIDE_INT offset2, size2, maxsize2;
1744 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1745 &offset2, &size2, &maxsize2);
1746 if (maxsize2 != -1
1747 && operand_equal_p (base, base2, 0)
1748 && offset2 <= offset
1749 && offset2 + size2 >= offset + maxsize)
1751 tree val = build_zero_cst (vr->type);
1752 return vn_reference_lookup_or_insert_for_pieces
1753 (vuse, vr->set, vr->type, vr->operands, val);
1757 /* 3) Assignment from a constant. We can use folds native encode/interpret
1758 routines to extract the assigned bits. */
1759 else if (vn_walk_kind == VN_WALKREWRITE
1760 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1761 && ref->size == maxsize
1762 && maxsize % BITS_PER_UNIT == 0
1763 && offset % BITS_PER_UNIT == 0
1764 && is_gimple_reg_type (vr->type)
1765 && gimple_assign_single_p (def_stmt)
1766 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1768 tree base2;
1769 HOST_WIDE_INT offset2, size2, maxsize2;
1770 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1771 &offset2, &size2, &maxsize2);
1772 if (maxsize2 != -1
1773 && maxsize2 == size2
1774 && size2 % BITS_PER_UNIT == 0
1775 && offset2 % BITS_PER_UNIT == 0
1776 && operand_equal_p (base, base2, 0)
1777 && offset2 <= offset
1778 && offset2 + size2 >= offset + maxsize)
1780 /* We support up to 512-bit values (for V8DFmode). */
1781 unsigned char buffer[64];
1782 int len;
1784 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1785 buffer, sizeof (buffer));
1786 if (len > 0)
1788 tree val = native_interpret_expr (vr->type,
1789 buffer
1790 + ((offset - offset2)
1791 / BITS_PER_UNIT),
1792 ref->size / BITS_PER_UNIT);
1793 if (val)
1794 return vn_reference_lookup_or_insert_for_pieces
1795 (vuse, vr->set, vr->type, vr->operands, val);
1800 /* 4) Assignment from an SSA name which definition we may be able
1801 to access pieces from. */
1802 else if (ref->size == maxsize
1803 && is_gimple_reg_type (vr->type)
1804 && gimple_assign_single_p (def_stmt)
1805 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1807 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1808 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1809 if (is_gimple_assign (def_stmt2)
1810 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1811 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1812 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1814 tree base2;
1815 HOST_WIDE_INT offset2, size2, maxsize2, off;
1816 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1817 &offset2, &size2, &maxsize2);
1818 off = offset - offset2;
1819 if (maxsize2 != -1
1820 && maxsize2 == size2
1821 && operand_equal_p (base, base2, 0)
1822 && offset2 <= offset
1823 && offset2 + size2 >= offset + maxsize)
1825 tree val = NULL_TREE;
1826 HOST_WIDE_INT elsz
1827 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1828 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1830 if (off == 0)
1831 val = gimple_assign_rhs1 (def_stmt2);
1832 else if (off == elsz)
1833 val = gimple_assign_rhs2 (def_stmt2);
1835 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1836 && off % elsz == 0)
1838 tree ctor = gimple_assign_rhs1 (def_stmt2);
1839 unsigned i = off / elsz;
1840 if (i < CONSTRUCTOR_NELTS (ctor))
1842 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1843 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1845 if (TREE_CODE (TREE_TYPE (elt->value))
1846 != VECTOR_TYPE)
1847 val = elt->value;
1851 if (val)
1852 return vn_reference_lookup_or_insert_for_pieces
1853 (vuse, vr->set, vr->type, vr->operands, val);
1858 /* 5) For aggregate copies translate the reference through them if
1859 the copy kills ref. */
1860 else if (vn_walk_kind == VN_WALKREWRITE
1861 && gimple_assign_single_p (def_stmt)
1862 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1863 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1864 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1866 tree base2;
1867 HOST_WIDE_INT offset2, size2, maxsize2;
1868 int i, j;
1869 auto_vec<vn_reference_op_s> rhs;
1870 vn_reference_op_t vro;
1871 ao_ref r;
1873 if (!lhs_ref_ok)
1874 return (void *)-1;
1876 /* See if the assignment kills REF. */
1877 base2 = ao_ref_base (&lhs_ref);
1878 offset2 = lhs_ref.offset;
1879 size2 = lhs_ref.size;
1880 maxsize2 = lhs_ref.max_size;
1881 if (maxsize2 == -1
1882 || (base != base2 && !operand_equal_p (base, base2, 0))
1883 || offset2 > offset
1884 || offset2 + size2 < offset + maxsize)
1885 return (void *)-1;
1887 /* Find the common base of ref and the lhs. lhs_ops already
1888 contains valueized operands for the lhs. */
1889 i = vr->operands.length () - 1;
1890 j = lhs_ops.length () - 1;
1891 while (j >= 0 && i >= 0
1892 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1894 i--;
1895 j--;
1898 /* ??? The innermost op should always be a MEM_REF and we already
1899 checked that the assignment to the lhs kills vr. Thus for
1900 aggregate copies using char[] types the vn_reference_op_eq
1901 may fail when comparing types for compatibility. But we really
1902 don't care here - further lookups with the rewritten operands
1903 will simply fail if we messed up types too badly. */
1904 HOST_WIDE_INT extra_off = 0;
1905 if (j == 0 && i >= 0
1906 && lhs_ops[0].opcode == MEM_REF
1907 && lhs_ops[0].off != -1)
1909 if (lhs_ops[0].off == vr->operands[i].off)
1910 i--, j--;
1911 else if (vr->operands[i].opcode == MEM_REF
1912 && vr->operands[i].off != -1)
1914 extra_off = vr->operands[i].off - lhs_ops[0].off;
1915 i--, j--;
1919 /* i now points to the first additional op.
1920 ??? LHS may not be completely contained in VR, one or more
1921 VIEW_CONVERT_EXPRs could be in its way. We could at least
1922 try handling outermost VIEW_CONVERT_EXPRs. */
1923 if (j != -1)
1924 return (void *)-1;
1926 /* Now re-write REF to be based on the rhs of the assignment. */
1927 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1929 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1930 if (extra_off != 0)
1932 if (rhs.length () < 2
1933 || rhs[0].opcode != MEM_REF
1934 || rhs[0].off == -1)
1935 return (void *)-1;
1936 rhs[0].off += extra_off;
1937 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1938 build_int_cst (TREE_TYPE (rhs[0].op0),
1939 extra_off));
1942 /* We need to pre-pend vr->operands[0..i] to rhs. */
1943 vec<vn_reference_op_s> old = vr->operands;
1944 if (i + 1 + rhs.length () > vr->operands.length ())
1946 vr->operands.safe_grow (i + 1 + rhs.length ());
1947 if (old == shared_lookup_references)
1948 shared_lookup_references = vr->operands;
1950 else
1951 vr->operands.truncate (i + 1 + rhs.length ());
1952 FOR_EACH_VEC_ELT (rhs, j, vro)
1953 vr->operands[i + 1 + j] = *vro;
1954 vr->operands = valueize_refs (vr->operands);
1955 if (old == shared_lookup_references)
1956 shared_lookup_references = vr->operands;
1957 vr->hashcode = vn_reference_compute_hash (vr);
1959 /* Try folding the new reference to a constant. */
1960 tree val = fully_constant_vn_reference_p (vr);
1961 if (val)
1962 return vn_reference_lookup_or_insert_for_pieces
1963 (vuse, vr->set, vr->type, vr->operands, val);
1965 /* Adjust *ref from the new operands. */
1966 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1967 return (void *)-1;
1968 /* This can happen with bitfields. */
1969 if (ref->size != r.size)
1970 return (void *)-1;
1971 *ref = r;
1973 /* Do not update last seen VUSE after translating. */
1974 last_vuse_ptr = NULL;
1976 /* Keep looking for the adjusted *REF / VR pair. */
1977 return NULL;
1980 /* 6) For memcpy copies translate the reference through them if
1981 the copy kills ref. */
1982 else if (vn_walk_kind == VN_WALKREWRITE
1983 && is_gimple_reg_type (vr->type)
1984 /* ??? Handle BCOPY as well. */
1985 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1986 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1987 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1988 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1989 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1990 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1991 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1992 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
1994 tree lhs, rhs;
1995 ao_ref r;
1996 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1997 vn_reference_op_s op;
1998 HOST_WIDE_INT at;
2001 /* Only handle non-variable, addressable refs. */
2002 if (ref->size != maxsize
2003 || offset % BITS_PER_UNIT != 0
2004 || ref->size % BITS_PER_UNIT != 0)
2005 return (void *)-1;
2007 /* Extract a pointer base and an offset for the destination. */
2008 lhs = gimple_call_arg (def_stmt, 0);
2009 lhs_offset = 0;
2010 if (TREE_CODE (lhs) == SSA_NAME)
2011 lhs = SSA_VAL (lhs);
2012 if (TREE_CODE (lhs) == ADDR_EXPR)
2014 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2015 &lhs_offset);
2016 if (!tem)
2017 return (void *)-1;
2018 if (TREE_CODE (tem) == MEM_REF
2019 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2021 lhs = TREE_OPERAND (tem, 0);
2022 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2024 else if (DECL_P (tem))
2025 lhs = build_fold_addr_expr (tem);
2026 else
2027 return (void *)-1;
2029 if (TREE_CODE (lhs) != SSA_NAME
2030 && TREE_CODE (lhs) != ADDR_EXPR)
2031 return (void *)-1;
2033 /* Extract a pointer base and an offset for the source. */
2034 rhs = gimple_call_arg (def_stmt, 1);
2035 rhs_offset = 0;
2036 if (TREE_CODE (rhs) == SSA_NAME)
2037 rhs = SSA_VAL (rhs);
2038 if (TREE_CODE (rhs) == ADDR_EXPR)
2040 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2041 &rhs_offset);
2042 if (!tem)
2043 return (void *)-1;
2044 if (TREE_CODE (tem) == MEM_REF
2045 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2047 rhs = TREE_OPERAND (tem, 0);
2048 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2050 else if (DECL_P (tem))
2051 rhs = build_fold_addr_expr (tem);
2052 else
2053 return (void *)-1;
2055 if (TREE_CODE (rhs) != SSA_NAME
2056 && TREE_CODE (rhs) != ADDR_EXPR)
2057 return (void *)-1;
2059 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2061 /* The bases of the destination and the references have to agree. */
2062 if ((TREE_CODE (base) != MEM_REF
2063 && !DECL_P (base))
2064 || (TREE_CODE (base) == MEM_REF
2065 && (TREE_OPERAND (base, 0) != lhs
2066 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2067 || (DECL_P (base)
2068 && (TREE_CODE (lhs) != ADDR_EXPR
2069 || TREE_OPERAND (lhs, 0) != base)))
2070 return (void *)-1;
2072 /* And the access has to be contained within the memcpy destination. */
2073 at = offset / BITS_PER_UNIT;
2074 if (TREE_CODE (base) == MEM_REF)
2075 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2076 if (lhs_offset > at
2077 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2078 return (void *)-1;
2080 /* Make room for 2 operands in the new reference. */
2081 if (vr->operands.length () < 2)
2083 vec<vn_reference_op_s> old = vr->operands;
2084 vr->operands.safe_grow_cleared (2);
2085 if (old == shared_lookup_references
2086 && vr->operands != old)
2087 shared_lookup_references = vr->operands;
2089 else
2090 vr->operands.truncate (2);
2092 /* The looked-through reference is a simple MEM_REF. */
2093 memset (&op, 0, sizeof (op));
2094 op.type = vr->type;
2095 op.opcode = MEM_REF;
2096 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2097 op.off = at - lhs_offset + rhs_offset;
2098 vr->operands[0] = op;
2099 op.type = TREE_TYPE (rhs);
2100 op.opcode = TREE_CODE (rhs);
2101 op.op0 = rhs;
2102 op.off = -1;
2103 vr->operands[1] = op;
2104 vr->hashcode = vn_reference_compute_hash (vr);
2106 /* Adjust *ref from the new operands. */
2107 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2108 return (void *)-1;
2109 /* This can happen with bitfields. */
2110 if (ref->size != r.size)
2111 return (void *)-1;
2112 *ref = r;
2114 /* Do not update last seen VUSE after translating. */
2115 last_vuse_ptr = NULL;
2117 /* Keep looking for the adjusted *REF / VR pair. */
2118 return NULL;
2121 /* Bail out and stop walking. */
2122 return (void *)-1;
2125 /* Lookup a reference operation by it's parts, in the current hash table.
2126 Returns the resulting value number if it exists in the hash table,
2127 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2128 vn_reference_t stored in the hashtable if something is found. */
2130 tree
2131 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2132 vec<vn_reference_op_s> operands,
2133 vn_reference_t *vnresult, vn_lookup_kind kind)
2135 struct vn_reference_s vr1;
2136 vn_reference_t tmp;
2137 tree cst;
2139 if (!vnresult)
2140 vnresult = &tmp;
2141 *vnresult = NULL;
2143 vr1.vuse = vuse_ssa_val (vuse);
2144 shared_lookup_references.truncate (0);
2145 shared_lookup_references.safe_grow (operands.length ());
2146 memcpy (shared_lookup_references.address (),
2147 operands.address (),
2148 sizeof (vn_reference_op_s)
2149 * operands.length ());
2150 vr1.operands = operands = shared_lookup_references
2151 = valueize_refs (shared_lookup_references);
2152 vr1.type = type;
2153 vr1.set = set;
2154 vr1.hashcode = vn_reference_compute_hash (&vr1);
2155 if ((cst = fully_constant_vn_reference_p (&vr1)))
2156 return cst;
2158 vn_reference_lookup_1 (&vr1, vnresult);
2159 if (!*vnresult
2160 && kind != VN_NOWALK
2161 && vr1.vuse)
2163 ao_ref r;
2164 vn_walk_kind = kind;
2165 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2166 *vnresult =
2167 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2168 vn_reference_lookup_2,
2169 vn_reference_lookup_3,
2170 vuse_ssa_val, &vr1);
2171 gcc_checking_assert (vr1.operands == shared_lookup_references);
2174 if (*vnresult)
2175 return (*vnresult)->result;
2177 return NULL_TREE;
2180 /* Lookup OP in the current hash table, and return the resulting value
2181 number if it exists in the hash table. Return NULL_TREE if it does
2182 not exist in the hash table or if the result field of the structure
2183 was NULL.. VNRESULT will be filled in with the vn_reference_t
2184 stored in the hashtable if one exists. */
2186 tree
2187 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2188 vn_reference_t *vnresult)
2190 vec<vn_reference_op_s> operands;
2191 struct vn_reference_s vr1;
2192 tree cst;
2193 bool valuezied_anything;
2195 if (vnresult)
2196 *vnresult = NULL;
2198 vr1.vuse = vuse_ssa_val (vuse);
2199 vr1.operands = operands
2200 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2201 vr1.type = TREE_TYPE (op);
2202 vr1.set = get_alias_set (op);
2203 vr1.hashcode = vn_reference_compute_hash (&vr1);
2204 if ((cst = fully_constant_vn_reference_p (&vr1)))
2205 return cst;
2207 if (kind != VN_NOWALK
2208 && vr1.vuse)
2210 vn_reference_t wvnresult;
2211 ao_ref r;
2212 /* Make sure to use a valueized reference if we valueized anything.
2213 Otherwise preserve the full reference for advanced TBAA. */
2214 if (!valuezied_anything
2215 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2216 vr1.operands))
2217 ao_ref_init (&r, op);
2218 vn_walk_kind = kind;
2219 wvnresult =
2220 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2221 vn_reference_lookup_2,
2222 vn_reference_lookup_3,
2223 vuse_ssa_val, &vr1);
2224 gcc_checking_assert (vr1.operands == shared_lookup_references);
2225 if (wvnresult)
2227 if (vnresult)
2228 *vnresult = wvnresult;
2229 return wvnresult->result;
2232 return NULL_TREE;
2235 return vn_reference_lookup_1 (&vr1, vnresult);
2238 /* Lookup CALL in the current hash table and return the entry in
2239 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2241 void
2242 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2243 vn_reference_t vr)
2245 if (vnresult)
2246 *vnresult = NULL;
2248 tree vuse = gimple_vuse (call);
2250 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2251 vr->operands = valueize_shared_reference_ops_from_call (call);
2252 vr->type = gimple_expr_type (call);
2253 vr->set = 0;
2254 vr->hashcode = vn_reference_compute_hash (vr);
2255 vn_reference_lookup_1 (vr, vnresult);
2258 /* Insert OP into the current hash table with a value number of
2259 RESULT, and return the resulting reference structure we created. */
2261 static vn_reference_t
2262 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2264 vn_reference_s **slot;
2265 vn_reference_t vr1;
2266 bool tem;
2268 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2269 if (TREE_CODE (result) == SSA_NAME)
2270 vr1->value_id = VN_INFO (result)->value_id;
2271 else
2272 vr1->value_id = get_or_alloc_constant_value_id (result);
2273 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2274 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2275 vr1->type = TREE_TYPE (op);
2276 vr1->set = get_alias_set (op);
2277 vr1->hashcode = vn_reference_compute_hash (vr1);
2278 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2279 vr1->result_vdef = vdef;
2281 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2282 INSERT);
2284 /* Because we lookup stores using vuses, and value number failures
2285 using the vdefs (see visit_reference_op_store for how and why),
2286 it's possible that on failure we may try to insert an already
2287 inserted store. This is not wrong, there is no ssa name for a
2288 store that we could use as a differentiator anyway. Thus, unlike
2289 the other lookup functions, you cannot gcc_assert (!*slot)
2290 here. */
2292 /* But free the old slot in case of a collision. */
2293 if (*slot)
2294 free_reference (*slot);
2296 *slot = vr1;
2297 return vr1;
2300 /* Insert a reference by it's pieces into the current hash table with
2301 a value number of RESULT. Return the resulting reference
2302 structure we created. */
2304 vn_reference_t
2305 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2306 vec<vn_reference_op_s> operands,
2307 tree result, unsigned int value_id)
2310 vn_reference_s **slot;
2311 vn_reference_t vr1;
2313 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2314 vr1->value_id = value_id;
2315 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2316 vr1->operands = valueize_refs (operands);
2317 vr1->type = type;
2318 vr1->set = set;
2319 vr1->hashcode = vn_reference_compute_hash (vr1);
2320 if (result && TREE_CODE (result) == SSA_NAME)
2321 result = SSA_VAL (result);
2322 vr1->result = result;
2324 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2325 INSERT);
2327 /* At this point we should have all the things inserted that we have
2328 seen before, and we should never try inserting something that
2329 already exists. */
2330 gcc_assert (!*slot);
2331 if (*slot)
2332 free_reference (*slot);
2334 *slot = vr1;
2335 return vr1;
2338 /* Compute and return the hash value for nary operation VBO1. */
2340 static hashval_t
2341 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2343 inchash::hash hstate;
2344 unsigned i;
2346 for (i = 0; i < vno1->length; ++i)
2347 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2348 vno1->op[i] = SSA_VAL (vno1->op[i]);
2350 if (vno1->length == 2
2351 && commutative_tree_code (vno1->opcode)
2352 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2354 tree temp = vno1->op[0];
2355 vno1->op[0] = vno1->op[1];
2356 vno1->op[1] = temp;
2359 hstate.add_int (vno1->opcode);
2360 for (i = 0; i < vno1->length; ++i)
2361 inchash::add_expr (vno1->op[i], hstate);
2363 return hstate.end ();
2366 /* Compare nary operations VNO1 and VNO2 and return true if they are
2367 equivalent. */
2369 bool
2370 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2372 unsigned i;
2374 if (vno1->hashcode != vno2->hashcode)
2375 return false;
2377 if (vno1->length != vno2->length)
2378 return false;
2380 if (vno1->opcode != vno2->opcode
2381 || !types_compatible_p (vno1->type, vno2->type))
2382 return false;
2384 for (i = 0; i < vno1->length; ++i)
2385 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2386 return false;
2388 return true;
2391 /* Initialize VNO from the pieces provided. */
2393 static void
2394 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2395 enum tree_code code, tree type, tree *ops)
2397 vno->opcode = code;
2398 vno->length = length;
2399 vno->type = type;
2400 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2403 /* Initialize VNO from OP. */
2405 static void
2406 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2408 unsigned i;
2410 vno->opcode = TREE_CODE (op);
2411 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2412 vno->type = TREE_TYPE (op);
2413 for (i = 0; i < vno->length; ++i)
2414 vno->op[i] = TREE_OPERAND (op, i);
2417 /* Return the number of operands for a vn_nary ops structure from STMT. */
2419 static unsigned int
2420 vn_nary_length_from_stmt (gimple stmt)
2422 switch (gimple_assign_rhs_code (stmt))
2424 case REALPART_EXPR:
2425 case IMAGPART_EXPR:
2426 case VIEW_CONVERT_EXPR:
2427 return 1;
2429 case BIT_FIELD_REF:
2430 return 3;
2432 case CONSTRUCTOR:
2433 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2435 default:
2436 return gimple_num_ops (stmt) - 1;
2440 /* Initialize VNO from STMT. */
2442 static void
2443 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2445 unsigned i;
2447 vno->opcode = gimple_assign_rhs_code (stmt);
2448 vno->type = gimple_expr_type (stmt);
2449 switch (vno->opcode)
2451 case REALPART_EXPR:
2452 case IMAGPART_EXPR:
2453 case VIEW_CONVERT_EXPR:
2454 vno->length = 1;
2455 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2456 break;
2458 case BIT_FIELD_REF:
2459 vno->length = 3;
2460 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2461 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2462 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2463 break;
2465 case CONSTRUCTOR:
2466 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2467 for (i = 0; i < vno->length; ++i)
2468 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2469 break;
2471 default:
2472 gcc_checking_assert (!gimple_assign_single_p (stmt));
2473 vno->length = gimple_num_ops (stmt) - 1;
2474 for (i = 0; i < vno->length; ++i)
2475 vno->op[i] = gimple_op (stmt, i + 1);
2479 /* Compute the hashcode for VNO and look for it in the hash table;
2480 return the resulting value number if it exists in the hash table.
2481 Return NULL_TREE if it does not exist in the hash table or if the
2482 result field of the operation is NULL. VNRESULT will contain the
2483 vn_nary_op_t from the hashtable if it exists. */
2485 static tree
2486 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2488 vn_nary_op_s **slot;
2490 if (vnresult)
2491 *vnresult = NULL;
2493 vno->hashcode = vn_nary_op_compute_hash (vno);
2494 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2495 NO_INSERT);
2496 if (!slot && current_info == optimistic_info)
2497 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2498 NO_INSERT);
2499 if (!slot)
2500 return NULL_TREE;
2501 if (vnresult)
2502 *vnresult = *slot;
2503 return (*slot)->result;
2506 /* Lookup a n-ary operation by its pieces and return the resulting value
2507 number if it exists in the hash table. Return NULL_TREE if it does
2508 not exist in the hash table or if the result field of the operation
2509 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2510 if it exists. */
2512 tree
2513 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2514 tree type, tree *ops, vn_nary_op_t *vnresult)
2516 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2517 sizeof_vn_nary_op (length));
2518 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2519 return vn_nary_op_lookup_1 (vno1, vnresult);
2522 /* Lookup OP in the current hash table, and return the resulting value
2523 number if it exists in the hash table. Return NULL_TREE if it does
2524 not exist in the hash table or if the result field of the operation
2525 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2526 if it exists. */
2528 tree
2529 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2531 vn_nary_op_t vno1
2532 = XALLOCAVAR (struct vn_nary_op_s,
2533 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2534 init_vn_nary_op_from_op (vno1, op);
2535 return vn_nary_op_lookup_1 (vno1, vnresult);
2538 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2539 value number if it exists in the hash table. Return NULL_TREE if
2540 it does not exist in the hash table. VNRESULT will contain the
2541 vn_nary_op_t from the hashtable if it exists. */
2543 tree
2544 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2546 vn_nary_op_t vno1
2547 = XALLOCAVAR (struct vn_nary_op_s,
2548 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2549 init_vn_nary_op_from_stmt (vno1, stmt);
2550 return vn_nary_op_lookup_1 (vno1, vnresult);
2553 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2555 static vn_nary_op_t
2556 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2558 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2561 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2562 obstack. */
2564 static vn_nary_op_t
2565 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2567 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2568 &current_info->nary_obstack);
2570 vno1->value_id = value_id;
2571 vno1->length = length;
2572 vno1->result = result;
2574 return vno1;
2577 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2578 VNO->HASHCODE first. */
2580 static vn_nary_op_t
2581 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2582 bool compute_hash)
2584 vn_nary_op_s **slot;
2586 if (compute_hash)
2587 vno->hashcode = vn_nary_op_compute_hash (vno);
2589 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2590 gcc_assert (!*slot);
2592 *slot = vno;
2593 return vno;
2596 /* Insert a n-ary operation into the current hash table using it's
2597 pieces. Return the vn_nary_op_t structure we created and put in
2598 the hashtable. */
2600 vn_nary_op_t
2601 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2602 tree type, tree *ops,
2603 tree result, unsigned int value_id)
2605 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2606 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2607 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2610 /* Insert OP into the current hash table with a value number of
2611 RESULT. Return the vn_nary_op_t structure we created and put in
2612 the hashtable. */
2614 vn_nary_op_t
2615 vn_nary_op_insert (tree op, tree result)
2617 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2618 vn_nary_op_t vno1;
2620 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2621 init_vn_nary_op_from_op (vno1, op);
2622 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2625 /* Insert the rhs of STMT into the current hash table with a value number of
2626 RESULT. */
2628 vn_nary_op_t
2629 vn_nary_op_insert_stmt (gimple stmt, tree result)
2631 vn_nary_op_t vno1
2632 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2633 result, VN_INFO (result)->value_id);
2634 init_vn_nary_op_from_stmt (vno1, stmt);
2635 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2638 /* Compute a hashcode for PHI operation VP1 and return it. */
2640 static inline hashval_t
2641 vn_phi_compute_hash (vn_phi_t vp1)
2643 inchash::hash hstate (vp1->block->index);
2644 int i;
2645 tree phi1op;
2646 tree type;
2648 /* If all PHI arguments are constants we need to distinguish
2649 the PHI node via its type. */
2650 type = vp1->type;
2651 hstate.merge_hash (vn_hash_type (type));
2653 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2655 if (phi1op == VN_TOP)
2656 continue;
2657 inchash::add_expr (phi1op, hstate);
2660 return hstate.end ();
2663 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2665 static int
2666 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2668 if (vp1->hashcode != vp2->hashcode)
2669 return false;
2671 if (vp1->block == vp2->block)
2673 int i;
2674 tree phi1op;
2676 /* If the PHI nodes do not have compatible types
2677 they are not the same. */
2678 if (!types_compatible_p (vp1->type, vp2->type))
2679 return false;
2681 /* Any phi in the same block will have it's arguments in the
2682 same edge order, because of how we store phi nodes. */
2683 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2685 tree phi2op = vp2->phiargs[i];
2686 if (phi1op == VN_TOP || phi2op == VN_TOP)
2687 continue;
2688 if (!expressions_equal_p (phi1op, phi2op))
2689 return false;
2691 return true;
2693 return false;
2696 static vec<tree> shared_lookup_phiargs;
2698 /* Lookup PHI in the current hash table, and return the resulting
2699 value number if it exists in the hash table. Return NULL_TREE if
2700 it does not exist in the hash table. */
2702 static tree
2703 vn_phi_lookup (gimple phi)
2705 vn_phi_s **slot;
2706 struct vn_phi_s vp1;
2707 unsigned i;
2709 shared_lookup_phiargs.truncate (0);
2711 /* Canonicalize the SSA_NAME's to their value number. */
2712 for (i = 0; i < gimple_phi_num_args (phi); i++)
2714 tree def = PHI_ARG_DEF (phi, i);
2715 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2716 shared_lookup_phiargs.safe_push (def);
2718 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2719 vp1.phiargs = shared_lookup_phiargs;
2720 vp1.block = gimple_bb (phi);
2721 vp1.hashcode = vn_phi_compute_hash (&vp1);
2722 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2723 NO_INSERT);
2724 if (!slot && current_info == optimistic_info)
2725 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2726 NO_INSERT);
2727 if (!slot)
2728 return NULL_TREE;
2729 return (*slot)->result;
2732 /* Insert PHI into the current hash table with a value number of
2733 RESULT. */
2735 static vn_phi_t
2736 vn_phi_insert (gimple phi, tree result)
2738 vn_phi_s **slot;
2739 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2740 unsigned i;
2741 vec<tree> args = vNULL;
2743 /* Canonicalize the SSA_NAME's to their value number. */
2744 for (i = 0; i < gimple_phi_num_args (phi); i++)
2746 tree def = PHI_ARG_DEF (phi, i);
2747 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2748 args.safe_push (def);
2750 vp1->value_id = VN_INFO (result)->value_id;
2751 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2752 vp1->phiargs = args;
2753 vp1->block = gimple_bb (phi);
2754 vp1->result = result;
2755 vp1->hashcode = vn_phi_compute_hash (vp1);
2757 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2759 /* Because we iterate over phi operations more than once, it's
2760 possible the slot might already exist here, hence no assert.*/
2761 *slot = vp1;
2762 return vp1;
2766 /* Print set of components in strongly connected component SCC to OUT. */
2768 static void
2769 print_scc (FILE *out, vec<tree> scc)
2771 tree var;
2772 unsigned int i;
2774 fprintf (out, "SCC consists of:");
2775 FOR_EACH_VEC_ELT (scc, i, var)
2777 fprintf (out, " ");
2778 print_generic_expr (out, var, 0);
2780 fprintf (out, "\n");
2783 /* Set the value number of FROM to TO, return true if it has changed
2784 as a result. */
2786 static inline bool
2787 set_ssa_val_to (tree from, tree to)
2789 tree currval = SSA_VAL (from);
2790 HOST_WIDE_INT toff, coff;
2792 /* The only thing we allow as value numbers are ssa_names
2793 and invariants. So assert that here. We don't allow VN_TOP
2794 as visiting a stmt should produce a value-number other than
2795 that.
2796 ??? Still VN_TOP can happen for unreachable code, so force
2797 it to varying in that case. Not all code is prepared to
2798 get VN_TOP on valueization. */
2799 if (to == VN_TOP)
2801 if (dump_file && (dump_flags & TDF_DETAILS))
2802 fprintf (dump_file, "Forcing value number to varying on "
2803 "receiving VN_TOP\n");
2804 to = from;
2807 gcc_assert (to != NULL_TREE
2808 && (TREE_CODE (to) == SSA_NAME
2809 || is_gimple_min_invariant (to)));
2811 if (from != to)
2813 if (currval == from)
2815 if (dump_file && (dump_flags & TDF_DETAILS))
2817 fprintf (dump_file, "Not changing value number of ");
2818 print_generic_expr (dump_file, from, 0);
2819 fprintf (dump_file, " from VARYING to ");
2820 print_generic_expr (dump_file, to, 0);
2821 fprintf (dump_file, "\n");
2823 return false;
2825 else if (TREE_CODE (to) == SSA_NAME
2826 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2827 to = from;
2830 if (dump_file && (dump_flags & TDF_DETAILS))
2832 fprintf (dump_file, "Setting value number of ");
2833 print_generic_expr (dump_file, from, 0);
2834 fprintf (dump_file, " to ");
2835 print_generic_expr (dump_file, to, 0);
2838 if (currval != to
2839 && !operand_equal_p (currval, to, 0)
2840 /* ??? For addresses involving volatile objects or types operand_equal_p
2841 does not reliably detect ADDR_EXPRs as equal. We know we are only
2842 getting invariant gimple addresses here, so can use
2843 get_addr_base_and_unit_offset to do this comparison. */
2844 && !(TREE_CODE (currval) == ADDR_EXPR
2845 && TREE_CODE (to) == ADDR_EXPR
2846 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2847 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2848 && coff == toff))
2850 VN_INFO (from)->valnum = to;
2851 if (dump_file && (dump_flags & TDF_DETAILS))
2852 fprintf (dump_file, " (changed)\n");
2853 return true;
2855 if (dump_file && (dump_flags & TDF_DETAILS))
2856 fprintf (dump_file, "\n");
2857 return false;
2860 /* Mark as processed all the definitions in the defining stmt of USE, or
2861 the USE itself. */
2863 static void
2864 mark_use_processed (tree use)
2866 ssa_op_iter iter;
2867 def_operand_p defp;
2868 gimple stmt = SSA_NAME_DEF_STMT (use);
2870 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2872 VN_INFO (use)->use_processed = true;
2873 return;
2876 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2878 tree def = DEF_FROM_PTR (defp);
2880 VN_INFO (def)->use_processed = true;
2884 /* Set all definitions in STMT to value number to themselves.
2885 Return true if a value number changed. */
2887 static bool
2888 defs_to_varying (gimple stmt)
2890 bool changed = false;
2891 ssa_op_iter iter;
2892 def_operand_p defp;
2894 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2896 tree def = DEF_FROM_PTR (defp);
2897 changed |= set_ssa_val_to (def, def);
2899 return changed;
2902 static bool expr_has_constants (tree expr);
2904 /* Visit a copy between LHS and RHS, return true if the value number
2905 changed. */
2907 static bool
2908 visit_copy (tree lhs, tree rhs)
2910 /* The copy may have a more interesting constant filled expression
2911 (we don't, since we know our RHS is just an SSA name). */
2912 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2913 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2915 /* And finally valueize. */
2916 rhs = SSA_VAL (rhs);
2918 return set_ssa_val_to (lhs, rhs);
2921 /* Visit a nary operator RHS, value number it, and return true if the
2922 value number of LHS has changed as a result. */
2924 static bool
2925 visit_nary_op (tree lhs, gimple stmt)
2927 bool changed = false;
2928 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2930 if (result)
2931 changed = set_ssa_val_to (lhs, result);
2932 else
2934 changed = set_ssa_val_to (lhs, lhs);
2935 vn_nary_op_insert_stmt (stmt, lhs);
2938 return changed;
2941 /* Visit a call STMT storing into LHS. Return true if the value number
2942 of the LHS has changed as a result. */
2944 static bool
2945 visit_reference_op_call (tree lhs, gcall *stmt)
2947 bool changed = false;
2948 struct vn_reference_s vr1;
2949 vn_reference_t vnresult = NULL;
2950 tree vdef = gimple_vdef (stmt);
2952 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2953 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2954 lhs = NULL_TREE;
2956 vn_reference_lookup_call (stmt, &vnresult, &vr1);
2957 if (vnresult)
2959 if (vnresult->result_vdef && vdef)
2960 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2962 if (!vnresult->result && lhs)
2963 vnresult->result = lhs;
2965 if (vnresult->result && lhs)
2967 changed |= set_ssa_val_to (lhs, vnresult->result);
2969 if (VN_INFO (vnresult->result)->has_constants)
2970 VN_INFO (lhs)->has_constants = true;
2973 else
2975 vn_reference_t vr2;
2976 vn_reference_s **slot;
2977 if (vdef)
2978 changed |= set_ssa_val_to (vdef, vdef);
2979 if (lhs)
2980 changed |= set_ssa_val_to (lhs, lhs);
2981 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2982 vr2->vuse = vr1.vuse;
2983 /* As we are not walking the virtual operand chain we know the
2984 shared_lookup_references are still original so we can re-use
2985 them here. */
2986 vr2->operands = vr1.operands.copy ();
2987 vr2->type = vr1.type;
2988 vr2->set = vr1.set;
2989 vr2->hashcode = vr1.hashcode;
2990 vr2->result = lhs;
2991 vr2->result_vdef = vdef;
2992 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
2993 INSERT);
2994 gcc_assert (!*slot);
2995 *slot = vr2;
2998 return changed;
3001 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3002 and return true if the value number of the LHS has changed as a result. */
3004 static bool
3005 visit_reference_op_load (tree lhs, tree op, gimple stmt)
3007 bool changed = false;
3008 tree last_vuse;
3009 tree result;
3011 last_vuse = gimple_vuse (stmt);
3012 last_vuse_ptr = &last_vuse;
3013 result = vn_reference_lookup (op, gimple_vuse (stmt),
3014 default_vn_walk_kind, NULL);
3015 last_vuse_ptr = NULL;
3017 /* We handle type-punning through unions by value-numbering based
3018 on offset and size of the access. Be prepared to handle a
3019 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3020 if (result
3021 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3023 /* We will be setting the value number of lhs to the value number
3024 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3025 So first simplify and lookup this expression to see if it
3026 is already available. */
3027 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3028 if ((CONVERT_EXPR_P (val)
3029 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
3030 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
3032 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
3033 if ((CONVERT_EXPR_P (tem)
3034 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
3035 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
3036 TREE_TYPE (val), tem)))
3037 val = tem;
3039 result = val;
3040 if (!is_gimple_min_invariant (val)
3041 && TREE_CODE (val) != SSA_NAME)
3042 result = vn_nary_op_lookup (val, NULL);
3043 /* If the expression is not yet available, value-number lhs to
3044 a new SSA_NAME we create. */
3045 if (!result)
3047 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
3048 "vntemp");
3049 /* Initialize value-number information properly. */
3050 VN_INFO_GET (result)->valnum = result;
3051 VN_INFO (result)->value_id = get_next_value_id ();
3052 VN_INFO (result)->expr = val;
3053 VN_INFO (result)->has_constants = expr_has_constants (val);
3054 VN_INFO (result)->needs_insertion = true;
3055 /* As all "inserted" statements are singleton SCCs, insert
3056 to the valid table. This is strictly needed to
3057 avoid re-generating new value SSA_NAMEs for the same
3058 expression during SCC iteration over and over (the
3059 optimistic table gets cleared after each iteration).
3060 We do not need to insert into the optimistic table, as
3061 lookups there will fall back to the valid table. */
3062 if (current_info == optimistic_info)
3064 current_info = valid_info;
3065 vn_nary_op_insert (val, result);
3066 current_info = optimistic_info;
3068 else
3069 vn_nary_op_insert (val, result);
3070 if (dump_file && (dump_flags & TDF_DETAILS))
3072 fprintf (dump_file, "Inserting name ");
3073 print_generic_expr (dump_file, result, 0);
3074 fprintf (dump_file, " for expression ");
3075 print_generic_expr (dump_file, val, 0);
3076 fprintf (dump_file, "\n");
3081 if (result)
3083 changed = set_ssa_val_to (lhs, result);
3084 if (TREE_CODE (result) == SSA_NAME
3085 && VN_INFO (result)->has_constants)
3087 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3088 VN_INFO (lhs)->has_constants = true;
3091 else
3093 changed = set_ssa_val_to (lhs, lhs);
3094 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3097 return changed;
3101 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3102 and return true if the value number of the LHS has changed as a result. */
3104 static bool
3105 visit_reference_op_store (tree lhs, tree op, gimple stmt)
3107 bool changed = false;
3108 vn_reference_t vnresult = NULL;
3109 tree result, assign;
3110 bool resultsame = false;
3111 tree vuse = gimple_vuse (stmt);
3112 tree vdef = gimple_vdef (stmt);
3114 /* First we want to lookup using the *vuses* from the store and see
3115 if there the last store to this location with the same address
3116 had the same value.
3118 The vuses represent the memory state before the store. If the
3119 memory state, address, and value of the store is the same as the
3120 last store to this location, then this store will produce the
3121 same memory state as that store.
3123 In this case the vdef versions for this store are value numbered to those
3124 vuse versions, since they represent the same memory state after
3125 this store.
3127 Otherwise, the vdefs for the store are used when inserting into
3128 the table, since the store generates a new memory state. */
3130 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3132 if (result)
3134 if (TREE_CODE (result) == SSA_NAME)
3135 result = SSA_VAL (result);
3136 if (TREE_CODE (op) == SSA_NAME)
3137 op = SSA_VAL (op);
3138 resultsame = expressions_equal_p (result, op);
3141 if ((!result || !resultsame)
3142 /* Only perform the following when being called from PRE
3143 which embeds tail merging. */
3144 && default_vn_walk_kind == VN_WALK)
3146 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3147 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3148 if (vnresult)
3150 VN_INFO (vdef)->use_processed = true;
3151 return set_ssa_val_to (vdef, vnresult->result_vdef);
3155 if (!result || !resultsame)
3157 if (dump_file && (dump_flags & TDF_DETAILS))
3159 fprintf (dump_file, "No store match\n");
3160 fprintf (dump_file, "Value numbering store ");
3161 print_generic_expr (dump_file, lhs, 0);
3162 fprintf (dump_file, " to ");
3163 print_generic_expr (dump_file, op, 0);
3164 fprintf (dump_file, "\n");
3166 /* Have to set value numbers before insert, since insert is
3167 going to valueize the references in-place. */
3168 if (vdef)
3170 changed |= set_ssa_val_to (vdef, vdef);
3173 /* Do not insert structure copies into the tables. */
3174 if (is_gimple_min_invariant (op)
3175 || is_gimple_reg (op))
3176 vn_reference_insert (lhs, op, vdef, NULL);
3178 /* Only perform the following when being called from PRE
3179 which embeds tail merging. */
3180 if (default_vn_walk_kind == VN_WALK)
3182 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3183 vn_reference_insert (assign, lhs, vuse, vdef);
3186 else
3188 /* We had a match, so value number the vdef to have the value
3189 number of the vuse it came from. */
3191 if (dump_file && (dump_flags & TDF_DETAILS))
3192 fprintf (dump_file, "Store matched earlier value,"
3193 "value numbering store vdefs to matching vuses.\n");
3195 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3198 return changed;
3201 /* Visit and value number PHI, return true if the value number
3202 changed. */
3204 static bool
3205 visit_phi (gimple phi)
3207 bool changed = false;
3208 tree result;
3209 tree sameval = VN_TOP;
3210 bool allsame = true;
3212 /* TODO: We could check for this in init_sccvn, and replace this
3213 with a gcc_assert. */
3214 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3215 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3217 /* See if all non-TOP arguments have the same value. TOP is
3218 equivalent to everything, so we can ignore it. */
3219 edge_iterator ei;
3220 edge e;
3221 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3222 if (e->flags & EDGE_EXECUTABLE)
3224 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3226 if (TREE_CODE (def) == SSA_NAME)
3227 def = SSA_VAL (def);
3228 if (def == VN_TOP)
3229 continue;
3230 if (sameval == VN_TOP)
3232 sameval = def;
3234 else
3236 if (!expressions_equal_p (def, sameval))
3238 allsame = false;
3239 break;
3244 /* If all value numbered to the same value, the phi node has that
3245 value. */
3246 if (allsame)
3247 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3249 /* Otherwise, see if it is equivalent to a phi node in this block. */
3250 result = vn_phi_lookup (phi);
3251 if (result)
3252 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3253 else
3255 vn_phi_insert (phi, PHI_RESULT (phi));
3256 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3257 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3258 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3261 return changed;
3264 /* Return true if EXPR contains constants. */
3266 static bool
3267 expr_has_constants (tree expr)
3269 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3271 case tcc_unary:
3272 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3274 case tcc_binary:
3275 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3276 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3277 /* Constants inside reference ops are rarely interesting, but
3278 it can take a lot of looking to find them. */
3279 case tcc_reference:
3280 case tcc_declaration:
3281 return false;
3282 default:
3283 return is_gimple_min_invariant (expr);
3285 return false;
3288 /* Return true if STMT contains constants. */
3290 static bool
3291 stmt_has_constants (gimple stmt)
3293 tree tem;
3295 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3296 return false;
3298 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3300 case GIMPLE_TERNARY_RHS:
3301 tem = gimple_assign_rhs3 (stmt);
3302 if (TREE_CODE (tem) == SSA_NAME)
3303 tem = SSA_VAL (tem);
3304 if (is_gimple_min_invariant (tem))
3305 return true;
3306 /* Fallthru. */
3308 case GIMPLE_BINARY_RHS:
3309 tem = gimple_assign_rhs2 (stmt);
3310 if (TREE_CODE (tem) == SSA_NAME)
3311 tem = SSA_VAL (tem);
3312 if (is_gimple_min_invariant (tem))
3313 return true;
3314 /* Fallthru. */
3316 case GIMPLE_SINGLE_RHS:
3317 /* Constants inside reference ops are rarely interesting, but
3318 it can take a lot of looking to find them. */
3319 case GIMPLE_UNARY_RHS:
3320 tem = gimple_assign_rhs1 (stmt);
3321 if (TREE_CODE (tem) == SSA_NAME)
3322 tem = SSA_VAL (tem);
3323 return is_gimple_min_invariant (tem);
3325 default:
3326 gcc_unreachable ();
3328 return false;
3331 /* Simplify the binary expression RHS, and return the result if
3332 simplified. */
3334 static tree
3335 simplify_binary_expression (gimple stmt)
3337 tree result = NULL_TREE;
3338 tree op0 = gimple_assign_rhs1 (stmt);
3339 tree op1 = gimple_assign_rhs2 (stmt);
3340 enum tree_code code = gimple_assign_rhs_code (stmt);
3342 /* This will not catch every single case we could combine, but will
3343 catch those with constants. The goal here is to simultaneously
3344 combine constants between expressions, but avoid infinite
3345 expansion of expressions during simplification. */
3346 op0 = vn_valueize (op0);
3347 if (TREE_CODE (op0) == SSA_NAME
3348 && (VN_INFO (op0)->has_constants
3349 || TREE_CODE_CLASS (code) == tcc_comparison
3350 || code == COMPLEX_EXPR))
3351 op0 = vn_get_expr_for (op0);
3353 op1 = vn_valueize (op1);
3354 if (TREE_CODE (op1) == SSA_NAME
3355 && (VN_INFO (op1)->has_constants
3356 || code == COMPLEX_EXPR))
3357 op1 = vn_get_expr_for (op1);
3359 /* Pointer plus constant can be represented as invariant address.
3360 Do so to allow further propatation, see also tree forwprop. */
3361 if (code == POINTER_PLUS_EXPR
3362 && tree_fits_uhwi_p (op1)
3363 && TREE_CODE (op0) == ADDR_EXPR
3364 && is_gimple_min_invariant (op0))
3365 return build_invariant_address (TREE_TYPE (op0),
3366 TREE_OPERAND (op0, 0),
3367 tree_to_uhwi (op1));
3369 /* Avoid folding if nothing changed. */
3370 if (op0 == gimple_assign_rhs1 (stmt)
3371 && op1 == gimple_assign_rhs2 (stmt))
3372 return NULL_TREE;
3374 fold_defer_overflow_warnings ();
3376 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3377 if (result)
3378 STRIP_USELESS_TYPE_CONVERSION (result);
3380 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3381 stmt, 0);
3383 /* Make sure result is not a complex expression consisting
3384 of operators of operators (IE (a + b) + (a + c))
3385 Otherwise, we will end up with unbounded expressions if
3386 fold does anything at all. */
3387 if (result && valid_gimple_rhs_p (result))
3388 return result;
3390 return NULL_TREE;
3393 /* Simplify the unary expression RHS, and return the result if
3394 simplified. */
3396 static tree
3397 simplify_unary_expression (gassign *stmt)
3399 tree result = NULL_TREE;
3400 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3401 enum tree_code code = gimple_assign_rhs_code (stmt);
3403 /* We handle some tcc_reference codes here that are all
3404 GIMPLE_ASSIGN_SINGLE codes. */
3405 if (code == REALPART_EXPR
3406 || code == IMAGPART_EXPR
3407 || code == VIEW_CONVERT_EXPR
3408 || code == BIT_FIELD_REF)
3409 op0 = TREE_OPERAND (op0, 0);
3411 orig_op0 = op0;
3412 op0 = vn_valueize (op0);
3413 if (TREE_CODE (op0) == SSA_NAME)
3415 if (VN_INFO (op0)->has_constants)
3416 op0 = vn_get_expr_for (op0);
3417 else if (CONVERT_EXPR_CODE_P (code)
3418 || code == REALPART_EXPR
3419 || code == IMAGPART_EXPR
3420 || code == VIEW_CONVERT_EXPR
3421 || code == BIT_FIELD_REF)
3423 /* We want to do tree-combining on conversion-like expressions.
3424 Make sure we feed only SSA_NAMEs or constants to fold though. */
3425 tree tem = vn_get_expr_for (op0);
3426 if (UNARY_CLASS_P (tem)
3427 || BINARY_CLASS_P (tem)
3428 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3429 || TREE_CODE (tem) == SSA_NAME
3430 || TREE_CODE (tem) == CONSTRUCTOR
3431 || is_gimple_min_invariant (tem))
3432 op0 = tem;
3436 /* Avoid folding if nothing changed, but remember the expression. */
3437 if (op0 == orig_op0)
3438 return NULL_TREE;
3440 if (code == BIT_FIELD_REF)
3442 tree rhs = gimple_assign_rhs1 (stmt);
3443 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3444 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3446 else
3447 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3448 if (result)
3450 STRIP_USELESS_TYPE_CONVERSION (result);
3451 if (valid_gimple_rhs_p (result))
3452 return result;
3455 return NULL_TREE;
3458 /* Try to simplify RHS using equivalences and constant folding. */
3460 static tree
3461 try_to_simplify (gassign *stmt)
3463 enum tree_code code = gimple_assign_rhs_code (stmt);
3464 tree tem;
3466 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3467 in this case, there is no point in doing extra work. */
3468 if (code == SSA_NAME)
3469 return NULL_TREE;
3471 /* First try constant folding based on our current lattice. */
3472 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3473 if (tem
3474 && (TREE_CODE (tem) == SSA_NAME
3475 || is_gimple_min_invariant (tem)))
3476 return tem;
3478 /* If that didn't work try combining multiple statements. */
3479 switch (TREE_CODE_CLASS (code))
3481 case tcc_reference:
3482 /* Fallthrough for some unary codes that can operate on registers. */
3483 if (!(code == REALPART_EXPR
3484 || code == IMAGPART_EXPR
3485 || code == VIEW_CONVERT_EXPR
3486 || code == BIT_FIELD_REF))
3487 break;
3488 /* We could do a little more with unary ops, if they expand
3489 into binary ops, but it's debatable whether it is worth it. */
3490 case tcc_unary:
3491 return simplify_unary_expression (stmt);
3493 case tcc_comparison:
3494 case tcc_binary:
3495 return simplify_binary_expression (stmt);
3497 default:
3498 break;
3501 return NULL_TREE;
3504 /* Visit and value number USE, return true if the value number
3505 changed. */
3507 static bool
3508 visit_use (tree use)
3510 bool changed = false;
3511 gimple stmt = SSA_NAME_DEF_STMT (use);
3513 mark_use_processed (use);
3515 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3516 if (dump_file && (dump_flags & TDF_DETAILS)
3517 && !SSA_NAME_IS_DEFAULT_DEF (use))
3519 fprintf (dump_file, "Value numbering ");
3520 print_generic_expr (dump_file, use, 0);
3521 fprintf (dump_file, " stmt = ");
3522 print_gimple_stmt (dump_file, stmt, 0, 0);
3525 /* Handle uninitialized uses. */
3526 if (SSA_NAME_IS_DEFAULT_DEF (use))
3527 changed = set_ssa_val_to (use, use);
3528 else
3530 if (gimple_code (stmt) == GIMPLE_PHI)
3531 changed = visit_phi (stmt);
3532 else if (gimple_has_volatile_ops (stmt))
3533 changed = defs_to_varying (stmt);
3534 else if (is_gimple_assign (stmt))
3536 enum tree_code code = gimple_assign_rhs_code (stmt);
3537 tree lhs = gimple_assign_lhs (stmt);
3538 tree rhs1 = gimple_assign_rhs1 (stmt);
3539 tree simplified;
3541 /* Shortcut for copies. Simplifying copies is pointless,
3542 since we copy the expression and value they represent. */
3543 if (code == SSA_NAME
3544 && TREE_CODE (lhs) == SSA_NAME)
3546 changed = visit_copy (lhs, rhs1);
3547 goto done;
3549 simplified = try_to_simplify (as_a <gassign *> (stmt));
3550 if (simplified)
3552 if (dump_file && (dump_flags & TDF_DETAILS))
3554 fprintf (dump_file, "RHS ");
3555 print_gimple_expr (dump_file, stmt, 0, 0);
3556 fprintf (dump_file, " simplified to ");
3557 print_generic_expr (dump_file, simplified, 0);
3558 if (TREE_CODE (lhs) == SSA_NAME)
3559 fprintf (dump_file, " has constants %d\n",
3560 expr_has_constants (simplified));
3561 else
3562 fprintf (dump_file, "\n");
3565 /* Setting value numbers to constants will occasionally
3566 screw up phi congruence because constants are not
3567 uniquely associated with a single ssa name that can be
3568 looked up. */
3569 if (simplified
3570 && is_gimple_min_invariant (simplified)
3571 && TREE_CODE (lhs) == SSA_NAME)
3573 VN_INFO (lhs)->expr = simplified;
3574 VN_INFO (lhs)->has_constants = true;
3575 changed = set_ssa_val_to (lhs, simplified);
3576 goto done;
3578 else if (simplified
3579 && TREE_CODE (simplified) == SSA_NAME
3580 && TREE_CODE (lhs) == SSA_NAME)
3582 changed = visit_copy (lhs, simplified);
3583 goto done;
3585 else if (simplified)
3587 if (TREE_CODE (lhs) == SSA_NAME)
3589 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3590 /* We have to unshare the expression or else
3591 valuizing may change the IL stream. */
3592 VN_INFO (lhs)->expr = unshare_expr (simplified);
3595 else if (stmt_has_constants (stmt)
3596 && TREE_CODE (lhs) == SSA_NAME)
3597 VN_INFO (lhs)->has_constants = true;
3598 else if (TREE_CODE (lhs) == SSA_NAME)
3600 /* We reset expr and constantness here because we may
3601 have been value numbering optimistically, and
3602 iterating. They may become non-constant in this case,
3603 even if they were optimistically constant. */
3605 VN_INFO (lhs)->has_constants = false;
3606 VN_INFO (lhs)->expr = NULL_TREE;
3609 if ((TREE_CODE (lhs) == SSA_NAME
3610 /* We can substitute SSA_NAMEs that are live over
3611 abnormal edges with their constant value. */
3612 && !(gimple_assign_copy_p (stmt)
3613 && is_gimple_min_invariant (rhs1))
3614 && !(simplified
3615 && is_gimple_min_invariant (simplified))
3616 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3617 /* Stores or copies from SSA_NAMEs that are live over
3618 abnormal edges are a problem. */
3619 || (code == SSA_NAME
3620 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3621 changed = defs_to_varying (stmt);
3622 else if (REFERENCE_CLASS_P (lhs)
3623 || DECL_P (lhs))
3624 changed = visit_reference_op_store (lhs, rhs1, stmt);
3625 else if (TREE_CODE (lhs) == SSA_NAME)
3627 if ((gimple_assign_copy_p (stmt)
3628 && is_gimple_min_invariant (rhs1))
3629 || (simplified
3630 && is_gimple_min_invariant (simplified)))
3632 VN_INFO (lhs)->has_constants = true;
3633 if (simplified)
3634 changed = set_ssa_val_to (lhs, simplified);
3635 else
3636 changed = set_ssa_val_to (lhs, rhs1);
3638 else
3640 /* First try to lookup the simplified expression. */
3641 if (simplified)
3643 enum gimple_rhs_class rhs_class;
3646 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3647 if ((rhs_class == GIMPLE_UNARY_RHS
3648 || rhs_class == GIMPLE_BINARY_RHS
3649 || rhs_class == GIMPLE_TERNARY_RHS)
3650 && valid_gimple_rhs_p (simplified))
3652 tree result = vn_nary_op_lookup (simplified, NULL);
3653 if (result)
3655 changed = set_ssa_val_to (lhs, result);
3656 goto done;
3661 /* Otherwise visit the original statement. */
3662 switch (vn_get_stmt_kind (stmt))
3664 case VN_NARY:
3665 changed = visit_nary_op (lhs, stmt);
3666 break;
3667 case VN_REFERENCE:
3668 changed = visit_reference_op_load (lhs, rhs1, stmt);
3669 break;
3670 default:
3671 changed = defs_to_varying (stmt);
3672 break;
3676 else
3677 changed = defs_to_varying (stmt);
3679 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3681 tree lhs = gimple_call_lhs (stmt);
3682 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3684 /* Try constant folding based on our current lattice. */
3685 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3686 vn_valueize);
3687 if (simplified)
3689 if (dump_file && (dump_flags & TDF_DETAILS))
3691 fprintf (dump_file, "call ");
3692 print_gimple_expr (dump_file, stmt, 0, 0);
3693 fprintf (dump_file, " simplified to ");
3694 print_generic_expr (dump_file, simplified, 0);
3695 if (TREE_CODE (lhs) == SSA_NAME)
3696 fprintf (dump_file, " has constants %d\n",
3697 expr_has_constants (simplified));
3698 else
3699 fprintf (dump_file, "\n");
3702 /* Setting value numbers to constants will occasionally
3703 screw up phi congruence because constants are not
3704 uniquely associated with a single ssa name that can be
3705 looked up. */
3706 if (simplified
3707 && is_gimple_min_invariant (simplified))
3709 VN_INFO (lhs)->expr = simplified;
3710 VN_INFO (lhs)->has_constants = true;
3711 changed = set_ssa_val_to (lhs, simplified);
3712 if (gimple_vdef (stmt))
3713 changed |= set_ssa_val_to (gimple_vdef (stmt),
3714 gimple_vuse (stmt));
3715 goto done;
3717 else if (simplified
3718 && TREE_CODE (simplified) == SSA_NAME)
3720 changed = visit_copy (lhs, simplified);
3721 if (gimple_vdef (stmt))
3722 changed |= set_ssa_val_to (gimple_vdef (stmt),
3723 gimple_vuse (stmt));
3724 goto done;
3726 else
3728 if (stmt_has_constants (stmt))
3729 VN_INFO (lhs)->has_constants = true;
3730 else
3732 /* We reset expr and constantness here because we may
3733 have been value numbering optimistically, and
3734 iterating. They may become non-constant in this case,
3735 even if they were optimistically constant. */
3736 VN_INFO (lhs)->has_constants = false;
3737 VN_INFO (lhs)->expr = NULL_TREE;
3740 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3742 changed = defs_to_varying (stmt);
3743 goto done;
3748 if (!gimple_call_internal_p (stmt)
3749 && (/* Calls to the same function with the same vuse
3750 and the same operands do not necessarily return the same
3751 value, unless they're pure or const. */
3752 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3753 /* If calls have a vdef, subsequent calls won't have
3754 the same incoming vuse. So, if 2 calls with vdef have the
3755 same vuse, we know they're not subsequent.
3756 We can value number 2 calls to the same function with the
3757 same vuse and the same operands which are not subsequent
3758 the same, because there is no code in the program that can
3759 compare the 2 values... */
3760 || (gimple_vdef (stmt)
3761 /* ... unless the call returns a pointer which does
3762 not alias with anything else. In which case the
3763 information that the values are distinct are encoded
3764 in the IL. */
3765 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3766 /* Only perform the following when being called from PRE
3767 which embeds tail merging. */
3768 && default_vn_walk_kind == VN_WALK)))
3769 changed = visit_reference_op_call (lhs, call_stmt);
3770 else
3771 changed = defs_to_varying (stmt);
3773 else
3774 changed = defs_to_varying (stmt);
3776 done:
3777 return changed;
3780 /* Compare two operands by reverse postorder index */
3782 static int
3783 compare_ops (const void *pa, const void *pb)
3785 const tree opa = *((const tree *)pa);
3786 const tree opb = *((const tree *)pb);
3787 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3788 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3789 basic_block bba;
3790 basic_block bbb;
3792 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3793 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3794 else if (gimple_nop_p (opstmta))
3795 return -1;
3796 else if (gimple_nop_p (opstmtb))
3797 return 1;
3799 bba = gimple_bb (opstmta);
3800 bbb = gimple_bb (opstmtb);
3802 if (!bba && !bbb)
3803 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3804 else if (!bba)
3805 return -1;
3806 else if (!bbb)
3807 return 1;
3809 if (bba == bbb)
3811 if (gimple_code (opstmta) == GIMPLE_PHI
3812 && gimple_code (opstmtb) == GIMPLE_PHI)
3813 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3814 else if (gimple_code (opstmta) == GIMPLE_PHI)
3815 return -1;
3816 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3817 return 1;
3818 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3819 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3820 else
3821 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3823 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3826 /* Sort an array containing members of a strongly connected component
3827 SCC so that the members are ordered by RPO number.
3828 This means that when the sort is complete, iterating through the
3829 array will give you the members in RPO order. */
3831 static void
3832 sort_scc (vec<tree> scc)
3834 scc.qsort (compare_ops);
3837 /* Insert the no longer used nary ONARY to the hash INFO. */
3839 static void
3840 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3842 size_t size = sizeof_vn_nary_op (onary->length);
3843 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3844 &info->nary_obstack);
3845 memcpy (nary, onary, size);
3846 vn_nary_op_insert_into (nary, info->nary, false);
3849 /* Insert the no longer used phi OPHI to the hash INFO. */
3851 static void
3852 copy_phi (vn_phi_t ophi, vn_tables_t info)
3854 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3855 vn_phi_s **slot;
3856 memcpy (phi, ophi, sizeof (*phi));
3857 ophi->phiargs.create (0);
3858 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3859 gcc_assert (!*slot);
3860 *slot = phi;
3863 /* Insert the no longer used reference OREF to the hash INFO. */
3865 static void
3866 copy_reference (vn_reference_t oref, vn_tables_t info)
3868 vn_reference_t ref;
3869 vn_reference_s **slot;
3870 ref = (vn_reference_t) pool_alloc (info->references_pool);
3871 memcpy (ref, oref, sizeof (*ref));
3872 oref->operands.create (0);
3873 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3874 if (*slot)
3875 free_reference (*slot);
3876 *slot = ref;
3879 /* Process a strongly connected component in the SSA graph. */
3881 static void
3882 process_scc (vec<tree> scc)
3884 tree var;
3885 unsigned int i;
3886 unsigned int iterations = 0;
3887 bool changed = true;
3888 vn_nary_op_iterator_type hin;
3889 vn_phi_iterator_type hip;
3890 vn_reference_iterator_type hir;
3891 vn_nary_op_t nary;
3892 vn_phi_t phi;
3893 vn_reference_t ref;
3895 /* If the SCC has a single member, just visit it. */
3896 if (scc.length () == 1)
3898 tree use = scc[0];
3899 if (VN_INFO (use)->use_processed)
3900 return;
3901 /* We need to make sure it doesn't form a cycle itself, which can
3902 happen for self-referential PHI nodes. In that case we would
3903 end up inserting an expression with VN_TOP operands into the
3904 valid table which makes us derive bogus equivalences later.
3905 The cheapest way to check this is to assume it for all PHI nodes. */
3906 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3907 /* Fallthru to iteration. */ ;
3908 else
3910 visit_use (use);
3911 return;
3915 if (dump_file && (dump_flags & TDF_DETAILS))
3916 print_scc (dump_file, scc);
3918 /* Iterate over the SCC with the optimistic table until it stops
3919 changing. */
3920 current_info = optimistic_info;
3921 while (changed)
3923 changed = false;
3924 iterations++;
3925 if (dump_file && (dump_flags & TDF_DETAILS))
3926 fprintf (dump_file, "Starting iteration %d\n", iterations);
3927 /* As we are value-numbering optimistically we have to
3928 clear the expression tables and the simplified expressions
3929 in each iteration until we converge. */
3930 optimistic_info->nary->empty ();
3931 optimistic_info->phis->empty ();
3932 optimistic_info->references->empty ();
3933 obstack_free (&optimistic_info->nary_obstack, NULL);
3934 gcc_obstack_init (&optimistic_info->nary_obstack);
3935 empty_alloc_pool (optimistic_info->phis_pool);
3936 empty_alloc_pool (optimistic_info->references_pool);
3937 FOR_EACH_VEC_ELT (scc, i, var)
3938 VN_INFO (var)->expr = NULL_TREE;
3939 FOR_EACH_VEC_ELT (scc, i, var)
3940 changed |= visit_use (var);
3943 if (dump_file && (dump_flags & TDF_DETAILS))
3944 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3945 statistics_histogram_event (cfun, "SCC iterations", iterations);
3947 /* Finally, copy the contents of the no longer used optimistic
3948 table to the valid table. */
3949 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3950 copy_nary (nary, valid_info);
3951 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3952 copy_phi (phi, valid_info);
3953 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3954 ref, vn_reference_t, hir)
3955 copy_reference (ref, valid_info);
3957 current_info = valid_info;
3961 /* Pop the components of the found SCC for NAME off the SCC stack
3962 and process them. Returns true if all went well, false if
3963 we run into resource limits. */
3965 static bool
3966 extract_and_process_scc_for_name (tree name)
3968 auto_vec<tree> scc;
3969 tree x;
3971 /* Found an SCC, pop the components off the SCC stack and
3972 process them. */
3975 x = sccstack.pop ();
3977 VN_INFO (x)->on_sccstack = false;
3978 scc.safe_push (x);
3979 } while (x != name);
3981 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3982 if (scc.length ()
3983 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3985 if (dump_file)
3986 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3987 "SCC size %u exceeding %u\n", scc.length (),
3988 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3990 return false;
3993 if (scc.length () > 1)
3994 sort_scc (scc);
3996 process_scc (scc);
3998 return true;
4001 /* Depth first search on NAME to discover and process SCC's in the SSA
4002 graph.
4003 Execution of this algorithm relies on the fact that the SCC's are
4004 popped off the stack in topological order.
4005 Returns true if successful, false if we stopped processing SCC's due
4006 to resource constraints. */
4008 static bool
4009 DFS (tree name)
4011 vec<ssa_op_iter> itervec = vNULL;
4012 vec<tree> namevec = vNULL;
4013 use_operand_p usep = NULL;
4014 gimple defstmt;
4015 tree use;
4016 ssa_op_iter iter;
4018 start_over:
4019 /* SCC info */
4020 VN_INFO (name)->dfsnum = next_dfs_num++;
4021 VN_INFO (name)->visited = true;
4022 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4024 sccstack.safe_push (name);
4025 VN_INFO (name)->on_sccstack = true;
4026 defstmt = SSA_NAME_DEF_STMT (name);
4028 /* Recursively DFS on our operands, looking for SCC's. */
4029 if (!gimple_nop_p (defstmt))
4031 /* Push a new iterator. */
4032 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4033 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4034 else
4035 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4037 else
4038 clear_and_done_ssa_iter (&iter);
4040 while (1)
4042 /* If we are done processing uses of a name, go up the stack
4043 of iterators and process SCCs as we found them. */
4044 if (op_iter_done (&iter))
4046 /* See if we found an SCC. */
4047 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4048 if (!extract_and_process_scc_for_name (name))
4050 namevec.release ();
4051 itervec.release ();
4052 return false;
4055 /* Check if we are done. */
4056 if (namevec.is_empty ())
4058 namevec.release ();
4059 itervec.release ();
4060 return true;
4063 /* Restore the last use walker and continue walking there. */
4064 use = name;
4065 name = namevec.pop ();
4066 memcpy (&iter, &itervec.last (),
4067 sizeof (ssa_op_iter));
4068 itervec.pop ();
4069 goto continue_walking;
4072 use = USE_FROM_PTR (usep);
4074 /* Since we handle phi nodes, we will sometimes get
4075 invariants in the use expression. */
4076 if (TREE_CODE (use) == SSA_NAME)
4078 if (! (VN_INFO (use)->visited))
4080 /* Recurse by pushing the current use walking state on
4081 the stack and starting over. */
4082 itervec.safe_push (iter);
4083 namevec.safe_push (name);
4084 name = use;
4085 goto start_over;
4087 continue_walking:
4088 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4089 VN_INFO (use)->low);
4091 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4092 && VN_INFO (use)->on_sccstack)
4094 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4095 VN_INFO (name)->low);
4099 usep = op_iter_next_use (&iter);
4103 /* Allocate a value number table. */
4105 static void
4106 allocate_vn_table (vn_tables_t table)
4108 table->phis = new vn_phi_table_type (23);
4109 table->nary = new vn_nary_op_table_type (23);
4110 table->references = new vn_reference_table_type (23);
4112 gcc_obstack_init (&table->nary_obstack);
4113 table->phis_pool = create_alloc_pool ("VN phis",
4114 sizeof (struct vn_phi_s),
4115 30);
4116 table->references_pool = create_alloc_pool ("VN references",
4117 sizeof (struct vn_reference_s),
4118 30);
4121 /* Free a value number table. */
4123 static void
4124 free_vn_table (vn_tables_t table)
4126 delete table->phis;
4127 table->phis = NULL;
4128 delete table->nary;
4129 table->nary = NULL;
4130 delete table->references;
4131 table->references = NULL;
4132 obstack_free (&table->nary_obstack, NULL);
4133 free_alloc_pool (table->phis_pool);
4134 free_alloc_pool (table->references_pool);
4137 static void
4138 init_scc_vn (void)
4140 size_t i;
4141 int j;
4142 int *rpo_numbers_temp;
4144 calculate_dominance_info (CDI_DOMINATORS);
4145 sccstack.create (0);
4146 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4148 constant_value_ids = BITMAP_ALLOC (NULL);
4150 next_dfs_num = 1;
4151 next_value_id = 1;
4153 vn_ssa_aux_table.create (num_ssa_names + 1);
4154 /* VEC_alloc doesn't actually grow it to the right size, it just
4155 preallocates the space to do so. */
4156 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4157 gcc_obstack_init (&vn_ssa_aux_obstack);
4159 shared_lookup_phiargs.create (0);
4160 shared_lookup_references.create (0);
4161 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4162 rpo_numbers_temp =
4163 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4164 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4166 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4167 the i'th block in RPO order is bb. We want to map bb's to RPO
4168 numbers, so we need to rearrange this array. */
4169 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4170 rpo_numbers[rpo_numbers_temp[j]] = j;
4172 XDELETE (rpo_numbers_temp);
4174 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4176 /* Create the VN_INFO structures, and initialize value numbers to
4177 TOP. */
4178 for (i = 0; i < num_ssa_names; i++)
4180 tree name = ssa_name (i);
4181 if (name)
4183 VN_INFO_GET (name)->valnum = VN_TOP;
4184 VN_INFO (name)->expr = NULL_TREE;
4185 VN_INFO (name)->value_id = 0;
4189 renumber_gimple_stmt_uids ();
4191 /* Create the valid and optimistic value numbering tables. */
4192 valid_info = XCNEW (struct vn_tables_s);
4193 allocate_vn_table (valid_info);
4194 optimistic_info = XCNEW (struct vn_tables_s);
4195 allocate_vn_table (optimistic_info);
4198 void
4199 free_scc_vn (void)
4201 size_t i;
4203 delete constant_to_value_id;
4204 constant_to_value_id = NULL;
4205 BITMAP_FREE (constant_value_ids);
4206 shared_lookup_phiargs.release ();
4207 shared_lookup_references.release ();
4208 XDELETEVEC (rpo_numbers);
4210 for (i = 0; i < num_ssa_names; i++)
4212 tree name = ssa_name (i);
4213 if (name
4214 && VN_INFO (name)->needs_insertion)
4215 release_ssa_name (name);
4217 obstack_free (&vn_ssa_aux_obstack, NULL);
4218 vn_ssa_aux_table.release ();
4220 sccstack.release ();
4221 free_vn_table (valid_info);
4222 XDELETE (valid_info);
4223 free_vn_table (optimistic_info);
4224 XDELETE (optimistic_info);
4227 /* Set *ID according to RESULT. */
4229 static void
4230 set_value_id_for_result (tree result, unsigned int *id)
4232 if (result && TREE_CODE (result) == SSA_NAME)
4233 *id = VN_INFO (result)->value_id;
4234 else if (result && is_gimple_min_invariant (result))
4235 *id = get_or_alloc_constant_value_id (result);
4236 else
4237 *id = get_next_value_id ();
4240 /* Set the value ids in the valid hash tables. */
4242 static void
4243 set_hashtable_value_ids (void)
4245 vn_nary_op_iterator_type hin;
4246 vn_phi_iterator_type hip;
4247 vn_reference_iterator_type hir;
4248 vn_nary_op_t vno;
4249 vn_reference_t vr;
4250 vn_phi_t vp;
4252 /* Now set the value ids of the things we had put in the hash
4253 table. */
4255 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4256 set_value_id_for_result (vno->result, &vno->value_id);
4258 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4259 set_value_id_for_result (vp->result, &vp->value_id);
4261 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4262 hir)
4263 set_value_id_for_result (vr->result, &vr->value_id);
4266 class cond_dom_walker : public dom_walker
4268 public:
4269 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4271 virtual void before_dom_children (basic_block);
4273 bool fail;
4276 void
4277 cond_dom_walker::before_dom_children (basic_block bb)
4279 edge e;
4280 edge_iterator ei;
4282 if (fail)
4283 return;
4285 /* If any of the predecessor edges that do not come from blocks dominated
4286 by us are still marked as possibly executable consider this block
4287 reachable. */
4288 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4289 FOR_EACH_EDGE (e, ei, bb->preds)
4290 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4291 reachable |= (e->flags & EDGE_EXECUTABLE);
4293 /* If the block is not reachable all outgoing edges are not
4294 executable. */
4295 if (!reachable)
4297 if (dump_file && (dump_flags & TDF_DETAILS))
4298 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4299 "BB %d as not executable\n", bb->index);
4301 FOR_EACH_EDGE (e, ei, bb->succs)
4302 e->flags &= ~EDGE_EXECUTABLE;
4303 return;
4306 gimple stmt = last_stmt (bb);
4307 if (!stmt)
4308 return;
4310 enum gimple_code code = gimple_code (stmt);
4311 if (code != GIMPLE_COND
4312 && code != GIMPLE_SWITCH
4313 && code != GIMPLE_GOTO)
4314 return;
4316 if (dump_file && (dump_flags & TDF_DETAILS))
4318 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4319 bb->index);
4320 print_gimple_stmt (dump_file, stmt, 0, 0);
4323 /* Value-number the last stmts SSA uses. */
4324 ssa_op_iter i;
4325 tree op;
4326 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4327 if (VN_INFO (op)->visited == false
4328 && !DFS (op))
4330 fail = true;
4331 return;
4334 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4335 if value-numbering can prove they are not reachable. Handling
4336 computed gotos is also possible. */
4337 tree val;
4338 switch (code)
4340 case GIMPLE_COND:
4342 tree lhs = gimple_cond_lhs (stmt);
4343 tree rhs = gimple_cond_rhs (stmt);
4344 /* Work hard in computing the condition and take into account
4345 the valueization of the defining stmt. */
4346 if (TREE_CODE (lhs) == SSA_NAME)
4347 lhs = vn_get_expr_for (lhs);
4348 if (TREE_CODE (rhs) == SSA_NAME)
4349 rhs = vn_get_expr_for (rhs);
4350 val = fold_binary (gimple_cond_code (stmt),
4351 boolean_type_node, lhs, rhs);
4352 break;
4354 case GIMPLE_SWITCH:
4355 val = gimple_switch_index (as_a <gswitch *> (stmt));
4356 break;
4357 case GIMPLE_GOTO:
4358 val = gimple_goto_dest (stmt);
4359 break;
4360 default:
4361 gcc_unreachable ();
4363 if (!val)
4364 return;
4366 edge taken = find_taken_edge (bb, vn_valueize (val));
4367 if (!taken)
4368 return;
4370 if (dump_file && (dump_flags & TDF_DETAILS))
4371 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4372 "not executable\n", bb->index, bb->index, taken->dest->index);
4374 FOR_EACH_EDGE (e, ei, bb->succs)
4375 if (e != taken)
4376 e->flags &= ~EDGE_EXECUTABLE;
4379 /* Do SCCVN. Returns true if it finished, false if we bailed out
4380 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4381 how we use the alias oracle walking during the VN process. */
4383 bool
4384 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4386 basic_block bb;
4387 size_t i;
4388 tree param;
4390 default_vn_walk_kind = default_vn_walk_kind_;
4392 init_scc_vn ();
4393 current_info = valid_info;
4395 for (param = DECL_ARGUMENTS (current_function_decl);
4396 param;
4397 param = DECL_CHAIN (param))
4399 tree def = ssa_default_def (cfun, param);
4400 if (def)
4402 VN_INFO (def)->visited = true;
4403 VN_INFO (def)->valnum = def;
4407 /* Mark all edges as possibly executable. */
4408 FOR_ALL_BB_FN (bb, cfun)
4410 edge_iterator ei;
4411 edge e;
4412 FOR_EACH_EDGE (e, ei, bb->succs)
4413 e->flags |= EDGE_EXECUTABLE;
4416 /* Walk all blocks in dominator order, value-numbering the last stmts
4417 SSA uses and decide whether outgoing edges are not executable. */
4418 cond_dom_walker walker;
4419 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4420 if (walker.fail)
4422 free_scc_vn ();
4423 return false;
4426 /* Value-number remaining SSA names. */
4427 for (i = 1; i < num_ssa_names; ++i)
4429 tree name = ssa_name (i);
4430 if (name
4431 && VN_INFO (name)->visited == false
4432 && !has_zero_uses (name))
4433 if (!DFS (name))
4435 free_scc_vn ();
4436 return false;
4440 /* Initialize the value ids. */
4442 for (i = 1; i < num_ssa_names; ++i)
4444 tree name = ssa_name (i);
4445 vn_ssa_aux_t info;
4446 if (!name)
4447 continue;
4448 info = VN_INFO (name);
4449 if (info->valnum == name
4450 || info->valnum == VN_TOP)
4451 info->value_id = get_next_value_id ();
4452 else if (is_gimple_min_invariant (info->valnum))
4453 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4456 /* Propagate. */
4457 for (i = 1; i < num_ssa_names; ++i)
4459 tree name = ssa_name (i);
4460 vn_ssa_aux_t info;
4461 if (!name)
4462 continue;
4463 info = VN_INFO (name);
4464 if (TREE_CODE (info->valnum) == SSA_NAME
4465 && info->valnum != name
4466 && info->value_id != VN_INFO (info->valnum)->value_id)
4467 info->value_id = VN_INFO (info->valnum)->value_id;
4470 set_hashtable_value_ids ();
4472 if (dump_file && (dump_flags & TDF_DETAILS))
4474 fprintf (dump_file, "Value numbers:\n");
4475 for (i = 0; i < num_ssa_names; i++)
4477 tree name = ssa_name (i);
4478 if (name
4479 && VN_INFO (name)->visited
4480 && SSA_VAL (name) != name)
4482 print_generic_expr (dump_file, name, 0);
4483 fprintf (dump_file, " = ");
4484 print_generic_expr (dump_file, SSA_VAL (name), 0);
4485 fprintf (dump_file, "\n");
4490 return true;
4493 /* Return the maximum value id we have ever seen. */
4495 unsigned int
4496 get_max_value_id (void)
4498 return next_value_id;
4501 /* Return the next unique value id. */
4503 unsigned int
4504 get_next_value_id (void)
4506 return next_value_id++;
4510 /* Compare two expressions E1 and E2 and return true if they are equal. */
4512 bool
4513 expressions_equal_p (tree e1, tree e2)
4515 /* The obvious case. */
4516 if (e1 == e2)
4517 return true;
4519 /* If only one of them is null, they cannot be equal. */
4520 if (!e1 || !e2)
4521 return false;
4523 /* Now perform the actual comparison. */
4524 if (TREE_CODE (e1) == TREE_CODE (e2)
4525 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4526 return true;
4528 return false;
4532 /* Return true if the nary operation NARY may trap. This is a copy
4533 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4535 bool
4536 vn_nary_may_trap (vn_nary_op_t nary)
4538 tree type;
4539 tree rhs2 = NULL_TREE;
4540 bool honor_nans = false;
4541 bool honor_snans = false;
4542 bool fp_operation = false;
4543 bool honor_trapv = false;
4544 bool handled, ret;
4545 unsigned i;
4547 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4548 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4549 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4551 type = nary->type;
4552 fp_operation = FLOAT_TYPE_P (type);
4553 if (fp_operation)
4555 honor_nans = flag_trapping_math && !flag_finite_math_only;
4556 honor_snans = flag_signaling_nans != 0;
4558 else if (INTEGRAL_TYPE_P (type)
4559 && TYPE_OVERFLOW_TRAPS (type))
4560 honor_trapv = true;
4562 if (nary->length >= 2)
4563 rhs2 = nary->op[1];
4564 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4565 honor_trapv,
4566 honor_nans, honor_snans, rhs2,
4567 &handled);
4568 if (handled
4569 && ret)
4570 return true;
4572 for (i = 0; i < nary->length; ++i)
4573 if (tree_could_trap_p (nary->op[i]))
4574 return true;
4576 return false;