2015-03-24 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobe417a1536efc0a087bc3bdd232c23d56693152ee
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "predict.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfganal.h"
43 #include "basic-block.h"
44 #include "gimple-pretty-print.h"
45 #include "tree-inline.h"
46 #include "hash-table.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-fold.h"
50 #include "tree-eh.h"
51 #include "gimple-expr.h"
52 #include "is-a.h"
53 #include "gimple.h"
54 #include "gimplify.h"
55 #include "gimple-ssa.h"
56 #include "tree-phinodes.h"
57 #include "ssa-iterators.h"
58 #include "stringpool.h"
59 #include "tree-ssanames.h"
60 #include "hashtab.h"
61 #include "rtl.h"
62 #include "flags.h"
63 #include "statistics.h"
64 #include "real.h"
65 #include "fixed-value.h"
66 #include "insn-config.h"
67 #include "expmed.h"
68 #include "dojump.h"
69 #include "explow.h"
70 #include "calls.h"
71 #include "emit-rtl.h"
72 #include "varasm.h"
73 #include "stmt.h"
74 #include "expr.h"
75 #include "tree-dfa.h"
76 #include "tree-ssa.h"
77 #include "dumpfile.h"
78 #include "alloc-pool.h"
79 #include "cfgloop.h"
80 #include "params.h"
81 #include "tree-ssa-propagate.h"
82 #include "tree-ssa-sccvn.h"
83 #include "tree-cfg.h"
84 #include "domwalk.h"
85 #include "ipa-ref.h"
86 #include "plugin-api.h"
87 #include "cgraph.h"
89 /* This algorithm is based on the SCC algorithm presented by Keith
90 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
91 (http://citeseer.ist.psu.edu/41805.html). In
92 straight line code, it is equivalent to a regular hash based value
93 numbering that is performed in reverse postorder.
95 For code with cycles, there are two alternatives, both of which
96 require keeping the hashtables separate from the actual list of
97 value numbers for SSA names.
99 1. Iterate value numbering in an RPO walk of the blocks, removing
100 all the entries from the hashtable after each iteration (but
101 keeping the SSA name->value number mapping between iterations).
102 Iterate until it does not change.
104 2. Perform value numbering as part of an SCC walk on the SSA graph,
105 iterating only the cycles in the SSA graph until they do not change
106 (using a separate, optimistic hashtable for value numbering the SCC
107 operands).
109 The second is not just faster in practice (because most SSA graph
110 cycles do not involve all the variables in the graph), it also has
111 some nice properties.
113 One of these nice properties is that when we pop an SCC off the
114 stack, we are guaranteed to have processed all the operands coming from
115 *outside of that SCC*, so we do not need to do anything special to
116 ensure they have value numbers.
118 Another nice property is that the SCC walk is done as part of a DFS
119 of the SSA graph, which makes it easy to perform combining and
120 simplifying operations at the same time.
122 The code below is deliberately written in a way that makes it easy
123 to separate the SCC walk from the other work it does.
125 In order to propagate constants through the code, we track which
126 expressions contain constants, and use those while folding. In
127 theory, we could also track expressions whose value numbers are
128 replaced, in case we end up folding based on expression
129 identities.
131 In order to value number memory, we assign value numbers to vuses.
132 This enables us to note that, for example, stores to the same
133 address of the same value from the same starting memory states are
134 equivalent.
135 TODO:
137 1. We can iterate only the changing portions of the SCC's, but
138 I have not seen an SCC big enough for this to be a win.
139 2. If you differentiate between phi nodes for loops and phi nodes
140 for if-then-else, you can properly consider phi nodes in different
141 blocks for equivalence.
142 3. We could value number vuses in more cases, particularly, whole
143 structure copies.
147 /* vn_nary_op hashtable helpers. */
149 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
151 typedef vn_nary_op_s value_type;
152 typedef vn_nary_op_s compare_type;
153 static inline hashval_t hash (const value_type *);
154 static inline bool equal (const value_type *, const compare_type *);
157 /* Return the computed hashcode for nary operation P1. */
159 inline hashval_t
160 vn_nary_op_hasher::hash (const value_type *vno1)
162 return vno1->hashcode;
165 /* Compare nary operations P1 and P2 and return true if they are
166 equivalent. */
168 inline bool
169 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
171 return vn_nary_op_eq (vno1, vno2);
174 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
175 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
178 /* vn_phi hashtable helpers. */
180 static int
181 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
183 struct vn_phi_hasher
185 typedef vn_phi_s value_type;
186 typedef vn_phi_s compare_type;
187 static inline hashval_t hash (const value_type *);
188 static inline bool equal (const value_type *, const compare_type *);
189 static inline void remove (value_type *);
192 /* Return the computed hashcode for phi operation P1. */
194 inline hashval_t
195 vn_phi_hasher::hash (const value_type *vp1)
197 return vp1->hashcode;
200 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
202 inline bool
203 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
205 return vn_phi_eq (vp1, vp2);
208 /* Free a phi operation structure VP. */
210 inline void
211 vn_phi_hasher::remove (value_type *phi)
213 phi->phiargs.release ();
216 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
217 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
220 /* Compare two reference operands P1 and P2 for equality. Return true if
221 they are equal, and false otherwise. */
223 static int
224 vn_reference_op_eq (const void *p1, const void *p2)
226 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
227 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
229 return (vro1->opcode == vro2->opcode
230 /* We do not care for differences in type qualification. */
231 && (vro1->type == vro2->type
232 || (vro1->type && vro2->type
233 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
234 TYPE_MAIN_VARIANT (vro2->type))))
235 && expressions_equal_p (vro1->op0, vro2->op0)
236 && expressions_equal_p (vro1->op1, vro2->op1)
237 && expressions_equal_p (vro1->op2, vro2->op2));
240 /* Free a reference operation structure VP. */
242 static inline void
243 free_reference (vn_reference_s *vr)
245 vr->operands.release ();
249 /* vn_reference hashtable helpers. */
251 struct vn_reference_hasher
253 typedef vn_reference_s value_type;
254 typedef vn_reference_s compare_type;
255 static inline hashval_t hash (const value_type *);
256 static inline bool equal (const value_type *, const compare_type *);
257 static inline void remove (value_type *);
260 /* Return the hashcode for a given reference operation P1. */
262 inline hashval_t
263 vn_reference_hasher::hash (const value_type *vr1)
265 return vr1->hashcode;
268 inline bool
269 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
271 return vn_reference_eq (v, c);
274 inline void
275 vn_reference_hasher::remove (value_type *v)
277 free_reference (v);
280 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
281 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
284 /* The set of hashtables and alloc_pool's for their items. */
286 typedef struct vn_tables_s
288 vn_nary_op_table_type *nary;
289 vn_phi_table_type *phis;
290 vn_reference_table_type *references;
291 struct obstack nary_obstack;
292 alloc_pool phis_pool;
293 alloc_pool references_pool;
294 } *vn_tables_t;
297 /* vn_constant hashtable helpers. */
299 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
301 typedef vn_constant_s value_type;
302 typedef vn_constant_s compare_type;
303 static inline hashval_t hash (const value_type *);
304 static inline bool equal (const value_type *, const compare_type *);
307 /* Hash table hash function for vn_constant_t. */
309 inline hashval_t
310 vn_constant_hasher::hash (const value_type *vc1)
312 return vc1->hashcode;
315 /* Hash table equality function for vn_constant_t. */
317 inline bool
318 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
320 if (vc1->hashcode != vc2->hashcode)
321 return false;
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 static hash_table<vn_constant_hasher> *constant_to_value_id;
327 static bitmap constant_value_ids;
330 /* Valid hashtables storing information we have proven to be
331 correct. */
333 static vn_tables_t valid_info;
335 /* Optimistic hashtables storing information we are making assumptions about
336 during iterations. */
338 static vn_tables_t optimistic_info;
340 /* Pointer to the set of hashtables that is currently being used.
341 Should always point to either the optimistic_info, or the
342 valid_info. */
344 static vn_tables_t current_info;
347 /* Reverse post order index for each basic block. */
349 static int *rpo_numbers;
351 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
353 /* Return the SSA value of the VUSE x, supporting released VDEFs
354 during elimination which will value-number the VDEF to the
355 associated VUSE (but not substitute in the whole lattice). */
357 static inline tree
358 vuse_ssa_val (tree x)
360 if (!x)
361 return NULL_TREE;
365 x = SSA_VAL (x);
367 while (SSA_NAME_IN_FREE_LIST (x));
369 return x;
372 /* This represents the top of the VN lattice, which is the universal
373 value. */
375 tree VN_TOP;
377 /* Unique counter for our value ids. */
379 static unsigned int next_value_id;
381 /* Next DFS number and the stack for strongly connected component
382 detection. */
384 static unsigned int next_dfs_num;
385 static vec<tree> sccstack;
389 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
390 are allocated on an obstack for locality reasons, and to free them
391 without looping over the vec. */
393 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
394 static struct obstack vn_ssa_aux_obstack;
396 /* Return the value numbering information for a given SSA name. */
398 vn_ssa_aux_t
399 VN_INFO (tree name)
401 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
402 gcc_checking_assert (res);
403 return res;
406 /* Set the value numbering info for a given SSA name to a given
407 value. */
409 static inline void
410 VN_INFO_SET (tree name, vn_ssa_aux_t value)
412 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
415 /* Initialize the value numbering info for a given SSA name.
416 This should be called just once for every SSA name. */
418 vn_ssa_aux_t
419 VN_INFO_GET (tree name)
421 vn_ssa_aux_t newinfo;
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Get the representative expression for the SSA_NAME NAME. Returns
433 the representative SSA_NAME if there is no expression associated with it. */
435 tree
436 vn_get_expr_for (tree name)
438 vn_ssa_aux_t vn = VN_INFO (name);
439 gimple def_stmt;
440 tree expr = NULL_TREE;
441 enum tree_code code;
443 if (vn->valnum == VN_TOP)
444 return name;
446 /* If the value-number is a constant it is the representative
447 expression. */
448 if (TREE_CODE (vn->valnum) != SSA_NAME)
449 return vn->valnum;
451 /* Get to the information of the value of this SSA_NAME. */
452 vn = VN_INFO (vn->valnum);
454 /* If the value-number is a constant it is the representative
455 expression. */
456 if (TREE_CODE (vn->valnum) != SSA_NAME)
457 return vn->valnum;
459 /* Else if we have an expression, return it. */
460 if (vn->expr != NULL_TREE)
461 return vn->expr;
463 /* Otherwise use the defining statement to build the expression. */
464 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
466 /* If the value number is not an assignment use it directly. */
467 if (!is_gimple_assign (def_stmt))
468 return vn->valnum;
470 /* Note that we can valueize here because we clear the cached
471 simplified expressions after each optimistic iteration. */
472 code = gimple_assign_rhs_code (def_stmt);
473 switch (TREE_CODE_CLASS (code))
475 case tcc_reference:
476 if ((code == REALPART_EXPR
477 || code == IMAGPART_EXPR
478 || code == VIEW_CONVERT_EXPR)
479 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
480 0)) == SSA_NAME)
481 expr = fold_build1 (code,
482 gimple_expr_type (def_stmt),
483 vn_valueize (TREE_OPERAND
484 (gimple_assign_rhs1 (def_stmt), 0)));
485 break;
487 case tcc_unary:
488 expr = fold_build1 (code,
489 gimple_expr_type (def_stmt),
490 vn_valueize (gimple_assign_rhs1 (def_stmt)));
491 break;
493 case tcc_binary:
494 expr = fold_build2 (code,
495 gimple_expr_type (def_stmt),
496 vn_valueize (gimple_assign_rhs1 (def_stmt)),
497 vn_valueize (gimple_assign_rhs2 (def_stmt)));
498 break;
500 case tcc_exceptional:
501 if (code == CONSTRUCTOR
502 && TREE_CODE
503 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
504 expr = gimple_assign_rhs1 (def_stmt);
505 break;
507 default:;
509 if (expr == NULL_TREE)
510 return vn->valnum;
512 /* Cache the expression. */
513 vn->expr = expr;
515 return expr;
518 /* Return the vn_kind the expression computed by the stmt should be
519 associated with. */
521 enum vn_kind
522 vn_get_stmt_kind (gimple stmt)
524 switch (gimple_code (stmt))
526 case GIMPLE_CALL:
527 return VN_REFERENCE;
528 case GIMPLE_PHI:
529 return VN_PHI;
530 case GIMPLE_ASSIGN:
532 enum tree_code code = gimple_assign_rhs_code (stmt);
533 tree rhs1 = gimple_assign_rhs1 (stmt);
534 switch (get_gimple_rhs_class (code))
536 case GIMPLE_UNARY_RHS:
537 case GIMPLE_BINARY_RHS:
538 case GIMPLE_TERNARY_RHS:
539 return VN_NARY;
540 case GIMPLE_SINGLE_RHS:
541 switch (TREE_CODE_CLASS (code))
543 case tcc_reference:
544 /* VOP-less references can go through unary case. */
545 if ((code == REALPART_EXPR
546 || code == IMAGPART_EXPR
547 || code == VIEW_CONVERT_EXPR
548 || code == BIT_FIELD_REF)
549 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
550 return VN_NARY;
552 /* Fallthrough. */
553 case tcc_declaration:
554 return VN_REFERENCE;
556 case tcc_constant:
557 return VN_CONSTANT;
559 default:
560 if (code == ADDR_EXPR)
561 return (is_gimple_min_invariant (rhs1)
562 ? VN_CONSTANT : VN_REFERENCE);
563 else if (code == CONSTRUCTOR)
564 return VN_NARY;
565 return VN_NONE;
567 default:
568 return VN_NONE;
571 default:
572 return VN_NONE;
576 /* Lookup a value id for CONSTANT and return it. If it does not
577 exist returns 0. */
579 unsigned int
580 get_constant_value_id (tree constant)
582 vn_constant_s **slot;
583 struct vn_constant_s vc;
585 vc.hashcode = vn_hash_constant_with_type (constant);
586 vc.constant = constant;
587 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
588 if (slot)
589 return (*slot)->value_id;
590 return 0;
593 /* Lookup a value id for CONSTANT, and if it does not exist, create a
594 new one and return it. If it does exist, return it. */
596 unsigned int
597 get_or_alloc_constant_value_id (tree constant)
599 vn_constant_s **slot;
600 struct vn_constant_s vc;
601 vn_constant_t vcp;
603 vc.hashcode = vn_hash_constant_with_type (constant);
604 vc.constant = constant;
605 slot = constant_to_value_id->find_slot (&vc, INSERT);
606 if (*slot)
607 return (*slot)->value_id;
609 vcp = XNEW (struct vn_constant_s);
610 vcp->hashcode = vc.hashcode;
611 vcp->constant = constant;
612 vcp->value_id = get_next_value_id ();
613 *slot = vcp;
614 bitmap_set_bit (constant_value_ids, vcp->value_id);
615 return vcp->value_id;
618 /* Return true if V is a value id for a constant. */
620 bool
621 value_id_constant_p (unsigned int v)
623 return bitmap_bit_p (constant_value_ids, v);
626 /* Compute the hash for a reference operand VRO1. */
628 static void
629 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
631 hstate.add_int (vro1->opcode);
632 if (vro1->op0)
633 inchash::add_expr (vro1->op0, hstate);
634 if (vro1->op1)
635 inchash::add_expr (vro1->op1, hstate);
636 if (vro1->op2)
637 inchash::add_expr (vro1->op2, hstate);
640 /* Compute a hash for the reference operation VR1 and return it. */
642 static hashval_t
643 vn_reference_compute_hash (const vn_reference_t vr1)
645 inchash::hash hstate;
646 hashval_t result;
647 int i;
648 vn_reference_op_t vro;
649 HOST_WIDE_INT off = -1;
650 bool deref = false;
652 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
654 if (vro->opcode == MEM_REF)
655 deref = true;
656 else if (vro->opcode != ADDR_EXPR)
657 deref = false;
658 if (vro->off != -1)
660 if (off == -1)
661 off = 0;
662 off += vro->off;
664 else
666 if (off != -1
667 && off != 0)
668 hstate.add_int (off);
669 off = -1;
670 if (deref
671 && vro->opcode == ADDR_EXPR)
673 if (vro->op0)
675 tree op = TREE_OPERAND (vro->op0, 0);
676 hstate.add_int (TREE_CODE (op));
677 inchash::add_expr (op, hstate);
680 else
681 vn_reference_op_compute_hash (vro, hstate);
684 result = hstate.end ();
685 /* ??? We would ICE later if we hash instead of adding that in. */
686 if (vr1->vuse)
687 result += SSA_NAME_VERSION (vr1->vuse);
689 return result;
692 /* Return true if reference operations VR1 and VR2 are equivalent. This
693 means they have the same set of operands and vuses. */
695 bool
696 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
698 unsigned i, j;
700 /* Early out if this is not a hash collision. */
701 if (vr1->hashcode != vr2->hashcode)
702 return false;
704 /* The VOP needs to be the same. */
705 if (vr1->vuse != vr2->vuse)
706 return false;
708 /* If the operands are the same we are done. */
709 if (vr1->operands == vr2->operands)
710 return true;
712 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
713 return false;
715 if (INTEGRAL_TYPE_P (vr1->type)
716 && INTEGRAL_TYPE_P (vr2->type))
718 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
719 return false;
721 else if (INTEGRAL_TYPE_P (vr1->type)
722 && (TYPE_PRECISION (vr1->type)
723 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
724 return false;
725 else if (INTEGRAL_TYPE_P (vr2->type)
726 && (TYPE_PRECISION (vr2->type)
727 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
728 return false;
730 i = 0;
731 j = 0;
734 HOST_WIDE_INT off1 = 0, off2 = 0;
735 vn_reference_op_t vro1, vro2;
736 vn_reference_op_s tem1, tem2;
737 bool deref1 = false, deref2 = false;
738 for (; vr1->operands.iterate (i, &vro1); i++)
740 if (vro1->opcode == MEM_REF)
741 deref1 = true;
742 if (vro1->off == -1)
743 break;
744 off1 += vro1->off;
746 for (; vr2->operands.iterate (j, &vro2); j++)
748 if (vro2->opcode == MEM_REF)
749 deref2 = true;
750 if (vro2->off == -1)
751 break;
752 off2 += vro2->off;
754 if (off1 != off2)
755 return false;
756 if (deref1 && vro1->opcode == ADDR_EXPR)
758 memset (&tem1, 0, sizeof (tem1));
759 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
760 tem1.type = TREE_TYPE (tem1.op0);
761 tem1.opcode = TREE_CODE (tem1.op0);
762 vro1 = &tem1;
763 deref1 = false;
765 if (deref2 && vro2->opcode == ADDR_EXPR)
767 memset (&tem2, 0, sizeof (tem2));
768 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
769 tem2.type = TREE_TYPE (tem2.op0);
770 tem2.opcode = TREE_CODE (tem2.op0);
771 vro2 = &tem2;
772 deref2 = false;
774 if (deref1 != deref2)
775 return false;
776 if (!vn_reference_op_eq (vro1, vro2))
777 return false;
778 ++j;
779 ++i;
781 while (vr1->operands.length () != i
782 || vr2->operands.length () != j);
784 return true;
787 /* Copy the operations present in load/store REF into RESULT, a vector of
788 vn_reference_op_s's. */
790 static void
791 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
793 if (TREE_CODE (ref) == TARGET_MEM_REF)
795 vn_reference_op_s temp;
797 result->reserve (3);
799 memset (&temp, 0, sizeof (temp));
800 temp.type = TREE_TYPE (ref);
801 temp.opcode = TREE_CODE (ref);
802 temp.op0 = TMR_INDEX (ref);
803 temp.op1 = TMR_STEP (ref);
804 temp.op2 = TMR_OFFSET (ref);
805 temp.off = -1;
806 result->quick_push (temp);
808 memset (&temp, 0, sizeof (temp));
809 temp.type = NULL_TREE;
810 temp.opcode = ERROR_MARK;
811 temp.op0 = TMR_INDEX2 (ref);
812 temp.off = -1;
813 result->quick_push (temp);
815 memset (&temp, 0, sizeof (temp));
816 temp.type = NULL_TREE;
817 temp.opcode = TREE_CODE (TMR_BASE (ref));
818 temp.op0 = TMR_BASE (ref);
819 temp.off = -1;
820 result->quick_push (temp);
821 return;
824 /* For non-calls, store the information that makes up the address. */
825 tree orig = ref;
826 while (ref)
828 vn_reference_op_s temp;
830 memset (&temp, 0, sizeof (temp));
831 temp.type = TREE_TYPE (ref);
832 temp.opcode = TREE_CODE (ref);
833 temp.off = -1;
835 switch (temp.opcode)
837 case MODIFY_EXPR:
838 temp.op0 = TREE_OPERAND (ref, 1);
839 break;
840 case WITH_SIZE_EXPR:
841 temp.op0 = TREE_OPERAND (ref, 1);
842 temp.off = 0;
843 break;
844 case MEM_REF:
845 /* The base address gets its own vn_reference_op_s structure. */
846 temp.op0 = TREE_OPERAND (ref, 1);
847 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
848 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
849 break;
850 case BIT_FIELD_REF:
851 /* Record bits and position. */
852 temp.op0 = TREE_OPERAND (ref, 1);
853 temp.op1 = TREE_OPERAND (ref, 2);
854 break;
855 case COMPONENT_REF:
856 /* The field decl is enough to unambiguously specify the field,
857 a matching type is not necessary and a mismatching type
858 is always a spurious difference. */
859 temp.type = NULL_TREE;
860 temp.op0 = TREE_OPERAND (ref, 1);
861 temp.op1 = TREE_OPERAND (ref, 2);
863 tree this_offset = component_ref_field_offset (ref);
864 if (this_offset
865 && TREE_CODE (this_offset) == INTEGER_CST)
867 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
868 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
870 offset_int off
871 = (wi::to_offset (this_offset)
872 + wi::lrshift (wi::to_offset (bit_offset),
873 LOG2_BITS_PER_UNIT));
874 if (wi::fits_shwi_p (off)
875 /* Probibit value-numbering zero offset components
876 of addresses the same before the pass folding
877 __builtin_object_size had a chance to run
878 (checking cfun->after_inlining does the
879 trick here). */
880 && (TREE_CODE (orig) != ADDR_EXPR
881 || off != 0
882 || cfun->after_inlining))
883 temp.off = off.to_shwi ();
887 break;
888 case ARRAY_RANGE_REF:
889 case ARRAY_REF:
890 /* Record index as operand. */
891 temp.op0 = TREE_OPERAND (ref, 1);
892 /* Always record lower bounds and element size. */
893 temp.op1 = array_ref_low_bound (ref);
894 temp.op2 = array_ref_element_size (ref);
895 if (TREE_CODE (temp.op0) == INTEGER_CST
896 && TREE_CODE (temp.op1) == INTEGER_CST
897 && TREE_CODE (temp.op2) == INTEGER_CST)
899 offset_int off = ((wi::to_offset (temp.op0)
900 - wi::to_offset (temp.op1))
901 * wi::to_offset (temp.op2));
902 if (wi::fits_shwi_p (off))
903 temp.off = off.to_shwi();
905 break;
906 case VAR_DECL:
907 if (DECL_HARD_REGISTER (ref))
909 temp.op0 = ref;
910 break;
912 /* Fallthru. */
913 case PARM_DECL:
914 case CONST_DECL:
915 case RESULT_DECL:
916 /* Canonicalize decls to MEM[&decl] which is what we end up with
917 when valueizing MEM[ptr] with ptr = &decl. */
918 temp.opcode = MEM_REF;
919 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
920 temp.off = 0;
921 result->safe_push (temp);
922 temp.opcode = ADDR_EXPR;
923 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
924 temp.type = TREE_TYPE (temp.op0);
925 temp.off = -1;
926 break;
927 case STRING_CST:
928 case INTEGER_CST:
929 case COMPLEX_CST:
930 case VECTOR_CST:
931 case REAL_CST:
932 case FIXED_CST:
933 case CONSTRUCTOR:
934 case SSA_NAME:
935 temp.op0 = ref;
936 break;
937 case ADDR_EXPR:
938 if (is_gimple_min_invariant (ref))
940 temp.op0 = ref;
941 break;
943 break;
944 /* These are only interesting for their operands, their
945 existence, and their type. They will never be the last
946 ref in the chain of references (IE they require an
947 operand), so we don't have to put anything
948 for op* as it will be handled by the iteration */
949 case REALPART_EXPR:
950 case VIEW_CONVERT_EXPR:
951 temp.off = 0;
952 break;
953 case IMAGPART_EXPR:
954 /* This is only interesting for its constant offset. */
955 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
956 break;
957 default:
958 gcc_unreachable ();
960 result->safe_push (temp);
962 if (REFERENCE_CLASS_P (ref)
963 || TREE_CODE (ref) == MODIFY_EXPR
964 || TREE_CODE (ref) == WITH_SIZE_EXPR
965 || (TREE_CODE (ref) == ADDR_EXPR
966 && !is_gimple_min_invariant (ref)))
967 ref = TREE_OPERAND (ref, 0);
968 else
969 ref = NULL_TREE;
973 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
974 operands in *OPS, the reference alias set SET and the reference type TYPE.
975 Return true if something useful was produced. */
977 bool
978 ao_ref_init_from_vn_reference (ao_ref *ref,
979 alias_set_type set, tree type,
980 vec<vn_reference_op_s> ops)
982 vn_reference_op_t op;
983 unsigned i;
984 tree base = NULL_TREE;
985 tree *op0_p = &base;
986 HOST_WIDE_INT offset = 0;
987 HOST_WIDE_INT max_size;
988 HOST_WIDE_INT size = -1;
989 tree size_tree = NULL_TREE;
990 alias_set_type base_alias_set = -1;
992 /* First get the final access size from just the outermost expression. */
993 op = &ops[0];
994 if (op->opcode == COMPONENT_REF)
995 size_tree = DECL_SIZE (op->op0);
996 else if (op->opcode == BIT_FIELD_REF)
997 size_tree = op->op0;
998 else
1000 machine_mode mode = TYPE_MODE (type);
1001 if (mode == BLKmode)
1002 size_tree = TYPE_SIZE (type);
1003 else
1004 size = GET_MODE_BITSIZE (mode);
1006 if (size_tree != NULL_TREE)
1008 if (!tree_fits_uhwi_p (size_tree))
1009 size = -1;
1010 else
1011 size = tree_to_uhwi (size_tree);
1014 /* Initially, maxsize is the same as the accessed element size.
1015 In the following it will only grow (or become -1). */
1016 max_size = size;
1018 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1019 and find the ultimate containing object. */
1020 FOR_EACH_VEC_ELT (ops, i, op)
1022 switch (op->opcode)
1024 /* These may be in the reference ops, but we cannot do anything
1025 sensible with them here. */
1026 case ADDR_EXPR:
1027 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1028 if (base != NULL_TREE
1029 && TREE_CODE (base) == MEM_REF
1030 && op->op0
1031 && DECL_P (TREE_OPERAND (op->op0, 0)))
1033 vn_reference_op_t pop = &ops[i-1];
1034 base = TREE_OPERAND (op->op0, 0);
1035 if (pop->off == -1)
1037 max_size = -1;
1038 offset = 0;
1040 else
1041 offset += pop->off * BITS_PER_UNIT;
1042 op0_p = NULL;
1043 break;
1045 /* Fallthru. */
1046 case CALL_EXPR:
1047 return false;
1049 /* Record the base objects. */
1050 case MEM_REF:
1051 base_alias_set = get_deref_alias_set (op->op0);
1052 *op0_p = build2 (MEM_REF, op->type,
1053 NULL_TREE, op->op0);
1054 op0_p = &TREE_OPERAND (*op0_p, 0);
1055 break;
1057 case VAR_DECL:
1058 case PARM_DECL:
1059 case RESULT_DECL:
1060 case SSA_NAME:
1061 *op0_p = op->op0;
1062 op0_p = NULL;
1063 break;
1065 /* And now the usual component-reference style ops. */
1066 case BIT_FIELD_REF:
1067 offset += tree_to_shwi (op->op1);
1068 break;
1070 case COMPONENT_REF:
1072 tree field = op->op0;
1073 /* We do not have a complete COMPONENT_REF tree here so we
1074 cannot use component_ref_field_offset. Do the interesting
1075 parts manually. */
1077 if (op->op1
1078 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1079 max_size = -1;
1080 else
1082 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1083 * BITS_PER_UNIT);
1084 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1086 break;
1089 case ARRAY_RANGE_REF:
1090 case ARRAY_REF:
1091 /* We recorded the lower bound and the element size. */
1092 if (!tree_fits_shwi_p (op->op0)
1093 || !tree_fits_shwi_p (op->op1)
1094 || !tree_fits_shwi_p (op->op2))
1095 max_size = -1;
1096 else
1098 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1099 hindex -= tree_to_shwi (op->op1);
1100 hindex *= tree_to_shwi (op->op2);
1101 hindex *= BITS_PER_UNIT;
1102 offset += hindex;
1104 break;
1106 case REALPART_EXPR:
1107 break;
1109 case IMAGPART_EXPR:
1110 offset += size;
1111 break;
1113 case VIEW_CONVERT_EXPR:
1114 break;
1116 case STRING_CST:
1117 case INTEGER_CST:
1118 case COMPLEX_CST:
1119 case VECTOR_CST:
1120 case REAL_CST:
1121 case CONSTRUCTOR:
1122 case CONST_DECL:
1123 return false;
1125 default:
1126 return false;
1130 if (base == NULL_TREE)
1131 return false;
1133 ref->ref = NULL_TREE;
1134 ref->base = base;
1135 ref->offset = offset;
1136 ref->size = size;
1137 ref->max_size = max_size;
1138 ref->ref_alias_set = set;
1139 if (base_alias_set != -1)
1140 ref->base_alias_set = base_alias_set;
1141 else
1142 ref->base_alias_set = get_alias_set (base);
1143 /* We discount volatiles from value-numbering elsewhere. */
1144 ref->volatile_p = false;
1146 return true;
1149 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1150 vn_reference_op_s's. */
1152 static void
1153 copy_reference_ops_from_call (gcall *call,
1154 vec<vn_reference_op_s> *result)
1156 vn_reference_op_s temp;
1157 unsigned i;
1158 tree lhs = gimple_call_lhs (call);
1159 int lr;
1161 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1162 different. By adding the lhs here in the vector, we ensure that the
1163 hashcode is different, guaranteeing a different value number. */
1164 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1166 memset (&temp, 0, sizeof (temp));
1167 temp.opcode = MODIFY_EXPR;
1168 temp.type = TREE_TYPE (lhs);
1169 temp.op0 = lhs;
1170 temp.off = -1;
1171 result->safe_push (temp);
1174 /* Copy the type, opcode, function, static chain and EH region, if any. */
1175 memset (&temp, 0, sizeof (temp));
1176 temp.type = gimple_call_return_type (call);
1177 temp.opcode = CALL_EXPR;
1178 temp.op0 = gimple_call_fn (call);
1179 temp.op1 = gimple_call_chain (call);
1180 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1181 temp.op2 = size_int (lr);
1182 temp.off = -1;
1183 if (gimple_call_with_bounds_p (call))
1184 temp.with_bounds = 1;
1185 result->safe_push (temp);
1187 /* Copy the call arguments. As they can be references as well,
1188 just chain them together. */
1189 for (i = 0; i < gimple_call_num_args (call); ++i)
1191 tree callarg = gimple_call_arg (call, i);
1192 copy_reference_ops_from_ref (callarg, result);
1196 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1197 *I_P to point to the last element of the replacement. */
1198 void
1199 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1200 unsigned int *i_p)
1202 unsigned int i = *i_p;
1203 vn_reference_op_t op = &(*ops)[i];
1204 vn_reference_op_t mem_op = &(*ops)[i - 1];
1205 tree addr_base;
1206 HOST_WIDE_INT addr_offset = 0;
1208 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1209 from .foo.bar to the preceding MEM_REF offset and replace the
1210 address with &OBJ. */
1211 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1212 &addr_offset);
1213 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1214 if (addr_base != TREE_OPERAND (op->op0, 0))
1216 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1217 off += addr_offset;
1218 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1219 op->op0 = build_fold_addr_expr (addr_base);
1220 if (tree_fits_shwi_p (mem_op->op0))
1221 mem_op->off = tree_to_shwi (mem_op->op0);
1222 else
1223 mem_op->off = -1;
1227 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1228 *I_P to point to the last element of the replacement. */
1229 static void
1230 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1231 unsigned int *i_p)
1233 unsigned int i = *i_p;
1234 vn_reference_op_t op = &(*ops)[i];
1235 vn_reference_op_t mem_op = &(*ops)[i - 1];
1236 gimple def_stmt;
1237 enum tree_code code;
1238 offset_int off;
1240 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1241 if (!is_gimple_assign (def_stmt))
1242 return;
1244 code = gimple_assign_rhs_code (def_stmt);
1245 if (code != ADDR_EXPR
1246 && code != POINTER_PLUS_EXPR)
1247 return;
1249 off = offset_int::from (mem_op->op0, SIGNED);
1251 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1252 from .foo.bar to the preceding MEM_REF offset and replace the
1253 address with &OBJ. */
1254 if (code == ADDR_EXPR)
1256 tree addr, addr_base;
1257 HOST_WIDE_INT addr_offset;
1259 addr = gimple_assign_rhs1 (def_stmt);
1260 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1261 &addr_offset);
1262 if (!addr_base
1263 || TREE_CODE (addr_base) != MEM_REF)
1264 return;
1266 off += addr_offset;
1267 off += mem_ref_offset (addr_base);
1268 op->op0 = TREE_OPERAND (addr_base, 0);
1270 else
1272 tree ptr, ptroff;
1273 ptr = gimple_assign_rhs1 (def_stmt);
1274 ptroff = gimple_assign_rhs2 (def_stmt);
1275 if (TREE_CODE (ptr) != SSA_NAME
1276 || TREE_CODE (ptroff) != INTEGER_CST)
1277 return;
1279 off += wi::to_offset (ptroff);
1280 op->op0 = ptr;
1283 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1284 if (tree_fits_shwi_p (mem_op->op0))
1285 mem_op->off = tree_to_shwi (mem_op->op0);
1286 else
1287 mem_op->off = -1;
1288 if (TREE_CODE (op->op0) == SSA_NAME)
1289 op->op0 = SSA_VAL (op->op0);
1290 if (TREE_CODE (op->op0) != SSA_NAME)
1291 op->opcode = TREE_CODE (op->op0);
1293 /* And recurse. */
1294 if (TREE_CODE (op->op0) == SSA_NAME)
1295 vn_reference_maybe_forwprop_address (ops, i_p);
1296 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1297 vn_reference_fold_indirect (ops, i_p);
1300 /* Optimize the reference REF to a constant if possible or return
1301 NULL_TREE if not. */
1303 tree
1304 fully_constant_vn_reference_p (vn_reference_t ref)
1306 vec<vn_reference_op_s> operands = ref->operands;
1307 vn_reference_op_t op;
1309 /* Try to simplify the translated expression if it is
1310 a call to a builtin function with at most two arguments. */
1311 op = &operands[0];
1312 if (op->opcode == CALL_EXPR
1313 && TREE_CODE (op->op0) == ADDR_EXPR
1314 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1315 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1316 && operands.length () >= 2
1317 && operands.length () <= 3)
1319 vn_reference_op_t arg0, arg1 = NULL;
1320 bool anyconst = false;
1321 arg0 = &operands[1];
1322 if (operands.length () > 2)
1323 arg1 = &operands[2];
1324 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1325 || (arg0->opcode == ADDR_EXPR
1326 && is_gimple_min_invariant (arg0->op0)))
1327 anyconst = true;
1328 if (arg1
1329 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1330 || (arg1->opcode == ADDR_EXPR
1331 && is_gimple_min_invariant (arg1->op0))))
1332 anyconst = true;
1333 if (anyconst)
1335 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1336 arg1 ? 2 : 1,
1337 arg0->op0,
1338 arg1 ? arg1->op0 : NULL);
1339 if (folded
1340 && TREE_CODE (folded) == NOP_EXPR)
1341 folded = TREE_OPERAND (folded, 0);
1342 if (folded
1343 && is_gimple_min_invariant (folded))
1344 return folded;
1348 /* Simplify reads from constants or constant initializers. */
1349 else if (BITS_PER_UNIT == 8
1350 && is_gimple_reg_type (ref->type)
1351 && (!INTEGRAL_TYPE_P (ref->type)
1352 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1354 HOST_WIDE_INT off = 0;
1355 HOST_WIDE_INT size = TYPE_PRECISION (ref->type);
1356 if (size % BITS_PER_UNIT != 0
1357 || size > MAX_BITSIZE_MODE_ANY_MODE)
1358 return NULL_TREE;
1359 size /= BITS_PER_UNIT;
1360 unsigned i;
1361 for (i = 0; i < operands.length (); ++i)
1363 if (operands[i].off == -1)
1364 return NULL_TREE;
1365 off += operands[i].off;
1366 if (operands[i].opcode == MEM_REF)
1368 ++i;
1369 break;
1372 vn_reference_op_t base = &operands[--i];
1373 tree ctor = error_mark_node;
1374 tree decl = NULL_TREE;
1375 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1376 ctor = base->op0;
1377 else if (base->opcode == MEM_REF
1378 && base[1].opcode == ADDR_EXPR
1379 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1380 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1382 decl = TREE_OPERAND (base[1].op0, 0);
1383 ctor = ctor_for_folding (decl);
1385 if (ctor == NULL_TREE)
1386 return build_zero_cst (ref->type);
1387 else if (ctor != error_mark_node)
1389 if (decl)
1391 tree res = fold_ctor_reference (ref->type, ctor,
1392 off * BITS_PER_UNIT,
1393 size * BITS_PER_UNIT, decl);
1394 if (res)
1396 STRIP_USELESS_TYPE_CONVERSION (res);
1397 if (is_gimple_min_invariant (res))
1398 return res;
1401 else
1403 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1404 if (native_encode_expr (ctor, buf, size, off) > 0)
1405 return native_interpret_expr (ref->type, buf, size);
1410 return NULL_TREE;
1413 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1414 structures into their value numbers. This is done in-place, and
1415 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1416 whether any operands were valueized. */
1418 static vec<vn_reference_op_s>
1419 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1421 vn_reference_op_t vro;
1422 unsigned int i;
1424 *valueized_anything = false;
1426 FOR_EACH_VEC_ELT (orig, i, vro)
1428 if (vro->opcode == SSA_NAME
1429 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1431 tree tem = SSA_VAL (vro->op0);
1432 if (tem != vro->op0)
1434 *valueized_anything = true;
1435 vro->op0 = tem;
1437 /* If it transforms from an SSA_NAME to a constant, update
1438 the opcode. */
1439 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1440 vro->opcode = TREE_CODE (vro->op0);
1442 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1444 tree tem = SSA_VAL (vro->op1);
1445 if (tem != vro->op1)
1447 *valueized_anything = true;
1448 vro->op1 = tem;
1451 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1453 tree tem = SSA_VAL (vro->op2);
1454 if (tem != vro->op2)
1456 *valueized_anything = true;
1457 vro->op2 = tem;
1460 /* If it transforms from an SSA_NAME to an address, fold with
1461 a preceding indirect reference. */
1462 if (i > 0
1463 && vro->op0
1464 && TREE_CODE (vro->op0) == ADDR_EXPR
1465 && orig[i - 1].opcode == MEM_REF)
1466 vn_reference_fold_indirect (&orig, &i);
1467 else if (i > 0
1468 && vro->opcode == SSA_NAME
1469 && orig[i - 1].opcode == MEM_REF)
1470 vn_reference_maybe_forwprop_address (&orig, &i);
1471 /* If it transforms a non-constant ARRAY_REF into a constant
1472 one, adjust the constant offset. */
1473 else if (vro->opcode == ARRAY_REF
1474 && vro->off == -1
1475 && TREE_CODE (vro->op0) == INTEGER_CST
1476 && TREE_CODE (vro->op1) == INTEGER_CST
1477 && TREE_CODE (vro->op2) == INTEGER_CST)
1479 offset_int off = ((wi::to_offset (vro->op0)
1480 - wi::to_offset (vro->op1))
1481 * wi::to_offset (vro->op2));
1482 if (wi::fits_shwi_p (off))
1483 vro->off = off.to_shwi ();
1487 return orig;
1490 static vec<vn_reference_op_s>
1491 valueize_refs (vec<vn_reference_op_s> orig)
1493 bool tem;
1494 return valueize_refs_1 (orig, &tem);
1497 static vec<vn_reference_op_s> shared_lookup_references;
1499 /* Create a vector of vn_reference_op_s structures from REF, a
1500 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1501 this function. *VALUEIZED_ANYTHING will specify whether any
1502 operands were valueized. */
1504 static vec<vn_reference_op_s>
1505 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1507 if (!ref)
1508 return vNULL;
1509 shared_lookup_references.truncate (0);
1510 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1511 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1512 valueized_anything);
1513 return shared_lookup_references;
1516 /* Create a vector of vn_reference_op_s structures from CALL, a
1517 call statement. The vector is shared among all callers of
1518 this function. */
1520 static vec<vn_reference_op_s>
1521 valueize_shared_reference_ops_from_call (gcall *call)
1523 if (!call)
1524 return vNULL;
1525 shared_lookup_references.truncate (0);
1526 copy_reference_ops_from_call (call, &shared_lookup_references);
1527 shared_lookup_references = valueize_refs (shared_lookup_references);
1528 return shared_lookup_references;
1531 /* Lookup a SCCVN reference operation VR in the current hash table.
1532 Returns the resulting value number if it exists in the hash table,
1533 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1534 vn_reference_t stored in the hashtable if something is found. */
1536 static tree
1537 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1539 vn_reference_s **slot;
1540 hashval_t hash;
1542 hash = vr->hashcode;
1543 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1544 if (!slot && current_info == optimistic_info)
1545 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1546 if (slot)
1548 if (vnresult)
1549 *vnresult = (vn_reference_t)*slot;
1550 return ((vn_reference_t)*slot)->result;
1553 return NULL_TREE;
1556 static tree *last_vuse_ptr;
1557 static vn_lookup_kind vn_walk_kind;
1558 static vn_lookup_kind default_vn_walk_kind;
1560 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1561 with the current VUSE and performs the expression lookup. */
1563 static void *
1564 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1565 unsigned int cnt, void *vr_)
1567 vn_reference_t vr = (vn_reference_t)vr_;
1568 vn_reference_s **slot;
1569 hashval_t hash;
1571 /* This bounds the stmt walks we perform on reference lookups
1572 to O(1) instead of O(N) where N is the number of dominating
1573 stores. */
1574 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1575 return (void *)-1;
1577 if (last_vuse_ptr)
1578 *last_vuse_ptr = vuse;
1580 /* Fixup vuse and hash. */
1581 if (vr->vuse)
1582 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1583 vr->vuse = vuse_ssa_val (vuse);
1584 if (vr->vuse)
1585 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1587 hash = vr->hashcode;
1588 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1589 if (!slot && current_info == optimistic_info)
1590 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1591 if (slot)
1592 return *slot;
1594 return NULL;
1597 /* Lookup an existing or insert a new vn_reference entry into the
1598 value table for the VUSE, SET, TYPE, OPERANDS reference which
1599 has the value VALUE which is either a constant or an SSA name. */
1601 static vn_reference_t
1602 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1603 alias_set_type set,
1604 tree type,
1605 vec<vn_reference_op_s,
1606 va_heap> operands,
1607 tree value)
1609 vn_reference_s vr1;
1610 vn_reference_t result;
1611 unsigned value_id;
1612 vr1.vuse = vuse;
1613 vr1.operands = operands;
1614 vr1.type = type;
1615 vr1.set = set;
1616 vr1.hashcode = vn_reference_compute_hash (&vr1);
1617 if (vn_reference_lookup_1 (&vr1, &result))
1618 return result;
1619 if (TREE_CODE (value) == SSA_NAME)
1620 value_id = VN_INFO (value)->value_id;
1621 else
1622 value_id = get_or_alloc_constant_value_id (value);
1623 return vn_reference_insert_pieces (vuse, set, type,
1624 operands.copy (), value, value_id);
1627 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1628 from the statement defining VUSE and if not successful tries to
1629 translate *REFP and VR_ through an aggregate copy at the definition
1630 of VUSE. */
1632 static void *
1633 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1634 bool disambiguate_only)
1636 vn_reference_t vr = (vn_reference_t)vr_;
1637 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1638 tree base;
1639 HOST_WIDE_INT offset, maxsize;
1640 static vec<vn_reference_op_s>
1641 lhs_ops = vNULL;
1642 ao_ref lhs_ref;
1643 bool lhs_ref_ok = false;
1645 /* First try to disambiguate after value-replacing in the definitions LHS. */
1646 if (is_gimple_assign (def_stmt))
1648 vec<vn_reference_op_s> tem;
1649 tree lhs = gimple_assign_lhs (def_stmt);
1650 bool valueized_anything = false;
1651 /* Avoid re-allocation overhead. */
1652 lhs_ops.truncate (0);
1653 copy_reference_ops_from_ref (lhs, &lhs_ops);
1654 tem = lhs_ops;
1655 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1656 gcc_assert (lhs_ops == tem);
1657 if (valueized_anything)
1659 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1660 get_alias_set (lhs),
1661 TREE_TYPE (lhs), lhs_ops);
1662 if (lhs_ref_ok
1663 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1664 return NULL;
1666 else
1668 ao_ref_init (&lhs_ref, lhs);
1669 lhs_ref_ok = true;
1672 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1673 && gimple_call_num_args (def_stmt) <= 4)
1675 /* For builtin calls valueize its arguments and call the
1676 alias oracle again. Valueization may improve points-to
1677 info of pointers and constify size and position arguments.
1678 Originally this was motivated by PR61034 which has
1679 conditional calls to free falsely clobbering ref because
1680 of imprecise points-to info of the argument. */
1681 tree oldargs[4];
1682 bool valueized_anything = false;
1683 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1685 oldargs[i] = gimple_call_arg (def_stmt, i);
1686 if (TREE_CODE (oldargs[i]) == SSA_NAME
1687 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1689 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1690 valueized_anything = true;
1693 if (valueized_anything)
1695 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1696 ref);
1697 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1698 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1699 if (!res)
1700 return NULL;
1704 if (disambiguate_only)
1705 return (void *)-1;
1707 base = ao_ref_base (ref);
1708 offset = ref->offset;
1709 maxsize = ref->max_size;
1711 /* If we cannot constrain the size of the reference we cannot
1712 test if anything kills it. */
1713 if (maxsize == -1)
1714 return (void *)-1;
1716 /* We can't deduce anything useful from clobbers. */
1717 if (gimple_clobber_p (def_stmt))
1718 return (void *)-1;
1720 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1721 from that definition.
1722 1) Memset. */
1723 if (is_gimple_reg_type (vr->type)
1724 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1725 && integer_zerop (gimple_call_arg (def_stmt, 1))
1726 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1727 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1729 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1730 tree base2;
1731 HOST_WIDE_INT offset2, size2, maxsize2;
1732 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1733 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1734 if ((unsigned HOST_WIDE_INT)size2 / 8
1735 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1736 && maxsize2 != -1
1737 && operand_equal_p (base, base2, 0)
1738 && offset2 <= offset
1739 && offset2 + size2 >= offset + maxsize)
1741 tree val = build_zero_cst (vr->type);
1742 return vn_reference_lookup_or_insert_for_pieces
1743 (vuse, vr->set, vr->type, vr->operands, val);
1747 /* 2) Assignment from an empty CONSTRUCTOR. */
1748 else if (is_gimple_reg_type (vr->type)
1749 && gimple_assign_single_p (def_stmt)
1750 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1751 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1753 tree base2;
1754 HOST_WIDE_INT offset2, size2, maxsize2;
1755 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1756 &offset2, &size2, &maxsize2);
1757 if (maxsize2 != -1
1758 && operand_equal_p (base, base2, 0)
1759 && offset2 <= offset
1760 && offset2 + size2 >= offset + maxsize)
1762 tree val = build_zero_cst (vr->type);
1763 return vn_reference_lookup_or_insert_for_pieces
1764 (vuse, vr->set, vr->type, vr->operands, val);
1768 /* 3) Assignment from a constant. We can use folds native encode/interpret
1769 routines to extract the assigned bits. */
1770 else if (vn_walk_kind == VN_WALKREWRITE
1771 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1772 && ref->size == maxsize
1773 && maxsize % BITS_PER_UNIT == 0
1774 && offset % BITS_PER_UNIT == 0
1775 && is_gimple_reg_type (vr->type)
1776 && gimple_assign_single_p (def_stmt)
1777 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1779 tree base2;
1780 HOST_WIDE_INT offset2, size2, maxsize2;
1781 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1782 &offset2, &size2, &maxsize2);
1783 if (maxsize2 != -1
1784 && maxsize2 == size2
1785 && size2 % BITS_PER_UNIT == 0
1786 && offset2 % BITS_PER_UNIT == 0
1787 && operand_equal_p (base, base2, 0)
1788 && offset2 <= offset
1789 && offset2 + size2 >= offset + maxsize)
1791 /* We support up to 512-bit values (for V8DFmode). */
1792 unsigned char buffer[64];
1793 int len;
1795 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1796 buffer, sizeof (buffer));
1797 if (len > 0)
1799 tree val = native_interpret_expr (vr->type,
1800 buffer
1801 + ((offset - offset2)
1802 / BITS_PER_UNIT),
1803 ref->size / BITS_PER_UNIT);
1804 if (val)
1805 return vn_reference_lookup_or_insert_for_pieces
1806 (vuse, vr->set, vr->type, vr->operands, val);
1811 /* 4) Assignment from an SSA name which definition we may be able
1812 to access pieces from. */
1813 else if (ref->size == maxsize
1814 && is_gimple_reg_type (vr->type)
1815 && gimple_assign_single_p (def_stmt)
1816 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1818 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1819 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1820 if (is_gimple_assign (def_stmt2)
1821 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1822 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1823 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1825 tree base2;
1826 HOST_WIDE_INT offset2, size2, maxsize2, off;
1827 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1828 &offset2, &size2, &maxsize2);
1829 off = offset - offset2;
1830 if (maxsize2 != -1
1831 && maxsize2 == size2
1832 && operand_equal_p (base, base2, 0)
1833 && offset2 <= offset
1834 && offset2 + size2 >= offset + maxsize)
1836 tree val = NULL_TREE;
1837 HOST_WIDE_INT elsz
1838 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1839 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1841 if (off == 0)
1842 val = gimple_assign_rhs1 (def_stmt2);
1843 else if (off == elsz)
1844 val = gimple_assign_rhs2 (def_stmt2);
1846 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1847 && off % elsz == 0)
1849 tree ctor = gimple_assign_rhs1 (def_stmt2);
1850 unsigned i = off / elsz;
1851 if (i < CONSTRUCTOR_NELTS (ctor))
1853 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1854 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1856 if (TREE_CODE (TREE_TYPE (elt->value))
1857 != VECTOR_TYPE)
1858 val = elt->value;
1862 if (val)
1863 return vn_reference_lookup_or_insert_for_pieces
1864 (vuse, vr->set, vr->type, vr->operands, val);
1869 /* 5) For aggregate copies translate the reference through them if
1870 the copy kills ref. */
1871 else if (vn_walk_kind == VN_WALKREWRITE
1872 && gimple_assign_single_p (def_stmt)
1873 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1874 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1875 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1877 tree base2;
1878 HOST_WIDE_INT offset2, size2, maxsize2;
1879 int i, j;
1880 auto_vec<vn_reference_op_s> rhs;
1881 vn_reference_op_t vro;
1882 ao_ref r;
1884 if (!lhs_ref_ok)
1885 return (void *)-1;
1887 /* See if the assignment kills REF. */
1888 base2 = ao_ref_base (&lhs_ref);
1889 offset2 = lhs_ref.offset;
1890 size2 = lhs_ref.size;
1891 maxsize2 = lhs_ref.max_size;
1892 if (maxsize2 == -1
1893 || (base != base2 && !operand_equal_p (base, base2, 0))
1894 || offset2 > offset
1895 || offset2 + size2 < offset + maxsize)
1896 return (void *)-1;
1898 /* Find the common base of ref and the lhs. lhs_ops already
1899 contains valueized operands for the lhs. */
1900 i = vr->operands.length () - 1;
1901 j = lhs_ops.length () - 1;
1902 while (j >= 0 && i >= 0
1903 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1905 i--;
1906 j--;
1909 /* ??? The innermost op should always be a MEM_REF and we already
1910 checked that the assignment to the lhs kills vr. Thus for
1911 aggregate copies using char[] types the vn_reference_op_eq
1912 may fail when comparing types for compatibility. But we really
1913 don't care here - further lookups with the rewritten operands
1914 will simply fail if we messed up types too badly. */
1915 HOST_WIDE_INT extra_off = 0;
1916 if (j == 0 && i >= 0
1917 && lhs_ops[0].opcode == MEM_REF
1918 && lhs_ops[0].off != -1)
1920 if (lhs_ops[0].off == vr->operands[i].off)
1921 i--, j--;
1922 else if (vr->operands[i].opcode == MEM_REF
1923 && vr->operands[i].off != -1)
1925 extra_off = vr->operands[i].off - lhs_ops[0].off;
1926 i--, j--;
1930 /* i now points to the first additional op.
1931 ??? LHS may not be completely contained in VR, one or more
1932 VIEW_CONVERT_EXPRs could be in its way. We could at least
1933 try handling outermost VIEW_CONVERT_EXPRs. */
1934 if (j != -1)
1935 return (void *)-1;
1937 /* Now re-write REF to be based on the rhs of the assignment. */
1938 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1940 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1941 if (extra_off != 0)
1943 if (rhs.length () < 2
1944 || rhs[0].opcode != MEM_REF
1945 || rhs[0].off == -1)
1946 return (void *)-1;
1947 rhs[0].off += extra_off;
1948 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1949 build_int_cst (TREE_TYPE (rhs[0].op0),
1950 extra_off));
1953 /* We need to pre-pend vr->operands[0..i] to rhs. */
1954 vec<vn_reference_op_s> old = vr->operands;
1955 if (i + 1 + rhs.length () > vr->operands.length ())
1957 vr->operands.safe_grow (i + 1 + rhs.length ());
1958 if (old == shared_lookup_references)
1959 shared_lookup_references = vr->operands;
1961 else
1962 vr->operands.truncate (i + 1 + rhs.length ());
1963 FOR_EACH_VEC_ELT (rhs, j, vro)
1964 vr->operands[i + 1 + j] = *vro;
1965 vr->operands = valueize_refs (vr->operands);
1966 if (old == shared_lookup_references)
1967 shared_lookup_references = vr->operands;
1968 vr->hashcode = vn_reference_compute_hash (vr);
1970 /* Try folding the new reference to a constant. */
1971 tree val = fully_constant_vn_reference_p (vr);
1972 if (val)
1973 return vn_reference_lookup_or_insert_for_pieces
1974 (vuse, vr->set, vr->type, vr->operands, val);
1976 /* Adjust *ref from the new operands. */
1977 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1978 return (void *)-1;
1979 /* This can happen with bitfields. */
1980 if (ref->size != r.size)
1981 return (void *)-1;
1982 *ref = r;
1984 /* Do not update last seen VUSE after translating. */
1985 last_vuse_ptr = NULL;
1987 /* Keep looking for the adjusted *REF / VR pair. */
1988 return NULL;
1991 /* 6) For memcpy copies translate the reference through them if
1992 the copy kills ref. */
1993 else if (vn_walk_kind == VN_WALKREWRITE
1994 && is_gimple_reg_type (vr->type)
1995 /* ??? Handle BCOPY as well. */
1996 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1997 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1998 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1999 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2000 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2001 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2002 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2003 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2005 tree lhs, rhs;
2006 ao_ref r;
2007 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2008 vn_reference_op_s op;
2009 HOST_WIDE_INT at;
2012 /* Only handle non-variable, addressable refs. */
2013 if (ref->size != maxsize
2014 || offset % BITS_PER_UNIT != 0
2015 || ref->size % BITS_PER_UNIT != 0)
2016 return (void *)-1;
2018 /* Extract a pointer base and an offset for the destination. */
2019 lhs = gimple_call_arg (def_stmt, 0);
2020 lhs_offset = 0;
2021 if (TREE_CODE (lhs) == SSA_NAME)
2022 lhs = SSA_VAL (lhs);
2023 if (TREE_CODE (lhs) == ADDR_EXPR)
2025 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2026 &lhs_offset);
2027 if (!tem)
2028 return (void *)-1;
2029 if (TREE_CODE (tem) == MEM_REF
2030 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2032 lhs = TREE_OPERAND (tem, 0);
2033 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2035 else if (DECL_P (tem))
2036 lhs = build_fold_addr_expr (tem);
2037 else
2038 return (void *)-1;
2040 if (TREE_CODE (lhs) != SSA_NAME
2041 && TREE_CODE (lhs) != ADDR_EXPR)
2042 return (void *)-1;
2044 /* Extract a pointer base and an offset for the source. */
2045 rhs = gimple_call_arg (def_stmt, 1);
2046 rhs_offset = 0;
2047 if (TREE_CODE (rhs) == SSA_NAME)
2048 rhs = SSA_VAL (rhs);
2049 if (TREE_CODE (rhs) == ADDR_EXPR)
2051 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2052 &rhs_offset);
2053 if (!tem)
2054 return (void *)-1;
2055 if (TREE_CODE (tem) == MEM_REF
2056 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2058 rhs = TREE_OPERAND (tem, 0);
2059 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2061 else if (DECL_P (tem))
2062 rhs = build_fold_addr_expr (tem);
2063 else
2064 return (void *)-1;
2066 if (TREE_CODE (rhs) != SSA_NAME
2067 && TREE_CODE (rhs) != ADDR_EXPR)
2068 return (void *)-1;
2070 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2072 /* The bases of the destination and the references have to agree. */
2073 if ((TREE_CODE (base) != MEM_REF
2074 && !DECL_P (base))
2075 || (TREE_CODE (base) == MEM_REF
2076 && (TREE_OPERAND (base, 0) != lhs
2077 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2078 || (DECL_P (base)
2079 && (TREE_CODE (lhs) != ADDR_EXPR
2080 || TREE_OPERAND (lhs, 0) != base)))
2081 return (void *)-1;
2083 /* And the access has to be contained within the memcpy destination. */
2084 at = offset / BITS_PER_UNIT;
2085 if (TREE_CODE (base) == MEM_REF)
2086 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2087 if (lhs_offset > at
2088 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2089 return (void *)-1;
2091 /* Make room for 2 operands in the new reference. */
2092 if (vr->operands.length () < 2)
2094 vec<vn_reference_op_s> old = vr->operands;
2095 vr->operands.safe_grow_cleared (2);
2096 if (old == shared_lookup_references
2097 && vr->operands != old)
2098 shared_lookup_references = vr->operands;
2100 else
2101 vr->operands.truncate (2);
2103 /* The looked-through reference is a simple MEM_REF. */
2104 memset (&op, 0, sizeof (op));
2105 op.type = vr->type;
2106 op.opcode = MEM_REF;
2107 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2108 op.off = at - lhs_offset + rhs_offset;
2109 vr->operands[0] = op;
2110 op.type = TREE_TYPE (rhs);
2111 op.opcode = TREE_CODE (rhs);
2112 op.op0 = rhs;
2113 op.off = -1;
2114 vr->operands[1] = op;
2115 vr->hashcode = vn_reference_compute_hash (vr);
2117 /* Adjust *ref from the new operands. */
2118 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2119 return (void *)-1;
2120 /* This can happen with bitfields. */
2121 if (ref->size != r.size)
2122 return (void *)-1;
2123 *ref = r;
2125 /* Do not update last seen VUSE after translating. */
2126 last_vuse_ptr = NULL;
2128 /* Keep looking for the adjusted *REF / VR pair. */
2129 return NULL;
2132 /* Bail out and stop walking. */
2133 return (void *)-1;
2136 /* Lookup a reference operation by it's parts, in the current hash table.
2137 Returns the resulting value number if it exists in the hash table,
2138 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2139 vn_reference_t stored in the hashtable if something is found. */
2141 tree
2142 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2143 vec<vn_reference_op_s> operands,
2144 vn_reference_t *vnresult, vn_lookup_kind kind)
2146 struct vn_reference_s vr1;
2147 vn_reference_t tmp;
2148 tree cst;
2150 if (!vnresult)
2151 vnresult = &tmp;
2152 *vnresult = NULL;
2154 vr1.vuse = vuse_ssa_val (vuse);
2155 shared_lookup_references.truncate (0);
2156 shared_lookup_references.safe_grow (operands.length ());
2157 memcpy (shared_lookup_references.address (),
2158 operands.address (),
2159 sizeof (vn_reference_op_s)
2160 * operands.length ());
2161 vr1.operands = operands = shared_lookup_references
2162 = valueize_refs (shared_lookup_references);
2163 vr1.type = type;
2164 vr1.set = set;
2165 vr1.hashcode = vn_reference_compute_hash (&vr1);
2166 if ((cst = fully_constant_vn_reference_p (&vr1)))
2167 return cst;
2169 vn_reference_lookup_1 (&vr1, vnresult);
2170 if (!*vnresult
2171 && kind != VN_NOWALK
2172 && vr1.vuse)
2174 ao_ref r;
2175 vn_walk_kind = kind;
2176 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2177 *vnresult =
2178 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2179 vn_reference_lookup_2,
2180 vn_reference_lookup_3,
2181 vuse_ssa_val, &vr1);
2182 gcc_checking_assert (vr1.operands == shared_lookup_references);
2185 if (*vnresult)
2186 return (*vnresult)->result;
2188 return NULL_TREE;
2191 /* Lookup OP in the current hash table, and return the resulting value
2192 number if it exists in the hash table. Return NULL_TREE if it does
2193 not exist in the hash table or if the result field of the structure
2194 was NULL.. VNRESULT will be filled in with the vn_reference_t
2195 stored in the hashtable if one exists. */
2197 tree
2198 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2199 vn_reference_t *vnresult)
2201 vec<vn_reference_op_s> operands;
2202 struct vn_reference_s vr1;
2203 tree cst;
2204 bool valuezied_anything;
2206 if (vnresult)
2207 *vnresult = NULL;
2209 vr1.vuse = vuse_ssa_val (vuse);
2210 vr1.operands = operands
2211 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2212 vr1.type = TREE_TYPE (op);
2213 vr1.set = get_alias_set (op);
2214 vr1.hashcode = vn_reference_compute_hash (&vr1);
2215 if ((cst = fully_constant_vn_reference_p (&vr1)))
2216 return cst;
2218 if (kind != VN_NOWALK
2219 && vr1.vuse)
2221 vn_reference_t wvnresult;
2222 ao_ref r;
2223 /* Make sure to use a valueized reference if we valueized anything.
2224 Otherwise preserve the full reference for advanced TBAA. */
2225 if (!valuezied_anything
2226 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2227 vr1.operands))
2228 ao_ref_init (&r, op);
2229 vn_walk_kind = kind;
2230 wvnresult =
2231 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2232 vn_reference_lookup_2,
2233 vn_reference_lookup_3,
2234 vuse_ssa_val, &vr1);
2235 gcc_checking_assert (vr1.operands == shared_lookup_references);
2236 if (wvnresult)
2238 if (vnresult)
2239 *vnresult = wvnresult;
2240 return wvnresult->result;
2243 return NULL_TREE;
2246 return vn_reference_lookup_1 (&vr1, vnresult);
2249 /* Lookup CALL in the current hash table and return the entry in
2250 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2252 void
2253 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2254 vn_reference_t vr)
2256 if (vnresult)
2257 *vnresult = NULL;
2259 tree vuse = gimple_vuse (call);
2261 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2262 vr->operands = valueize_shared_reference_ops_from_call (call);
2263 vr->type = gimple_expr_type (call);
2264 vr->set = 0;
2265 vr->hashcode = vn_reference_compute_hash (vr);
2266 vn_reference_lookup_1 (vr, vnresult);
2269 /* Insert OP into the current hash table with a value number of
2270 RESULT, and return the resulting reference structure we created. */
2272 static vn_reference_t
2273 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2275 vn_reference_s **slot;
2276 vn_reference_t vr1;
2277 bool tem;
2279 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2280 if (TREE_CODE (result) == SSA_NAME)
2281 vr1->value_id = VN_INFO (result)->value_id;
2282 else
2283 vr1->value_id = get_or_alloc_constant_value_id (result);
2284 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2285 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2286 vr1->type = TREE_TYPE (op);
2287 vr1->set = get_alias_set (op);
2288 vr1->hashcode = vn_reference_compute_hash (vr1);
2289 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2290 vr1->result_vdef = vdef;
2292 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2293 INSERT);
2295 /* Because we lookup stores using vuses, and value number failures
2296 using the vdefs (see visit_reference_op_store for how and why),
2297 it's possible that on failure we may try to insert an already
2298 inserted store. This is not wrong, there is no ssa name for a
2299 store that we could use as a differentiator anyway. Thus, unlike
2300 the other lookup functions, you cannot gcc_assert (!*slot)
2301 here. */
2303 /* But free the old slot in case of a collision. */
2304 if (*slot)
2305 free_reference (*slot);
2307 *slot = vr1;
2308 return vr1;
2311 /* Insert a reference by it's pieces into the current hash table with
2312 a value number of RESULT. Return the resulting reference
2313 structure we created. */
2315 vn_reference_t
2316 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2317 vec<vn_reference_op_s> operands,
2318 tree result, unsigned int value_id)
2321 vn_reference_s **slot;
2322 vn_reference_t vr1;
2324 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2325 vr1->value_id = value_id;
2326 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2327 vr1->operands = valueize_refs (operands);
2328 vr1->type = type;
2329 vr1->set = set;
2330 vr1->hashcode = vn_reference_compute_hash (vr1);
2331 if (result && TREE_CODE (result) == SSA_NAME)
2332 result = SSA_VAL (result);
2333 vr1->result = result;
2335 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2336 INSERT);
2338 /* At this point we should have all the things inserted that we have
2339 seen before, and we should never try inserting something that
2340 already exists. */
2341 gcc_assert (!*slot);
2342 if (*slot)
2343 free_reference (*slot);
2345 *slot = vr1;
2346 return vr1;
2349 /* Compute and return the hash value for nary operation VBO1. */
2351 static hashval_t
2352 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2354 inchash::hash hstate;
2355 unsigned i;
2357 for (i = 0; i < vno1->length; ++i)
2358 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2359 vno1->op[i] = SSA_VAL (vno1->op[i]);
2361 if (vno1->length == 2
2362 && commutative_tree_code (vno1->opcode)
2363 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2365 tree temp = vno1->op[0];
2366 vno1->op[0] = vno1->op[1];
2367 vno1->op[1] = temp;
2370 hstate.add_int (vno1->opcode);
2371 for (i = 0; i < vno1->length; ++i)
2372 inchash::add_expr (vno1->op[i], hstate);
2374 return hstate.end ();
2377 /* Compare nary operations VNO1 and VNO2 and return true if they are
2378 equivalent. */
2380 bool
2381 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2383 unsigned i;
2385 if (vno1->hashcode != vno2->hashcode)
2386 return false;
2388 if (vno1->length != vno2->length)
2389 return false;
2391 if (vno1->opcode != vno2->opcode
2392 || !types_compatible_p (vno1->type, vno2->type))
2393 return false;
2395 for (i = 0; i < vno1->length; ++i)
2396 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2397 return false;
2399 return true;
2402 /* Initialize VNO from the pieces provided. */
2404 static void
2405 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2406 enum tree_code code, tree type, tree *ops)
2408 vno->opcode = code;
2409 vno->length = length;
2410 vno->type = type;
2411 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2414 /* Initialize VNO from OP. */
2416 static void
2417 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2419 unsigned i;
2421 vno->opcode = TREE_CODE (op);
2422 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2423 vno->type = TREE_TYPE (op);
2424 for (i = 0; i < vno->length; ++i)
2425 vno->op[i] = TREE_OPERAND (op, i);
2428 /* Return the number of operands for a vn_nary ops structure from STMT. */
2430 static unsigned int
2431 vn_nary_length_from_stmt (gimple stmt)
2433 switch (gimple_assign_rhs_code (stmt))
2435 case REALPART_EXPR:
2436 case IMAGPART_EXPR:
2437 case VIEW_CONVERT_EXPR:
2438 return 1;
2440 case BIT_FIELD_REF:
2441 return 3;
2443 case CONSTRUCTOR:
2444 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2446 default:
2447 return gimple_num_ops (stmt) - 1;
2451 /* Initialize VNO from STMT. */
2453 static void
2454 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2456 unsigned i;
2458 vno->opcode = gimple_assign_rhs_code (stmt);
2459 vno->type = gimple_expr_type (stmt);
2460 switch (vno->opcode)
2462 case REALPART_EXPR:
2463 case IMAGPART_EXPR:
2464 case VIEW_CONVERT_EXPR:
2465 vno->length = 1;
2466 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2467 break;
2469 case BIT_FIELD_REF:
2470 vno->length = 3;
2471 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2472 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2473 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2474 break;
2476 case CONSTRUCTOR:
2477 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2478 for (i = 0; i < vno->length; ++i)
2479 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2480 break;
2482 default:
2483 gcc_checking_assert (!gimple_assign_single_p (stmt));
2484 vno->length = gimple_num_ops (stmt) - 1;
2485 for (i = 0; i < vno->length; ++i)
2486 vno->op[i] = gimple_op (stmt, i + 1);
2490 /* Compute the hashcode for VNO and look for it in the hash table;
2491 return the resulting value number if it exists in the hash table.
2492 Return NULL_TREE if it does not exist in the hash table or if the
2493 result field of the operation is NULL. VNRESULT will contain the
2494 vn_nary_op_t from the hashtable if it exists. */
2496 static tree
2497 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2499 vn_nary_op_s **slot;
2501 if (vnresult)
2502 *vnresult = NULL;
2504 vno->hashcode = vn_nary_op_compute_hash (vno);
2505 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2506 NO_INSERT);
2507 if (!slot && current_info == optimistic_info)
2508 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2509 NO_INSERT);
2510 if (!slot)
2511 return NULL_TREE;
2512 if (vnresult)
2513 *vnresult = *slot;
2514 return (*slot)->result;
2517 /* Lookup a n-ary operation by its pieces and return the resulting value
2518 number if it exists in the hash table. Return NULL_TREE if it does
2519 not exist in the hash table or if the result field of the operation
2520 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2521 if it exists. */
2523 tree
2524 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2525 tree type, tree *ops, vn_nary_op_t *vnresult)
2527 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2528 sizeof_vn_nary_op (length));
2529 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2530 return vn_nary_op_lookup_1 (vno1, vnresult);
2533 /* Lookup OP in the current hash table, and return the resulting value
2534 number if it exists in the hash table. Return NULL_TREE if it does
2535 not exist in the hash table or if the result field of the operation
2536 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2537 if it exists. */
2539 tree
2540 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2542 vn_nary_op_t vno1
2543 = XALLOCAVAR (struct vn_nary_op_s,
2544 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2545 init_vn_nary_op_from_op (vno1, op);
2546 return vn_nary_op_lookup_1 (vno1, vnresult);
2549 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2550 value number if it exists in the hash table. Return NULL_TREE if
2551 it does not exist in the hash table. VNRESULT will contain the
2552 vn_nary_op_t from the hashtable if it exists. */
2554 tree
2555 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2557 vn_nary_op_t vno1
2558 = XALLOCAVAR (struct vn_nary_op_s,
2559 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2560 init_vn_nary_op_from_stmt (vno1, stmt);
2561 return vn_nary_op_lookup_1 (vno1, vnresult);
2564 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2566 static vn_nary_op_t
2567 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2569 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2572 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2573 obstack. */
2575 static vn_nary_op_t
2576 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2578 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2579 &current_info->nary_obstack);
2581 vno1->value_id = value_id;
2582 vno1->length = length;
2583 vno1->result = result;
2585 return vno1;
2588 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2589 VNO->HASHCODE first. */
2591 static vn_nary_op_t
2592 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2593 bool compute_hash)
2595 vn_nary_op_s **slot;
2597 if (compute_hash)
2598 vno->hashcode = vn_nary_op_compute_hash (vno);
2600 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2601 gcc_assert (!*slot);
2603 *slot = vno;
2604 return vno;
2607 /* Insert a n-ary operation into the current hash table using it's
2608 pieces. Return the vn_nary_op_t structure we created and put in
2609 the hashtable. */
2611 vn_nary_op_t
2612 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2613 tree type, tree *ops,
2614 tree result, unsigned int value_id)
2616 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2617 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2618 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2621 /* Insert OP into the current hash table with a value number of
2622 RESULT. Return the vn_nary_op_t structure we created and put in
2623 the hashtable. */
2625 vn_nary_op_t
2626 vn_nary_op_insert (tree op, tree result)
2628 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2629 vn_nary_op_t vno1;
2631 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2632 init_vn_nary_op_from_op (vno1, op);
2633 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2636 /* Insert the rhs of STMT into the current hash table with a value number of
2637 RESULT. */
2639 vn_nary_op_t
2640 vn_nary_op_insert_stmt (gimple stmt, tree result)
2642 vn_nary_op_t vno1
2643 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2644 result, VN_INFO (result)->value_id);
2645 init_vn_nary_op_from_stmt (vno1, stmt);
2646 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2649 /* Compute a hashcode for PHI operation VP1 and return it. */
2651 static inline hashval_t
2652 vn_phi_compute_hash (vn_phi_t vp1)
2654 inchash::hash hstate (vp1->block->index);
2655 int i;
2656 tree phi1op;
2657 tree type;
2659 /* If all PHI arguments are constants we need to distinguish
2660 the PHI node via its type. */
2661 type = vp1->type;
2662 hstate.merge_hash (vn_hash_type (type));
2664 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2666 if (phi1op == VN_TOP)
2667 continue;
2668 inchash::add_expr (phi1op, hstate);
2671 return hstate.end ();
2674 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2676 static int
2677 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2679 if (vp1->hashcode != vp2->hashcode)
2680 return false;
2682 if (vp1->block == vp2->block)
2684 int i;
2685 tree phi1op;
2687 /* If the PHI nodes do not have compatible types
2688 they are not the same. */
2689 if (!types_compatible_p (vp1->type, vp2->type))
2690 return false;
2692 /* Any phi in the same block will have it's arguments in the
2693 same edge order, because of how we store phi nodes. */
2694 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2696 tree phi2op = vp2->phiargs[i];
2697 if (phi1op == VN_TOP || phi2op == VN_TOP)
2698 continue;
2699 if (!expressions_equal_p (phi1op, phi2op))
2700 return false;
2702 return true;
2704 return false;
2707 static vec<tree> shared_lookup_phiargs;
2709 /* Lookup PHI in the current hash table, and return the resulting
2710 value number if it exists in the hash table. Return NULL_TREE if
2711 it does not exist in the hash table. */
2713 static tree
2714 vn_phi_lookup (gimple phi)
2716 vn_phi_s **slot;
2717 struct vn_phi_s vp1;
2718 unsigned i;
2720 shared_lookup_phiargs.truncate (0);
2722 /* Canonicalize the SSA_NAME's to their value number. */
2723 for (i = 0; i < gimple_phi_num_args (phi); i++)
2725 tree def = PHI_ARG_DEF (phi, i);
2726 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2727 shared_lookup_phiargs.safe_push (def);
2729 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2730 vp1.phiargs = shared_lookup_phiargs;
2731 vp1.block = gimple_bb (phi);
2732 vp1.hashcode = vn_phi_compute_hash (&vp1);
2733 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2734 NO_INSERT);
2735 if (!slot && current_info == optimistic_info)
2736 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2737 NO_INSERT);
2738 if (!slot)
2739 return NULL_TREE;
2740 return (*slot)->result;
2743 /* Insert PHI into the current hash table with a value number of
2744 RESULT. */
2746 static vn_phi_t
2747 vn_phi_insert (gimple phi, tree result)
2749 vn_phi_s **slot;
2750 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2751 unsigned i;
2752 vec<tree> args = vNULL;
2754 /* Canonicalize the SSA_NAME's to their value number. */
2755 for (i = 0; i < gimple_phi_num_args (phi); i++)
2757 tree def = PHI_ARG_DEF (phi, i);
2758 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2759 args.safe_push (def);
2761 vp1->value_id = VN_INFO (result)->value_id;
2762 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2763 vp1->phiargs = args;
2764 vp1->block = gimple_bb (phi);
2765 vp1->result = result;
2766 vp1->hashcode = vn_phi_compute_hash (vp1);
2768 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2770 /* Because we iterate over phi operations more than once, it's
2771 possible the slot might already exist here, hence no assert.*/
2772 *slot = vp1;
2773 return vp1;
2777 /* Print set of components in strongly connected component SCC to OUT. */
2779 static void
2780 print_scc (FILE *out, vec<tree> scc)
2782 tree var;
2783 unsigned int i;
2785 fprintf (out, "SCC consists of:");
2786 FOR_EACH_VEC_ELT (scc, i, var)
2788 fprintf (out, " ");
2789 print_generic_expr (out, var, 0);
2791 fprintf (out, "\n");
2794 /* Set the value number of FROM to TO, return true if it has changed
2795 as a result. */
2797 static inline bool
2798 set_ssa_val_to (tree from, tree to)
2800 tree currval = SSA_VAL (from);
2801 HOST_WIDE_INT toff, coff;
2803 /* The only thing we allow as value numbers are ssa_names
2804 and invariants. So assert that here. We don't allow VN_TOP
2805 as visiting a stmt should produce a value-number other than
2806 that.
2807 ??? Still VN_TOP can happen for unreachable code, so force
2808 it to varying in that case. Not all code is prepared to
2809 get VN_TOP on valueization. */
2810 if (to == VN_TOP)
2812 if (dump_file && (dump_flags & TDF_DETAILS))
2813 fprintf (dump_file, "Forcing value number to varying on "
2814 "receiving VN_TOP\n");
2815 to = from;
2818 gcc_assert (to != NULL_TREE
2819 && ((TREE_CODE (to) == SSA_NAME
2820 && (to == from || SSA_VAL (to) == to))
2821 || is_gimple_min_invariant (to)));
2823 if (from != to)
2825 if (currval == from)
2827 if (dump_file && (dump_flags & TDF_DETAILS))
2829 fprintf (dump_file, "Not changing value number of ");
2830 print_generic_expr (dump_file, from, 0);
2831 fprintf (dump_file, " from VARYING to ");
2832 print_generic_expr (dump_file, to, 0);
2833 fprintf (dump_file, "\n");
2835 return false;
2837 else if (TREE_CODE (to) == SSA_NAME
2838 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2839 to = from;
2842 if (dump_file && (dump_flags & TDF_DETAILS))
2844 fprintf (dump_file, "Setting value number of ");
2845 print_generic_expr (dump_file, from, 0);
2846 fprintf (dump_file, " to ");
2847 print_generic_expr (dump_file, to, 0);
2850 if (currval != to
2851 && !operand_equal_p (currval, to, 0)
2852 /* ??? For addresses involving volatile objects or types operand_equal_p
2853 does not reliably detect ADDR_EXPRs as equal. We know we are only
2854 getting invariant gimple addresses here, so can use
2855 get_addr_base_and_unit_offset to do this comparison. */
2856 && !(TREE_CODE (currval) == ADDR_EXPR
2857 && TREE_CODE (to) == ADDR_EXPR
2858 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2859 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2860 && coff == toff))
2862 VN_INFO (from)->valnum = to;
2863 if (dump_file && (dump_flags & TDF_DETAILS))
2864 fprintf (dump_file, " (changed)\n");
2865 return true;
2867 if (dump_file && (dump_flags & TDF_DETAILS))
2868 fprintf (dump_file, "\n");
2869 return false;
2872 /* Mark as processed all the definitions in the defining stmt of USE, or
2873 the USE itself. */
2875 static void
2876 mark_use_processed (tree use)
2878 ssa_op_iter iter;
2879 def_operand_p defp;
2880 gimple stmt = SSA_NAME_DEF_STMT (use);
2882 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2884 VN_INFO (use)->use_processed = true;
2885 return;
2888 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2890 tree def = DEF_FROM_PTR (defp);
2892 VN_INFO (def)->use_processed = true;
2896 /* Set all definitions in STMT to value number to themselves.
2897 Return true if a value number changed. */
2899 static bool
2900 defs_to_varying (gimple stmt)
2902 bool changed = false;
2903 ssa_op_iter iter;
2904 def_operand_p defp;
2906 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2908 tree def = DEF_FROM_PTR (defp);
2909 changed |= set_ssa_val_to (def, def);
2911 return changed;
2914 static bool expr_has_constants (tree expr);
2916 /* Visit a copy between LHS and RHS, return true if the value number
2917 changed. */
2919 static bool
2920 visit_copy (tree lhs, tree rhs)
2922 /* The copy may have a more interesting constant filled expression
2923 (we don't, since we know our RHS is just an SSA name). */
2924 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2925 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2927 /* And finally valueize. */
2928 rhs = SSA_VAL (rhs);
2930 return set_ssa_val_to (lhs, rhs);
2933 /* Visit a nary operator RHS, value number it, and return true if the
2934 value number of LHS has changed as a result. */
2936 static bool
2937 visit_nary_op (tree lhs, gimple stmt)
2939 bool changed = false;
2940 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2942 if (result)
2943 changed = set_ssa_val_to (lhs, result);
2944 else
2946 changed = set_ssa_val_to (lhs, lhs);
2947 vn_nary_op_insert_stmt (stmt, lhs);
2950 return changed;
2953 /* Visit a call STMT storing into LHS. Return true if the value number
2954 of the LHS has changed as a result. */
2956 static bool
2957 visit_reference_op_call (tree lhs, gcall *stmt)
2959 bool changed = false;
2960 struct vn_reference_s vr1;
2961 vn_reference_t vnresult = NULL;
2962 tree vdef = gimple_vdef (stmt);
2964 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2965 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2966 lhs = NULL_TREE;
2968 vn_reference_lookup_call (stmt, &vnresult, &vr1);
2969 if (vnresult)
2971 if (vnresult->result_vdef && vdef)
2972 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2974 if (!vnresult->result && lhs)
2975 vnresult->result = lhs;
2977 if (vnresult->result && lhs)
2979 changed |= set_ssa_val_to (lhs, vnresult->result);
2981 if (VN_INFO (vnresult->result)->has_constants)
2982 VN_INFO (lhs)->has_constants = true;
2985 else
2987 vn_reference_t vr2;
2988 vn_reference_s **slot;
2989 if (vdef)
2990 changed |= set_ssa_val_to (vdef, vdef);
2991 if (lhs)
2992 changed |= set_ssa_val_to (lhs, lhs);
2993 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2994 vr2->vuse = vr1.vuse;
2995 /* As we are not walking the virtual operand chain we know the
2996 shared_lookup_references are still original so we can re-use
2997 them here. */
2998 vr2->operands = vr1.operands.copy ();
2999 vr2->type = vr1.type;
3000 vr2->set = vr1.set;
3001 vr2->hashcode = vr1.hashcode;
3002 vr2->result = lhs;
3003 vr2->result_vdef = vdef;
3004 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3005 INSERT);
3006 gcc_assert (!*slot);
3007 *slot = vr2;
3010 return changed;
3013 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3014 and return true if the value number of the LHS has changed as a result. */
3016 static bool
3017 visit_reference_op_load (tree lhs, tree op, gimple stmt)
3019 bool changed = false;
3020 tree last_vuse;
3021 tree result;
3023 last_vuse = gimple_vuse (stmt);
3024 last_vuse_ptr = &last_vuse;
3025 result = vn_reference_lookup (op, gimple_vuse (stmt),
3026 default_vn_walk_kind, NULL);
3027 last_vuse_ptr = NULL;
3029 /* We handle type-punning through unions by value-numbering based
3030 on offset and size of the access. Be prepared to handle a
3031 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3032 if (result
3033 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3035 /* We will be setting the value number of lhs to the value number
3036 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3037 So first simplify and lookup this expression to see if it
3038 is already available. */
3039 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3040 if ((CONVERT_EXPR_P (val)
3041 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
3042 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
3044 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
3045 if ((CONVERT_EXPR_P (tem)
3046 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
3047 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
3048 TREE_TYPE (val), tem)))
3049 val = tem;
3051 result = val;
3052 if (!is_gimple_min_invariant (val)
3053 && TREE_CODE (val) != SSA_NAME)
3054 result = vn_nary_op_lookup (val, NULL);
3055 /* If the expression is not yet available, value-number lhs to
3056 a new SSA_NAME we create. */
3057 if (!result)
3059 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
3060 "vntemp");
3061 /* Initialize value-number information properly. */
3062 VN_INFO_GET (result)->valnum = result;
3063 VN_INFO (result)->value_id = get_next_value_id ();
3064 VN_INFO (result)->expr = val;
3065 VN_INFO (result)->has_constants = expr_has_constants (val);
3066 VN_INFO (result)->needs_insertion = true;
3067 /* As all "inserted" statements are singleton SCCs, insert
3068 to the valid table. This is strictly needed to
3069 avoid re-generating new value SSA_NAMEs for the same
3070 expression during SCC iteration over and over (the
3071 optimistic table gets cleared after each iteration).
3072 We do not need to insert into the optimistic table, as
3073 lookups there will fall back to the valid table. */
3074 if (current_info == optimistic_info)
3076 current_info = valid_info;
3077 vn_nary_op_insert (val, result);
3078 current_info = optimistic_info;
3080 else
3081 vn_nary_op_insert (val, result);
3082 if (dump_file && (dump_flags & TDF_DETAILS))
3084 fprintf (dump_file, "Inserting name ");
3085 print_generic_expr (dump_file, result, 0);
3086 fprintf (dump_file, " for expression ");
3087 print_generic_expr (dump_file, val, 0);
3088 fprintf (dump_file, "\n");
3093 if (result)
3095 changed = set_ssa_val_to (lhs, result);
3096 if (TREE_CODE (result) == SSA_NAME
3097 && VN_INFO (result)->has_constants)
3099 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3100 VN_INFO (lhs)->has_constants = true;
3103 else
3105 changed = set_ssa_val_to (lhs, lhs);
3106 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3109 return changed;
3113 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3114 and return true if the value number of the LHS has changed as a result. */
3116 static bool
3117 visit_reference_op_store (tree lhs, tree op, gimple stmt)
3119 bool changed = false;
3120 vn_reference_t vnresult = NULL;
3121 tree result, assign;
3122 bool resultsame = false;
3123 tree vuse = gimple_vuse (stmt);
3124 tree vdef = gimple_vdef (stmt);
3126 if (TREE_CODE (op) == SSA_NAME)
3127 op = SSA_VAL (op);
3129 /* First we want to lookup using the *vuses* from the store and see
3130 if there the last store to this location with the same address
3131 had the same value.
3133 The vuses represent the memory state before the store. If the
3134 memory state, address, and value of the store is the same as the
3135 last store to this location, then this store will produce the
3136 same memory state as that store.
3138 In this case the vdef versions for this store are value numbered to those
3139 vuse versions, since they represent the same memory state after
3140 this store.
3142 Otherwise, the vdefs for the store are used when inserting into
3143 the table, since the store generates a new memory state. */
3145 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3147 if (result)
3149 if (TREE_CODE (result) == SSA_NAME)
3150 result = SSA_VAL (result);
3151 resultsame = expressions_equal_p (result, op);
3154 if ((!result || !resultsame)
3155 /* Only perform the following when being called from PRE
3156 which embeds tail merging. */
3157 && default_vn_walk_kind == VN_WALK)
3159 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3160 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3161 if (vnresult)
3163 VN_INFO (vdef)->use_processed = true;
3164 return set_ssa_val_to (vdef, vnresult->result_vdef);
3168 if (!result || !resultsame)
3170 if (dump_file && (dump_flags & TDF_DETAILS))
3172 fprintf (dump_file, "No store match\n");
3173 fprintf (dump_file, "Value numbering store ");
3174 print_generic_expr (dump_file, lhs, 0);
3175 fprintf (dump_file, " to ");
3176 print_generic_expr (dump_file, op, 0);
3177 fprintf (dump_file, "\n");
3179 /* Have to set value numbers before insert, since insert is
3180 going to valueize the references in-place. */
3181 if (vdef)
3183 changed |= set_ssa_val_to (vdef, vdef);
3186 /* Do not insert structure copies into the tables. */
3187 if (is_gimple_min_invariant (op)
3188 || is_gimple_reg (op))
3189 vn_reference_insert (lhs, op, vdef, NULL);
3191 /* Only perform the following when being called from PRE
3192 which embeds tail merging. */
3193 if (default_vn_walk_kind == VN_WALK)
3195 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3196 vn_reference_insert (assign, lhs, vuse, vdef);
3199 else
3201 /* We had a match, so value number the vdef to have the value
3202 number of the vuse it came from. */
3204 if (dump_file && (dump_flags & TDF_DETAILS))
3205 fprintf (dump_file, "Store matched earlier value,"
3206 "value numbering store vdefs to matching vuses.\n");
3208 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3211 return changed;
3214 /* Visit and value number PHI, return true if the value number
3215 changed. */
3217 static bool
3218 visit_phi (gimple phi)
3220 bool changed = false;
3221 tree result;
3222 tree sameval = VN_TOP;
3223 bool allsame = true;
3225 /* TODO: We could check for this in init_sccvn, and replace this
3226 with a gcc_assert. */
3227 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3228 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3230 /* See if all non-TOP arguments have the same value. TOP is
3231 equivalent to everything, so we can ignore it. */
3232 edge_iterator ei;
3233 edge e;
3234 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3235 if (e->flags & EDGE_EXECUTABLE)
3237 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3239 if (TREE_CODE (def) == SSA_NAME)
3240 def = SSA_VAL (def);
3241 if (def == VN_TOP)
3242 continue;
3243 if (sameval == VN_TOP)
3245 sameval = def;
3247 else
3249 if (!expressions_equal_p (def, sameval))
3251 allsame = false;
3252 break;
3257 /* If all value numbered to the same value, the phi node has that
3258 value. */
3259 if (allsame)
3260 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3262 /* Otherwise, see if it is equivalent to a phi node in this block. */
3263 result = vn_phi_lookup (phi);
3264 if (result)
3265 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3266 else
3268 vn_phi_insert (phi, PHI_RESULT (phi));
3269 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3270 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3271 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3274 return changed;
3277 /* Return true if EXPR contains constants. */
3279 static bool
3280 expr_has_constants (tree expr)
3282 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3284 case tcc_unary:
3285 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3287 case tcc_binary:
3288 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3289 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3290 /* Constants inside reference ops are rarely interesting, but
3291 it can take a lot of looking to find them. */
3292 case tcc_reference:
3293 case tcc_declaration:
3294 return false;
3295 default:
3296 return is_gimple_min_invariant (expr);
3298 return false;
3301 /* Return true if STMT contains constants. */
3303 static bool
3304 stmt_has_constants (gimple stmt)
3306 tree tem;
3308 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3309 return false;
3311 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3313 case GIMPLE_TERNARY_RHS:
3314 tem = gimple_assign_rhs3 (stmt);
3315 if (TREE_CODE (tem) == SSA_NAME)
3316 tem = SSA_VAL (tem);
3317 if (is_gimple_min_invariant (tem))
3318 return true;
3319 /* Fallthru. */
3321 case GIMPLE_BINARY_RHS:
3322 tem = gimple_assign_rhs2 (stmt);
3323 if (TREE_CODE (tem) == SSA_NAME)
3324 tem = SSA_VAL (tem);
3325 if (is_gimple_min_invariant (tem))
3326 return true;
3327 /* Fallthru. */
3329 case GIMPLE_SINGLE_RHS:
3330 /* Constants inside reference ops are rarely interesting, but
3331 it can take a lot of looking to find them. */
3332 case GIMPLE_UNARY_RHS:
3333 tem = gimple_assign_rhs1 (stmt);
3334 if (TREE_CODE (tem) == SSA_NAME)
3335 tem = SSA_VAL (tem);
3336 return is_gimple_min_invariant (tem);
3338 default:
3339 gcc_unreachable ();
3341 return false;
3344 /* Simplify the binary expression RHS, and return the result if
3345 simplified. */
3347 static tree
3348 simplify_binary_expression (gimple stmt)
3350 tree result = NULL_TREE;
3351 tree op0 = gimple_assign_rhs1 (stmt);
3352 tree op1 = gimple_assign_rhs2 (stmt);
3353 enum tree_code code = gimple_assign_rhs_code (stmt);
3355 /* This will not catch every single case we could combine, but will
3356 catch those with constants. The goal here is to simultaneously
3357 combine constants between expressions, but avoid infinite
3358 expansion of expressions during simplification. */
3359 op0 = vn_valueize (op0);
3360 if (TREE_CODE (op0) == SSA_NAME
3361 && (VN_INFO (op0)->has_constants
3362 || TREE_CODE_CLASS (code) == tcc_comparison
3363 || code == COMPLEX_EXPR))
3364 op0 = vn_get_expr_for (op0);
3366 op1 = vn_valueize (op1);
3367 if (TREE_CODE (op1) == SSA_NAME
3368 && (VN_INFO (op1)->has_constants
3369 || code == COMPLEX_EXPR))
3370 op1 = vn_get_expr_for (op1);
3372 /* Pointer plus constant can be represented as invariant address.
3373 Do so to allow further propatation, see also tree forwprop. */
3374 if (code == POINTER_PLUS_EXPR
3375 && tree_fits_uhwi_p (op1)
3376 && TREE_CODE (op0) == ADDR_EXPR
3377 && is_gimple_min_invariant (op0))
3378 return build_invariant_address (TREE_TYPE (op0),
3379 TREE_OPERAND (op0, 0),
3380 tree_to_uhwi (op1));
3382 /* Avoid folding if nothing changed. */
3383 if (op0 == gimple_assign_rhs1 (stmt)
3384 && op1 == gimple_assign_rhs2 (stmt))
3385 return NULL_TREE;
3387 fold_defer_overflow_warnings ();
3389 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3390 if (result)
3391 STRIP_USELESS_TYPE_CONVERSION (result);
3393 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3394 stmt, 0);
3396 /* Make sure result is not a complex expression consisting
3397 of operators of operators (IE (a + b) + (a + c))
3398 Otherwise, we will end up with unbounded expressions if
3399 fold does anything at all. */
3400 if (result && valid_gimple_rhs_p (result))
3401 return result;
3403 return NULL_TREE;
3406 /* Simplify the unary expression RHS, and return the result if
3407 simplified. */
3409 static tree
3410 simplify_unary_expression (gassign *stmt)
3412 tree result = NULL_TREE;
3413 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3414 enum tree_code code = gimple_assign_rhs_code (stmt);
3416 /* We handle some tcc_reference codes here that are all
3417 GIMPLE_ASSIGN_SINGLE codes. */
3418 if (code == REALPART_EXPR
3419 || code == IMAGPART_EXPR
3420 || code == VIEW_CONVERT_EXPR
3421 || code == BIT_FIELD_REF)
3422 op0 = TREE_OPERAND (op0, 0);
3424 orig_op0 = op0;
3425 op0 = vn_valueize (op0);
3426 if (TREE_CODE (op0) == SSA_NAME)
3428 if (VN_INFO (op0)->has_constants)
3429 op0 = vn_get_expr_for (op0);
3430 else if (CONVERT_EXPR_CODE_P (code)
3431 || code == REALPART_EXPR
3432 || code == IMAGPART_EXPR
3433 || code == VIEW_CONVERT_EXPR
3434 || code == BIT_FIELD_REF)
3436 /* We want to do tree-combining on conversion-like expressions.
3437 Make sure we feed only SSA_NAMEs or constants to fold though. */
3438 tree tem = vn_get_expr_for (op0);
3439 if (UNARY_CLASS_P (tem)
3440 || BINARY_CLASS_P (tem)
3441 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3442 || TREE_CODE (tem) == SSA_NAME
3443 || TREE_CODE (tem) == CONSTRUCTOR
3444 || is_gimple_min_invariant (tem))
3445 op0 = tem;
3449 /* Avoid folding if nothing changed, but remember the expression. */
3450 if (op0 == orig_op0)
3451 return NULL_TREE;
3453 if (code == BIT_FIELD_REF)
3455 tree rhs = gimple_assign_rhs1 (stmt);
3456 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3457 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3459 else
3460 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3461 if (result)
3463 STRIP_USELESS_TYPE_CONVERSION (result);
3464 if (valid_gimple_rhs_p (result))
3465 return result;
3468 return NULL_TREE;
3471 /* Try to simplify RHS using equivalences and constant folding. */
3473 static tree
3474 try_to_simplify (gassign *stmt)
3476 enum tree_code code = gimple_assign_rhs_code (stmt);
3477 tree tem;
3479 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3480 in this case, there is no point in doing extra work. */
3481 if (code == SSA_NAME)
3482 return NULL_TREE;
3484 /* First try constant folding based on our current lattice. */
3485 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3486 if (tem
3487 && (TREE_CODE (tem) == SSA_NAME
3488 || is_gimple_min_invariant (tem)))
3489 return tem;
3491 /* If that didn't work try combining multiple statements. */
3492 switch (TREE_CODE_CLASS (code))
3494 case tcc_reference:
3495 /* Fallthrough for some unary codes that can operate on registers. */
3496 if (!(code == REALPART_EXPR
3497 || code == IMAGPART_EXPR
3498 || code == VIEW_CONVERT_EXPR
3499 || code == BIT_FIELD_REF))
3500 break;
3501 /* We could do a little more with unary ops, if they expand
3502 into binary ops, but it's debatable whether it is worth it. */
3503 case tcc_unary:
3504 return simplify_unary_expression (stmt);
3506 case tcc_comparison:
3507 case tcc_binary:
3508 return simplify_binary_expression (stmt);
3510 default:
3511 break;
3514 return NULL_TREE;
3517 /* Visit and value number USE, return true if the value number
3518 changed. */
3520 static bool
3521 visit_use (tree use)
3523 bool changed = false;
3524 gimple stmt = SSA_NAME_DEF_STMT (use);
3526 mark_use_processed (use);
3528 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3529 if (dump_file && (dump_flags & TDF_DETAILS)
3530 && !SSA_NAME_IS_DEFAULT_DEF (use))
3532 fprintf (dump_file, "Value numbering ");
3533 print_generic_expr (dump_file, use, 0);
3534 fprintf (dump_file, " stmt = ");
3535 print_gimple_stmt (dump_file, stmt, 0, 0);
3538 /* Handle uninitialized uses. */
3539 if (SSA_NAME_IS_DEFAULT_DEF (use))
3540 changed = set_ssa_val_to (use, use);
3541 else
3543 if (gimple_code (stmt) == GIMPLE_PHI)
3544 changed = visit_phi (stmt);
3545 else if (gimple_has_volatile_ops (stmt))
3546 changed = defs_to_varying (stmt);
3547 else if (is_gimple_assign (stmt))
3549 enum tree_code code = gimple_assign_rhs_code (stmt);
3550 tree lhs = gimple_assign_lhs (stmt);
3551 tree rhs1 = gimple_assign_rhs1 (stmt);
3552 tree simplified;
3554 /* Shortcut for copies. Simplifying copies is pointless,
3555 since we copy the expression and value they represent. */
3556 if (code == SSA_NAME
3557 && TREE_CODE (lhs) == SSA_NAME)
3559 changed = visit_copy (lhs, rhs1);
3560 goto done;
3562 simplified = try_to_simplify (as_a <gassign *> (stmt));
3563 if (simplified)
3565 if (dump_file && (dump_flags & TDF_DETAILS))
3567 fprintf (dump_file, "RHS ");
3568 print_gimple_expr (dump_file, stmt, 0, 0);
3569 fprintf (dump_file, " simplified to ");
3570 print_generic_expr (dump_file, simplified, 0);
3571 if (TREE_CODE (lhs) == SSA_NAME)
3572 fprintf (dump_file, " has constants %d\n",
3573 expr_has_constants (simplified));
3574 else
3575 fprintf (dump_file, "\n");
3578 /* Setting value numbers to constants will occasionally
3579 screw up phi congruence because constants are not
3580 uniquely associated with a single ssa name that can be
3581 looked up. */
3582 if (simplified
3583 && is_gimple_min_invariant (simplified)
3584 && TREE_CODE (lhs) == SSA_NAME)
3586 VN_INFO (lhs)->expr = simplified;
3587 VN_INFO (lhs)->has_constants = true;
3588 changed = set_ssa_val_to (lhs, simplified);
3589 goto done;
3591 else if (simplified
3592 && TREE_CODE (simplified) == SSA_NAME
3593 && TREE_CODE (lhs) == SSA_NAME)
3595 changed = visit_copy (lhs, simplified);
3596 goto done;
3598 else if (simplified)
3600 if (TREE_CODE (lhs) == SSA_NAME)
3602 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3603 /* We have to unshare the expression or else
3604 valuizing may change the IL stream. */
3605 VN_INFO (lhs)->expr = unshare_expr (simplified);
3608 else if (stmt_has_constants (stmt)
3609 && TREE_CODE (lhs) == SSA_NAME)
3610 VN_INFO (lhs)->has_constants = true;
3611 else if (TREE_CODE (lhs) == SSA_NAME)
3613 /* We reset expr and constantness here because we may
3614 have been value numbering optimistically, and
3615 iterating. They may become non-constant in this case,
3616 even if they were optimistically constant. */
3618 VN_INFO (lhs)->has_constants = false;
3619 VN_INFO (lhs)->expr = NULL_TREE;
3622 if ((TREE_CODE (lhs) == SSA_NAME
3623 /* We can substitute SSA_NAMEs that are live over
3624 abnormal edges with their constant value. */
3625 && !(gimple_assign_copy_p (stmt)
3626 && is_gimple_min_invariant (rhs1))
3627 && !(simplified
3628 && is_gimple_min_invariant (simplified))
3629 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3630 /* Stores or copies from SSA_NAMEs that are live over
3631 abnormal edges are a problem. */
3632 || (code == SSA_NAME
3633 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3634 changed = defs_to_varying (stmt);
3635 else if (REFERENCE_CLASS_P (lhs)
3636 || DECL_P (lhs))
3637 changed = visit_reference_op_store (lhs, rhs1, stmt);
3638 else if (TREE_CODE (lhs) == SSA_NAME)
3640 if ((gimple_assign_copy_p (stmt)
3641 && is_gimple_min_invariant (rhs1))
3642 || (simplified
3643 && is_gimple_min_invariant (simplified)))
3645 VN_INFO (lhs)->has_constants = true;
3646 if (simplified)
3647 changed = set_ssa_val_to (lhs, simplified);
3648 else
3649 changed = set_ssa_val_to (lhs, rhs1);
3651 else
3653 /* First try to lookup the simplified expression. */
3654 if (simplified)
3656 enum gimple_rhs_class rhs_class;
3659 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3660 if ((rhs_class == GIMPLE_UNARY_RHS
3661 || rhs_class == GIMPLE_BINARY_RHS
3662 || rhs_class == GIMPLE_TERNARY_RHS)
3663 && valid_gimple_rhs_p (simplified))
3665 tree result = vn_nary_op_lookup (simplified, NULL);
3666 if (result)
3668 changed = set_ssa_val_to (lhs, result);
3669 goto done;
3674 /* Otherwise visit the original statement. */
3675 switch (vn_get_stmt_kind (stmt))
3677 case VN_NARY:
3678 changed = visit_nary_op (lhs, stmt);
3679 break;
3680 case VN_REFERENCE:
3681 changed = visit_reference_op_load (lhs, rhs1, stmt);
3682 break;
3683 default:
3684 changed = defs_to_varying (stmt);
3685 break;
3689 else
3690 changed = defs_to_varying (stmt);
3692 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3694 tree lhs = gimple_call_lhs (stmt);
3695 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3697 /* Try constant folding based on our current lattice. */
3698 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3699 vn_valueize);
3700 if (simplified)
3702 if (dump_file && (dump_flags & TDF_DETAILS))
3704 fprintf (dump_file, "call ");
3705 print_gimple_expr (dump_file, stmt, 0, 0);
3706 fprintf (dump_file, " simplified to ");
3707 print_generic_expr (dump_file, simplified, 0);
3708 if (TREE_CODE (lhs) == SSA_NAME)
3709 fprintf (dump_file, " has constants %d\n",
3710 expr_has_constants (simplified));
3711 else
3712 fprintf (dump_file, "\n");
3715 /* Setting value numbers to constants will occasionally
3716 screw up phi congruence because constants are not
3717 uniquely associated with a single ssa name that can be
3718 looked up. */
3719 if (simplified
3720 && is_gimple_min_invariant (simplified))
3722 VN_INFO (lhs)->expr = simplified;
3723 VN_INFO (lhs)->has_constants = true;
3724 changed = set_ssa_val_to (lhs, simplified);
3725 if (gimple_vdef (stmt))
3726 changed |= set_ssa_val_to (gimple_vdef (stmt),
3727 SSA_VAL (gimple_vuse (stmt)));
3728 goto done;
3730 else if (simplified
3731 && TREE_CODE (simplified) == SSA_NAME)
3733 changed = visit_copy (lhs, simplified);
3734 if (gimple_vdef (stmt))
3735 changed |= set_ssa_val_to (gimple_vdef (stmt),
3736 SSA_VAL (gimple_vuse (stmt)));
3737 goto done;
3739 else
3741 if (stmt_has_constants (stmt))
3742 VN_INFO (lhs)->has_constants = true;
3743 else
3745 /* We reset expr and constantness here because we may
3746 have been value numbering optimistically, and
3747 iterating. They may become non-constant in this case,
3748 even if they were optimistically constant. */
3749 VN_INFO (lhs)->has_constants = false;
3750 VN_INFO (lhs)->expr = NULL_TREE;
3753 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3755 changed = defs_to_varying (stmt);
3756 goto done;
3761 if (!gimple_call_internal_p (stmt)
3762 && (/* Calls to the same function with the same vuse
3763 and the same operands do not necessarily return the same
3764 value, unless they're pure or const. */
3765 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3766 /* If calls have a vdef, subsequent calls won't have
3767 the same incoming vuse. So, if 2 calls with vdef have the
3768 same vuse, we know they're not subsequent.
3769 We can value number 2 calls to the same function with the
3770 same vuse and the same operands which are not subsequent
3771 the same, because there is no code in the program that can
3772 compare the 2 values... */
3773 || (gimple_vdef (stmt)
3774 /* ... unless the call returns a pointer which does
3775 not alias with anything else. In which case the
3776 information that the values are distinct are encoded
3777 in the IL. */
3778 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3779 /* Only perform the following when being called from PRE
3780 which embeds tail merging. */
3781 && default_vn_walk_kind == VN_WALK)))
3782 changed = visit_reference_op_call (lhs, call_stmt);
3783 else
3784 changed = defs_to_varying (stmt);
3786 else
3787 changed = defs_to_varying (stmt);
3789 done:
3790 return changed;
3793 /* Compare two operands by reverse postorder index */
3795 static int
3796 compare_ops (const void *pa, const void *pb)
3798 const tree opa = *((const tree *)pa);
3799 const tree opb = *((const tree *)pb);
3800 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3801 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3802 basic_block bba;
3803 basic_block bbb;
3805 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3806 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3807 else if (gimple_nop_p (opstmta))
3808 return -1;
3809 else if (gimple_nop_p (opstmtb))
3810 return 1;
3812 bba = gimple_bb (opstmta);
3813 bbb = gimple_bb (opstmtb);
3815 if (!bba && !bbb)
3816 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3817 else if (!bba)
3818 return -1;
3819 else if (!bbb)
3820 return 1;
3822 if (bba == bbb)
3824 if (gimple_code (opstmta) == GIMPLE_PHI
3825 && gimple_code (opstmtb) == GIMPLE_PHI)
3826 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3827 else if (gimple_code (opstmta) == GIMPLE_PHI)
3828 return -1;
3829 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3830 return 1;
3831 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3832 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3833 else
3834 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3836 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3839 /* Sort an array containing members of a strongly connected component
3840 SCC so that the members are ordered by RPO number.
3841 This means that when the sort is complete, iterating through the
3842 array will give you the members in RPO order. */
3844 static void
3845 sort_scc (vec<tree> scc)
3847 scc.qsort (compare_ops);
3850 /* Insert the no longer used nary ONARY to the hash INFO. */
3852 static void
3853 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3855 size_t size = sizeof_vn_nary_op (onary->length);
3856 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3857 &info->nary_obstack);
3858 memcpy (nary, onary, size);
3859 vn_nary_op_insert_into (nary, info->nary, false);
3862 /* Insert the no longer used phi OPHI to the hash INFO. */
3864 static void
3865 copy_phi (vn_phi_t ophi, vn_tables_t info)
3867 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3868 vn_phi_s **slot;
3869 memcpy (phi, ophi, sizeof (*phi));
3870 ophi->phiargs.create (0);
3871 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3872 gcc_assert (!*slot);
3873 *slot = phi;
3876 /* Insert the no longer used reference OREF to the hash INFO. */
3878 static void
3879 copy_reference (vn_reference_t oref, vn_tables_t info)
3881 vn_reference_t ref;
3882 vn_reference_s **slot;
3883 ref = (vn_reference_t) pool_alloc (info->references_pool);
3884 memcpy (ref, oref, sizeof (*ref));
3885 oref->operands.create (0);
3886 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3887 if (*slot)
3888 free_reference (*slot);
3889 *slot = ref;
3892 /* Process a strongly connected component in the SSA graph. */
3894 static void
3895 process_scc (vec<tree> scc)
3897 tree var;
3898 unsigned int i;
3899 unsigned int iterations = 0;
3900 bool changed = true;
3901 vn_nary_op_iterator_type hin;
3902 vn_phi_iterator_type hip;
3903 vn_reference_iterator_type hir;
3904 vn_nary_op_t nary;
3905 vn_phi_t phi;
3906 vn_reference_t ref;
3908 /* If the SCC has a single member, just visit it. */
3909 if (scc.length () == 1)
3911 tree use = scc[0];
3912 if (VN_INFO (use)->use_processed)
3913 return;
3914 /* We need to make sure it doesn't form a cycle itself, which can
3915 happen for self-referential PHI nodes. In that case we would
3916 end up inserting an expression with VN_TOP operands into the
3917 valid table which makes us derive bogus equivalences later.
3918 The cheapest way to check this is to assume it for all PHI nodes. */
3919 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3920 /* Fallthru to iteration. */ ;
3921 else
3923 visit_use (use);
3924 return;
3928 if (dump_file && (dump_flags & TDF_DETAILS))
3929 print_scc (dump_file, scc);
3931 /* Iterate over the SCC with the optimistic table until it stops
3932 changing. */
3933 current_info = optimistic_info;
3934 while (changed)
3936 changed = false;
3937 iterations++;
3938 if (dump_file && (dump_flags & TDF_DETAILS))
3939 fprintf (dump_file, "Starting iteration %d\n", iterations);
3940 /* As we are value-numbering optimistically we have to
3941 clear the expression tables and the simplified expressions
3942 in each iteration until we converge. */
3943 optimistic_info->nary->empty ();
3944 optimistic_info->phis->empty ();
3945 optimistic_info->references->empty ();
3946 obstack_free (&optimistic_info->nary_obstack, NULL);
3947 gcc_obstack_init (&optimistic_info->nary_obstack);
3948 empty_alloc_pool (optimistic_info->phis_pool);
3949 empty_alloc_pool (optimistic_info->references_pool);
3950 FOR_EACH_VEC_ELT (scc, i, var)
3951 VN_INFO (var)->expr = NULL_TREE;
3952 FOR_EACH_VEC_ELT (scc, i, var)
3953 changed |= visit_use (var);
3956 if (dump_file && (dump_flags & TDF_DETAILS))
3957 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3958 statistics_histogram_event (cfun, "SCC iterations", iterations);
3960 /* Finally, copy the contents of the no longer used optimistic
3961 table to the valid table. */
3962 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3963 copy_nary (nary, valid_info);
3964 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3965 copy_phi (phi, valid_info);
3966 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3967 ref, vn_reference_t, hir)
3968 copy_reference (ref, valid_info);
3970 current_info = valid_info;
3974 /* Pop the components of the found SCC for NAME off the SCC stack
3975 and process them. Returns true if all went well, false if
3976 we run into resource limits. */
3978 static bool
3979 extract_and_process_scc_for_name (tree name)
3981 auto_vec<tree> scc;
3982 tree x;
3984 /* Found an SCC, pop the components off the SCC stack and
3985 process them. */
3988 x = sccstack.pop ();
3990 VN_INFO (x)->on_sccstack = false;
3991 scc.safe_push (x);
3992 } while (x != name);
3994 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3995 if (scc.length ()
3996 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3998 if (dump_file)
3999 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4000 "SCC size %u exceeding %u\n", scc.length (),
4001 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4003 return false;
4006 if (scc.length () > 1)
4007 sort_scc (scc);
4009 process_scc (scc);
4011 return true;
4014 /* Depth first search on NAME to discover and process SCC's in the SSA
4015 graph.
4016 Execution of this algorithm relies on the fact that the SCC's are
4017 popped off the stack in topological order.
4018 Returns true if successful, false if we stopped processing SCC's due
4019 to resource constraints. */
4021 static bool
4022 DFS (tree name)
4024 vec<ssa_op_iter> itervec = vNULL;
4025 vec<tree> namevec = vNULL;
4026 use_operand_p usep = NULL;
4027 gimple defstmt;
4028 tree use;
4029 ssa_op_iter iter;
4031 start_over:
4032 /* SCC info */
4033 VN_INFO (name)->dfsnum = next_dfs_num++;
4034 VN_INFO (name)->visited = true;
4035 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4037 sccstack.safe_push (name);
4038 VN_INFO (name)->on_sccstack = true;
4039 defstmt = SSA_NAME_DEF_STMT (name);
4041 /* Recursively DFS on our operands, looking for SCC's. */
4042 if (!gimple_nop_p (defstmt))
4044 /* Push a new iterator. */
4045 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4046 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4047 else
4048 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4050 else
4051 clear_and_done_ssa_iter (&iter);
4053 while (1)
4055 /* If we are done processing uses of a name, go up the stack
4056 of iterators and process SCCs as we found them. */
4057 if (op_iter_done (&iter))
4059 /* See if we found an SCC. */
4060 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4061 if (!extract_and_process_scc_for_name (name))
4063 namevec.release ();
4064 itervec.release ();
4065 return false;
4068 /* Check if we are done. */
4069 if (namevec.is_empty ())
4071 namevec.release ();
4072 itervec.release ();
4073 return true;
4076 /* Restore the last use walker and continue walking there. */
4077 use = name;
4078 name = namevec.pop ();
4079 memcpy (&iter, &itervec.last (),
4080 sizeof (ssa_op_iter));
4081 itervec.pop ();
4082 goto continue_walking;
4085 use = USE_FROM_PTR (usep);
4087 /* Since we handle phi nodes, we will sometimes get
4088 invariants in the use expression. */
4089 if (TREE_CODE (use) == SSA_NAME)
4091 if (! (VN_INFO (use)->visited))
4093 /* Recurse by pushing the current use walking state on
4094 the stack and starting over. */
4095 itervec.safe_push (iter);
4096 namevec.safe_push (name);
4097 name = use;
4098 goto start_over;
4100 continue_walking:
4101 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4102 VN_INFO (use)->low);
4104 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4105 && VN_INFO (use)->on_sccstack)
4107 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4108 VN_INFO (name)->low);
4112 usep = op_iter_next_use (&iter);
4116 /* Allocate a value number table. */
4118 static void
4119 allocate_vn_table (vn_tables_t table)
4121 table->phis = new vn_phi_table_type (23);
4122 table->nary = new vn_nary_op_table_type (23);
4123 table->references = new vn_reference_table_type (23);
4125 gcc_obstack_init (&table->nary_obstack);
4126 table->phis_pool = create_alloc_pool ("VN phis",
4127 sizeof (struct vn_phi_s),
4128 30);
4129 table->references_pool = create_alloc_pool ("VN references",
4130 sizeof (struct vn_reference_s),
4131 30);
4134 /* Free a value number table. */
4136 static void
4137 free_vn_table (vn_tables_t table)
4139 delete table->phis;
4140 table->phis = NULL;
4141 delete table->nary;
4142 table->nary = NULL;
4143 delete table->references;
4144 table->references = NULL;
4145 obstack_free (&table->nary_obstack, NULL);
4146 free_alloc_pool (table->phis_pool);
4147 free_alloc_pool (table->references_pool);
4150 static void
4151 init_scc_vn (void)
4153 size_t i;
4154 int j;
4155 int *rpo_numbers_temp;
4157 calculate_dominance_info (CDI_DOMINATORS);
4158 sccstack.create (0);
4159 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4161 constant_value_ids = BITMAP_ALLOC (NULL);
4163 next_dfs_num = 1;
4164 next_value_id = 1;
4166 vn_ssa_aux_table.create (num_ssa_names + 1);
4167 /* VEC_alloc doesn't actually grow it to the right size, it just
4168 preallocates the space to do so. */
4169 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4170 gcc_obstack_init (&vn_ssa_aux_obstack);
4172 shared_lookup_phiargs.create (0);
4173 shared_lookup_references.create (0);
4174 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4175 rpo_numbers_temp =
4176 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4177 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4179 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4180 the i'th block in RPO order is bb. We want to map bb's to RPO
4181 numbers, so we need to rearrange this array. */
4182 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4183 rpo_numbers[rpo_numbers_temp[j]] = j;
4185 XDELETE (rpo_numbers_temp);
4187 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4189 /* Create the VN_INFO structures, and initialize value numbers to
4190 TOP. */
4191 for (i = 0; i < num_ssa_names; i++)
4193 tree name = ssa_name (i);
4194 if (name)
4196 VN_INFO_GET (name)->valnum = VN_TOP;
4197 VN_INFO (name)->expr = NULL_TREE;
4198 VN_INFO (name)->value_id = 0;
4202 renumber_gimple_stmt_uids ();
4204 /* Create the valid and optimistic value numbering tables. */
4205 valid_info = XCNEW (struct vn_tables_s);
4206 allocate_vn_table (valid_info);
4207 optimistic_info = XCNEW (struct vn_tables_s);
4208 allocate_vn_table (optimistic_info);
4211 void
4212 free_scc_vn (void)
4214 size_t i;
4216 delete constant_to_value_id;
4217 constant_to_value_id = NULL;
4218 BITMAP_FREE (constant_value_ids);
4219 shared_lookup_phiargs.release ();
4220 shared_lookup_references.release ();
4221 XDELETEVEC (rpo_numbers);
4223 for (i = 0; i < num_ssa_names; i++)
4225 tree name = ssa_name (i);
4226 if (name
4227 && VN_INFO (name)->needs_insertion)
4228 release_ssa_name (name);
4230 obstack_free (&vn_ssa_aux_obstack, NULL);
4231 vn_ssa_aux_table.release ();
4233 sccstack.release ();
4234 free_vn_table (valid_info);
4235 XDELETE (valid_info);
4236 free_vn_table (optimistic_info);
4237 XDELETE (optimistic_info);
4240 /* Set *ID according to RESULT. */
4242 static void
4243 set_value_id_for_result (tree result, unsigned int *id)
4245 if (result && TREE_CODE (result) == SSA_NAME)
4246 *id = VN_INFO (result)->value_id;
4247 else if (result && is_gimple_min_invariant (result))
4248 *id = get_or_alloc_constant_value_id (result);
4249 else
4250 *id = get_next_value_id ();
4253 /* Set the value ids in the valid hash tables. */
4255 static void
4256 set_hashtable_value_ids (void)
4258 vn_nary_op_iterator_type hin;
4259 vn_phi_iterator_type hip;
4260 vn_reference_iterator_type hir;
4261 vn_nary_op_t vno;
4262 vn_reference_t vr;
4263 vn_phi_t vp;
4265 /* Now set the value ids of the things we had put in the hash
4266 table. */
4268 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4269 set_value_id_for_result (vno->result, &vno->value_id);
4271 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4272 set_value_id_for_result (vp->result, &vp->value_id);
4274 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4275 hir)
4276 set_value_id_for_result (vr->result, &vr->value_id);
4279 class cond_dom_walker : public dom_walker
4281 public:
4282 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4284 virtual void before_dom_children (basic_block);
4286 bool fail;
4289 void
4290 cond_dom_walker::before_dom_children (basic_block bb)
4292 edge e;
4293 edge_iterator ei;
4295 if (fail)
4296 return;
4298 /* If any of the predecessor edges that do not come from blocks dominated
4299 by us are still marked as possibly executable consider this block
4300 reachable. */
4301 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4302 FOR_EACH_EDGE (e, ei, bb->preds)
4303 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4304 reachable |= (e->flags & EDGE_EXECUTABLE);
4306 /* If the block is not reachable all outgoing edges are not
4307 executable. */
4308 if (!reachable)
4310 if (dump_file && (dump_flags & TDF_DETAILS))
4311 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4312 "BB %d as not executable\n", bb->index);
4314 FOR_EACH_EDGE (e, ei, bb->succs)
4315 e->flags &= ~EDGE_EXECUTABLE;
4316 return;
4319 gimple stmt = last_stmt (bb);
4320 if (!stmt)
4321 return;
4323 enum gimple_code code = gimple_code (stmt);
4324 if (code != GIMPLE_COND
4325 && code != GIMPLE_SWITCH
4326 && code != GIMPLE_GOTO)
4327 return;
4329 if (dump_file && (dump_flags & TDF_DETAILS))
4331 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4332 bb->index);
4333 print_gimple_stmt (dump_file, stmt, 0, 0);
4336 /* Value-number the last stmts SSA uses. */
4337 ssa_op_iter i;
4338 tree op;
4339 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4340 if (VN_INFO (op)->visited == false
4341 && !DFS (op))
4343 fail = true;
4344 return;
4347 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4348 if value-numbering can prove they are not reachable. Handling
4349 computed gotos is also possible. */
4350 tree val;
4351 switch (code)
4353 case GIMPLE_COND:
4355 tree lhs = gimple_cond_lhs (stmt);
4356 tree rhs = gimple_cond_rhs (stmt);
4357 /* Work hard in computing the condition and take into account
4358 the valueization of the defining stmt. */
4359 if (TREE_CODE (lhs) == SSA_NAME)
4360 lhs = vn_get_expr_for (lhs);
4361 if (TREE_CODE (rhs) == SSA_NAME)
4362 rhs = vn_get_expr_for (rhs);
4363 val = fold_binary (gimple_cond_code (stmt),
4364 boolean_type_node, lhs, rhs);
4365 break;
4367 case GIMPLE_SWITCH:
4368 val = gimple_switch_index (as_a <gswitch *> (stmt));
4369 break;
4370 case GIMPLE_GOTO:
4371 val = gimple_goto_dest (stmt);
4372 break;
4373 default:
4374 gcc_unreachable ();
4376 if (!val)
4377 return;
4379 edge taken = find_taken_edge (bb, vn_valueize (val));
4380 if (!taken)
4381 return;
4383 if (dump_file && (dump_flags & TDF_DETAILS))
4384 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4385 "not executable\n", bb->index, bb->index, taken->dest->index);
4387 FOR_EACH_EDGE (e, ei, bb->succs)
4388 if (e != taken)
4389 e->flags &= ~EDGE_EXECUTABLE;
4392 /* Do SCCVN. Returns true if it finished, false if we bailed out
4393 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4394 how we use the alias oracle walking during the VN process. */
4396 bool
4397 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4399 basic_block bb;
4400 size_t i;
4401 tree param;
4403 default_vn_walk_kind = default_vn_walk_kind_;
4405 init_scc_vn ();
4406 current_info = valid_info;
4408 for (param = DECL_ARGUMENTS (current_function_decl);
4409 param;
4410 param = DECL_CHAIN (param))
4412 tree def = ssa_default_def (cfun, param);
4413 if (def)
4415 VN_INFO (def)->visited = true;
4416 VN_INFO (def)->valnum = def;
4420 /* Mark all edges as possibly executable. */
4421 FOR_ALL_BB_FN (bb, cfun)
4423 edge_iterator ei;
4424 edge e;
4425 FOR_EACH_EDGE (e, ei, bb->succs)
4426 e->flags |= EDGE_EXECUTABLE;
4429 /* Walk all blocks in dominator order, value-numbering the last stmts
4430 SSA uses and decide whether outgoing edges are not executable. */
4431 cond_dom_walker walker;
4432 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4433 if (walker.fail)
4435 free_scc_vn ();
4436 return false;
4439 /* Value-number remaining SSA names. */
4440 for (i = 1; i < num_ssa_names; ++i)
4442 tree name = ssa_name (i);
4443 if (name
4444 && VN_INFO (name)->visited == false
4445 && !has_zero_uses (name))
4446 if (!DFS (name))
4448 free_scc_vn ();
4449 return false;
4453 /* Initialize the value ids. */
4455 for (i = 1; i < num_ssa_names; ++i)
4457 tree name = ssa_name (i);
4458 vn_ssa_aux_t info;
4459 if (!name)
4460 continue;
4461 info = VN_INFO (name);
4462 if (info->valnum == name
4463 || info->valnum == VN_TOP)
4464 info->value_id = get_next_value_id ();
4465 else if (is_gimple_min_invariant (info->valnum))
4466 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4469 /* Propagate. */
4470 for (i = 1; i < num_ssa_names; ++i)
4472 tree name = ssa_name (i);
4473 vn_ssa_aux_t info;
4474 if (!name)
4475 continue;
4476 info = VN_INFO (name);
4477 if (TREE_CODE (info->valnum) == SSA_NAME
4478 && info->valnum != name
4479 && info->value_id != VN_INFO (info->valnum)->value_id)
4480 info->value_id = VN_INFO (info->valnum)->value_id;
4483 set_hashtable_value_ids ();
4485 if (dump_file && (dump_flags & TDF_DETAILS))
4487 fprintf (dump_file, "Value numbers:\n");
4488 for (i = 0; i < num_ssa_names; i++)
4490 tree name = ssa_name (i);
4491 if (name
4492 && VN_INFO (name)->visited
4493 && SSA_VAL (name) != name)
4495 print_generic_expr (dump_file, name, 0);
4496 fprintf (dump_file, " = ");
4497 print_generic_expr (dump_file, SSA_VAL (name), 0);
4498 fprintf (dump_file, "\n");
4503 return true;
4506 /* Return the maximum value id we have ever seen. */
4508 unsigned int
4509 get_max_value_id (void)
4511 return next_value_id;
4514 /* Return the next unique value id. */
4516 unsigned int
4517 get_next_value_id (void)
4519 return next_value_id++;
4523 /* Compare two expressions E1 and E2 and return true if they are equal. */
4525 bool
4526 expressions_equal_p (tree e1, tree e2)
4528 /* The obvious case. */
4529 if (e1 == e2)
4530 return true;
4532 /* If only one of them is null, they cannot be equal. */
4533 if (!e1 || !e2)
4534 return false;
4536 /* Now perform the actual comparison. */
4537 if (TREE_CODE (e1) == TREE_CODE (e2)
4538 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4539 return true;
4541 return false;
4545 /* Return true if the nary operation NARY may trap. This is a copy
4546 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4548 bool
4549 vn_nary_may_trap (vn_nary_op_t nary)
4551 tree type;
4552 tree rhs2 = NULL_TREE;
4553 bool honor_nans = false;
4554 bool honor_snans = false;
4555 bool fp_operation = false;
4556 bool honor_trapv = false;
4557 bool handled, ret;
4558 unsigned i;
4560 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4561 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4562 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4564 type = nary->type;
4565 fp_operation = FLOAT_TYPE_P (type);
4566 if (fp_operation)
4568 honor_nans = flag_trapping_math && !flag_finite_math_only;
4569 honor_snans = flag_signaling_nans != 0;
4571 else if (INTEGRAL_TYPE_P (type)
4572 && TYPE_OVERFLOW_TRAPS (type))
4573 honor_trapv = true;
4575 if (nary->length >= 2)
4576 rhs2 = nary->op[1];
4577 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4578 honor_trapv,
4579 honor_nans, honor_snans, rhs2,
4580 &handled);
4581 if (handled
4582 && ret)
4583 return true;
4585 for (i = 0; i < nary->length; ++i)
4586 if (tree_could_trap_p (nary->op[i]))
4587 return true;
4589 return false;