2015-06-24 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobccfa6b603ecf6d87f9e3ee60131c396eb932d740
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stor-layout.h"
30 #include "predict.h"
31 #include "hard-reg-set.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "cfganal.h"
36 #include "basic-block.h"
37 #include "gimple-pretty-print.h"
38 #include "tree-inline.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimple-expr.h"
44 #include "gimple.h"
45 #include "gimplify.h"
46 #include "gimple-ssa.h"
47 #include "tree-phinodes.h"
48 #include "ssa-iterators.h"
49 #include "stringpool.h"
50 #include "tree-ssanames.h"
51 #include "rtl.h"
52 #include "flags.h"
53 #include "insn-config.h"
54 #include "expmed.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "calls.h"
58 #include "emit-rtl.h"
59 #include "varasm.h"
60 #include "stmt.h"
61 #include "expr.h"
62 #include "tree-dfa.h"
63 #include "tree-ssa.h"
64 #include "dumpfile.h"
65 #include "alloc-pool.h"
66 #include "cfgloop.h"
67 #include "params.h"
68 #include "tree-ssa-propagate.h"
69 #include "tree-ssa-sccvn.h"
70 #include "tree-cfg.h"
71 #include "domwalk.h"
72 #include "ipa-ref.h"
73 #include "plugin-api.h"
74 #include "cgraph.h"
76 /* This algorithm is based on the SCC algorithm presented by Keith
77 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
78 (http://citeseer.ist.psu.edu/41805.html). In
79 straight line code, it is equivalent to a regular hash based value
80 numbering that is performed in reverse postorder.
82 For code with cycles, there are two alternatives, both of which
83 require keeping the hashtables separate from the actual list of
84 value numbers for SSA names.
86 1. Iterate value numbering in an RPO walk of the blocks, removing
87 all the entries from the hashtable after each iteration (but
88 keeping the SSA name->value number mapping between iterations).
89 Iterate until it does not change.
91 2. Perform value numbering as part of an SCC walk on the SSA graph,
92 iterating only the cycles in the SSA graph until they do not change
93 (using a separate, optimistic hashtable for value numbering the SCC
94 operands).
96 The second is not just faster in practice (because most SSA graph
97 cycles do not involve all the variables in the graph), it also has
98 some nice properties.
100 One of these nice properties is that when we pop an SCC off the
101 stack, we are guaranteed to have processed all the operands coming from
102 *outside of that SCC*, so we do not need to do anything special to
103 ensure they have value numbers.
105 Another nice property is that the SCC walk is done as part of a DFS
106 of the SSA graph, which makes it easy to perform combining and
107 simplifying operations at the same time.
109 The code below is deliberately written in a way that makes it easy
110 to separate the SCC walk from the other work it does.
112 In order to propagate constants through the code, we track which
113 expressions contain constants, and use those while folding. In
114 theory, we could also track expressions whose value numbers are
115 replaced, in case we end up folding based on expression
116 identities.
118 In order to value number memory, we assign value numbers to vuses.
119 This enables us to note that, for example, stores to the same
120 address of the same value from the same starting memory states are
121 equivalent.
122 TODO:
124 1. We can iterate only the changing portions of the SCC's, but
125 I have not seen an SCC big enough for this to be a win.
126 2. If you differentiate between phi nodes for loops and phi nodes
127 for if-then-else, you can properly consider phi nodes in different
128 blocks for equivalence.
129 3. We could value number vuses in more cases, particularly, whole
130 structure copies.
134 static tree *last_vuse_ptr;
135 static vn_lookup_kind vn_walk_kind;
136 static vn_lookup_kind default_vn_walk_kind;
138 /* vn_nary_op hashtable helpers. */
140 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
142 typedef vn_nary_op_s *value_type;
143 typedef vn_nary_op_s *compare_type;
144 static inline hashval_t hash (const vn_nary_op_s *);
145 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
148 /* Return the computed hashcode for nary operation P1. */
150 inline hashval_t
151 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
153 return vno1->hashcode;
156 /* Compare nary operations P1 and P2 and return true if they are
157 equivalent. */
159 inline bool
160 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
162 return vn_nary_op_eq (vno1, vno2);
165 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
166 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
169 /* vn_phi hashtable helpers. */
171 static int
172 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
174 struct vn_phi_hasher
176 typedef vn_phi_s *value_type;
177 typedef vn_phi_s *compare_type;
178 static inline hashval_t hash (const vn_phi_s *);
179 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
180 static inline void remove (vn_phi_s *);
183 /* Return the computed hashcode for phi operation P1. */
185 inline hashval_t
186 vn_phi_hasher::hash (const vn_phi_s *vp1)
188 return vp1->hashcode;
191 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
193 inline bool
194 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
196 return vn_phi_eq (vp1, vp2);
199 /* Free a phi operation structure VP. */
201 inline void
202 vn_phi_hasher::remove (vn_phi_s *phi)
204 phi->phiargs.release ();
207 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
208 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
211 /* Compare two reference operands P1 and P2 for equality. Return true if
212 they are equal, and false otherwise. */
214 static int
215 vn_reference_op_eq (const void *p1, const void *p2)
217 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
218 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
220 return (vro1->opcode == vro2->opcode
221 /* We do not care for differences in type qualification. */
222 && (vro1->type == vro2->type
223 || (vro1->type && vro2->type
224 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
225 TYPE_MAIN_VARIANT (vro2->type))))
226 && expressions_equal_p (vro1->op0, vro2->op0)
227 && expressions_equal_p (vro1->op1, vro2->op1)
228 && expressions_equal_p (vro1->op2, vro2->op2));
231 /* Free a reference operation structure VP. */
233 static inline void
234 free_reference (vn_reference_s *vr)
236 vr->operands.release ();
240 /* vn_reference hashtable helpers. */
242 struct vn_reference_hasher
244 typedef vn_reference_s *value_type;
245 typedef vn_reference_s *compare_type;
246 static inline hashval_t hash (const vn_reference_s *);
247 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
248 static inline void remove (vn_reference_s *);
251 /* Return the hashcode for a given reference operation P1. */
253 inline hashval_t
254 vn_reference_hasher::hash (const vn_reference_s *vr1)
256 return vr1->hashcode;
259 inline bool
260 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
262 return vn_reference_eq (v, c);
265 inline void
266 vn_reference_hasher::remove (vn_reference_s *v)
268 free_reference (v);
271 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
272 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
275 /* The set of hashtables and alloc_pool's for their items. */
277 typedef struct vn_tables_s
279 vn_nary_op_table_type *nary;
280 vn_phi_table_type *phis;
281 vn_reference_table_type *references;
282 struct obstack nary_obstack;
283 pool_allocator<vn_phi_s> *phis_pool;
284 pool_allocator<vn_reference_s> *references_pool;
285 } *vn_tables_t;
288 /* vn_constant hashtable helpers. */
290 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
292 typedef vn_constant_s *value_type;
293 typedef vn_constant_s *compare_type;
294 static inline hashval_t hash (const vn_constant_s *);
295 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
298 /* Hash table hash function for vn_constant_t. */
300 inline hashval_t
301 vn_constant_hasher::hash (const vn_constant_s *vc1)
303 return vc1->hashcode;
306 /* Hash table equality function for vn_constant_t. */
308 inline bool
309 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
311 if (vc1->hashcode != vc2->hashcode)
312 return false;
314 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
317 static hash_table<vn_constant_hasher> *constant_to_value_id;
318 static bitmap constant_value_ids;
321 /* Valid hashtables storing information we have proven to be
322 correct. */
324 static vn_tables_t valid_info;
326 /* Optimistic hashtables storing information we are making assumptions about
327 during iterations. */
329 static vn_tables_t optimistic_info;
331 /* Pointer to the set of hashtables that is currently being used.
332 Should always point to either the optimistic_info, or the
333 valid_info. */
335 static vn_tables_t current_info;
338 /* Reverse post order index for each basic block. */
340 static int *rpo_numbers;
342 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
344 /* Return the SSA value of the VUSE x, supporting released VDEFs
345 during elimination which will value-number the VDEF to the
346 associated VUSE (but not substitute in the whole lattice). */
348 static inline tree
349 vuse_ssa_val (tree x)
351 if (!x)
352 return NULL_TREE;
356 x = SSA_VAL (x);
358 while (SSA_NAME_IN_FREE_LIST (x));
360 return x;
363 /* This represents the top of the VN lattice, which is the universal
364 value. */
366 tree VN_TOP;
368 /* Unique counter for our value ids. */
370 static unsigned int next_value_id;
372 /* Next DFS number and the stack for strongly connected component
373 detection. */
375 static unsigned int next_dfs_num;
376 static vec<tree> sccstack;
380 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
381 are allocated on an obstack for locality reasons, and to free them
382 without looping over the vec. */
384 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
385 static struct obstack vn_ssa_aux_obstack;
387 /* Return the value numbering information for a given SSA name. */
389 vn_ssa_aux_t
390 VN_INFO (tree name)
392 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
393 gcc_checking_assert (res);
394 return res;
397 /* Set the value numbering info for a given SSA name to a given
398 value. */
400 static inline void
401 VN_INFO_SET (tree name, vn_ssa_aux_t value)
403 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
406 /* Initialize the value numbering info for a given SSA name.
407 This should be called just once for every SSA name. */
409 vn_ssa_aux_t
410 VN_INFO_GET (tree name)
412 vn_ssa_aux_t newinfo;
414 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
415 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
416 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
417 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
418 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
419 return newinfo;
423 /* Get the representative expression for the SSA_NAME NAME. Returns
424 the representative SSA_NAME if there is no expression associated with it. */
426 tree
427 vn_get_expr_for (tree name)
429 vn_ssa_aux_t vn = VN_INFO (name);
430 gimple def_stmt;
431 tree expr = NULL_TREE;
432 enum tree_code code;
434 if (vn->valnum == VN_TOP)
435 return name;
437 /* If the value-number is a constant it is the representative
438 expression. */
439 if (TREE_CODE (vn->valnum) != SSA_NAME)
440 return vn->valnum;
442 /* Get to the information of the value of this SSA_NAME. */
443 vn = VN_INFO (vn->valnum);
445 /* If the value-number is a constant it is the representative
446 expression. */
447 if (TREE_CODE (vn->valnum) != SSA_NAME)
448 return vn->valnum;
450 /* Else if we have an expression, return it. */
451 if (vn->expr != NULL_TREE)
452 return vn->expr;
454 /* Otherwise use the defining statement to build the expression. */
455 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
457 /* If the value number is not an assignment use it directly. */
458 if (!is_gimple_assign (def_stmt))
459 return vn->valnum;
461 /* Note that we can valueize here because we clear the cached
462 simplified expressions after each optimistic iteration. */
463 code = gimple_assign_rhs_code (def_stmt);
464 switch (TREE_CODE_CLASS (code))
466 case tcc_reference:
467 if ((code == REALPART_EXPR
468 || code == IMAGPART_EXPR
469 || code == VIEW_CONVERT_EXPR)
470 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
471 0)) == SSA_NAME)
472 expr = fold_build1 (code,
473 gimple_expr_type (def_stmt),
474 vn_valueize (TREE_OPERAND
475 (gimple_assign_rhs1 (def_stmt), 0)));
476 break;
478 case tcc_unary:
479 expr = fold_build1 (code,
480 gimple_expr_type (def_stmt),
481 vn_valueize (gimple_assign_rhs1 (def_stmt)));
482 break;
484 case tcc_binary:
485 expr = fold_build2 (code,
486 gimple_expr_type (def_stmt),
487 vn_valueize (gimple_assign_rhs1 (def_stmt)),
488 vn_valueize (gimple_assign_rhs2 (def_stmt)));
489 break;
491 case tcc_exceptional:
492 if (code == CONSTRUCTOR
493 && TREE_CODE
494 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
495 expr = gimple_assign_rhs1 (def_stmt);
496 break;
498 default:;
500 if (expr == NULL_TREE)
501 return vn->valnum;
503 /* Cache the expression. */
504 vn->expr = expr;
506 return expr;
509 /* Return the vn_kind the expression computed by the stmt should be
510 associated with. */
512 enum vn_kind
513 vn_get_stmt_kind (gimple stmt)
515 switch (gimple_code (stmt))
517 case GIMPLE_CALL:
518 return VN_REFERENCE;
519 case GIMPLE_PHI:
520 return VN_PHI;
521 case GIMPLE_ASSIGN:
523 enum tree_code code = gimple_assign_rhs_code (stmt);
524 tree rhs1 = gimple_assign_rhs1 (stmt);
525 switch (get_gimple_rhs_class (code))
527 case GIMPLE_UNARY_RHS:
528 case GIMPLE_BINARY_RHS:
529 case GIMPLE_TERNARY_RHS:
530 return VN_NARY;
531 case GIMPLE_SINGLE_RHS:
532 switch (TREE_CODE_CLASS (code))
534 case tcc_reference:
535 /* VOP-less references can go through unary case. */
536 if ((code == REALPART_EXPR
537 || code == IMAGPART_EXPR
538 || code == VIEW_CONVERT_EXPR
539 || code == BIT_FIELD_REF)
540 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
541 return VN_NARY;
543 /* Fallthrough. */
544 case tcc_declaration:
545 return VN_REFERENCE;
547 case tcc_constant:
548 return VN_CONSTANT;
550 default:
551 if (code == ADDR_EXPR)
552 return (is_gimple_min_invariant (rhs1)
553 ? VN_CONSTANT : VN_REFERENCE);
554 else if (code == CONSTRUCTOR)
555 return VN_NARY;
556 return VN_NONE;
558 default:
559 return VN_NONE;
562 default:
563 return VN_NONE;
567 /* Lookup a value id for CONSTANT and return it. If it does not
568 exist returns 0. */
570 unsigned int
571 get_constant_value_id (tree constant)
573 vn_constant_s **slot;
574 struct vn_constant_s vc;
576 vc.hashcode = vn_hash_constant_with_type (constant);
577 vc.constant = constant;
578 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
579 if (slot)
580 return (*slot)->value_id;
581 return 0;
584 /* Lookup a value id for CONSTANT, and if it does not exist, create a
585 new one and return it. If it does exist, return it. */
587 unsigned int
588 get_or_alloc_constant_value_id (tree constant)
590 vn_constant_s **slot;
591 struct vn_constant_s vc;
592 vn_constant_t vcp;
594 vc.hashcode = vn_hash_constant_with_type (constant);
595 vc.constant = constant;
596 slot = constant_to_value_id->find_slot (&vc, INSERT);
597 if (*slot)
598 return (*slot)->value_id;
600 vcp = XNEW (struct vn_constant_s);
601 vcp->hashcode = vc.hashcode;
602 vcp->constant = constant;
603 vcp->value_id = get_next_value_id ();
604 *slot = vcp;
605 bitmap_set_bit (constant_value_ids, vcp->value_id);
606 return vcp->value_id;
609 /* Return true if V is a value id for a constant. */
611 bool
612 value_id_constant_p (unsigned int v)
614 return bitmap_bit_p (constant_value_ids, v);
617 /* Compute the hash for a reference operand VRO1. */
619 static void
620 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
622 hstate.add_int (vro1->opcode);
623 if (vro1->op0)
624 inchash::add_expr (vro1->op0, hstate);
625 if (vro1->op1)
626 inchash::add_expr (vro1->op1, hstate);
627 if (vro1->op2)
628 inchash::add_expr (vro1->op2, hstate);
631 /* Compute a hash for the reference operation VR1 and return it. */
633 static hashval_t
634 vn_reference_compute_hash (const vn_reference_t vr1)
636 inchash::hash hstate;
637 hashval_t result;
638 int i;
639 vn_reference_op_t vro;
640 HOST_WIDE_INT off = -1;
641 bool deref = false;
643 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
645 if (vro->opcode == MEM_REF)
646 deref = true;
647 else if (vro->opcode != ADDR_EXPR)
648 deref = false;
649 if (vro->off != -1)
651 if (off == -1)
652 off = 0;
653 off += vro->off;
655 else
657 if (off != -1
658 && off != 0)
659 hstate.add_int (off);
660 off = -1;
661 if (deref
662 && vro->opcode == ADDR_EXPR)
664 if (vro->op0)
666 tree op = TREE_OPERAND (vro->op0, 0);
667 hstate.add_int (TREE_CODE (op));
668 inchash::add_expr (op, hstate);
671 else
672 vn_reference_op_compute_hash (vro, hstate);
675 result = hstate.end ();
676 /* ??? We would ICE later if we hash instead of adding that in. */
677 if (vr1->vuse)
678 result += SSA_NAME_VERSION (vr1->vuse);
680 return result;
683 /* Return true if reference operations VR1 and VR2 are equivalent. This
684 means they have the same set of operands and vuses. */
686 bool
687 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
689 unsigned i, j;
691 /* Early out if this is not a hash collision. */
692 if (vr1->hashcode != vr2->hashcode)
693 return false;
695 /* The VOP needs to be the same. */
696 if (vr1->vuse != vr2->vuse)
697 return false;
699 /* If the operands are the same we are done. */
700 if (vr1->operands == vr2->operands)
701 return true;
703 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
704 return false;
706 if (INTEGRAL_TYPE_P (vr1->type)
707 && INTEGRAL_TYPE_P (vr2->type))
709 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
710 return false;
712 else if (INTEGRAL_TYPE_P (vr1->type)
713 && (TYPE_PRECISION (vr1->type)
714 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
715 return false;
716 else if (INTEGRAL_TYPE_P (vr2->type)
717 && (TYPE_PRECISION (vr2->type)
718 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
719 return false;
721 i = 0;
722 j = 0;
725 HOST_WIDE_INT off1 = 0, off2 = 0;
726 vn_reference_op_t vro1, vro2;
727 vn_reference_op_s tem1, tem2;
728 bool deref1 = false, deref2 = false;
729 for (; vr1->operands.iterate (i, &vro1); i++)
731 if (vro1->opcode == MEM_REF)
732 deref1 = true;
733 if (vro1->off == -1)
734 break;
735 off1 += vro1->off;
737 for (; vr2->operands.iterate (j, &vro2); j++)
739 if (vro2->opcode == MEM_REF)
740 deref2 = true;
741 if (vro2->off == -1)
742 break;
743 off2 += vro2->off;
745 if (off1 != off2)
746 return false;
747 if (deref1 && vro1->opcode == ADDR_EXPR)
749 memset (&tem1, 0, sizeof (tem1));
750 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
751 tem1.type = TREE_TYPE (tem1.op0);
752 tem1.opcode = TREE_CODE (tem1.op0);
753 vro1 = &tem1;
754 deref1 = false;
756 if (deref2 && vro2->opcode == ADDR_EXPR)
758 memset (&tem2, 0, sizeof (tem2));
759 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
760 tem2.type = TREE_TYPE (tem2.op0);
761 tem2.opcode = TREE_CODE (tem2.op0);
762 vro2 = &tem2;
763 deref2 = false;
765 if (deref1 != deref2)
766 return false;
767 if (!vn_reference_op_eq (vro1, vro2))
768 return false;
769 ++j;
770 ++i;
772 while (vr1->operands.length () != i
773 || vr2->operands.length () != j);
775 return true;
778 /* Copy the operations present in load/store REF into RESULT, a vector of
779 vn_reference_op_s's. */
781 static void
782 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
784 if (TREE_CODE (ref) == TARGET_MEM_REF)
786 vn_reference_op_s temp;
788 result->reserve (3);
790 memset (&temp, 0, sizeof (temp));
791 temp.type = TREE_TYPE (ref);
792 temp.opcode = TREE_CODE (ref);
793 temp.op0 = TMR_INDEX (ref);
794 temp.op1 = TMR_STEP (ref);
795 temp.op2 = TMR_OFFSET (ref);
796 temp.off = -1;
797 result->quick_push (temp);
799 memset (&temp, 0, sizeof (temp));
800 temp.type = NULL_TREE;
801 temp.opcode = ERROR_MARK;
802 temp.op0 = TMR_INDEX2 (ref);
803 temp.off = -1;
804 result->quick_push (temp);
806 memset (&temp, 0, sizeof (temp));
807 temp.type = NULL_TREE;
808 temp.opcode = TREE_CODE (TMR_BASE (ref));
809 temp.op0 = TMR_BASE (ref);
810 temp.off = -1;
811 result->quick_push (temp);
812 return;
815 /* For non-calls, store the information that makes up the address. */
816 tree orig = ref;
817 while (ref)
819 vn_reference_op_s temp;
821 memset (&temp, 0, sizeof (temp));
822 temp.type = TREE_TYPE (ref);
823 temp.opcode = TREE_CODE (ref);
824 temp.off = -1;
826 switch (temp.opcode)
828 case MODIFY_EXPR:
829 temp.op0 = TREE_OPERAND (ref, 1);
830 break;
831 case WITH_SIZE_EXPR:
832 temp.op0 = TREE_OPERAND (ref, 1);
833 temp.off = 0;
834 break;
835 case MEM_REF:
836 /* The base address gets its own vn_reference_op_s structure. */
837 temp.op0 = TREE_OPERAND (ref, 1);
838 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
839 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
840 break;
841 case BIT_FIELD_REF:
842 /* Record bits and position. */
843 temp.op0 = TREE_OPERAND (ref, 1);
844 temp.op1 = TREE_OPERAND (ref, 2);
845 break;
846 case COMPONENT_REF:
847 /* The field decl is enough to unambiguously specify the field,
848 a matching type is not necessary and a mismatching type
849 is always a spurious difference. */
850 temp.type = NULL_TREE;
851 temp.op0 = TREE_OPERAND (ref, 1);
852 temp.op1 = TREE_OPERAND (ref, 2);
854 tree this_offset = component_ref_field_offset (ref);
855 if (this_offset
856 && TREE_CODE (this_offset) == INTEGER_CST)
858 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
859 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
861 offset_int off
862 = (wi::to_offset (this_offset)
863 + wi::lrshift (wi::to_offset (bit_offset),
864 LOG2_BITS_PER_UNIT));
865 if (wi::fits_shwi_p (off)
866 /* Probibit value-numbering zero offset components
867 of addresses the same before the pass folding
868 __builtin_object_size had a chance to run
869 (checking cfun->after_inlining does the
870 trick here). */
871 && (TREE_CODE (orig) != ADDR_EXPR
872 || off != 0
873 || cfun->after_inlining))
874 temp.off = off.to_shwi ();
878 break;
879 case ARRAY_RANGE_REF:
880 case ARRAY_REF:
881 /* Record index as operand. */
882 temp.op0 = TREE_OPERAND (ref, 1);
883 /* Always record lower bounds and element size. */
884 temp.op1 = array_ref_low_bound (ref);
885 temp.op2 = array_ref_element_size (ref);
886 if (TREE_CODE (temp.op0) == INTEGER_CST
887 && TREE_CODE (temp.op1) == INTEGER_CST
888 && TREE_CODE (temp.op2) == INTEGER_CST)
890 offset_int off = ((wi::to_offset (temp.op0)
891 - wi::to_offset (temp.op1))
892 * wi::to_offset (temp.op2));
893 if (wi::fits_shwi_p (off))
894 temp.off = off.to_shwi();
896 break;
897 case VAR_DECL:
898 if (DECL_HARD_REGISTER (ref))
900 temp.op0 = ref;
901 break;
903 /* Fallthru. */
904 case PARM_DECL:
905 case CONST_DECL:
906 case RESULT_DECL:
907 /* Canonicalize decls to MEM[&decl] which is what we end up with
908 when valueizing MEM[ptr] with ptr = &decl. */
909 temp.opcode = MEM_REF;
910 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
911 temp.off = 0;
912 result->safe_push (temp);
913 temp.opcode = ADDR_EXPR;
914 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
915 temp.type = TREE_TYPE (temp.op0);
916 temp.off = -1;
917 break;
918 case STRING_CST:
919 case INTEGER_CST:
920 case COMPLEX_CST:
921 case VECTOR_CST:
922 case REAL_CST:
923 case FIXED_CST:
924 case CONSTRUCTOR:
925 case SSA_NAME:
926 temp.op0 = ref;
927 break;
928 case ADDR_EXPR:
929 if (is_gimple_min_invariant (ref))
931 temp.op0 = ref;
932 break;
934 break;
935 /* These are only interesting for their operands, their
936 existence, and their type. They will never be the last
937 ref in the chain of references (IE they require an
938 operand), so we don't have to put anything
939 for op* as it will be handled by the iteration */
940 case REALPART_EXPR:
941 case VIEW_CONVERT_EXPR:
942 temp.off = 0;
943 break;
944 case IMAGPART_EXPR:
945 /* This is only interesting for its constant offset. */
946 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
947 break;
948 default:
949 gcc_unreachable ();
951 result->safe_push (temp);
953 if (REFERENCE_CLASS_P (ref)
954 || TREE_CODE (ref) == MODIFY_EXPR
955 || TREE_CODE (ref) == WITH_SIZE_EXPR
956 || (TREE_CODE (ref) == ADDR_EXPR
957 && !is_gimple_min_invariant (ref)))
958 ref = TREE_OPERAND (ref, 0);
959 else
960 ref = NULL_TREE;
964 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
965 operands in *OPS, the reference alias set SET and the reference type TYPE.
966 Return true if something useful was produced. */
968 bool
969 ao_ref_init_from_vn_reference (ao_ref *ref,
970 alias_set_type set, tree type,
971 vec<vn_reference_op_s> ops)
973 vn_reference_op_t op;
974 unsigned i;
975 tree base = NULL_TREE;
976 tree *op0_p = &base;
977 HOST_WIDE_INT offset = 0;
978 HOST_WIDE_INT max_size;
979 HOST_WIDE_INT size = -1;
980 tree size_tree = NULL_TREE;
981 alias_set_type base_alias_set = -1;
983 /* First get the final access size from just the outermost expression. */
984 op = &ops[0];
985 if (op->opcode == COMPONENT_REF)
986 size_tree = DECL_SIZE (op->op0);
987 else if (op->opcode == BIT_FIELD_REF)
988 size_tree = op->op0;
989 else
991 machine_mode mode = TYPE_MODE (type);
992 if (mode == BLKmode)
993 size_tree = TYPE_SIZE (type);
994 else
995 size = GET_MODE_BITSIZE (mode);
997 if (size_tree != NULL_TREE)
999 if (!tree_fits_uhwi_p (size_tree))
1000 size = -1;
1001 else
1002 size = tree_to_uhwi (size_tree);
1005 /* Initially, maxsize is the same as the accessed element size.
1006 In the following it will only grow (or become -1). */
1007 max_size = size;
1009 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1010 and find the ultimate containing object. */
1011 FOR_EACH_VEC_ELT (ops, i, op)
1013 switch (op->opcode)
1015 /* These may be in the reference ops, but we cannot do anything
1016 sensible with them here. */
1017 case ADDR_EXPR:
1018 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1019 if (base != NULL_TREE
1020 && TREE_CODE (base) == MEM_REF
1021 && op->op0
1022 && DECL_P (TREE_OPERAND (op->op0, 0)))
1024 vn_reference_op_t pop = &ops[i-1];
1025 base = TREE_OPERAND (op->op0, 0);
1026 if (pop->off == -1)
1028 max_size = -1;
1029 offset = 0;
1031 else
1032 offset += pop->off * BITS_PER_UNIT;
1033 op0_p = NULL;
1034 break;
1036 /* Fallthru. */
1037 case CALL_EXPR:
1038 return false;
1040 /* Record the base objects. */
1041 case MEM_REF:
1042 base_alias_set = get_deref_alias_set (op->op0);
1043 *op0_p = build2 (MEM_REF, op->type,
1044 NULL_TREE, op->op0);
1045 op0_p = &TREE_OPERAND (*op0_p, 0);
1046 break;
1048 case VAR_DECL:
1049 case PARM_DECL:
1050 case RESULT_DECL:
1051 case SSA_NAME:
1052 *op0_p = op->op0;
1053 op0_p = NULL;
1054 break;
1056 /* And now the usual component-reference style ops. */
1057 case BIT_FIELD_REF:
1058 offset += tree_to_shwi (op->op1);
1059 break;
1061 case COMPONENT_REF:
1063 tree field = op->op0;
1064 /* We do not have a complete COMPONENT_REF tree here so we
1065 cannot use component_ref_field_offset. Do the interesting
1066 parts manually. */
1068 if (op->op1
1069 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1070 max_size = -1;
1071 else
1073 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1074 * BITS_PER_UNIT);
1075 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1077 break;
1080 case ARRAY_RANGE_REF:
1081 case ARRAY_REF:
1082 /* We recorded the lower bound and the element size. */
1083 if (!tree_fits_shwi_p (op->op0)
1084 || !tree_fits_shwi_p (op->op1)
1085 || !tree_fits_shwi_p (op->op2))
1086 max_size = -1;
1087 else
1089 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1090 hindex -= tree_to_shwi (op->op1);
1091 hindex *= tree_to_shwi (op->op2);
1092 hindex *= BITS_PER_UNIT;
1093 offset += hindex;
1095 break;
1097 case REALPART_EXPR:
1098 break;
1100 case IMAGPART_EXPR:
1101 offset += size;
1102 break;
1104 case VIEW_CONVERT_EXPR:
1105 break;
1107 case STRING_CST:
1108 case INTEGER_CST:
1109 case COMPLEX_CST:
1110 case VECTOR_CST:
1111 case REAL_CST:
1112 case CONSTRUCTOR:
1113 case CONST_DECL:
1114 return false;
1116 default:
1117 return false;
1121 if (base == NULL_TREE)
1122 return false;
1124 ref->ref = NULL_TREE;
1125 ref->base = base;
1126 ref->offset = offset;
1127 ref->size = size;
1128 ref->max_size = max_size;
1129 ref->ref_alias_set = set;
1130 if (base_alias_set != -1)
1131 ref->base_alias_set = base_alias_set;
1132 else
1133 ref->base_alias_set = get_alias_set (base);
1134 /* We discount volatiles from value-numbering elsewhere. */
1135 ref->volatile_p = false;
1137 return true;
1140 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1141 vn_reference_op_s's. */
1143 static void
1144 copy_reference_ops_from_call (gcall *call,
1145 vec<vn_reference_op_s> *result)
1147 vn_reference_op_s temp;
1148 unsigned i;
1149 tree lhs = gimple_call_lhs (call);
1150 int lr;
1152 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1153 different. By adding the lhs here in the vector, we ensure that the
1154 hashcode is different, guaranteeing a different value number. */
1155 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1157 memset (&temp, 0, sizeof (temp));
1158 temp.opcode = MODIFY_EXPR;
1159 temp.type = TREE_TYPE (lhs);
1160 temp.op0 = lhs;
1161 temp.off = -1;
1162 result->safe_push (temp);
1165 /* Copy the type, opcode, function, static chain and EH region, if any. */
1166 memset (&temp, 0, sizeof (temp));
1167 temp.type = gimple_call_return_type (call);
1168 temp.opcode = CALL_EXPR;
1169 temp.op0 = gimple_call_fn (call);
1170 temp.op1 = gimple_call_chain (call);
1171 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1172 temp.op2 = size_int (lr);
1173 temp.off = -1;
1174 if (gimple_call_with_bounds_p (call))
1175 temp.with_bounds = 1;
1176 result->safe_push (temp);
1178 /* Copy the call arguments. As they can be references as well,
1179 just chain them together. */
1180 for (i = 0; i < gimple_call_num_args (call); ++i)
1182 tree callarg = gimple_call_arg (call, i);
1183 copy_reference_ops_from_ref (callarg, result);
1187 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1188 *I_P to point to the last element of the replacement. */
1189 void
1190 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1191 unsigned int *i_p)
1193 unsigned int i = *i_p;
1194 vn_reference_op_t op = &(*ops)[i];
1195 vn_reference_op_t mem_op = &(*ops)[i - 1];
1196 tree addr_base;
1197 HOST_WIDE_INT addr_offset = 0;
1199 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1200 from .foo.bar to the preceding MEM_REF offset and replace the
1201 address with &OBJ. */
1202 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1203 &addr_offset);
1204 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1205 if (addr_base != TREE_OPERAND (op->op0, 0))
1207 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1208 off += addr_offset;
1209 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1210 op->op0 = build_fold_addr_expr (addr_base);
1211 if (tree_fits_shwi_p (mem_op->op0))
1212 mem_op->off = tree_to_shwi (mem_op->op0);
1213 else
1214 mem_op->off = -1;
1218 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1219 *I_P to point to the last element of the replacement. */
1220 static void
1221 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1222 unsigned int *i_p)
1224 unsigned int i = *i_p;
1225 vn_reference_op_t op = &(*ops)[i];
1226 vn_reference_op_t mem_op = &(*ops)[i - 1];
1227 gimple def_stmt;
1228 enum tree_code code;
1229 offset_int off;
1231 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1232 if (!is_gimple_assign (def_stmt))
1233 return;
1235 code = gimple_assign_rhs_code (def_stmt);
1236 if (code != ADDR_EXPR
1237 && code != POINTER_PLUS_EXPR)
1238 return;
1240 off = offset_int::from (mem_op->op0, SIGNED);
1242 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1243 from .foo.bar to the preceding MEM_REF offset and replace the
1244 address with &OBJ. */
1245 if (code == ADDR_EXPR)
1247 tree addr, addr_base;
1248 HOST_WIDE_INT addr_offset;
1250 addr = gimple_assign_rhs1 (def_stmt);
1251 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1252 &addr_offset);
1253 /* If that didn't work because the address isn't invariant propagate
1254 the reference tree from the address operation in case the current
1255 dereference isn't offsetted. */
1256 if (!addr_base
1257 && *i_p == ops->length () - 1
1258 && off == 0
1259 /* This makes us disable this transform for PRE where the
1260 reference ops might be also used for code insertion which
1261 is invalid. */
1262 && default_vn_walk_kind == VN_WALKREWRITE)
1264 auto_vec<vn_reference_op_s, 32> tem;
1265 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1266 ops->pop ();
1267 ops->pop ();
1268 ops->safe_splice (tem);
1269 --*i_p;
1270 return;
1272 if (!addr_base
1273 || TREE_CODE (addr_base) != MEM_REF)
1274 return;
1276 off += addr_offset;
1277 off += mem_ref_offset (addr_base);
1278 op->op0 = TREE_OPERAND (addr_base, 0);
1280 else
1282 tree ptr, ptroff;
1283 ptr = gimple_assign_rhs1 (def_stmt);
1284 ptroff = gimple_assign_rhs2 (def_stmt);
1285 if (TREE_CODE (ptr) != SSA_NAME
1286 || TREE_CODE (ptroff) != INTEGER_CST)
1287 return;
1289 off += wi::to_offset (ptroff);
1290 op->op0 = ptr;
1293 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1294 if (tree_fits_shwi_p (mem_op->op0))
1295 mem_op->off = tree_to_shwi (mem_op->op0);
1296 else
1297 mem_op->off = -1;
1298 if (TREE_CODE (op->op0) == SSA_NAME)
1299 op->op0 = SSA_VAL (op->op0);
1300 if (TREE_CODE (op->op0) != SSA_NAME)
1301 op->opcode = TREE_CODE (op->op0);
1303 /* And recurse. */
1304 if (TREE_CODE (op->op0) == SSA_NAME)
1305 vn_reference_maybe_forwprop_address (ops, i_p);
1306 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1307 vn_reference_fold_indirect (ops, i_p);
1310 /* Optimize the reference REF to a constant if possible or return
1311 NULL_TREE if not. */
1313 tree
1314 fully_constant_vn_reference_p (vn_reference_t ref)
1316 vec<vn_reference_op_s> operands = ref->operands;
1317 vn_reference_op_t op;
1319 /* Try to simplify the translated expression if it is
1320 a call to a builtin function with at most two arguments. */
1321 op = &operands[0];
1322 if (op->opcode == CALL_EXPR
1323 && TREE_CODE (op->op0) == ADDR_EXPR
1324 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1325 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1326 && operands.length () >= 2
1327 && operands.length () <= 3)
1329 vn_reference_op_t arg0, arg1 = NULL;
1330 bool anyconst = false;
1331 arg0 = &operands[1];
1332 if (operands.length () > 2)
1333 arg1 = &operands[2];
1334 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1335 || (arg0->opcode == ADDR_EXPR
1336 && is_gimple_min_invariant (arg0->op0)))
1337 anyconst = true;
1338 if (arg1
1339 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1340 || (arg1->opcode == ADDR_EXPR
1341 && is_gimple_min_invariant (arg1->op0))))
1342 anyconst = true;
1343 if (anyconst)
1345 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1346 arg1 ? 2 : 1,
1347 arg0->op0,
1348 arg1 ? arg1->op0 : NULL);
1349 if (folded
1350 && TREE_CODE (folded) == NOP_EXPR)
1351 folded = TREE_OPERAND (folded, 0);
1352 if (folded
1353 && is_gimple_min_invariant (folded))
1354 return folded;
1358 /* Simplify reads from constants or constant initializers. */
1359 else if (BITS_PER_UNIT == 8
1360 && is_gimple_reg_type (ref->type)
1361 && (!INTEGRAL_TYPE_P (ref->type)
1362 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1364 HOST_WIDE_INT off = 0;
1365 HOST_WIDE_INT size;
1366 if (INTEGRAL_TYPE_P (ref->type))
1367 size = TYPE_PRECISION (ref->type);
1368 else
1369 size = tree_to_shwi (TYPE_SIZE (ref->type));
1370 if (size % BITS_PER_UNIT != 0
1371 || size > MAX_BITSIZE_MODE_ANY_MODE)
1372 return NULL_TREE;
1373 size /= BITS_PER_UNIT;
1374 unsigned i;
1375 for (i = 0; i < operands.length (); ++i)
1377 if (operands[i].off == -1)
1378 return NULL_TREE;
1379 off += operands[i].off;
1380 if (operands[i].opcode == MEM_REF)
1382 ++i;
1383 break;
1386 vn_reference_op_t base = &operands[--i];
1387 tree ctor = error_mark_node;
1388 tree decl = NULL_TREE;
1389 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1390 ctor = base->op0;
1391 else if (base->opcode == MEM_REF
1392 && base[1].opcode == ADDR_EXPR
1393 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1394 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1396 decl = TREE_OPERAND (base[1].op0, 0);
1397 ctor = ctor_for_folding (decl);
1399 if (ctor == NULL_TREE)
1400 return build_zero_cst (ref->type);
1401 else if (ctor != error_mark_node)
1403 if (decl)
1405 tree res = fold_ctor_reference (ref->type, ctor,
1406 off * BITS_PER_UNIT,
1407 size * BITS_PER_UNIT, decl);
1408 if (res)
1410 STRIP_USELESS_TYPE_CONVERSION (res);
1411 if (is_gimple_min_invariant (res))
1412 return res;
1415 else
1417 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1418 if (native_encode_expr (ctor, buf, size, off) > 0)
1419 return native_interpret_expr (ref->type, buf, size);
1424 return NULL_TREE;
1427 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1428 structures into their value numbers. This is done in-place, and
1429 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1430 whether any operands were valueized. */
1432 static vec<vn_reference_op_s>
1433 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1435 vn_reference_op_t vro;
1436 unsigned int i;
1438 *valueized_anything = false;
1440 FOR_EACH_VEC_ELT (orig, i, vro)
1442 if (vro->opcode == SSA_NAME
1443 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1445 tree tem = SSA_VAL (vro->op0);
1446 if (tem != vro->op0)
1448 *valueized_anything = true;
1449 vro->op0 = tem;
1451 /* If it transforms from an SSA_NAME to a constant, update
1452 the opcode. */
1453 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1454 vro->opcode = TREE_CODE (vro->op0);
1456 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1458 tree tem = SSA_VAL (vro->op1);
1459 if (tem != vro->op1)
1461 *valueized_anything = true;
1462 vro->op1 = tem;
1465 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1467 tree tem = SSA_VAL (vro->op2);
1468 if (tem != vro->op2)
1470 *valueized_anything = true;
1471 vro->op2 = tem;
1474 /* If it transforms from an SSA_NAME to an address, fold with
1475 a preceding indirect reference. */
1476 if (i > 0
1477 && vro->op0
1478 && TREE_CODE (vro->op0) == ADDR_EXPR
1479 && orig[i - 1].opcode == MEM_REF)
1480 vn_reference_fold_indirect (&orig, &i);
1481 else if (i > 0
1482 && vro->opcode == SSA_NAME
1483 && orig[i - 1].opcode == MEM_REF)
1484 vn_reference_maybe_forwprop_address (&orig, &i);
1485 /* If it transforms a non-constant ARRAY_REF into a constant
1486 one, adjust the constant offset. */
1487 else if (vro->opcode == ARRAY_REF
1488 && vro->off == -1
1489 && TREE_CODE (vro->op0) == INTEGER_CST
1490 && TREE_CODE (vro->op1) == INTEGER_CST
1491 && TREE_CODE (vro->op2) == INTEGER_CST)
1493 offset_int off = ((wi::to_offset (vro->op0)
1494 - wi::to_offset (vro->op1))
1495 * wi::to_offset (vro->op2));
1496 if (wi::fits_shwi_p (off))
1497 vro->off = off.to_shwi ();
1501 return orig;
1504 static vec<vn_reference_op_s>
1505 valueize_refs (vec<vn_reference_op_s> orig)
1507 bool tem;
1508 return valueize_refs_1 (orig, &tem);
1511 static vec<vn_reference_op_s> shared_lookup_references;
1513 /* Create a vector of vn_reference_op_s structures from REF, a
1514 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1515 this function. *VALUEIZED_ANYTHING will specify whether any
1516 operands were valueized. */
1518 static vec<vn_reference_op_s>
1519 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1521 if (!ref)
1522 return vNULL;
1523 shared_lookup_references.truncate (0);
1524 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1525 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1526 valueized_anything);
1527 return shared_lookup_references;
1530 /* Create a vector of vn_reference_op_s structures from CALL, a
1531 call statement. The vector is shared among all callers of
1532 this function. */
1534 static vec<vn_reference_op_s>
1535 valueize_shared_reference_ops_from_call (gcall *call)
1537 if (!call)
1538 return vNULL;
1539 shared_lookup_references.truncate (0);
1540 copy_reference_ops_from_call (call, &shared_lookup_references);
1541 shared_lookup_references = valueize_refs (shared_lookup_references);
1542 return shared_lookup_references;
1545 /* Lookup a SCCVN reference operation VR in the current hash table.
1546 Returns the resulting value number if it exists in the hash table,
1547 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1548 vn_reference_t stored in the hashtable if something is found. */
1550 static tree
1551 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1553 vn_reference_s **slot;
1554 hashval_t hash;
1556 hash = vr->hashcode;
1557 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1558 if (!slot && current_info == optimistic_info)
1559 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1560 if (slot)
1562 if (vnresult)
1563 *vnresult = (vn_reference_t)*slot;
1564 return ((vn_reference_t)*slot)->result;
1567 return NULL_TREE;
1570 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1571 with the current VUSE and performs the expression lookup. */
1573 static void *
1574 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1575 unsigned int cnt, void *vr_)
1577 vn_reference_t vr = (vn_reference_t)vr_;
1578 vn_reference_s **slot;
1579 hashval_t hash;
1581 /* This bounds the stmt walks we perform on reference lookups
1582 to O(1) instead of O(N) where N is the number of dominating
1583 stores. */
1584 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1585 return (void *)-1;
1587 if (last_vuse_ptr)
1588 *last_vuse_ptr = vuse;
1590 /* Fixup vuse and hash. */
1591 if (vr->vuse)
1592 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1593 vr->vuse = vuse_ssa_val (vuse);
1594 if (vr->vuse)
1595 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1597 hash = vr->hashcode;
1598 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1599 if (!slot && current_info == optimistic_info)
1600 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1601 if (slot)
1602 return *slot;
1604 return NULL;
1607 /* Lookup an existing or insert a new vn_reference entry into the
1608 value table for the VUSE, SET, TYPE, OPERANDS reference which
1609 has the value VALUE which is either a constant or an SSA name. */
1611 static vn_reference_t
1612 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1613 alias_set_type set,
1614 tree type,
1615 vec<vn_reference_op_s,
1616 va_heap> operands,
1617 tree value)
1619 vn_reference_s vr1;
1620 vn_reference_t result;
1621 unsigned value_id;
1622 vr1.vuse = vuse;
1623 vr1.operands = operands;
1624 vr1.type = type;
1625 vr1.set = set;
1626 vr1.hashcode = vn_reference_compute_hash (&vr1);
1627 if (vn_reference_lookup_1 (&vr1, &result))
1628 return result;
1629 if (TREE_CODE (value) == SSA_NAME)
1630 value_id = VN_INFO (value)->value_id;
1631 else
1632 value_id = get_or_alloc_constant_value_id (value);
1633 return vn_reference_insert_pieces (vuse, set, type,
1634 operands.copy (), value, value_id);
1637 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1638 from the statement defining VUSE and if not successful tries to
1639 translate *REFP and VR_ through an aggregate copy at the definition
1640 of VUSE. */
1642 static void *
1643 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1644 bool disambiguate_only)
1646 vn_reference_t vr = (vn_reference_t)vr_;
1647 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1648 tree base;
1649 HOST_WIDE_INT offset, maxsize;
1650 static vec<vn_reference_op_s>
1651 lhs_ops = vNULL;
1652 ao_ref lhs_ref;
1653 bool lhs_ref_ok = false;
1655 /* First try to disambiguate after value-replacing in the definitions LHS. */
1656 if (is_gimple_assign (def_stmt))
1658 tree lhs = gimple_assign_lhs (def_stmt);
1659 bool valueized_anything = false;
1660 /* Avoid re-allocation overhead. */
1661 lhs_ops.truncate (0);
1662 copy_reference_ops_from_ref (lhs, &lhs_ops);
1663 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1664 if (valueized_anything)
1666 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1667 get_alias_set (lhs),
1668 TREE_TYPE (lhs), lhs_ops);
1669 if (lhs_ref_ok
1670 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1671 return NULL;
1673 else
1675 ao_ref_init (&lhs_ref, lhs);
1676 lhs_ref_ok = true;
1679 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1680 && gimple_call_num_args (def_stmt) <= 4)
1682 /* For builtin calls valueize its arguments and call the
1683 alias oracle again. Valueization may improve points-to
1684 info of pointers and constify size and position arguments.
1685 Originally this was motivated by PR61034 which has
1686 conditional calls to free falsely clobbering ref because
1687 of imprecise points-to info of the argument. */
1688 tree oldargs[4];
1689 bool valueized_anything = false;
1690 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1692 oldargs[i] = gimple_call_arg (def_stmt, i);
1693 if (TREE_CODE (oldargs[i]) == SSA_NAME
1694 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1696 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1697 valueized_anything = true;
1700 if (valueized_anything)
1702 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1703 ref);
1704 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1705 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1706 if (!res)
1707 return NULL;
1711 if (disambiguate_only)
1712 return (void *)-1;
1714 base = ao_ref_base (ref);
1715 offset = ref->offset;
1716 maxsize = ref->max_size;
1718 /* If we cannot constrain the size of the reference we cannot
1719 test if anything kills it. */
1720 if (maxsize == -1)
1721 return (void *)-1;
1723 /* We can't deduce anything useful from clobbers. */
1724 if (gimple_clobber_p (def_stmt))
1725 return (void *)-1;
1727 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1728 from that definition.
1729 1) Memset. */
1730 if (is_gimple_reg_type (vr->type)
1731 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1732 && integer_zerop (gimple_call_arg (def_stmt, 1))
1733 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1734 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1736 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1737 tree base2;
1738 HOST_WIDE_INT offset2, size2, maxsize2;
1739 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1740 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1741 if ((unsigned HOST_WIDE_INT)size2 / 8
1742 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1743 && maxsize2 != -1
1744 && operand_equal_p (base, base2, 0)
1745 && offset2 <= offset
1746 && offset2 + size2 >= offset + maxsize)
1748 tree val = build_zero_cst (vr->type);
1749 return vn_reference_lookup_or_insert_for_pieces
1750 (vuse, vr->set, vr->type, vr->operands, val);
1754 /* 2) Assignment from an empty CONSTRUCTOR. */
1755 else if (is_gimple_reg_type (vr->type)
1756 && gimple_assign_single_p (def_stmt)
1757 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1758 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1760 tree base2;
1761 HOST_WIDE_INT offset2, size2, maxsize2;
1762 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1763 &offset2, &size2, &maxsize2);
1764 if (maxsize2 != -1
1765 && operand_equal_p (base, base2, 0)
1766 && offset2 <= offset
1767 && offset2 + size2 >= offset + maxsize)
1769 tree val = build_zero_cst (vr->type);
1770 return vn_reference_lookup_or_insert_for_pieces
1771 (vuse, vr->set, vr->type, vr->operands, val);
1775 /* 3) Assignment from a constant. We can use folds native encode/interpret
1776 routines to extract the assigned bits. */
1777 else if (vn_walk_kind == VN_WALKREWRITE
1778 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1779 && ref->size == maxsize
1780 && maxsize % BITS_PER_UNIT == 0
1781 && offset % BITS_PER_UNIT == 0
1782 && is_gimple_reg_type (vr->type)
1783 && gimple_assign_single_p (def_stmt)
1784 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1786 tree base2;
1787 HOST_WIDE_INT offset2, size2, maxsize2;
1788 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1789 &offset2, &size2, &maxsize2);
1790 if (maxsize2 != -1
1791 && maxsize2 == size2
1792 && size2 % BITS_PER_UNIT == 0
1793 && offset2 % BITS_PER_UNIT == 0
1794 && operand_equal_p (base, base2, 0)
1795 && offset2 <= offset
1796 && offset2 + size2 >= offset + maxsize)
1798 /* We support up to 512-bit values (for V8DFmode). */
1799 unsigned char buffer[64];
1800 int len;
1802 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1803 buffer, sizeof (buffer));
1804 if (len > 0)
1806 tree val = native_interpret_expr (vr->type,
1807 buffer
1808 + ((offset - offset2)
1809 / BITS_PER_UNIT),
1810 ref->size / BITS_PER_UNIT);
1811 if (val)
1812 return vn_reference_lookup_or_insert_for_pieces
1813 (vuse, vr->set, vr->type, vr->operands, val);
1818 /* 4) Assignment from an SSA name which definition we may be able
1819 to access pieces from. */
1820 else if (ref->size == maxsize
1821 && is_gimple_reg_type (vr->type)
1822 && gimple_assign_single_p (def_stmt)
1823 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1825 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1826 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1827 if (is_gimple_assign (def_stmt2)
1828 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1829 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1830 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1832 tree base2;
1833 HOST_WIDE_INT offset2, size2, maxsize2, off;
1834 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1835 &offset2, &size2, &maxsize2);
1836 off = offset - offset2;
1837 if (maxsize2 != -1
1838 && maxsize2 == size2
1839 && operand_equal_p (base, base2, 0)
1840 && offset2 <= offset
1841 && offset2 + size2 >= offset + maxsize)
1843 tree val = NULL_TREE;
1844 HOST_WIDE_INT elsz
1845 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1846 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1848 if (off == 0)
1849 val = gimple_assign_rhs1 (def_stmt2);
1850 else if (off == elsz)
1851 val = gimple_assign_rhs2 (def_stmt2);
1853 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1854 && off % elsz == 0)
1856 tree ctor = gimple_assign_rhs1 (def_stmt2);
1857 unsigned i = off / elsz;
1858 if (i < CONSTRUCTOR_NELTS (ctor))
1860 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1861 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1863 if (TREE_CODE (TREE_TYPE (elt->value))
1864 != VECTOR_TYPE)
1865 val = elt->value;
1869 if (val)
1870 return vn_reference_lookup_or_insert_for_pieces
1871 (vuse, vr->set, vr->type, vr->operands, val);
1876 /* 5) For aggregate copies translate the reference through them if
1877 the copy kills ref. */
1878 else if (vn_walk_kind == VN_WALKREWRITE
1879 && gimple_assign_single_p (def_stmt)
1880 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1881 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1882 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1884 tree base2;
1885 HOST_WIDE_INT offset2, size2, maxsize2;
1886 int i, j;
1887 auto_vec<vn_reference_op_s> rhs;
1888 vn_reference_op_t vro;
1889 ao_ref r;
1891 if (!lhs_ref_ok)
1892 return (void *)-1;
1894 /* See if the assignment kills REF. */
1895 base2 = ao_ref_base (&lhs_ref);
1896 offset2 = lhs_ref.offset;
1897 size2 = lhs_ref.size;
1898 maxsize2 = lhs_ref.max_size;
1899 if (maxsize2 == -1
1900 || (base != base2
1901 && (TREE_CODE (base) != MEM_REF
1902 || TREE_CODE (base2) != MEM_REF
1903 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1904 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1905 TREE_OPERAND (base2, 1))))
1906 || offset2 > offset
1907 || offset2 + size2 < offset + maxsize)
1908 return (void *)-1;
1910 /* Find the common base of ref and the lhs. lhs_ops already
1911 contains valueized operands for the lhs. */
1912 i = vr->operands.length () - 1;
1913 j = lhs_ops.length () - 1;
1914 while (j >= 0 && i >= 0
1915 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1917 i--;
1918 j--;
1921 /* ??? The innermost op should always be a MEM_REF and we already
1922 checked that the assignment to the lhs kills vr. Thus for
1923 aggregate copies using char[] types the vn_reference_op_eq
1924 may fail when comparing types for compatibility. But we really
1925 don't care here - further lookups with the rewritten operands
1926 will simply fail if we messed up types too badly. */
1927 HOST_WIDE_INT extra_off = 0;
1928 if (j == 0 && i >= 0
1929 && lhs_ops[0].opcode == MEM_REF
1930 && lhs_ops[0].off != -1)
1932 if (lhs_ops[0].off == vr->operands[i].off)
1933 i--, j--;
1934 else if (vr->operands[i].opcode == MEM_REF
1935 && vr->operands[i].off != -1)
1937 extra_off = vr->operands[i].off - lhs_ops[0].off;
1938 i--, j--;
1942 /* i now points to the first additional op.
1943 ??? LHS may not be completely contained in VR, one or more
1944 VIEW_CONVERT_EXPRs could be in its way. We could at least
1945 try handling outermost VIEW_CONVERT_EXPRs. */
1946 if (j != -1)
1947 return (void *)-1;
1949 /* Now re-write REF to be based on the rhs of the assignment. */
1950 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1952 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1953 if (extra_off != 0)
1955 if (rhs.length () < 2
1956 || rhs[0].opcode != MEM_REF
1957 || rhs[0].off == -1)
1958 return (void *)-1;
1959 rhs[0].off += extra_off;
1960 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1961 build_int_cst (TREE_TYPE (rhs[0].op0),
1962 extra_off));
1965 /* We need to pre-pend vr->operands[0..i] to rhs. */
1966 vec<vn_reference_op_s> old = vr->operands;
1967 if (i + 1 + rhs.length () > vr->operands.length ())
1969 vr->operands.safe_grow (i + 1 + rhs.length ());
1970 if (old == shared_lookup_references)
1971 shared_lookup_references = vr->operands;
1973 else
1974 vr->operands.truncate (i + 1 + rhs.length ());
1975 FOR_EACH_VEC_ELT (rhs, j, vro)
1976 vr->operands[i + 1 + j] = *vro;
1977 vr->operands = valueize_refs (vr->operands);
1978 if (old == shared_lookup_references)
1979 shared_lookup_references = vr->operands;
1980 vr->hashcode = vn_reference_compute_hash (vr);
1982 /* Try folding the new reference to a constant. */
1983 tree val = fully_constant_vn_reference_p (vr);
1984 if (val)
1985 return vn_reference_lookup_or_insert_for_pieces
1986 (vuse, vr->set, vr->type, vr->operands, val);
1988 /* Adjust *ref from the new operands. */
1989 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1990 return (void *)-1;
1991 /* This can happen with bitfields. */
1992 if (ref->size != r.size)
1993 return (void *)-1;
1994 *ref = r;
1996 /* Do not update last seen VUSE after translating. */
1997 last_vuse_ptr = NULL;
1999 /* Keep looking for the adjusted *REF / VR pair. */
2000 return NULL;
2003 /* 6) For memcpy copies translate the reference through them if
2004 the copy kills ref. */
2005 else if (vn_walk_kind == VN_WALKREWRITE
2006 && is_gimple_reg_type (vr->type)
2007 /* ??? Handle BCOPY as well. */
2008 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2009 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2010 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2011 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2012 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2013 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2014 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2015 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2017 tree lhs, rhs;
2018 ao_ref r;
2019 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2020 vn_reference_op_s op;
2021 HOST_WIDE_INT at;
2024 /* Only handle non-variable, addressable refs. */
2025 if (ref->size != maxsize
2026 || offset % BITS_PER_UNIT != 0
2027 || ref->size % BITS_PER_UNIT != 0)
2028 return (void *)-1;
2030 /* Extract a pointer base and an offset for the destination. */
2031 lhs = gimple_call_arg (def_stmt, 0);
2032 lhs_offset = 0;
2033 if (TREE_CODE (lhs) == SSA_NAME)
2035 lhs = SSA_VAL (lhs);
2036 if (TREE_CODE (lhs) == SSA_NAME)
2038 gimple def_stmt = SSA_NAME_DEF_STMT (lhs);
2039 if (gimple_assign_single_p (def_stmt)
2040 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2041 lhs = gimple_assign_rhs1 (def_stmt);
2044 if (TREE_CODE (lhs) == ADDR_EXPR)
2046 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2047 &lhs_offset);
2048 if (!tem)
2049 return (void *)-1;
2050 if (TREE_CODE (tem) == MEM_REF
2051 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2053 lhs = TREE_OPERAND (tem, 0);
2054 if (TREE_CODE (lhs) == SSA_NAME)
2055 lhs = SSA_VAL (lhs);
2056 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2058 else if (DECL_P (tem))
2059 lhs = build_fold_addr_expr (tem);
2060 else
2061 return (void *)-1;
2063 if (TREE_CODE (lhs) != SSA_NAME
2064 && TREE_CODE (lhs) != ADDR_EXPR)
2065 return (void *)-1;
2067 /* Extract a pointer base and an offset for the source. */
2068 rhs = gimple_call_arg (def_stmt, 1);
2069 rhs_offset = 0;
2070 if (TREE_CODE (rhs) == SSA_NAME)
2071 rhs = SSA_VAL (rhs);
2072 if (TREE_CODE (rhs) == ADDR_EXPR)
2074 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2075 &rhs_offset);
2076 if (!tem)
2077 return (void *)-1;
2078 if (TREE_CODE (tem) == MEM_REF
2079 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2081 rhs = TREE_OPERAND (tem, 0);
2082 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2084 else if (DECL_P (tem))
2085 rhs = build_fold_addr_expr (tem);
2086 else
2087 return (void *)-1;
2089 if (TREE_CODE (rhs) != SSA_NAME
2090 && TREE_CODE (rhs) != ADDR_EXPR)
2091 return (void *)-1;
2093 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2095 /* The bases of the destination and the references have to agree. */
2096 if ((TREE_CODE (base) != MEM_REF
2097 && !DECL_P (base))
2098 || (TREE_CODE (base) == MEM_REF
2099 && (TREE_OPERAND (base, 0) != lhs
2100 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2101 || (DECL_P (base)
2102 && (TREE_CODE (lhs) != ADDR_EXPR
2103 || TREE_OPERAND (lhs, 0) != base)))
2104 return (void *)-1;
2106 at = offset / BITS_PER_UNIT;
2107 if (TREE_CODE (base) == MEM_REF)
2108 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2109 /* If the access is completely outside of the memcpy destination
2110 area there is no aliasing. */
2111 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2112 || lhs_offset + copy_size <= at)
2113 return NULL;
2114 /* And the access has to be contained within the memcpy destination. */
2115 if (lhs_offset > at
2116 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2117 return (void *)-1;
2119 /* Make room for 2 operands in the new reference. */
2120 if (vr->operands.length () < 2)
2122 vec<vn_reference_op_s> old = vr->operands;
2123 vr->operands.safe_grow_cleared (2);
2124 if (old == shared_lookup_references
2125 && vr->operands != old)
2126 shared_lookup_references = vr->operands;
2128 else
2129 vr->operands.truncate (2);
2131 /* The looked-through reference is a simple MEM_REF. */
2132 memset (&op, 0, sizeof (op));
2133 op.type = vr->type;
2134 op.opcode = MEM_REF;
2135 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2136 op.off = at - lhs_offset + rhs_offset;
2137 vr->operands[0] = op;
2138 op.type = TREE_TYPE (rhs);
2139 op.opcode = TREE_CODE (rhs);
2140 op.op0 = rhs;
2141 op.off = -1;
2142 vr->operands[1] = op;
2143 vr->hashcode = vn_reference_compute_hash (vr);
2145 /* Adjust *ref from the new operands. */
2146 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2147 return (void *)-1;
2148 /* This can happen with bitfields. */
2149 if (ref->size != r.size)
2150 return (void *)-1;
2151 *ref = r;
2153 /* Do not update last seen VUSE after translating. */
2154 last_vuse_ptr = NULL;
2156 /* Keep looking for the adjusted *REF / VR pair. */
2157 return NULL;
2160 /* Bail out and stop walking. */
2161 return (void *)-1;
2164 /* Lookup a reference operation by it's parts, in the current hash table.
2165 Returns the resulting value number if it exists in the hash table,
2166 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2167 vn_reference_t stored in the hashtable if something is found. */
2169 tree
2170 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2171 vec<vn_reference_op_s> operands,
2172 vn_reference_t *vnresult, vn_lookup_kind kind)
2174 struct vn_reference_s vr1;
2175 vn_reference_t tmp;
2176 tree cst;
2178 if (!vnresult)
2179 vnresult = &tmp;
2180 *vnresult = NULL;
2182 vr1.vuse = vuse_ssa_val (vuse);
2183 shared_lookup_references.truncate (0);
2184 shared_lookup_references.safe_grow (operands.length ());
2185 memcpy (shared_lookup_references.address (),
2186 operands.address (),
2187 sizeof (vn_reference_op_s)
2188 * operands.length ());
2189 vr1.operands = operands = shared_lookup_references
2190 = valueize_refs (shared_lookup_references);
2191 vr1.type = type;
2192 vr1.set = set;
2193 vr1.hashcode = vn_reference_compute_hash (&vr1);
2194 if ((cst = fully_constant_vn_reference_p (&vr1)))
2195 return cst;
2197 vn_reference_lookup_1 (&vr1, vnresult);
2198 if (!*vnresult
2199 && kind != VN_NOWALK
2200 && vr1.vuse)
2202 ao_ref r;
2203 vn_walk_kind = kind;
2204 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2205 *vnresult =
2206 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2207 vn_reference_lookup_2,
2208 vn_reference_lookup_3,
2209 vuse_ssa_val, &vr1);
2210 gcc_checking_assert (vr1.operands == shared_lookup_references);
2213 if (*vnresult)
2214 return (*vnresult)->result;
2216 return NULL_TREE;
2219 /* Lookup OP in the current hash table, and return the resulting value
2220 number if it exists in the hash table. Return NULL_TREE if it does
2221 not exist in the hash table or if the result field of the structure
2222 was NULL.. VNRESULT will be filled in with the vn_reference_t
2223 stored in the hashtable if one exists. */
2225 tree
2226 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2227 vn_reference_t *vnresult)
2229 vec<vn_reference_op_s> operands;
2230 struct vn_reference_s vr1;
2231 tree cst;
2232 bool valuezied_anything;
2234 if (vnresult)
2235 *vnresult = NULL;
2237 vr1.vuse = vuse_ssa_val (vuse);
2238 vr1.operands = operands
2239 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2240 vr1.type = TREE_TYPE (op);
2241 vr1.set = get_alias_set (op);
2242 vr1.hashcode = vn_reference_compute_hash (&vr1);
2243 if ((cst = fully_constant_vn_reference_p (&vr1)))
2244 return cst;
2246 if (kind != VN_NOWALK
2247 && vr1.vuse)
2249 vn_reference_t wvnresult;
2250 ao_ref r;
2251 /* Make sure to use a valueized reference if we valueized anything.
2252 Otherwise preserve the full reference for advanced TBAA. */
2253 if (!valuezied_anything
2254 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2255 vr1.operands))
2256 ao_ref_init (&r, op);
2257 vn_walk_kind = kind;
2258 wvnresult =
2259 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2260 vn_reference_lookup_2,
2261 vn_reference_lookup_3,
2262 vuse_ssa_val, &vr1);
2263 gcc_checking_assert (vr1.operands == shared_lookup_references);
2264 if (wvnresult)
2266 if (vnresult)
2267 *vnresult = wvnresult;
2268 return wvnresult->result;
2271 return NULL_TREE;
2274 return vn_reference_lookup_1 (&vr1, vnresult);
2277 /* Lookup CALL in the current hash table and return the entry in
2278 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2280 void
2281 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2282 vn_reference_t vr)
2284 if (vnresult)
2285 *vnresult = NULL;
2287 tree vuse = gimple_vuse (call);
2289 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2290 vr->operands = valueize_shared_reference_ops_from_call (call);
2291 vr->type = gimple_expr_type (call);
2292 vr->set = 0;
2293 vr->hashcode = vn_reference_compute_hash (vr);
2294 vn_reference_lookup_1 (vr, vnresult);
2297 /* Insert OP into the current hash table with a value number of
2298 RESULT, and return the resulting reference structure we created. */
2300 static vn_reference_t
2301 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2303 vn_reference_s **slot;
2304 vn_reference_t vr1;
2305 bool tem;
2307 vr1 = current_info->references_pool->allocate ();
2308 if (TREE_CODE (result) == SSA_NAME)
2309 vr1->value_id = VN_INFO (result)->value_id;
2310 else
2311 vr1->value_id = get_or_alloc_constant_value_id (result);
2312 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2313 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2314 vr1->type = TREE_TYPE (op);
2315 vr1->set = get_alias_set (op);
2316 vr1->hashcode = vn_reference_compute_hash (vr1);
2317 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2318 vr1->result_vdef = vdef;
2320 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2321 INSERT);
2323 /* Because we lookup stores using vuses, and value number failures
2324 using the vdefs (see visit_reference_op_store for how and why),
2325 it's possible that on failure we may try to insert an already
2326 inserted store. This is not wrong, there is no ssa name for a
2327 store that we could use as a differentiator anyway. Thus, unlike
2328 the other lookup functions, you cannot gcc_assert (!*slot)
2329 here. */
2331 /* But free the old slot in case of a collision. */
2332 if (*slot)
2333 free_reference (*slot);
2335 *slot = vr1;
2336 return vr1;
2339 /* Insert a reference by it's pieces into the current hash table with
2340 a value number of RESULT. Return the resulting reference
2341 structure we created. */
2343 vn_reference_t
2344 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2345 vec<vn_reference_op_s> operands,
2346 tree result, unsigned int value_id)
2349 vn_reference_s **slot;
2350 vn_reference_t vr1;
2352 vr1 = current_info->references_pool->allocate ();
2353 vr1->value_id = value_id;
2354 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2355 vr1->operands = valueize_refs (operands);
2356 vr1->type = type;
2357 vr1->set = set;
2358 vr1->hashcode = vn_reference_compute_hash (vr1);
2359 if (result && TREE_CODE (result) == SSA_NAME)
2360 result = SSA_VAL (result);
2361 vr1->result = result;
2363 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2364 INSERT);
2366 /* At this point we should have all the things inserted that we have
2367 seen before, and we should never try inserting something that
2368 already exists. */
2369 gcc_assert (!*slot);
2370 if (*slot)
2371 free_reference (*slot);
2373 *slot = vr1;
2374 return vr1;
2377 /* Compute and return the hash value for nary operation VBO1. */
2379 static hashval_t
2380 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2382 inchash::hash hstate;
2383 unsigned i;
2385 for (i = 0; i < vno1->length; ++i)
2386 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2387 vno1->op[i] = SSA_VAL (vno1->op[i]);
2389 if (vno1->length == 2
2390 && commutative_tree_code (vno1->opcode)
2391 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2392 std::swap (vno1->op[0], vno1->op[1]);
2394 hstate.add_int (vno1->opcode);
2395 for (i = 0; i < vno1->length; ++i)
2396 inchash::add_expr (vno1->op[i], hstate);
2398 return hstate.end ();
2401 /* Compare nary operations VNO1 and VNO2 and return true if they are
2402 equivalent. */
2404 bool
2405 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2407 unsigned i;
2409 if (vno1->hashcode != vno2->hashcode)
2410 return false;
2412 if (vno1->length != vno2->length)
2413 return false;
2415 if (vno1->opcode != vno2->opcode
2416 || !types_compatible_p (vno1->type, vno2->type))
2417 return false;
2419 for (i = 0; i < vno1->length; ++i)
2420 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2421 return false;
2423 return true;
2426 /* Initialize VNO from the pieces provided. */
2428 static void
2429 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2430 enum tree_code code, tree type, tree *ops)
2432 vno->opcode = code;
2433 vno->length = length;
2434 vno->type = type;
2435 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2438 /* Initialize VNO from OP. */
2440 static void
2441 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2443 unsigned i;
2445 vno->opcode = TREE_CODE (op);
2446 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2447 vno->type = TREE_TYPE (op);
2448 for (i = 0; i < vno->length; ++i)
2449 vno->op[i] = TREE_OPERAND (op, i);
2452 /* Return the number of operands for a vn_nary ops structure from STMT. */
2454 static unsigned int
2455 vn_nary_length_from_stmt (gimple stmt)
2457 switch (gimple_assign_rhs_code (stmt))
2459 case REALPART_EXPR:
2460 case IMAGPART_EXPR:
2461 case VIEW_CONVERT_EXPR:
2462 return 1;
2464 case BIT_FIELD_REF:
2465 return 3;
2467 case CONSTRUCTOR:
2468 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2470 default:
2471 return gimple_num_ops (stmt) - 1;
2475 /* Initialize VNO from STMT. */
2477 static void
2478 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2480 unsigned i;
2482 vno->opcode = gimple_assign_rhs_code (stmt);
2483 vno->type = gimple_expr_type (stmt);
2484 switch (vno->opcode)
2486 case REALPART_EXPR:
2487 case IMAGPART_EXPR:
2488 case VIEW_CONVERT_EXPR:
2489 vno->length = 1;
2490 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2491 break;
2493 case BIT_FIELD_REF:
2494 vno->length = 3;
2495 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2496 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2497 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2498 break;
2500 case CONSTRUCTOR:
2501 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2502 for (i = 0; i < vno->length; ++i)
2503 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2504 break;
2506 default:
2507 gcc_checking_assert (!gimple_assign_single_p (stmt));
2508 vno->length = gimple_num_ops (stmt) - 1;
2509 for (i = 0; i < vno->length; ++i)
2510 vno->op[i] = gimple_op (stmt, i + 1);
2514 /* Compute the hashcode for VNO and look for it in the hash table;
2515 return the resulting value number if it exists in the hash table.
2516 Return NULL_TREE if it does not exist in the hash table or if the
2517 result field of the operation is NULL. VNRESULT will contain the
2518 vn_nary_op_t from the hashtable if it exists. */
2520 static tree
2521 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2523 vn_nary_op_s **slot;
2525 if (vnresult)
2526 *vnresult = NULL;
2528 vno->hashcode = vn_nary_op_compute_hash (vno);
2529 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2530 NO_INSERT);
2531 if (!slot && current_info == optimistic_info)
2532 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2533 NO_INSERT);
2534 if (!slot)
2535 return NULL_TREE;
2536 if (vnresult)
2537 *vnresult = *slot;
2538 return (*slot)->result;
2541 /* Lookup a n-ary operation by its pieces and return the resulting value
2542 number if it exists in the hash table. Return NULL_TREE if it does
2543 not exist in the hash table or if the result field of the operation
2544 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2545 if it exists. */
2547 tree
2548 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2549 tree type, tree *ops, vn_nary_op_t *vnresult)
2551 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2552 sizeof_vn_nary_op (length));
2553 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2554 return vn_nary_op_lookup_1 (vno1, vnresult);
2557 /* Lookup OP in the current hash table, and return the resulting value
2558 number if it exists in the hash table. Return NULL_TREE if it does
2559 not exist in the hash table or if the result field of the operation
2560 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2561 if it exists. */
2563 tree
2564 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2566 vn_nary_op_t vno1
2567 = XALLOCAVAR (struct vn_nary_op_s,
2568 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2569 init_vn_nary_op_from_op (vno1, op);
2570 return vn_nary_op_lookup_1 (vno1, vnresult);
2573 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2574 value number if it exists in the hash table. Return NULL_TREE if
2575 it does not exist in the hash table. VNRESULT will contain the
2576 vn_nary_op_t from the hashtable if it exists. */
2578 tree
2579 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2581 vn_nary_op_t vno1
2582 = XALLOCAVAR (struct vn_nary_op_s,
2583 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2584 init_vn_nary_op_from_stmt (vno1, stmt);
2585 return vn_nary_op_lookup_1 (vno1, vnresult);
2588 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2590 static vn_nary_op_t
2591 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2593 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2596 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2597 obstack. */
2599 static vn_nary_op_t
2600 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2602 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2603 &current_info->nary_obstack);
2605 vno1->value_id = value_id;
2606 vno1->length = length;
2607 vno1->result = result;
2609 return vno1;
2612 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2613 VNO->HASHCODE first. */
2615 static vn_nary_op_t
2616 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2617 bool compute_hash)
2619 vn_nary_op_s **slot;
2621 if (compute_hash)
2622 vno->hashcode = vn_nary_op_compute_hash (vno);
2624 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2625 gcc_assert (!*slot);
2627 *slot = vno;
2628 return vno;
2631 /* Insert a n-ary operation into the current hash table using it's
2632 pieces. Return the vn_nary_op_t structure we created and put in
2633 the hashtable. */
2635 vn_nary_op_t
2636 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2637 tree type, tree *ops,
2638 tree result, unsigned int value_id)
2640 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2641 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2642 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2645 /* Insert OP into the current hash table with a value number of
2646 RESULT. Return the vn_nary_op_t structure we created and put in
2647 the hashtable. */
2649 vn_nary_op_t
2650 vn_nary_op_insert (tree op, tree result)
2652 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2653 vn_nary_op_t vno1;
2655 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2656 init_vn_nary_op_from_op (vno1, op);
2657 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2660 /* Insert the rhs of STMT into the current hash table with a value number of
2661 RESULT. */
2663 vn_nary_op_t
2664 vn_nary_op_insert_stmt (gimple stmt, tree result)
2666 vn_nary_op_t vno1
2667 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2668 result, VN_INFO (result)->value_id);
2669 init_vn_nary_op_from_stmt (vno1, stmt);
2670 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2673 /* Compute a hashcode for PHI operation VP1 and return it. */
2675 static inline hashval_t
2676 vn_phi_compute_hash (vn_phi_t vp1)
2678 inchash::hash hstate (vp1->block->index);
2679 int i;
2680 tree phi1op;
2681 tree type;
2683 /* If all PHI arguments are constants we need to distinguish
2684 the PHI node via its type. */
2685 type = vp1->type;
2686 hstate.merge_hash (vn_hash_type (type));
2688 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2690 if (phi1op == VN_TOP)
2691 continue;
2692 inchash::add_expr (phi1op, hstate);
2695 return hstate.end ();
2698 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2700 static int
2701 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2703 if (vp1->hashcode != vp2->hashcode)
2704 return false;
2706 if (vp1->block == vp2->block)
2708 int i;
2709 tree phi1op;
2711 /* If the PHI nodes do not have compatible types
2712 they are not the same. */
2713 if (!types_compatible_p (vp1->type, vp2->type))
2714 return false;
2716 /* Any phi in the same block will have it's arguments in the
2717 same edge order, because of how we store phi nodes. */
2718 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2720 tree phi2op = vp2->phiargs[i];
2721 if (phi1op == VN_TOP || phi2op == VN_TOP)
2722 continue;
2723 if (!expressions_equal_p (phi1op, phi2op))
2724 return false;
2726 return true;
2728 return false;
2731 static vec<tree> shared_lookup_phiargs;
2733 /* Lookup PHI in the current hash table, and return the resulting
2734 value number if it exists in the hash table. Return NULL_TREE if
2735 it does not exist in the hash table. */
2737 static tree
2738 vn_phi_lookup (gimple phi)
2740 vn_phi_s **slot;
2741 struct vn_phi_s vp1;
2742 unsigned i;
2744 shared_lookup_phiargs.truncate (0);
2746 /* Canonicalize the SSA_NAME's to their value number. */
2747 for (i = 0; i < gimple_phi_num_args (phi); i++)
2749 tree def = PHI_ARG_DEF (phi, i);
2750 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2751 shared_lookup_phiargs.safe_push (def);
2753 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2754 vp1.phiargs = shared_lookup_phiargs;
2755 vp1.block = gimple_bb (phi);
2756 vp1.hashcode = vn_phi_compute_hash (&vp1);
2757 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2758 NO_INSERT);
2759 if (!slot && current_info == optimistic_info)
2760 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2761 NO_INSERT);
2762 if (!slot)
2763 return NULL_TREE;
2764 return (*slot)->result;
2767 /* Insert PHI into the current hash table with a value number of
2768 RESULT. */
2770 static vn_phi_t
2771 vn_phi_insert (gimple phi, tree result)
2773 vn_phi_s **slot;
2774 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2775 unsigned i;
2776 vec<tree> args = vNULL;
2778 /* Canonicalize the SSA_NAME's to their value number. */
2779 for (i = 0; i < gimple_phi_num_args (phi); i++)
2781 tree def = PHI_ARG_DEF (phi, i);
2782 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2783 args.safe_push (def);
2785 vp1->value_id = VN_INFO (result)->value_id;
2786 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2787 vp1->phiargs = args;
2788 vp1->block = gimple_bb (phi);
2789 vp1->result = result;
2790 vp1->hashcode = vn_phi_compute_hash (vp1);
2792 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2794 /* Because we iterate over phi operations more than once, it's
2795 possible the slot might already exist here, hence no assert.*/
2796 *slot = vp1;
2797 return vp1;
2801 /* Print set of components in strongly connected component SCC to OUT. */
2803 static void
2804 print_scc (FILE *out, vec<tree> scc)
2806 tree var;
2807 unsigned int i;
2809 fprintf (out, "SCC consists of:");
2810 FOR_EACH_VEC_ELT (scc, i, var)
2812 fprintf (out, " ");
2813 print_generic_expr (out, var, 0);
2815 fprintf (out, "\n");
2818 /* Set the value number of FROM to TO, return true if it has changed
2819 as a result. */
2821 static inline bool
2822 set_ssa_val_to (tree from, tree to)
2824 tree currval = SSA_VAL (from);
2825 HOST_WIDE_INT toff, coff;
2827 /* The only thing we allow as value numbers are ssa_names
2828 and invariants. So assert that here. We don't allow VN_TOP
2829 as visiting a stmt should produce a value-number other than
2830 that.
2831 ??? Still VN_TOP can happen for unreachable code, so force
2832 it to varying in that case. Not all code is prepared to
2833 get VN_TOP on valueization. */
2834 if (to == VN_TOP)
2836 if (dump_file && (dump_flags & TDF_DETAILS))
2837 fprintf (dump_file, "Forcing value number to varying on "
2838 "receiving VN_TOP\n");
2839 to = from;
2842 gcc_assert (to != NULL_TREE
2843 && ((TREE_CODE (to) == SSA_NAME
2844 && (to == from || SSA_VAL (to) == to))
2845 || is_gimple_min_invariant (to)));
2847 if (from != to)
2849 if (currval == from)
2851 if (dump_file && (dump_flags & TDF_DETAILS))
2853 fprintf (dump_file, "Not changing value number of ");
2854 print_generic_expr (dump_file, from, 0);
2855 fprintf (dump_file, " from VARYING to ");
2856 print_generic_expr (dump_file, to, 0);
2857 fprintf (dump_file, "\n");
2859 return false;
2861 else if (TREE_CODE (to) == SSA_NAME
2862 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2863 to = from;
2866 if (dump_file && (dump_flags & TDF_DETAILS))
2868 fprintf (dump_file, "Setting value number of ");
2869 print_generic_expr (dump_file, from, 0);
2870 fprintf (dump_file, " to ");
2871 print_generic_expr (dump_file, to, 0);
2874 if (currval != to
2875 && !operand_equal_p (currval, to, 0)
2876 /* ??? For addresses involving volatile objects or types operand_equal_p
2877 does not reliably detect ADDR_EXPRs as equal. We know we are only
2878 getting invariant gimple addresses here, so can use
2879 get_addr_base_and_unit_offset to do this comparison. */
2880 && !(TREE_CODE (currval) == ADDR_EXPR
2881 && TREE_CODE (to) == ADDR_EXPR
2882 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2883 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2884 && coff == toff))
2886 VN_INFO (from)->valnum = to;
2887 if (dump_file && (dump_flags & TDF_DETAILS))
2888 fprintf (dump_file, " (changed)\n");
2889 return true;
2891 if (dump_file && (dump_flags & TDF_DETAILS))
2892 fprintf (dump_file, "\n");
2893 return false;
2896 /* Mark as processed all the definitions in the defining stmt of USE, or
2897 the USE itself. */
2899 static void
2900 mark_use_processed (tree use)
2902 ssa_op_iter iter;
2903 def_operand_p defp;
2904 gimple stmt = SSA_NAME_DEF_STMT (use);
2906 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2908 VN_INFO (use)->use_processed = true;
2909 return;
2912 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2914 tree def = DEF_FROM_PTR (defp);
2916 VN_INFO (def)->use_processed = true;
2920 /* Set all definitions in STMT to value number to themselves.
2921 Return true if a value number changed. */
2923 static bool
2924 defs_to_varying (gimple stmt)
2926 bool changed = false;
2927 ssa_op_iter iter;
2928 def_operand_p defp;
2930 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2932 tree def = DEF_FROM_PTR (defp);
2933 changed |= set_ssa_val_to (def, def);
2935 return changed;
2938 static bool expr_has_constants (tree expr);
2940 /* Visit a copy between LHS and RHS, return true if the value number
2941 changed. */
2943 static bool
2944 visit_copy (tree lhs, tree rhs)
2946 /* The copy may have a more interesting constant filled expression
2947 (we don't, since we know our RHS is just an SSA name). */
2948 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2949 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2951 /* And finally valueize. */
2952 rhs = SSA_VAL (rhs);
2954 return set_ssa_val_to (lhs, rhs);
2957 /* Visit a nary operator RHS, value number it, and return true if the
2958 value number of LHS has changed as a result. */
2960 static bool
2961 visit_nary_op (tree lhs, gimple stmt)
2963 bool changed = false;
2964 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2966 if (result)
2967 changed = set_ssa_val_to (lhs, result);
2968 else
2970 changed = set_ssa_val_to (lhs, lhs);
2971 vn_nary_op_insert_stmt (stmt, lhs);
2974 return changed;
2977 /* Visit a call STMT storing into LHS. Return true if the value number
2978 of the LHS has changed as a result. */
2980 static bool
2981 visit_reference_op_call (tree lhs, gcall *stmt)
2983 bool changed = false;
2984 struct vn_reference_s vr1;
2985 vn_reference_t vnresult = NULL;
2986 tree vdef = gimple_vdef (stmt);
2988 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2989 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2990 lhs = NULL_TREE;
2992 vn_reference_lookup_call (stmt, &vnresult, &vr1);
2993 if (vnresult)
2995 if (vnresult->result_vdef && vdef)
2996 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2998 if (!vnresult->result && lhs)
2999 vnresult->result = lhs;
3001 if (vnresult->result && lhs)
3003 changed |= set_ssa_val_to (lhs, vnresult->result);
3005 if (VN_INFO (vnresult->result)->has_constants)
3006 VN_INFO (lhs)->has_constants = true;
3009 else
3011 vn_reference_t vr2;
3012 vn_reference_s **slot;
3013 if (vdef)
3014 changed |= set_ssa_val_to (vdef, vdef);
3015 if (lhs)
3016 changed |= set_ssa_val_to (lhs, lhs);
3017 vr2 = current_info->references_pool->allocate ();
3018 vr2->vuse = vr1.vuse;
3019 /* As we are not walking the virtual operand chain we know the
3020 shared_lookup_references are still original so we can re-use
3021 them here. */
3022 vr2->operands = vr1.operands.copy ();
3023 vr2->type = vr1.type;
3024 vr2->set = vr1.set;
3025 vr2->hashcode = vr1.hashcode;
3026 vr2->result = lhs;
3027 vr2->result_vdef = vdef;
3028 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3029 INSERT);
3030 gcc_assert (!*slot);
3031 *slot = vr2;
3034 return changed;
3037 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3038 and return true if the value number of the LHS has changed as a result. */
3040 static bool
3041 visit_reference_op_load (tree lhs, tree op, gimple stmt)
3043 bool changed = false;
3044 tree last_vuse;
3045 tree result;
3047 last_vuse = gimple_vuse (stmt);
3048 last_vuse_ptr = &last_vuse;
3049 result = vn_reference_lookup (op, gimple_vuse (stmt),
3050 default_vn_walk_kind, NULL);
3051 last_vuse_ptr = NULL;
3053 /* We handle type-punning through unions by value-numbering based
3054 on offset and size of the access. Be prepared to handle a
3055 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3056 if (result
3057 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3059 /* We will be setting the value number of lhs to the value number
3060 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3061 So first simplify and lookup this expression to see if it
3062 is already available. */
3063 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3064 if ((CONVERT_EXPR_P (val)
3065 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
3066 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
3068 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
3069 if ((CONVERT_EXPR_P (tem)
3070 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
3071 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
3072 TREE_TYPE (val), tem)))
3073 val = tem;
3075 result = val;
3076 if (!is_gimple_min_invariant (val)
3077 && TREE_CODE (val) != SSA_NAME)
3078 result = vn_nary_op_lookup (val, NULL);
3079 /* If the expression is not yet available, value-number lhs to
3080 a new SSA_NAME we create. */
3081 if (!result)
3083 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
3084 "vntemp");
3085 /* Initialize value-number information properly. */
3086 VN_INFO_GET (result)->valnum = result;
3087 VN_INFO (result)->value_id = get_next_value_id ();
3088 VN_INFO (result)->expr = val;
3089 VN_INFO (result)->has_constants = expr_has_constants (val);
3090 VN_INFO (result)->needs_insertion = true;
3091 /* As all "inserted" statements are singleton SCCs, insert
3092 to the valid table. This is strictly needed to
3093 avoid re-generating new value SSA_NAMEs for the same
3094 expression during SCC iteration over and over (the
3095 optimistic table gets cleared after each iteration).
3096 We do not need to insert into the optimistic table, as
3097 lookups there will fall back to the valid table. */
3098 if (current_info == optimistic_info)
3100 current_info = valid_info;
3101 vn_nary_op_insert (val, result);
3102 current_info = optimistic_info;
3104 else
3105 vn_nary_op_insert (val, result);
3106 if (dump_file && (dump_flags & TDF_DETAILS))
3108 fprintf (dump_file, "Inserting name ");
3109 print_generic_expr (dump_file, result, 0);
3110 fprintf (dump_file, " for expression ");
3111 print_generic_expr (dump_file, val, 0);
3112 fprintf (dump_file, "\n");
3117 if (result)
3119 changed = set_ssa_val_to (lhs, result);
3120 if (TREE_CODE (result) == SSA_NAME
3121 && VN_INFO (result)->has_constants)
3123 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3124 VN_INFO (lhs)->has_constants = true;
3127 else
3129 changed = set_ssa_val_to (lhs, lhs);
3130 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3133 return changed;
3137 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3138 and return true if the value number of the LHS has changed as a result. */
3140 static bool
3141 visit_reference_op_store (tree lhs, tree op, gimple stmt)
3143 bool changed = false;
3144 vn_reference_t vnresult = NULL;
3145 tree result, assign;
3146 bool resultsame = false;
3147 tree vuse = gimple_vuse (stmt);
3148 tree vdef = gimple_vdef (stmt);
3150 if (TREE_CODE (op) == SSA_NAME)
3151 op = SSA_VAL (op);
3153 /* First we want to lookup using the *vuses* from the store and see
3154 if there the last store to this location with the same address
3155 had the same value.
3157 The vuses represent the memory state before the store. If the
3158 memory state, address, and value of the store is the same as the
3159 last store to this location, then this store will produce the
3160 same memory state as that store.
3162 In this case the vdef versions for this store are value numbered to those
3163 vuse versions, since they represent the same memory state after
3164 this store.
3166 Otherwise, the vdefs for the store are used when inserting into
3167 the table, since the store generates a new memory state. */
3169 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3171 if (result)
3173 if (TREE_CODE (result) == SSA_NAME)
3174 result = SSA_VAL (result);
3175 resultsame = expressions_equal_p (result, op);
3178 if ((!result || !resultsame)
3179 /* Only perform the following when being called from PRE
3180 which embeds tail merging. */
3181 && default_vn_walk_kind == VN_WALK)
3183 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3184 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3185 if (vnresult)
3187 VN_INFO (vdef)->use_processed = true;
3188 return set_ssa_val_to (vdef, vnresult->result_vdef);
3192 if (!result || !resultsame)
3194 if (dump_file && (dump_flags & TDF_DETAILS))
3196 fprintf (dump_file, "No store match\n");
3197 fprintf (dump_file, "Value numbering store ");
3198 print_generic_expr (dump_file, lhs, 0);
3199 fprintf (dump_file, " to ");
3200 print_generic_expr (dump_file, op, 0);
3201 fprintf (dump_file, "\n");
3203 /* Have to set value numbers before insert, since insert is
3204 going to valueize the references in-place. */
3205 if (vdef)
3207 changed |= set_ssa_val_to (vdef, vdef);
3210 /* Do not insert structure copies into the tables. */
3211 if (is_gimple_min_invariant (op)
3212 || is_gimple_reg (op))
3213 vn_reference_insert (lhs, op, vdef, NULL);
3215 /* Only perform the following when being called from PRE
3216 which embeds tail merging. */
3217 if (default_vn_walk_kind == VN_WALK)
3219 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3220 vn_reference_insert (assign, lhs, vuse, vdef);
3223 else
3225 /* We had a match, so value number the vdef to have the value
3226 number of the vuse it came from. */
3228 if (dump_file && (dump_flags & TDF_DETAILS))
3229 fprintf (dump_file, "Store matched earlier value,"
3230 "value numbering store vdefs to matching vuses.\n");
3232 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3235 return changed;
3238 /* Visit and value number PHI, return true if the value number
3239 changed. */
3241 static bool
3242 visit_phi (gimple phi)
3244 bool changed = false;
3245 tree result;
3246 tree sameval = VN_TOP;
3247 bool allsame = true;
3249 /* TODO: We could check for this in init_sccvn, and replace this
3250 with a gcc_assert. */
3251 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3252 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3254 /* See if all non-TOP arguments have the same value. TOP is
3255 equivalent to everything, so we can ignore it. */
3256 edge_iterator ei;
3257 edge e;
3258 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3259 if (e->flags & EDGE_EXECUTABLE)
3261 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3263 if (TREE_CODE (def) == SSA_NAME)
3264 def = SSA_VAL (def);
3265 if (def == VN_TOP)
3266 continue;
3267 if (sameval == VN_TOP)
3269 sameval = def;
3271 else
3273 if (!expressions_equal_p (def, sameval))
3275 allsame = false;
3276 break;
3281 /* If all value numbered to the same value, the phi node has that
3282 value. */
3283 if (allsame)
3284 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3286 /* Otherwise, see if it is equivalent to a phi node in this block. */
3287 result = vn_phi_lookup (phi);
3288 if (result)
3289 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3290 else
3292 vn_phi_insert (phi, PHI_RESULT (phi));
3293 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3294 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3295 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3298 return changed;
3301 /* Return true if EXPR contains constants. */
3303 static bool
3304 expr_has_constants (tree expr)
3306 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3308 case tcc_unary:
3309 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3311 case tcc_binary:
3312 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3313 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3314 /* Constants inside reference ops are rarely interesting, but
3315 it can take a lot of looking to find them. */
3316 case tcc_reference:
3317 case tcc_declaration:
3318 return false;
3319 default:
3320 return is_gimple_min_invariant (expr);
3322 return false;
3325 /* Return true if STMT contains constants. */
3327 static bool
3328 stmt_has_constants (gimple stmt)
3330 tree tem;
3332 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3333 return false;
3335 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3337 case GIMPLE_TERNARY_RHS:
3338 tem = gimple_assign_rhs3 (stmt);
3339 if (TREE_CODE (tem) == SSA_NAME)
3340 tem = SSA_VAL (tem);
3341 if (is_gimple_min_invariant (tem))
3342 return true;
3343 /* Fallthru. */
3345 case GIMPLE_BINARY_RHS:
3346 tem = gimple_assign_rhs2 (stmt);
3347 if (TREE_CODE (tem) == SSA_NAME)
3348 tem = SSA_VAL (tem);
3349 if (is_gimple_min_invariant (tem))
3350 return true;
3351 /* Fallthru. */
3353 case GIMPLE_SINGLE_RHS:
3354 /* Constants inside reference ops are rarely interesting, but
3355 it can take a lot of looking to find them. */
3356 case GIMPLE_UNARY_RHS:
3357 tem = gimple_assign_rhs1 (stmt);
3358 if (TREE_CODE (tem) == SSA_NAME)
3359 tem = SSA_VAL (tem);
3360 return is_gimple_min_invariant (tem);
3362 default:
3363 gcc_unreachable ();
3365 return false;
3368 /* Simplify the binary expression RHS, and return the result if
3369 simplified. */
3371 static tree
3372 simplify_binary_expression (gimple stmt)
3374 tree result = NULL_TREE;
3375 tree op0 = gimple_assign_rhs1 (stmt);
3376 tree op1 = gimple_assign_rhs2 (stmt);
3377 enum tree_code code = gimple_assign_rhs_code (stmt);
3379 /* This will not catch every single case we could combine, but will
3380 catch those with constants. The goal here is to simultaneously
3381 combine constants between expressions, but avoid infinite
3382 expansion of expressions during simplification. */
3383 op0 = vn_valueize (op0);
3384 if (TREE_CODE (op0) == SSA_NAME
3385 && (VN_INFO (op0)->has_constants
3386 || TREE_CODE_CLASS (code) == tcc_comparison
3387 || code == COMPLEX_EXPR))
3388 op0 = vn_get_expr_for (op0);
3390 op1 = vn_valueize (op1);
3391 if (TREE_CODE (op1) == SSA_NAME
3392 && (VN_INFO (op1)->has_constants
3393 || code == COMPLEX_EXPR))
3394 op1 = vn_get_expr_for (op1);
3396 /* Pointer plus constant can be represented as invariant address.
3397 Do so to allow further propatation, see also tree forwprop. */
3398 if (code == POINTER_PLUS_EXPR
3399 && tree_fits_uhwi_p (op1)
3400 && TREE_CODE (op0) == ADDR_EXPR
3401 && is_gimple_min_invariant (op0))
3402 return build_invariant_address (TREE_TYPE (op0),
3403 TREE_OPERAND (op0, 0),
3404 tree_to_uhwi (op1));
3406 /* Avoid folding if nothing changed. */
3407 if (op0 == gimple_assign_rhs1 (stmt)
3408 && op1 == gimple_assign_rhs2 (stmt))
3409 return NULL_TREE;
3411 fold_defer_overflow_warnings ();
3413 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3414 if (result)
3415 STRIP_USELESS_TYPE_CONVERSION (result);
3417 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3418 stmt, 0);
3420 /* Make sure result is not a complex expression consisting
3421 of operators of operators (IE (a + b) + (a + c))
3422 Otherwise, we will end up with unbounded expressions if
3423 fold does anything at all. */
3424 if (result && valid_gimple_rhs_p (result))
3425 return result;
3427 return NULL_TREE;
3430 /* Simplify the unary expression RHS, and return the result if
3431 simplified. */
3433 static tree
3434 simplify_unary_expression (gassign *stmt)
3436 tree result = NULL_TREE;
3437 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3438 enum tree_code code = gimple_assign_rhs_code (stmt);
3440 /* We handle some tcc_reference codes here that are all
3441 GIMPLE_ASSIGN_SINGLE codes. */
3442 if (code == REALPART_EXPR
3443 || code == IMAGPART_EXPR
3444 || code == VIEW_CONVERT_EXPR
3445 || code == BIT_FIELD_REF)
3446 op0 = TREE_OPERAND (op0, 0);
3448 orig_op0 = op0;
3449 op0 = vn_valueize (op0);
3450 if (TREE_CODE (op0) == SSA_NAME)
3452 if (VN_INFO (op0)->has_constants)
3453 op0 = vn_get_expr_for (op0);
3454 else if (CONVERT_EXPR_CODE_P (code)
3455 || code == REALPART_EXPR
3456 || code == IMAGPART_EXPR
3457 || code == VIEW_CONVERT_EXPR
3458 || code == BIT_FIELD_REF)
3460 /* We want to do tree-combining on conversion-like expressions.
3461 Make sure we feed only SSA_NAMEs or constants to fold though. */
3462 tree tem = vn_get_expr_for (op0);
3463 if (UNARY_CLASS_P (tem)
3464 || BINARY_CLASS_P (tem)
3465 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3466 || TREE_CODE (tem) == SSA_NAME
3467 || TREE_CODE (tem) == CONSTRUCTOR
3468 || is_gimple_min_invariant (tem))
3469 op0 = tem;
3473 /* Avoid folding if nothing changed, but remember the expression. */
3474 if (op0 == orig_op0)
3475 return NULL_TREE;
3477 if (code == BIT_FIELD_REF)
3479 tree rhs = gimple_assign_rhs1 (stmt);
3480 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3481 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3483 else
3484 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3485 if (result)
3487 STRIP_USELESS_TYPE_CONVERSION (result);
3488 if (valid_gimple_rhs_p (result))
3489 return result;
3492 return NULL_TREE;
3495 /* Try to simplify RHS using equivalences and constant folding. */
3497 static tree
3498 try_to_simplify (gassign *stmt)
3500 enum tree_code code = gimple_assign_rhs_code (stmt);
3501 tree tem;
3503 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3504 in this case, there is no point in doing extra work. */
3505 if (code == SSA_NAME)
3506 return NULL_TREE;
3508 /* First try constant folding based on our current lattice. */
3509 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3510 if (tem
3511 && (TREE_CODE (tem) == SSA_NAME
3512 || is_gimple_min_invariant (tem)))
3513 return tem;
3515 /* If that didn't work try combining multiple statements. */
3516 switch (TREE_CODE_CLASS (code))
3518 case tcc_reference:
3519 /* Fallthrough for some unary codes that can operate on registers. */
3520 if (!(code == REALPART_EXPR
3521 || code == IMAGPART_EXPR
3522 || code == VIEW_CONVERT_EXPR
3523 || code == BIT_FIELD_REF))
3524 break;
3525 /* We could do a little more with unary ops, if they expand
3526 into binary ops, but it's debatable whether it is worth it. */
3527 case tcc_unary:
3528 return simplify_unary_expression (stmt);
3530 case tcc_comparison:
3531 case tcc_binary:
3532 return simplify_binary_expression (stmt);
3534 default:
3535 break;
3538 return NULL_TREE;
3541 /* Visit and value number USE, return true if the value number
3542 changed. */
3544 static bool
3545 visit_use (tree use)
3547 bool changed = false;
3548 gimple stmt = SSA_NAME_DEF_STMT (use);
3550 mark_use_processed (use);
3552 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3553 if (dump_file && (dump_flags & TDF_DETAILS)
3554 && !SSA_NAME_IS_DEFAULT_DEF (use))
3556 fprintf (dump_file, "Value numbering ");
3557 print_generic_expr (dump_file, use, 0);
3558 fprintf (dump_file, " stmt = ");
3559 print_gimple_stmt (dump_file, stmt, 0, 0);
3562 /* Handle uninitialized uses. */
3563 if (SSA_NAME_IS_DEFAULT_DEF (use))
3564 changed = set_ssa_val_to (use, use);
3565 else
3567 if (gimple_code (stmt) == GIMPLE_PHI)
3568 changed = visit_phi (stmt);
3569 else if (gimple_has_volatile_ops (stmt))
3570 changed = defs_to_varying (stmt);
3571 else if (is_gimple_assign (stmt))
3573 enum tree_code code = gimple_assign_rhs_code (stmt);
3574 tree lhs = gimple_assign_lhs (stmt);
3575 tree rhs1 = gimple_assign_rhs1 (stmt);
3576 tree simplified;
3578 /* Shortcut for copies. Simplifying copies is pointless,
3579 since we copy the expression and value they represent. */
3580 if (code == SSA_NAME
3581 && TREE_CODE (lhs) == SSA_NAME)
3583 changed = visit_copy (lhs, rhs1);
3584 goto done;
3586 simplified = try_to_simplify (as_a <gassign *> (stmt));
3587 if (simplified)
3589 if (dump_file && (dump_flags & TDF_DETAILS))
3591 fprintf (dump_file, "RHS ");
3592 print_gimple_expr (dump_file, stmt, 0, 0);
3593 fprintf (dump_file, " simplified to ");
3594 print_generic_expr (dump_file, simplified, 0);
3595 if (TREE_CODE (lhs) == SSA_NAME)
3596 fprintf (dump_file, " has constants %d\n",
3597 expr_has_constants (simplified));
3598 else
3599 fprintf (dump_file, "\n");
3602 /* Setting value numbers to constants will occasionally
3603 screw up phi congruence because constants are not
3604 uniquely associated with a single ssa name that can be
3605 looked up. */
3606 if (simplified
3607 && is_gimple_min_invariant (simplified)
3608 && TREE_CODE (lhs) == SSA_NAME)
3610 VN_INFO (lhs)->expr = simplified;
3611 VN_INFO (lhs)->has_constants = true;
3612 changed = set_ssa_val_to (lhs, simplified);
3613 goto done;
3615 else if (simplified
3616 && TREE_CODE (simplified) == SSA_NAME
3617 && TREE_CODE (lhs) == SSA_NAME)
3619 changed = visit_copy (lhs, simplified);
3620 goto done;
3622 else if (simplified)
3624 if (TREE_CODE (lhs) == SSA_NAME)
3626 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3627 /* We have to unshare the expression or else
3628 valuizing may change the IL stream. */
3629 VN_INFO (lhs)->expr = unshare_expr (simplified);
3632 else if (stmt_has_constants (stmt)
3633 && TREE_CODE (lhs) == SSA_NAME)
3634 VN_INFO (lhs)->has_constants = true;
3635 else if (TREE_CODE (lhs) == SSA_NAME)
3637 /* We reset expr and constantness here because we may
3638 have been value numbering optimistically, and
3639 iterating. They may become non-constant in this case,
3640 even if they were optimistically constant. */
3642 VN_INFO (lhs)->has_constants = false;
3643 VN_INFO (lhs)->expr = NULL_TREE;
3646 if ((TREE_CODE (lhs) == SSA_NAME
3647 /* We can substitute SSA_NAMEs that are live over
3648 abnormal edges with their constant value. */
3649 && !(gimple_assign_copy_p (stmt)
3650 && is_gimple_min_invariant (rhs1))
3651 && !(simplified
3652 && is_gimple_min_invariant (simplified))
3653 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3654 /* Stores or copies from SSA_NAMEs that are live over
3655 abnormal edges are a problem. */
3656 || (code == SSA_NAME
3657 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3658 changed = defs_to_varying (stmt);
3659 else if (REFERENCE_CLASS_P (lhs)
3660 || DECL_P (lhs))
3661 changed = visit_reference_op_store (lhs, rhs1, stmt);
3662 else if (TREE_CODE (lhs) == SSA_NAME)
3664 if ((gimple_assign_copy_p (stmt)
3665 && is_gimple_min_invariant (rhs1))
3666 || (simplified
3667 && is_gimple_min_invariant (simplified)))
3669 VN_INFO (lhs)->has_constants = true;
3670 if (simplified)
3671 changed = set_ssa_val_to (lhs, simplified);
3672 else
3673 changed = set_ssa_val_to (lhs, rhs1);
3675 else
3677 /* First try to lookup the simplified expression. */
3678 if (simplified)
3680 enum gimple_rhs_class rhs_class;
3683 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3684 if ((rhs_class == GIMPLE_UNARY_RHS
3685 || rhs_class == GIMPLE_BINARY_RHS
3686 || rhs_class == GIMPLE_TERNARY_RHS)
3687 && valid_gimple_rhs_p (simplified))
3689 tree result = vn_nary_op_lookup (simplified, NULL);
3690 if (result)
3692 changed = set_ssa_val_to (lhs, result);
3693 goto done;
3698 /* Otherwise visit the original statement. */
3699 switch (vn_get_stmt_kind (stmt))
3701 case VN_NARY:
3702 changed = visit_nary_op (lhs, stmt);
3703 break;
3704 case VN_REFERENCE:
3705 changed = visit_reference_op_load (lhs, rhs1, stmt);
3706 break;
3707 default:
3708 changed = defs_to_varying (stmt);
3709 break;
3713 else
3714 changed = defs_to_varying (stmt);
3716 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3718 tree lhs = gimple_call_lhs (stmt);
3719 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3721 /* Try constant folding based on our current lattice. */
3722 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3723 vn_valueize);
3724 if (simplified)
3726 if (dump_file && (dump_flags & TDF_DETAILS))
3728 fprintf (dump_file, "call ");
3729 print_gimple_expr (dump_file, stmt, 0, 0);
3730 fprintf (dump_file, " simplified to ");
3731 print_generic_expr (dump_file, simplified, 0);
3732 if (TREE_CODE (lhs) == SSA_NAME)
3733 fprintf (dump_file, " has constants %d\n",
3734 expr_has_constants (simplified));
3735 else
3736 fprintf (dump_file, "\n");
3739 /* Setting value numbers to constants will occasionally
3740 screw up phi congruence because constants are not
3741 uniquely associated with a single ssa name that can be
3742 looked up. */
3743 if (simplified
3744 && is_gimple_min_invariant (simplified))
3746 VN_INFO (lhs)->expr = simplified;
3747 VN_INFO (lhs)->has_constants = true;
3748 changed = set_ssa_val_to (lhs, simplified);
3749 if (gimple_vdef (stmt))
3750 changed |= set_ssa_val_to (gimple_vdef (stmt),
3751 SSA_VAL (gimple_vuse (stmt)));
3752 goto done;
3754 else if (simplified
3755 && TREE_CODE (simplified) == SSA_NAME)
3757 changed = visit_copy (lhs, simplified);
3758 if (gimple_vdef (stmt))
3759 changed |= set_ssa_val_to (gimple_vdef (stmt),
3760 SSA_VAL (gimple_vuse (stmt)));
3761 goto done;
3763 else
3765 if (stmt_has_constants (stmt))
3766 VN_INFO (lhs)->has_constants = true;
3767 else
3769 /* We reset expr and constantness here because we may
3770 have been value numbering optimistically, and
3771 iterating. They may become non-constant in this case,
3772 even if they were optimistically constant. */
3773 VN_INFO (lhs)->has_constants = false;
3774 VN_INFO (lhs)->expr = NULL_TREE;
3777 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3779 changed = defs_to_varying (stmt);
3780 goto done;
3785 if (!gimple_call_internal_p (stmt)
3786 && (/* Calls to the same function with the same vuse
3787 and the same operands do not necessarily return the same
3788 value, unless they're pure or const. */
3789 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3790 /* If calls have a vdef, subsequent calls won't have
3791 the same incoming vuse. So, if 2 calls with vdef have the
3792 same vuse, we know they're not subsequent.
3793 We can value number 2 calls to the same function with the
3794 same vuse and the same operands which are not subsequent
3795 the same, because there is no code in the program that can
3796 compare the 2 values... */
3797 || (gimple_vdef (stmt)
3798 /* ... unless the call returns a pointer which does
3799 not alias with anything else. In which case the
3800 information that the values are distinct are encoded
3801 in the IL. */
3802 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3803 /* Only perform the following when being called from PRE
3804 which embeds tail merging. */
3805 && default_vn_walk_kind == VN_WALK)))
3806 changed = visit_reference_op_call (lhs, call_stmt);
3807 else
3808 changed = defs_to_varying (stmt);
3810 else
3811 changed = defs_to_varying (stmt);
3813 done:
3814 return changed;
3817 /* Compare two operands by reverse postorder index */
3819 static int
3820 compare_ops (const void *pa, const void *pb)
3822 const tree opa = *((const tree *)pa);
3823 const tree opb = *((const tree *)pb);
3824 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3825 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3826 basic_block bba;
3827 basic_block bbb;
3829 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3830 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3831 else if (gimple_nop_p (opstmta))
3832 return -1;
3833 else if (gimple_nop_p (opstmtb))
3834 return 1;
3836 bba = gimple_bb (opstmta);
3837 bbb = gimple_bb (opstmtb);
3839 if (!bba && !bbb)
3840 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3841 else if (!bba)
3842 return -1;
3843 else if (!bbb)
3844 return 1;
3846 if (bba == bbb)
3848 if (gimple_code (opstmta) == GIMPLE_PHI
3849 && gimple_code (opstmtb) == GIMPLE_PHI)
3850 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3851 else if (gimple_code (opstmta) == GIMPLE_PHI)
3852 return -1;
3853 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3854 return 1;
3855 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3856 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3857 else
3858 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3860 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3863 /* Sort an array containing members of a strongly connected component
3864 SCC so that the members are ordered by RPO number.
3865 This means that when the sort is complete, iterating through the
3866 array will give you the members in RPO order. */
3868 static void
3869 sort_scc (vec<tree> scc)
3871 scc.qsort (compare_ops);
3874 /* Insert the no longer used nary ONARY to the hash INFO. */
3876 static void
3877 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3879 size_t size = sizeof_vn_nary_op (onary->length);
3880 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3881 &info->nary_obstack);
3882 memcpy (nary, onary, size);
3883 vn_nary_op_insert_into (nary, info->nary, false);
3886 /* Insert the no longer used phi OPHI to the hash INFO. */
3888 static void
3889 copy_phi (vn_phi_t ophi, vn_tables_t info)
3891 vn_phi_t phi = info->phis_pool->allocate ();
3892 vn_phi_s **slot;
3893 memcpy (phi, ophi, sizeof (*phi));
3894 ophi->phiargs.create (0);
3895 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3896 gcc_assert (!*slot);
3897 *slot = phi;
3900 /* Insert the no longer used reference OREF to the hash INFO. */
3902 static void
3903 copy_reference (vn_reference_t oref, vn_tables_t info)
3905 vn_reference_t ref;
3906 vn_reference_s **slot;
3907 ref = info->references_pool->allocate ();
3908 memcpy (ref, oref, sizeof (*ref));
3909 oref->operands.create (0);
3910 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3911 if (*slot)
3912 free_reference (*slot);
3913 *slot = ref;
3916 /* Process a strongly connected component in the SSA graph. */
3918 static void
3919 process_scc (vec<tree> scc)
3921 tree var;
3922 unsigned int i;
3923 unsigned int iterations = 0;
3924 bool changed = true;
3925 vn_nary_op_iterator_type hin;
3926 vn_phi_iterator_type hip;
3927 vn_reference_iterator_type hir;
3928 vn_nary_op_t nary;
3929 vn_phi_t phi;
3930 vn_reference_t ref;
3932 /* If the SCC has a single member, just visit it. */
3933 if (scc.length () == 1)
3935 tree use = scc[0];
3936 if (VN_INFO (use)->use_processed)
3937 return;
3938 /* We need to make sure it doesn't form a cycle itself, which can
3939 happen for self-referential PHI nodes. In that case we would
3940 end up inserting an expression with VN_TOP operands into the
3941 valid table which makes us derive bogus equivalences later.
3942 The cheapest way to check this is to assume it for all PHI nodes. */
3943 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3944 /* Fallthru to iteration. */ ;
3945 else
3947 visit_use (use);
3948 return;
3952 if (dump_file && (dump_flags & TDF_DETAILS))
3953 print_scc (dump_file, scc);
3955 /* Iterate over the SCC with the optimistic table until it stops
3956 changing. */
3957 current_info = optimistic_info;
3958 while (changed)
3960 changed = false;
3961 iterations++;
3962 if (dump_file && (dump_flags & TDF_DETAILS))
3963 fprintf (dump_file, "Starting iteration %d\n", iterations);
3964 /* As we are value-numbering optimistically we have to
3965 clear the expression tables and the simplified expressions
3966 in each iteration until we converge. */
3967 optimistic_info->nary->empty ();
3968 optimistic_info->phis->empty ();
3969 optimistic_info->references->empty ();
3970 obstack_free (&optimistic_info->nary_obstack, NULL);
3971 gcc_obstack_init (&optimistic_info->nary_obstack);
3972 optimistic_info->phis_pool->release ();
3973 optimistic_info->references_pool->release ();
3974 FOR_EACH_VEC_ELT (scc, i, var)
3975 VN_INFO (var)->expr = NULL_TREE;
3976 FOR_EACH_VEC_ELT (scc, i, var)
3977 changed |= visit_use (var);
3980 if (dump_file && (dump_flags & TDF_DETAILS))
3981 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3982 statistics_histogram_event (cfun, "SCC iterations", iterations);
3984 /* Finally, copy the contents of the no longer used optimistic
3985 table to the valid table. */
3986 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3987 copy_nary (nary, valid_info);
3988 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3989 copy_phi (phi, valid_info);
3990 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3991 ref, vn_reference_t, hir)
3992 copy_reference (ref, valid_info);
3994 current_info = valid_info;
3998 /* Pop the components of the found SCC for NAME off the SCC stack
3999 and process them. Returns true if all went well, false if
4000 we run into resource limits. */
4002 static bool
4003 extract_and_process_scc_for_name (tree name)
4005 auto_vec<tree> scc;
4006 tree x;
4008 /* Found an SCC, pop the components off the SCC stack and
4009 process them. */
4012 x = sccstack.pop ();
4014 VN_INFO (x)->on_sccstack = false;
4015 scc.safe_push (x);
4016 } while (x != name);
4018 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4019 if (scc.length ()
4020 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4022 if (dump_file)
4023 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4024 "SCC size %u exceeding %u\n", scc.length (),
4025 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4027 return false;
4030 if (scc.length () > 1)
4031 sort_scc (scc);
4033 process_scc (scc);
4035 return true;
4038 /* Depth first search on NAME to discover and process SCC's in the SSA
4039 graph.
4040 Execution of this algorithm relies on the fact that the SCC's are
4041 popped off the stack in topological order.
4042 Returns true if successful, false if we stopped processing SCC's due
4043 to resource constraints. */
4045 static bool
4046 DFS (tree name)
4048 vec<ssa_op_iter> itervec = vNULL;
4049 vec<tree> namevec = vNULL;
4050 use_operand_p usep = NULL;
4051 gimple defstmt;
4052 tree use;
4053 ssa_op_iter iter;
4055 start_over:
4056 /* SCC info */
4057 VN_INFO (name)->dfsnum = next_dfs_num++;
4058 VN_INFO (name)->visited = true;
4059 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4061 sccstack.safe_push (name);
4062 VN_INFO (name)->on_sccstack = true;
4063 defstmt = SSA_NAME_DEF_STMT (name);
4065 /* Recursively DFS on our operands, looking for SCC's. */
4066 if (!gimple_nop_p (defstmt))
4068 /* Push a new iterator. */
4069 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4070 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4071 else
4072 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4074 else
4075 clear_and_done_ssa_iter (&iter);
4077 while (1)
4079 /* If we are done processing uses of a name, go up the stack
4080 of iterators and process SCCs as we found them. */
4081 if (op_iter_done (&iter))
4083 /* See if we found an SCC. */
4084 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4085 if (!extract_and_process_scc_for_name (name))
4087 namevec.release ();
4088 itervec.release ();
4089 return false;
4092 /* Check if we are done. */
4093 if (namevec.is_empty ())
4095 namevec.release ();
4096 itervec.release ();
4097 return true;
4100 /* Restore the last use walker and continue walking there. */
4101 use = name;
4102 name = namevec.pop ();
4103 memcpy (&iter, &itervec.last (),
4104 sizeof (ssa_op_iter));
4105 itervec.pop ();
4106 goto continue_walking;
4109 use = USE_FROM_PTR (usep);
4111 /* Since we handle phi nodes, we will sometimes get
4112 invariants in the use expression. */
4113 if (TREE_CODE (use) == SSA_NAME)
4115 if (! (VN_INFO (use)->visited))
4117 /* Recurse by pushing the current use walking state on
4118 the stack and starting over. */
4119 itervec.safe_push (iter);
4120 namevec.safe_push (name);
4121 name = use;
4122 goto start_over;
4124 continue_walking:
4125 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4126 VN_INFO (use)->low);
4128 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4129 && VN_INFO (use)->on_sccstack)
4131 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4132 VN_INFO (name)->low);
4136 usep = op_iter_next_use (&iter);
4140 /* Allocate a value number table. */
4142 static void
4143 allocate_vn_table (vn_tables_t table)
4145 table->phis = new vn_phi_table_type (23);
4146 table->nary = new vn_nary_op_table_type (23);
4147 table->references = new vn_reference_table_type (23);
4149 gcc_obstack_init (&table->nary_obstack);
4150 table->phis_pool = new pool_allocator<vn_phi_s> ("VN phis", 30);
4151 table->references_pool = new pool_allocator<vn_reference_s> ("VN references",
4152 30);
4155 /* Free a value number table. */
4157 static void
4158 free_vn_table (vn_tables_t table)
4160 delete table->phis;
4161 table->phis = NULL;
4162 delete table->nary;
4163 table->nary = NULL;
4164 delete table->references;
4165 table->references = NULL;
4166 obstack_free (&table->nary_obstack, NULL);
4167 delete table->phis_pool;
4168 delete table->references_pool;
4171 static void
4172 init_scc_vn (void)
4174 size_t i;
4175 int j;
4176 int *rpo_numbers_temp;
4178 calculate_dominance_info (CDI_DOMINATORS);
4179 sccstack.create (0);
4180 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4182 constant_value_ids = BITMAP_ALLOC (NULL);
4184 next_dfs_num = 1;
4185 next_value_id = 1;
4187 vn_ssa_aux_table.create (num_ssa_names + 1);
4188 /* VEC_alloc doesn't actually grow it to the right size, it just
4189 preallocates the space to do so. */
4190 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4191 gcc_obstack_init (&vn_ssa_aux_obstack);
4193 shared_lookup_phiargs.create (0);
4194 shared_lookup_references.create (0);
4195 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4196 rpo_numbers_temp =
4197 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4198 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4200 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4201 the i'th block in RPO order is bb. We want to map bb's to RPO
4202 numbers, so we need to rearrange this array. */
4203 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4204 rpo_numbers[rpo_numbers_temp[j]] = j;
4206 XDELETE (rpo_numbers_temp);
4208 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4210 /* Create the VN_INFO structures, and initialize value numbers to
4211 TOP. */
4212 for (i = 0; i < num_ssa_names; i++)
4214 tree name = ssa_name (i);
4215 if (name)
4217 VN_INFO_GET (name)->valnum = VN_TOP;
4218 VN_INFO (name)->expr = NULL_TREE;
4219 VN_INFO (name)->value_id = 0;
4223 renumber_gimple_stmt_uids ();
4225 /* Create the valid and optimistic value numbering tables. */
4226 valid_info = XCNEW (struct vn_tables_s);
4227 allocate_vn_table (valid_info);
4228 optimistic_info = XCNEW (struct vn_tables_s);
4229 allocate_vn_table (optimistic_info);
4232 void
4233 free_scc_vn (void)
4235 size_t i;
4237 delete constant_to_value_id;
4238 constant_to_value_id = NULL;
4239 BITMAP_FREE (constant_value_ids);
4240 shared_lookup_phiargs.release ();
4241 shared_lookup_references.release ();
4242 XDELETEVEC (rpo_numbers);
4244 for (i = 0; i < num_ssa_names; i++)
4246 tree name = ssa_name (i);
4247 if (name
4248 && VN_INFO (name)->needs_insertion)
4249 release_ssa_name (name);
4251 obstack_free (&vn_ssa_aux_obstack, NULL);
4252 vn_ssa_aux_table.release ();
4254 sccstack.release ();
4255 free_vn_table (valid_info);
4256 XDELETE (valid_info);
4257 free_vn_table (optimistic_info);
4258 XDELETE (optimistic_info);
4261 /* Set *ID according to RESULT. */
4263 static void
4264 set_value_id_for_result (tree result, unsigned int *id)
4266 if (result && TREE_CODE (result) == SSA_NAME)
4267 *id = VN_INFO (result)->value_id;
4268 else if (result && is_gimple_min_invariant (result))
4269 *id = get_or_alloc_constant_value_id (result);
4270 else
4271 *id = get_next_value_id ();
4274 /* Set the value ids in the valid hash tables. */
4276 static void
4277 set_hashtable_value_ids (void)
4279 vn_nary_op_iterator_type hin;
4280 vn_phi_iterator_type hip;
4281 vn_reference_iterator_type hir;
4282 vn_nary_op_t vno;
4283 vn_reference_t vr;
4284 vn_phi_t vp;
4286 /* Now set the value ids of the things we had put in the hash
4287 table. */
4289 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4290 set_value_id_for_result (vno->result, &vno->value_id);
4292 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4293 set_value_id_for_result (vp->result, &vp->value_id);
4295 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4296 hir)
4297 set_value_id_for_result (vr->result, &vr->value_id);
4300 class cond_dom_walker : public dom_walker
4302 public:
4303 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4305 virtual void before_dom_children (basic_block);
4307 bool fail;
4310 void
4311 cond_dom_walker::before_dom_children (basic_block bb)
4313 edge e;
4314 edge_iterator ei;
4316 if (fail)
4317 return;
4319 /* If any of the predecessor edges that do not come from blocks dominated
4320 by us are still marked as possibly executable consider this block
4321 reachable. */
4322 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4323 FOR_EACH_EDGE (e, ei, bb->preds)
4324 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4325 reachable |= (e->flags & EDGE_EXECUTABLE);
4327 /* If the block is not reachable all outgoing edges are not
4328 executable. */
4329 if (!reachable)
4331 if (dump_file && (dump_flags & TDF_DETAILS))
4332 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4333 "BB %d as not executable\n", bb->index);
4335 FOR_EACH_EDGE (e, ei, bb->succs)
4336 e->flags &= ~EDGE_EXECUTABLE;
4337 return;
4340 gimple stmt = last_stmt (bb);
4341 if (!stmt)
4342 return;
4344 enum gimple_code code = gimple_code (stmt);
4345 if (code != GIMPLE_COND
4346 && code != GIMPLE_SWITCH
4347 && code != GIMPLE_GOTO)
4348 return;
4350 if (dump_file && (dump_flags & TDF_DETAILS))
4352 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4353 bb->index);
4354 print_gimple_stmt (dump_file, stmt, 0, 0);
4357 /* Value-number the last stmts SSA uses. */
4358 ssa_op_iter i;
4359 tree op;
4360 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4361 if (VN_INFO (op)->visited == false
4362 && !DFS (op))
4364 fail = true;
4365 return;
4368 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4369 if value-numbering can prove they are not reachable. Handling
4370 computed gotos is also possible. */
4371 tree val;
4372 switch (code)
4374 case GIMPLE_COND:
4376 tree lhs = gimple_cond_lhs (stmt);
4377 tree rhs = gimple_cond_rhs (stmt);
4378 /* Work hard in computing the condition and take into account
4379 the valueization of the defining stmt. */
4380 if (TREE_CODE (lhs) == SSA_NAME)
4381 lhs = vn_get_expr_for (lhs);
4382 if (TREE_CODE (rhs) == SSA_NAME)
4383 rhs = vn_get_expr_for (rhs);
4384 val = fold_binary (gimple_cond_code (stmt),
4385 boolean_type_node, lhs, rhs);
4386 break;
4388 case GIMPLE_SWITCH:
4389 val = gimple_switch_index (as_a <gswitch *> (stmt));
4390 break;
4391 case GIMPLE_GOTO:
4392 val = gimple_goto_dest (stmt);
4393 break;
4394 default:
4395 gcc_unreachable ();
4397 if (!val)
4398 return;
4400 edge taken = find_taken_edge (bb, vn_valueize (val));
4401 if (!taken)
4402 return;
4404 if (dump_file && (dump_flags & TDF_DETAILS))
4405 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4406 "not executable\n", bb->index, bb->index, taken->dest->index);
4408 FOR_EACH_EDGE (e, ei, bb->succs)
4409 if (e != taken)
4410 e->flags &= ~EDGE_EXECUTABLE;
4413 /* Do SCCVN. Returns true if it finished, false if we bailed out
4414 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4415 how we use the alias oracle walking during the VN process. */
4417 bool
4418 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4420 basic_block bb;
4421 size_t i;
4422 tree param;
4424 default_vn_walk_kind = default_vn_walk_kind_;
4426 init_scc_vn ();
4427 current_info = valid_info;
4429 for (param = DECL_ARGUMENTS (current_function_decl);
4430 param;
4431 param = DECL_CHAIN (param))
4433 tree def = ssa_default_def (cfun, param);
4434 if (def)
4436 VN_INFO (def)->visited = true;
4437 VN_INFO (def)->valnum = def;
4441 /* Mark all edges as possibly executable. */
4442 FOR_ALL_BB_FN (bb, cfun)
4444 edge_iterator ei;
4445 edge e;
4446 FOR_EACH_EDGE (e, ei, bb->succs)
4447 e->flags |= EDGE_EXECUTABLE;
4450 /* Walk all blocks in dominator order, value-numbering the last stmts
4451 SSA uses and decide whether outgoing edges are not executable. */
4452 cond_dom_walker walker;
4453 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4454 if (walker.fail)
4456 free_scc_vn ();
4457 return false;
4460 /* Value-number remaining SSA names. */
4461 for (i = 1; i < num_ssa_names; ++i)
4463 tree name = ssa_name (i);
4464 if (name
4465 && VN_INFO (name)->visited == false
4466 && !has_zero_uses (name))
4467 if (!DFS (name))
4469 free_scc_vn ();
4470 return false;
4474 /* Initialize the value ids. */
4476 for (i = 1; i < num_ssa_names; ++i)
4478 tree name = ssa_name (i);
4479 vn_ssa_aux_t info;
4480 if (!name)
4481 continue;
4482 info = VN_INFO (name);
4483 if (info->valnum == name
4484 || info->valnum == VN_TOP)
4485 info->value_id = get_next_value_id ();
4486 else if (is_gimple_min_invariant (info->valnum))
4487 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4490 /* Propagate. */
4491 for (i = 1; i < num_ssa_names; ++i)
4493 tree name = ssa_name (i);
4494 vn_ssa_aux_t info;
4495 if (!name)
4496 continue;
4497 info = VN_INFO (name);
4498 if (TREE_CODE (info->valnum) == SSA_NAME
4499 && info->valnum != name
4500 && info->value_id != VN_INFO (info->valnum)->value_id)
4501 info->value_id = VN_INFO (info->valnum)->value_id;
4504 set_hashtable_value_ids ();
4506 if (dump_file && (dump_flags & TDF_DETAILS))
4508 fprintf (dump_file, "Value numbers:\n");
4509 for (i = 0; i < num_ssa_names; i++)
4511 tree name = ssa_name (i);
4512 if (name
4513 && VN_INFO (name)->visited
4514 && SSA_VAL (name) != name)
4516 print_generic_expr (dump_file, name, 0);
4517 fprintf (dump_file, " = ");
4518 print_generic_expr (dump_file, SSA_VAL (name), 0);
4519 fprintf (dump_file, "\n");
4524 return true;
4527 /* Return the maximum value id we have ever seen. */
4529 unsigned int
4530 get_max_value_id (void)
4532 return next_value_id;
4535 /* Return the next unique value id. */
4537 unsigned int
4538 get_next_value_id (void)
4540 return next_value_id++;
4544 /* Compare two expressions E1 and E2 and return true if they are equal. */
4546 bool
4547 expressions_equal_p (tree e1, tree e2)
4549 /* The obvious case. */
4550 if (e1 == e2)
4551 return true;
4553 /* If only one of them is null, they cannot be equal. */
4554 if (!e1 || !e2)
4555 return false;
4557 /* Now perform the actual comparison. */
4558 if (TREE_CODE (e1) == TREE_CODE (e2)
4559 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4560 return true;
4562 return false;
4566 /* Return true if the nary operation NARY may trap. This is a copy
4567 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4569 bool
4570 vn_nary_may_trap (vn_nary_op_t nary)
4572 tree type;
4573 tree rhs2 = NULL_TREE;
4574 bool honor_nans = false;
4575 bool honor_snans = false;
4576 bool fp_operation = false;
4577 bool honor_trapv = false;
4578 bool handled, ret;
4579 unsigned i;
4581 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4582 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4583 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4585 type = nary->type;
4586 fp_operation = FLOAT_TYPE_P (type);
4587 if (fp_operation)
4589 honor_nans = flag_trapping_math && !flag_finite_math_only;
4590 honor_snans = flag_signaling_nans != 0;
4592 else if (INTEGRAL_TYPE_P (type)
4593 && TYPE_OVERFLOW_TRAPS (type))
4594 honor_trapv = true;
4596 if (nary->length >= 2)
4597 rhs2 = nary->op[1];
4598 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4599 honor_trapv,
4600 honor_nans, honor_snans, rhs2,
4601 &handled);
4602 if (handled
4603 && ret)
4604 return true;
4606 for (i = 0; i < nary->length; ++i)
4607 if (tree_could_trap_p (nary->op[i]))
4608 return true;
4610 return false;