gcc/
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob15b8b85d70a0a17f572c245ceeadce2424376c5b
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stor-layout.h"
30 #include "predict.h"
31 #include "hard-reg-set.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "cfganal.h"
36 #include "basic-block.h"
37 #include "gimple-pretty-print.h"
38 #include "tree-inline.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimple-expr.h"
44 #include "gimple.h"
45 #include "gimplify.h"
46 #include "gimple-ssa.h"
47 #include "tree-phinodes.h"
48 #include "ssa-iterators.h"
49 #include "stringpool.h"
50 #include "tree-ssanames.h"
51 #include "rtl.h"
52 #include "flags.h"
53 #include "insn-config.h"
54 #include "expmed.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "calls.h"
58 #include "emit-rtl.h"
59 #include "varasm.h"
60 #include "stmt.h"
61 #include "expr.h"
62 #include "tree-dfa.h"
63 #include "tree-ssa.h"
64 #include "dumpfile.h"
65 #include "alloc-pool.h"
66 #include "cfgloop.h"
67 #include "params.h"
68 #include "tree-ssa-propagate.h"
69 #include "tree-ssa-sccvn.h"
70 #include "tree-cfg.h"
71 #include "domwalk.h"
72 #include "ipa-ref.h"
73 #include "plugin-api.h"
74 #include "cgraph.h"
76 /* This algorithm is based on the SCC algorithm presented by Keith
77 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
78 (http://citeseer.ist.psu.edu/41805.html). In
79 straight line code, it is equivalent to a regular hash based value
80 numbering that is performed in reverse postorder.
82 For code with cycles, there are two alternatives, both of which
83 require keeping the hashtables separate from the actual list of
84 value numbers for SSA names.
86 1. Iterate value numbering in an RPO walk of the blocks, removing
87 all the entries from the hashtable after each iteration (but
88 keeping the SSA name->value number mapping between iterations).
89 Iterate until it does not change.
91 2. Perform value numbering as part of an SCC walk on the SSA graph,
92 iterating only the cycles in the SSA graph until they do not change
93 (using a separate, optimistic hashtable for value numbering the SCC
94 operands).
96 The second is not just faster in practice (because most SSA graph
97 cycles do not involve all the variables in the graph), it also has
98 some nice properties.
100 One of these nice properties is that when we pop an SCC off the
101 stack, we are guaranteed to have processed all the operands coming from
102 *outside of that SCC*, so we do not need to do anything special to
103 ensure they have value numbers.
105 Another nice property is that the SCC walk is done as part of a DFS
106 of the SSA graph, which makes it easy to perform combining and
107 simplifying operations at the same time.
109 The code below is deliberately written in a way that makes it easy
110 to separate the SCC walk from the other work it does.
112 In order to propagate constants through the code, we track which
113 expressions contain constants, and use those while folding. In
114 theory, we could also track expressions whose value numbers are
115 replaced, in case we end up folding based on expression
116 identities.
118 In order to value number memory, we assign value numbers to vuses.
119 This enables us to note that, for example, stores to the same
120 address of the same value from the same starting memory states are
121 equivalent.
122 TODO:
124 1. We can iterate only the changing portions of the SCC's, but
125 I have not seen an SCC big enough for this to be a win.
126 2. If you differentiate between phi nodes for loops and phi nodes
127 for if-then-else, you can properly consider phi nodes in different
128 blocks for equivalence.
129 3. We could value number vuses in more cases, particularly, whole
130 structure copies.
134 static tree *last_vuse_ptr;
135 static vn_lookup_kind vn_walk_kind;
136 static vn_lookup_kind default_vn_walk_kind;
138 /* vn_nary_op hashtable helpers. */
140 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
147 /* Return the computed hashcode for nary operation P1. */
149 inline hashval_t
150 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
152 return vno1->hashcode;
155 /* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
158 inline bool
159 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
161 return vn_nary_op_eq (vno1, vno2);
164 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
168 /* vn_phi hashtable helpers. */
170 static int
171 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
173 struct vn_phi_hasher : pointer_hash <vn_phi_s>
175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
177 static inline void remove (vn_phi_s *);
180 /* Return the computed hashcode for phi operation P1. */
182 inline hashval_t
183 vn_phi_hasher::hash (const vn_phi_s *vp1)
185 return vp1->hashcode;
188 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
190 inline bool
191 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
193 return vn_phi_eq (vp1, vp2);
196 /* Free a phi operation structure VP. */
198 inline void
199 vn_phi_hasher::remove (vn_phi_s *phi)
201 phi->phiargs.release ();
204 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
205 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
208 /* Compare two reference operands P1 and P2 for equality. Return true if
209 they are equal, and false otherwise. */
211 static int
212 vn_reference_op_eq (const void *p1, const void *p2)
214 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
215 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
217 return (vro1->opcode == vro2->opcode
218 /* We do not care for differences in type qualification. */
219 && (vro1->type == vro2->type
220 || (vro1->type && vro2->type
221 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
222 TYPE_MAIN_VARIANT (vro2->type))))
223 && expressions_equal_p (vro1->op0, vro2->op0)
224 && expressions_equal_p (vro1->op1, vro2->op1)
225 && expressions_equal_p (vro1->op2, vro2->op2));
228 /* Free a reference operation structure VP. */
230 static inline void
231 free_reference (vn_reference_s *vr)
233 vr->operands.release ();
237 /* vn_reference hashtable helpers. */
239 struct vn_reference_hasher : pointer_hash <vn_reference_s>
241 static inline hashval_t hash (const vn_reference_s *);
242 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
243 static inline void remove (vn_reference_s *);
246 /* Return the hashcode for a given reference operation P1. */
248 inline hashval_t
249 vn_reference_hasher::hash (const vn_reference_s *vr1)
251 return vr1->hashcode;
254 inline bool
255 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
257 return vn_reference_eq (v, c);
260 inline void
261 vn_reference_hasher::remove (vn_reference_s *v)
263 free_reference (v);
266 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
267 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
270 /* The set of hashtables and alloc_pool's for their items. */
272 typedef struct vn_tables_s
274 vn_nary_op_table_type *nary;
275 vn_phi_table_type *phis;
276 vn_reference_table_type *references;
277 struct obstack nary_obstack;
278 pool_allocator<vn_phi_s> *phis_pool;
279 pool_allocator<vn_reference_s> *references_pool;
280 } *vn_tables_t;
283 /* vn_constant hashtable helpers. */
285 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
287 static inline hashval_t hash (const vn_constant_s *);
288 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
291 /* Hash table hash function for vn_constant_t. */
293 inline hashval_t
294 vn_constant_hasher::hash (const vn_constant_s *vc1)
296 return vc1->hashcode;
299 /* Hash table equality function for vn_constant_t. */
301 inline bool
302 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
304 if (vc1->hashcode != vc2->hashcode)
305 return false;
307 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
310 static hash_table<vn_constant_hasher> *constant_to_value_id;
311 static bitmap constant_value_ids;
314 /* Valid hashtables storing information we have proven to be
315 correct. */
317 static vn_tables_t valid_info;
319 /* Optimistic hashtables storing information we are making assumptions about
320 during iterations. */
322 static vn_tables_t optimistic_info;
324 /* Pointer to the set of hashtables that is currently being used.
325 Should always point to either the optimistic_info, or the
326 valid_info. */
328 static vn_tables_t current_info;
331 /* Reverse post order index for each basic block. */
333 static int *rpo_numbers;
335 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
337 /* Return the SSA value of the VUSE x, supporting released VDEFs
338 during elimination which will value-number the VDEF to the
339 associated VUSE (but not substitute in the whole lattice). */
341 static inline tree
342 vuse_ssa_val (tree x)
344 if (!x)
345 return NULL_TREE;
349 x = SSA_VAL (x);
351 while (SSA_NAME_IN_FREE_LIST (x));
353 return x;
356 /* This represents the top of the VN lattice, which is the universal
357 value. */
359 tree VN_TOP;
361 /* Unique counter for our value ids. */
363 static unsigned int next_value_id;
365 /* Next DFS number and the stack for strongly connected component
366 detection. */
368 static unsigned int next_dfs_num;
369 static vec<tree> sccstack;
373 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
374 are allocated on an obstack for locality reasons, and to free them
375 without looping over the vec. */
377 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
378 static struct obstack vn_ssa_aux_obstack;
380 /* Return the value numbering information for a given SSA name. */
382 vn_ssa_aux_t
383 VN_INFO (tree name)
385 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
386 gcc_checking_assert (res);
387 return res;
390 /* Set the value numbering info for a given SSA name to a given
391 value. */
393 static inline void
394 VN_INFO_SET (tree name, vn_ssa_aux_t value)
396 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
399 /* Initialize the value numbering info for a given SSA name.
400 This should be called just once for every SSA name. */
402 vn_ssa_aux_t
403 VN_INFO_GET (tree name)
405 vn_ssa_aux_t newinfo;
407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412 return newinfo;
416 /* Get the representative expression for the SSA_NAME NAME. Returns
417 the representative SSA_NAME if there is no expression associated with it. */
419 tree
420 vn_get_expr_for (tree name)
422 vn_ssa_aux_t vn = VN_INFO (name);
423 gimple def_stmt;
424 tree expr = NULL_TREE;
425 enum tree_code code;
427 if (vn->valnum == VN_TOP)
428 return name;
430 /* If the value-number is a constant it is the representative
431 expression. */
432 if (TREE_CODE (vn->valnum) != SSA_NAME)
433 return vn->valnum;
435 /* Get to the information of the value of this SSA_NAME. */
436 vn = VN_INFO (vn->valnum);
438 /* If the value-number is a constant it is the representative
439 expression. */
440 if (TREE_CODE (vn->valnum) != SSA_NAME)
441 return vn->valnum;
443 /* Else if we have an expression, return it. */
444 if (vn->expr != NULL_TREE)
445 return vn->expr;
447 /* Otherwise use the defining statement to build the expression. */
448 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
450 /* If the value number is not an assignment use it directly. */
451 if (!is_gimple_assign (def_stmt))
452 return vn->valnum;
454 /* Note that we can valueize here because we clear the cached
455 simplified expressions after each optimistic iteration. */
456 code = gimple_assign_rhs_code (def_stmt);
457 switch (TREE_CODE_CLASS (code))
459 case tcc_reference:
460 if ((code == REALPART_EXPR
461 || code == IMAGPART_EXPR
462 || code == VIEW_CONVERT_EXPR)
463 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
464 0)) == SSA_NAME)
465 expr = fold_build1 (code,
466 gimple_expr_type (def_stmt),
467 vn_valueize (TREE_OPERAND
468 (gimple_assign_rhs1 (def_stmt), 0)));
469 break;
471 case tcc_unary:
472 expr = fold_build1 (code,
473 gimple_expr_type (def_stmt),
474 vn_valueize (gimple_assign_rhs1 (def_stmt)));
475 break;
477 case tcc_binary:
478 expr = fold_build2 (code,
479 gimple_expr_type (def_stmt),
480 vn_valueize (gimple_assign_rhs1 (def_stmt)),
481 vn_valueize (gimple_assign_rhs2 (def_stmt)));
482 break;
484 case tcc_exceptional:
485 if (code == CONSTRUCTOR
486 && TREE_CODE
487 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
488 expr = gimple_assign_rhs1 (def_stmt);
489 break;
491 default:;
493 if (expr == NULL_TREE)
494 return vn->valnum;
496 /* Cache the expression. */
497 vn->expr = expr;
499 return expr;
502 /* Return the vn_kind the expression computed by the stmt should be
503 associated with. */
505 enum vn_kind
506 vn_get_stmt_kind (gimple stmt)
508 switch (gimple_code (stmt))
510 case GIMPLE_CALL:
511 return VN_REFERENCE;
512 case GIMPLE_PHI:
513 return VN_PHI;
514 case GIMPLE_ASSIGN:
516 enum tree_code code = gimple_assign_rhs_code (stmt);
517 tree rhs1 = gimple_assign_rhs1 (stmt);
518 switch (get_gimple_rhs_class (code))
520 case GIMPLE_UNARY_RHS:
521 case GIMPLE_BINARY_RHS:
522 case GIMPLE_TERNARY_RHS:
523 return VN_NARY;
524 case GIMPLE_SINGLE_RHS:
525 switch (TREE_CODE_CLASS (code))
527 case tcc_reference:
528 /* VOP-less references can go through unary case. */
529 if ((code == REALPART_EXPR
530 || code == IMAGPART_EXPR
531 || code == VIEW_CONVERT_EXPR
532 || code == BIT_FIELD_REF)
533 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
534 return VN_NARY;
536 /* Fallthrough. */
537 case tcc_declaration:
538 return VN_REFERENCE;
540 case tcc_constant:
541 return VN_CONSTANT;
543 default:
544 if (code == ADDR_EXPR)
545 return (is_gimple_min_invariant (rhs1)
546 ? VN_CONSTANT : VN_REFERENCE);
547 else if (code == CONSTRUCTOR)
548 return VN_NARY;
549 return VN_NONE;
551 default:
552 return VN_NONE;
555 default:
556 return VN_NONE;
560 /* Lookup a value id for CONSTANT and return it. If it does not
561 exist returns 0. */
563 unsigned int
564 get_constant_value_id (tree constant)
566 vn_constant_s **slot;
567 struct vn_constant_s vc;
569 vc.hashcode = vn_hash_constant_with_type (constant);
570 vc.constant = constant;
571 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
572 if (slot)
573 return (*slot)->value_id;
574 return 0;
577 /* Lookup a value id for CONSTANT, and if it does not exist, create a
578 new one and return it. If it does exist, return it. */
580 unsigned int
581 get_or_alloc_constant_value_id (tree constant)
583 vn_constant_s **slot;
584 struct vn_constant_s vc;
585 vn_constant_t vcp;
587 vc.hashcode = vn_hash_constant_with_type (constant);
588 vc.constant = constant;
589 slot = constant_to_value_id->find_slot (&vc, INSERT);
590 if (*slot)
591 return (*slot)->value_id;
593 vcp = XNEW (struct vn_constant_s);
594 vcp->hashcode = vc.hashcode;
595 vcp->constant = constant;
596 vcp->value_id = get_next_value_id ();
597 *slot = vcp;
598 bitmap_set_bit (constant_value_ids, vcp->value_id);
599 return vcp->value_id;
602 /* Return true if V is a value id for a constant. */
604 bool
605 value_id_constant_p (unsigned int v)
607 return bitmap_bit_p (constant_value_ids, v);
610 /* Compute the hash for a reference operand VRO1. */
612 static void
613 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
615 hstate.add_int (vro1->opcode);
616 if (vro1->op0)
617 inchash::add_expr (vro1->op0, hstate);
618 if (vro1->op1)
619 inchash::add_expr (vro1->op1, hstate);
620 if (vro1->op2)
621 inchash::add_expr (vro1->op2, hstate);
624 /* Compute a hash for the reference operation VR1 and return it. */
626 static hashval_t
627 vn_reference_compute_hash (const vn_reference_t vr1)
629 inchash::hash hstate;
630 hashval_t result;
631 int i;
632 vn_reference_op_t vro;
633 HOST_WIDE_INT off = -1;
634 bool deref = false;
636 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
638 if (vro->opcode == MEM_REF)
639 deref = true;
640 else if (vro->opcode != ADDR_EXPR)
641 deref = false;
642 if (vro->off != -1)
644 if (off == -1)
645 off = 0;
646 off += vro->off;
648 else
650 if (off != -1
651 && off != 0)
652 hstate.add_int (off);
653 off = -1;
654 if (deref
655 && vro->opcode == ADDR_EXPR)
657 if (vro->op0)
659 tree op = TREE_OPERAND (vro->op0, 0);
660 hstate.add_int (TREE_CODE (op));
661 inchash::add_expr (op, hstate);
664 else
665 vn_reference_op_compute_hash (vro, hstate);
668 result = hstate.end ();
669 /* ??? We would ICE later if we hash instead of adding that in. */
670 if (vr1->vuse)
671 result += SSA_NAME_VERSION (vr1->vuse);
673 return result;
676 /* Return true if reference operations VR1 and VR2 are equivalent. This
677 means they have the same set of operands and vuses. */
679 bool
680 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
682 unsigned i, j;
684 /* Early out if this is not a hash collision. */
685 if (vr1->hashcode != vr2->hashcode)
686 return false;
688 /* The VOP needs to be the same. */
689 if (vr1->vuse != vr2->vuse)
690 return false;
692 /* If the operands are the same we are done. */
693 if (vr1->operands == vr2->operands)
694 return true;
696 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
697 return false;
699 if (INTEGRAL_TYPE_P (vr1->type)
700 && INTEGRAL_TYPE_P (vr2->type))
702 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
703 return false;
705 else if (INTEGRAL_TYPE_P (vr1->type)
706 && (TYPE_PRECISION (vr1->type)
707 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
708 return false;
709 else if (INTEGRAL_TYPE_P (vr2->type)
710 && (TYPE_PRECISION (vr2->type)
711 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
712 return false;
714 i = 0;
715 j = 0;
718 HOST_WIDE_INT off1 = 0, off2 = 0;
719 vn_reference_op_t vro1, vro2;
720 vn_reference_op_s tem1, tem2;
721 bool deref1 = false, deref2 = false;
722 for (; vr1->operands.iterate (i, &vro1); i++)
724 if (vro1->opcode == MEM_REF)
725 deref1 = true;
726 if (vro1->off == -1)
727 break;
728 off1 += vro1->off;
730 for (; vr2->operands.iterate (j, &vro2); j++)
732 if (vro2->opcode == MEM_REF)
733 deref2 = true;
734 if (vro2->off == -1)
735 break;
736 off2 += vro2->off;
738 if (off1 != off2)
739 return false;
740 if (deref1 && vro1->opcode == ADDR_EXPR)
742 memset (&tem1, 0, sizeof (tem1));
743 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
744 tem1.type = TREE_TYPE (tem1.op0);
745 tem1.opcode = TREE_CODE (tem1.op0);
746 vro1 = &tem1;
747 deref1 = false;
749 if (deref2 && vro2->opcode == ADDR_EXPR)
751 memset (&tem2, 0, sizeof (tem2));
752 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
753 tem2.type = TREE_TYPE (tem2.op0);
754 tem2.opcode = TREE_CODE (tem2.op0);
755 vro2 = &tem2;
756 deref2 = false;
758 if (deref1 != deref2)
759 return false;
760 if (!vn_reference_op_eq (vro1, vro2))
761 return false;
762 ++j;
763 ++i;
765 while (vr1->operands.length () != i
766 || vr2->operands.length () != j);
768 return true;
771 /* Copy the operations present in load/store REF into RESULT, a vector of
772 vn_reference_op_s's. */
774 static void
775 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
777 if (TREE_CODE (ref) == TARGET_MEM_REF)
779 vn_reference_op_s temp;
781 result->reserve (3);
783 memset (&temp, 0, sizeof (temp));
784 temp.type = TREE_TYPE (ref);
785 temp.opcode = TREE_CODE (ref);
786 temp.op0 = TMR_INDEX (ref);
787 temp.op1 = TMR_STEP (ref);
788 temp.op2 = TMR_OFFSET (ref);
789 temp.off = -1;
790 result->quick_push (temp);
792 memset (&temp, 0, sizeof (temp));
793 temp.type = NULL_TREE;
794 temp.opcode = ERROR_MARK;
795 temp.op0 = TMR_INDEX2 (ref);
796 temp.off = -1;
797 result->quick_push (temp);
799 memset (&temp, 0, sizeof (temp));
800 temp.type = NULL_TREE;
801 temp.opcode = TREE_CODE (TMR_BASE (ref));
802 temp.op0 = TMR_BASE (ref);
803 temp.off = -1;
804 result->quick_push (temp);
805 return;
808 /* For non-calls, store the information that makes up the address. */
809 tree orig = ref;
810 while (ref)
812 vn_reference_op_s temp;
814 memset (&temp, 0, sizeof (temp));
815 temp.type = TREE_TYPE (ref);
816 temp.opcode = TREE_CODE (ref);
817 temp.off = -1;
819 switch (temp.opcode)
821 case MODIFY_EXPR:
822 temp.op0 = TREE_OPERAND (ref, 1);
823 break;
824 case WITH_SIZE_EXPR:
825 temp.op0 = TREE_OPERAND (ref, 1);
826 temp.off = 0;
827 break;
828 case MEM_REF:
829 /* The base address gets its own vn_reference_op_s structure. */
830 temp.op0 = TREE_OPERAND (ref, 1);
831 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
832 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
833 break;
834 case BIT_FIELD_REF:
835 /* Record bits and position. */
836 temp.op0 = TREE_OPERAND (ref, 1);
837 temp.op1 = TREE_OPERAND (ref, 2);
838 break;
839 case COMPONENT_REF:
840 /* The field decl is enough to unambiguously specify the field,
841 a matching type is not necessary and a mismatching type
842 is always a spurious difference. */
843 temp.type = NULL_TREE;
844 temp.op0 = TREE_OPERAND (ref, 1);
845 temp.op1 = TREE_OPERAND (ref, 2);
847 tree this_offset = component_ref_field_offset (ref);
848 if (this_offset
849 && TREE_CODE (this_offset) == INTEGER_CST)
851 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
852 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
854 offset_int off
855 = (wi::to_offset (this_offset)
856 + wi::lrshift (wi::to_offset (bit_offset),
857 LOG2_BITS_PER_UNIT));
858 if (wi::fits_shwi_p (off)
859 /* Probibit value-numbering zero offset components
860 of addresses the same before the pass folding
861 __builtin_object_size had a chance to run
862 (checking cfun->after_inlining does the
863 trick here). */
864 && (TREE_CODE (orig) != ADDR_EXPR
865 || off != 0
866 || cfun->after_inlining))
867 temp.off = off.to_shwi ();
871 break;
872 case ARRAY_RANGE_REF:
873 case ARRAY_REF:
874 /* Record index as operand. */
875 temp.op0 = TREE_OPERAND (ref, 1);
876 /* Always record lower bounds and element size. */
877 temp.op1 = array_ref_low_bound (ref);
878 temp.op2 = array_ref_element_size (ref);
879 if (TREE_CODE (temp.op0) == INTEGER_CST
880 && TREE_CODE (temp.op1) == INTEGER_CST
881 && TREE_CODE (temp.op2) == INTEGER_CST)
883 offset_int off = ((wi::to_offset (temp.op0)
884 - wi::to_offset (temp.op1))
885 * wi::to_offset (temp.op2));
886 if (wi::fits_shwi_p (off))
887 temp.off = off.to_shwi();
889 break;
890 case VAR_DECL:
891 if (DECL_HARD_REGISTER (ref))
893 temp.op0 = ref;
894 break;
896 /* Fallthru. */
897 case PARM_DECL:
898 case CONST_DECL:
899 case RESULT_DECL:
900 /* Canonicalize decls to MEM[&decl] which is what we end up with
901 when valueizing MEM[ptr] with ptr = &decl. */
902 temp.opcode = MEM_REF;
903 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
904 temp.off = 0;
905 result->safe_push (temp);
906 temp.opcode = ADDR_EXPR;
907 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
908 temp.type = TREE_TYPE (temp.op0);
909 temp.off = -1;
910 break;
911 case STRING_CST:
912 case INTEGER_CST:
913 case COMPLEX_CST:
914 case VECTOR_CST:
915 case REAL_CST:
916 case FIXED_CST:
917 case CONSTRUCTOR:
918 case SSA_NAME:
919 temp.op0 = ref;
920 break;
921 case ADDR_EXPR:
922 if (is_gimple_min_invariant (ref))
924 temp.op0 = ref;
925 break;
927 break;
928 /* These are only interesting for their operands, their
929 existence, and their type. They will never be the last
930 ref in the chain of references (IE they require an
931 operand), so we don't have to put anything
932 for op* as it will be handled by the iteration */
933 case REALPART_EXPR:
934 case VIEW_CONVERT_EXPR:
935 temp.off = 0;
936 break;
937 case IMAGPART_EXPR:
938 /* This is only interesting for its constant offset. */
939 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
940 break;
941 default:
942 gcc_unreachable ();
944 result->safe_push (temp);
946 if (REFERENCE_CLASS_P (ref)
947 || TREE_CODE (ref) == MODIFY_EXPR
948 || TREE_CODE (ref) == WITH_SIZE_EXPR
949 || (TREE_CODE (ref) == ADDR_EXPR
950 && !is_gimple_min_invariant (ref)))
951 ref = TREE_OPERAND (ref, 0);
952 else
953 ref = NULL_TREE;
957 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
958 operands in *OPS, the reference alias set SET and the reference type TYPE.
959 Return true if something useful was produced. */
961 bool
962 ao_ref_init_from_vn_reference (ao_ref *ref,
963 alias_set_type set, tree type,
964 vec<vn_reference_op_s> ops)
966 vn_reference_op_t op;
967 unsigned i;
968 tree base = NULL_TREE;
969 tree *op0_p = &base;
970 HOST_WIDE_INT offset = 0;
971 HOST_WIDE_INT max_size;
972 HOST_WIDE_INT size = -1;
973 tree size_tree = NULL_TREE;
974 alias_set_type base_alias_set = -1;
976 /* First get the final access size from just the outermost expression. */
977 op = &ops[0];
978 if (op->opcode == COMPONENT_REF)
979 size_tree = DECL_SIZE (op->op0);
980 else if (op->opcode == BIT_FIELD_REF)
981 size_tree = op->op0;
982 else
984 machine_mode mode = TYPE_MODE (type);
985 if (mode == BLKmode)
986 size_tree = TYPE_SIZE (type);
987 else
988 size = GET_MODE_BITSIZE (mode);
990 if (size_tree != NULL_TREE)
992 if (!tree_fits_uhwi_p (size_tree))
993 size = -1;
994 else
995 size = tree_to_uhwi (size_tree);
998 /* Initially, maxsize is the same as the accessed element size.
999 In the following it will only grow (or become -1). */
1000 max_size = size;
1002 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1003 and find the ultimate containing object. */
1004 FOR_EACH_VEC_ELT (ops, i, op)
1006 switch (op->opcode)
1008 /* These may be in the reference ops, but we cannot do anything
1009 sensible with them here. */
1010 case ADDR_EXPR:
1011 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1012 if (base != NULL_TREE
1013 && TREE_CODE (base) == MEM_REF
1014 && op->op0
1015 && DECL_P (TREE_OPERAND (op->op0, 0)))
1017 vn_reference_op_t pop = &ops[i-1];
1018 base = TREE_OPERAND (op->op0, 0);
1019 if (pop->off == -1)
1021 max_size = -1;
1022 offset = 0;
1024 else
1025 offset += pop->off * BITS_PER_UNIT;
1026 op0_p = NULL;
1027 break;
1029 /* Fallthru. */
1030 case CALL_EXPR:
1031 return false;
1033 /* Record the base objects. */
1034 case MEM_REF:
1035 base_alias_set = get_deref_alias_set (op->op0);
1036 *op0_p = build2 (MEM_REF, op->type,
1037 NULL_TREE, op->op0);
1038 op0_p = &TREE_OPERAND (*op0_p, 0);
1039 break;
1041 case VAR_DECL:
1042 case PARM_DECL:
1043 case RESULT_DECL:
1044 case SSA_NAME:
1045 *op0_p = op->op0;
1046 op0_p = NULL;
1047 break;
1049 /* And now the usual component-reference style ops. */
1050 case BIT_FIELD_REF:
1051 offset += tree_to_shwi (op->op1);
1052 break;
1054 case COMPONENT_REF:
1056 tree field = op->op0;
1057 /* We do not have a complete COMPONENT_REF tree here so we
1058 cannot use component_ref_field_offset. Do the interesting
1059 parts manually. */
1061 if (op->op1
1062 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1063 max_size = -1;
1064 else
1066 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1067 * BITS_PER_UNIT);
1068 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1070 break;
1073 case ARRAY_RANGE_REF:
1074 case ARRAY_REF:
1075 /* We recorded the lower bound and the element size. */
1076 if (!tree_fits_shwi_p (op->op0)
1077 || !tree_fits_shwi_p (op->op1)
1078 || !tree_fits_shwi_p (op->op2))
1079 max_size = -1;
1080 else
1082 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1083 hindex -= tree_to_shwi (op->op1);
1084 hindex *= tree_to_shwi (op->op2);
1085 hindex *= BITS_PER_UNIT;
1086 offset += hindex;
1088 break;
1090 case REALPART_EXPR:
1091 break;
1093 case IMAGPART_EXPR:
1094 offset += size;
1095 break;
1097 case VIEW_CONVERT_EXPR:
1098 break;
1100 case STRING_CST:
1101 case INTEGER_CST:
1102 case COMPLEX_CST:
1103 case VECTOR_CST:
1104 case REAL_CST:
1105 case CONSTRUCTOR:
1106 case CONST_DECL:
1107 return false;
1109 default:
1110 return false;
1114 if (base == NULL_TREE)
1115 return false;
1117 ref->ref = NULL_TREE;
1118 ref->base = base;
1119 ref->offset = offset;
1120 ref->size = size;
1121 ref->max_size = max_size;
1122 ref->ref_alias_set = set;
1123 if (base_alias_set != -1)
1124 ref->base_alias_set = base_alias_set;
1125 else
1126 ref->base_alias_set = get_alias_set (base);
1127 /* We discount volatiles from value-numbering elsewhere. */
1128 ref->volatile_p = false;
1130 return true;
1133 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1134 vn_reference_op_s's. */
1136 static void
1137 copy_reference_ops_from_call (gcall *call,
1138 vec<vn_reference_op_s> *result)
1140 vn_reference_op_s temp;
1141 unsigned i;
1142 tree lhs = gimple_call_lhs (call);
1143 int lr;
1145 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1146 different. By adding the lhs here in the vector, we ensure that the
1147 hashcode is different, guaranteeing a different value number. */
1148 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1150 memset (&temp, 0, sizeof (temp));
1151 temp.opcode = MODIFY_EXPR;
1152 temp.type = TREE_TYPE (lhs);
1153 temp.op0 = lhs;
1154 temp.off = -1;
1155 result->safe_push (temp);
1158 /* Copy the type, opcode, function, static chain and EH region, if any. */
1159 memset (&temp, 0, sizeof (temp));
1160 temp.type = gimple_call_return_type (call);
1161 temp.opcode = CALL_EXPR;
1162 temp.op0 = gimple_call_fn (call);
1163 temp.op1 = gimple_call_chain (call);
1164 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1165 temp.op2 = size_int (lr);
1166 temp.off = -1;
1167 if (gimple_call_with_bounds_p (call))
1168 temp.with_bounds = 1;
1169 result->safe_push (temp);
1171 /* Copy the call arguments. As they can be references as well,
1172 just chain them together. */
1173 for (i = 0; i < gimple_call_num_args (call); ++i)
1175 tree callarg = gimple_call_arg (call, i);
1176 copy_reference_ops_from_ref (callarg, result);
1180 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1181 *I_P to point to the last element of the replacement. */
1182 void
1183 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1184 unsigned int *i_p)
1186 unsigned int i = *i_p;
1187 vn_reference_op_t op = &(*ops)[i];
1188 vn_reference_op_t mem_op = &(*ops)[i - 1];
1189 tree addr_base;
1190 HOST_WIDE_INT addr_offset = 0;
1192 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1193 from .foo.bar to the preceding MEM_REF offset and replace the
1194 address with &OBJ. */
1195 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1196 &addr_offset);
1197 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1198 if (addr_base != TREE_OPERAND (op->op0, 0))
1200 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1201 off += addr_offset;
1202 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1203 op->op0 = build_fold_addr_expr (addr_base);
1204 if (tree_fits_shwi_p (mem_op->op0))
1205 mem_op->off = tree_to_shwi (mem_op->op0);
1206 else
1207 mem_op->off = -1;
1211 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1212 *I_P to point to the last element of the replacement. */
1213 static void
1214 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1215 unsigned int *i_p)
1217 unsigned int i = *i_p;
1218 vn_reference_op_t op = &(*ops)[i];
1219 vn_reference_op_t mem_op = &(*ops)[i - 1];
1220 gimple def_stmt;
1221 enum tree_code code;
1222 offset_int off;
1224 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1225 if (!is_gimple_assign (def_stmt))
1226 return;
1228 code = gimple_assign_rhs_code (def_stmt);
1229 if (code != ADDR_EXPR
1230 && code != POINTER_PLUS_EXPR)
1231 return;
1233 off = offset_int::from (mem_op->op0, SIGNED);
1235 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1236 from .foo.bar to the preceding MEM_REF offset and replace the
1237 address with &OBJ. */
1238 if (code == ADDR_EXPR)
1240 tree addr, addr_base;
1241 HOST_WIDE_INT addr_offset;
1243 addr = gimple_assign_rhs1 (def_stmt);
1244 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1245 &addr_offset);
1246 /* If that didn't work because the address isn't invariant propagate
1247 the reference tree from the address operation in case the current
1248 dereference isn't offsetted. */
1249 if (!addr_base
1250 && *i_p == ops->length () - 1
1251 && off == 0
1252 /* This makes us disable this transform for PRE where the
1253 reference ops might be also used for code insertion which
1254 is invalid. */
1255 && default_vn_walk_kind == VN_WALKREWRITE)
1257 auto_vec<vn_reference_op_s, 32> tem;
1258 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1259 ops->pop ();
1260 ops->pop ();
1261 ops->safe_splice (tem);
1262 --*i_p;
1263 return;
1265 if (!addr_base
1266 || TREE_CODE (addr_base) != MEM_REF)
1267 return;
1269 off += addr_offset;
1270 off += mem_ref_offset (addr_base);
1271 op->op0 = TREE_OPERAND (addr_base, 0);
1273 else
1275 tree ptr, ptroff;
1276 ptr = gimple_assign_rhs1 (def_stmt);
1277 ptroff = gimple_assign_rhs2 (def_stmt);
1278 if (TREE_CODE (ptr) != SSA_NAME
1279 || TREE_CODE (ptroff) != INTEGER_CST)
1280 return;
1282 off += wi::to_offset (ptroff);
1283 op->op0 = ptr;
1286 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1287 if (tree_fits_shwi_p (mem_op->op0))
1288 mem_op->off = tree_to_shwi (mem_op->op0);
1289 else
1290 mem_op->off = -1;
1291 if (TREE_CODE (op->op0) == SSA_NAME)
1292 op->op0 = SSA_VAL (op->op0);
1293 if (TREE_CODE (op->op0) != SSA_NAME)
1294 op->opcode = TREE_CODE (op->op0);
1296 /* And recurse. */
1297 if (TREE_CODE (op->op0) == SSA_NAME)
1298 vn_reference_maybe_forwprop_address (ops, i_p);
1299 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1300 vn_reference_fold_indirect (ops, i_p);
1303 /* Optimize the reference REF to a constant if possible or return
1304 NULL_TREE if not. */
1306 tree
1307 fully_constant_vn_reference_p (vn_reference_t ref)
1309 vec<vn_reference_op_s> operands = ref->operands;
1310 vn_reference_op_t op;
1312 /* Try to simplify the translated expression if it is
1313 a call to a builtin function with at most two arguments. */
1314 op = &operands[0];
1315 if (op->opcode == CALL_EXPR
1316 && TREE_CODE (op->op0) == ADDR_EXPR
1317 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1318 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1319 && operands.length () >= 2
1320 && operands.length () <= 3)
1322 vn_reference_op_t arg0, arg1 = NULL;
1323 bool anyconst = false;
1324 arg0 = &operands[1];
1325 if (operands.length () > 2)
1326 arg1 = &operands[2];
1327 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1328 || (arg0->opcode == ADDR_EXPR
1329 && is_gimple_min_invariant (arg0->op0)))
1330 anyconst = true;
1331 if (arg1
1332 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1333 || (arg1->opcode == ADDR_EXPR
1334 && is_gimple_min_invariant (arg1->op0))))
1335 anyconst = true;
1336 if (anyconst)
1338 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1339 arg1 ? 2 : 1,
1340 arg0->op0,
1341 arg1 ? arg1->op0 : NULL);
1342 if (folded
1343 && TREE_CODE (folded) == NOP_EXPR)
1344 folded = TREE_OPERAND (folded, 0);
1345 if (folded
1346 && is_gimple_min_invariant (folded))
1347 return folded;
1351 /* Simplify reads from constants or constant initializers. */
1352 else if (BITS_PER_UNIT == 8
1353 && is_gimple_reg_type (ref->type)
1354 && (!INTEGRAL_TYPE_P (ref->type)
1355 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1357 HOST_WIDE_INT off = 0;
1358 HOST_WIDE_INT size;
1359 if (INTEGRAL_TYPE_P (ref->type))
1360 size = TYPE_PRECISION (ref->type);
1361 else
1362 size = tree_to_shwi (TYPE_SIZE (ref->type));
1363 if (size % BITS_PER_UNIT != 0
1364 || size > MAX_BITSIZE_MODE_ANY_MODE)
1365 return NULL_TREE;
1366 size /= BITS_PER_UNIT;
1367 unsigned i;
1368 for (i = 0; i < operands.length (); ++i)
1370 if (operands[i].off == -1)
1371 return NULL_TREE;
1372 off += operands[i].off;
1373 if (operands[i].opcode == MEM_REF)
1375 ++i;
1376 break;
1379 vn_reference_op_t base = &operands[--i];
1380 tree ctor = error_mark_node;
1381 tree decl = NULL_TREE;
1382 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1383 ctor = base->op0;
1384 else if (base->opcode == MEM_REF
1385 && base[1].opcode == ADDR_EXPR
1386 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1387 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1389 decl = TREE_OPERAND (base[1].op0, 0);
1390 ctor = ctor_for_folding (decl);
1392 if (ctor == NULL_TREE)
1393 return build_zero_cst (ref->type);
1394 else if (ctor != error_mark_node)
1396 if (decl)
1398 tree res = fold_ctor_reference (ref->type, ctor,
1399 off * BITS_PER_UNIT,
1400 size * BITS_PER_UNIT, decl);
1401 if (res)
1403 STRIP_USELESS_TYPE_CONVERSION (res);
1404 if (is_gimple_min_invariant (res))
1405 return res;
1408 else
1410 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1411 if (native_encode_expr (ctor, buf, size, off) > 0)
1412 return native_interpret_expr (ref->type, buf, size);
1417 return NULL_TREE;
1420 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1421 structures into their value numbers. This is done in-place, and
1422 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1423 whether any operands were valueized. */
1425 static vec<vn_reference_op_s>
1426 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1428 vn_reference_op_t vro;
1429 unsigned int i;
1431 *valueized_anything = false;
1433 FOR_EACH_VEC_ELT (orig, i, vro)
1435 if (vro->opcode == SSA_NAME
1436 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1438 tree tem = SSA_VAL (vro->op0);
1439 if (tem != vro->op0)
1441 *valueized_anything = true;
1442 vro->op0 = tem;
1444 /* If it transforms from an SSA_NAME to a constant, update
1445 the opcode. */
1446 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1447 vro->opcode = TREE_CODE (vro->op0);
1449 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1451 tree tem = SSA_VAL (vro->op1);
1452 if (tem != vro->op1)
1454 *valueized_anything = true;
1455 vro->op1 = tem;
1458 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1460 tree tem = SSA_VAL (vro->op2);
1461 if (tem != vro->op2)
1463 *valueized_anything = true;
1464 vro->op2 = tem;
1467 /* If it transforms from an SSA_NAME to an address, fold with
1468 a preceding indirect reference. */
1469 if (i > 0
1470 && vro->op0
1471 && TREE_CODE (vro->op0) == ADDR_EXPR
1472 && orig[i - 1].opcode == MEM_REF)
1473 vn_reference_fold_indirect (&orig, &i);
1474 else if (i > 0
1475 && vro->opcode == SSA_NAME
1476 && orig[i - 1].opcode == MEM_REF)
1477 vn_reference_maybe_forwprop_address (&orig, &i);
1478 /* If it transforms a non-constant ARRAY_REF into a constant
1479 one, adjust the constant offset. */
1480 else if (vro->opcode == ARRAY_REF
1481 && vro->off == -1
1482 && TREE_CODE (vro->op0) == INTEGER_CST
1483 && TREE_CODE (vro->op1) == INTEGER_CST
1484 && TREE_CODE (vro->op2) == INTEGER_CST)
1486 offset_int off = ((wi::to_offset (vro->op0)
1487 - wi::to_offset (vro->op1))
1488 * wi::to_offset (vro->op2));
1489 if (wi::fits_shwi_p (off))
1490 vro->off = off.to_shwi ();
1494 return orig;
1497 static vec<vn_reference_op_s>
1498 valueize_refs (vec<vn_reference_op_s> orig)
1500 bool tem;
1501 return valueize_refs_1 (orig, &tem);
1504 static vec<vn_reference_op_s> shared_lookup_references;
1506 /* Create a vector of vn_reference_op_s structures from REF, a
1507 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1508 this function. *VALUEIZED_ANYTHING will specify whether any
1509 operands were valueized. */
1511 static vec<vn_reference_op_s>
1512 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1514 if (!ref)
1515 return vNULL;
1516 shared_lookup_references.truncate (0);
1517 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1518 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1519 valueized_anything);
1520 return shared_lookup_references;
1523 /* Create a vector of vn_reference_op_s structures from CALL, a
1524 call statement. The vector is shared among all callers of
1525 this function. */
1527 static vec<vn_reference_op_s>
1528 valueize_shared_reference_ops_from_call (gcall *call)
1530 if (!call)
1531 return vNULL;
1532 shared_lookup_references.truncate (0);
1533 copy_reference_ops_from_call (call, &shared_lookup_references);
1534 shared_lookup_references = valueize_refs (shared_lookup_references);
1535 return shared_lookup_references;
1538 /* Lookup a SCCVN reference operation VR in the current hash table.
1539 Returns the resulting value number if it exists in the hash table,
1540 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1541 vn_reference_t stored in the hashtable if something is found. */
1543 static tree
1544 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1546 vn_reference_s **slot;
1547 hashval_t hash;
1549 hash = vr->hashcode;
1550 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1551 if (!slot && current_info == optimistic_info)
1552 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1553 if (slot)
1555 if (vnresult)
1556 *vnresult = (vn_reference_t)*slot;
1557 return ((vn_reference_t)*slot)->result;
1560 return NULL_TREE;
1563 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1564 with the current VUSE and performs the expression lookup. */
1566 static void *
1567 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1568 unsigned int cnt, void *vr_)
1570 vn_reference_t vr = (vn_reference_t)vr_;
1571 vn_reference_s **slot;
1572 hashval_t hash;
1574 /* This bounds the stmt walks we perform on reference lookups
1575 to O(1) instead of O(N) where N is the number of dominating
1576 stores. */
1577 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1578 return (void *)-1;
1580 if (last_vuse_ptr)
1581 *last_vuse_ptr = vuse;
1583 /* Fixup vuse and hash. */
1584 if (vr->vuse)
1585 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1586 vr->vuse = vuse_ssa_val (vuse);
1587 if (vr->vuse)
1588 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1590 hash = vr->hashcode;
1591 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1592 if (!slot && current_info == optimistic_info)
1593 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1594 if (slot)
1595 return *slot;
1597 return NULL;
1600 /* Lookup an existing or insert a new vn_reference entry into the
1601 value table for the VUSE, SET, TYPE, OPERANDS reference which
1602 has the value VALUE which is either a constant or an SSA name. */
1604 static vn_reference_t
1605 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1606 alias_set_type set,
1607 tree type,
1608 vec<vn_reference_op_s,
1609 va_heap> operands,
1610 tree value)
1612 vn_reference_s vr1;
1613 vn_reference_t result;
1614 unsigned value_id;
1615 vr1.vuse = vuse;
1616 vr1.operands = operands;
1617 vr1.type = type;
1618 vr1.set = set;
1619 vr1.hashcode = vn_reference_compute_hash (&vr1);
1620 if (vn_reference_lookup_1 (&vr1, &result))
1621 return result;
1622 if (TREE_CODE (value) == SSA_NAME)
1623 value_id = VN_INFO (value)->value_id;
1624 else
1625 value_id = get_or_alloc_constant_value_id (value);
1626 return vn_reference_insert_pieces (vuse, set, type,
1627 operands.copy (), value, value_id);
1630 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1631 from the statement defining VUSE and if not successful tries to
1632 translate *REFP and VR_ through an aggregate copy at the definition
1633 of VUSE. */
1635 static void *
1636 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1637 bool disambiguate_only)
1639 vn_reference_t vr = (vn_reference_t)vr_;
1640 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1641 tree base;
1642 HOST_WIDE_INT offset, maxsize;
1643 static vec<vn_reference_op_s>
1644 lhs_ops = vNULL;
1645 ao_ref lhs_ref;
1646 bool lhs_ref_ok = false;
1648 /* First try to disambiguate after value-replacing in the definitions LHS. */
1649 if (is_gimple_assign (def_stmt))
1651 tree lhs = gimple_assign_lhs (def_stmt);
1652 bool valueized_anything = false;
1653 /* Avoid re-allocation overhead. */
1654 lhs_ops.truncate (0);
1655 copy_reference_ops_from_ref (lhs, &lhs_ops);
1656 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1657 if (valueized_anything)
1659 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1660 get_alias_set (lhs),
1661 TREE_TYPE (lhs), lhs_ops);
1662 if (lhs_ref_ok
1663 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1664 return NULL;
1666 else
1668 ao_ref_init (&lhs_ref, lhs);
1669 lhs_ref_ok = true;
1672 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1673 && gimple_call_num_args (def_stmt) <= 4)
1675 /* For builtin calls valueize its arguments and call the
1676 alias oracle again. Valueization may improve points-to
1677 info of pointers and constify size and position arguments.
1678 Originally this was motivated by PR61034 which has
1679 conditional calls to free falsely clobbering ref because
1680 of imprecise points-to info of the argument. */
1681 tree oldargs[4];
1682 bool valueized_anything = false;
1683 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1685 oldargs[i] = gimple_call_arg (def_stmt, i);
1686 if (TREE_CODE (oldargs[i]) == SSA_NAME
1687 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1689 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1690 valueized_anything = true;
1693 if (valueized_anything)
1695 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1696 ref);
1697 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1698 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1699 if (!res)
1700 return NULL;
1704 if (disambiguate_only)
1705 return (void *)-1;
1707 base = ao_ref_base (ref);
1708 offset = ref->offset;
1709 maxsize = ref->max_size;
1711 /* If we cannot constrain the size of the reference we cannot
1712 test if anything kills it. */
1713 if (maxsize == -1)
1714 return (void *)-1;
1716 /* We can't deduce anything useful from clobbers. */
1717 if (gimple_clobber_p (def_stmt))
1718 return (void *)-1;
1720 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1721 from that definition.
1722 1) Memset. */
1723 if (is_gimple_reg_type (vr->type)
1724 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1725 && integer_zerop (gimple_call_arg (def_stmt, 1))
1726 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1727 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1729 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1730 tree base2;
1731 HOST_WIDE_INT offset2, size2, maxsize2;
1732 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1733 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1734 if ((unsigned HOST_WIDE_INT)size2 / 8
1735 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1736 && maxsize2 != -1
1737 && operand_equal_p (base, base2, 0)
1738 && offset2 <= offset
1739 && offset2 + size2 >= offset + maxsize)
1741 tree val = build_zero_cst (vr->type);
1742 return vn_reference_lookup_or_insert_for_pieces
1743 (vuse, vr->set, vr->type, vr->operands, val);
1747 /* 2) Assignment from an empty CONSTRUCTOR. */
1748 else if (is_gimple_reg_type (vr->type)
1749 && gimple_assign_single_p (def_stmt)
1750 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1751 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1753 tree base2;
1754 HOST_WIDE_INT offset2, size2, maxsize2;
1755 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1756 &offset2, &size2, &maxsize2);
1757 if (maxsize2 != -1
1758 && operand_equal_p (base, base2, 0)
1759 && offset2 <= offset
1760 && offset2 + size2 >= offset + maxsize)
1762 tree val = build_zero_cst (vr->type);
1763 return vn_reference_lookup_or_insert_for_pieces
1764 (vuse, vr->set, vr->type, vr->operands, val);
1768 /* 3) Assignment from a constant. We can use folds native encode/interpret
1769 routines to extract the assigned bits. */
1770 else if (vn_walk_kind == VN_WALKREWRITE
1771 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1772 && ref->size == maxsize
1773 && maxsize % BITS_PER_UNIT == 0
1774 && offset % BITS_PER_UNIT == 0
1775 && is_gimple_reg_type (vr->type)
1776 && gimple_assign_single_p (def_stmt)
1777 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1779 tree base2;
1780 HOST_WIDE_INT offset2, size2, maxsize2;
1781 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1782 &offset2, &size2, &maxsize2);
1783 if (maxsize2 != -1
1784 && maxsize2 == size2
1785 && size2 % BITS_PER_UNIT == 0
1786 && offset2 % BITS_PER_UNIT == 0
1787 && operand_equal_p (base, base2, 0)
1788 && offset2 <= offset
1789 && offset2 + size2 >= offset + maxsize)
1791 /* We support up to 512-bit values (for V8DFmode). */
1792 unsigned char buffer[64];
1793 int len;
1795 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1796 buffer, sizeof (buffer));
1797 if (len > 0)
1799 tree val = native_interpret_expr (vr->type,
1800 buffer
1801 + ((offset - offset2)
1802 / BITS_PER_UNIT),
1803 ref->size / BITS_PER_UNIT);
1804 if (val)
1805 return vn_reference_lookup_or_insert_for_pieces
1806 (vuse, vr->set, vr->type, vr->operands, val);
1811 /* 4) Assignment from an SSA name which definition we may be able
1812 to access pieces from. */
1813 else if (ref->size == maxsize
1814 && is_gimple_reg_type (vr->type)
1815 && gimple_assign_single_p (def_stmt)
1816 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1818 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1819 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1820 if (is_gimple_assign (def_stmt2)
1821 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1822 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1823 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1825 tree base2;
1826 HOST_WIDE_INT offset2, size2, maxsize2, off;
1827 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1828 &offset2, &size2, &maxsize2);
1829 off = offset - offset2;
1830 if (maxsize2 != -1
1831 && maxsize2 == size2
1832 && operand_equal_p (base, base2, 0)
1833 && offset2 <= offset
1834 && offset2 + size2 >= offset + maxsize)
1836 tree val = NULL_TREE;
1837 HOST_WIDE_INT elsz
1838 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1839 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1841 if (off == 0)
1842 val = gimple_assign_rhs1 (def_stmt2);
1843 else if (off == elsz)
1844 val = gimple_assign_rhs2 (def_stmt2);
1846 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1847 && off % elsz == 0)
1849 tree ctor = gimple_assign_rhs1 (def_stmt2);
1850 unsigned i = off / elsz;
1851 if (i < CONSTRUCTOR_NELTS (ctor))
1853 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1854 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1856 if (TREE_CODE (TREE_TYPE (elt->value))
1857 != VECTOR_TYPE)
1858 val = elt->value;
1862 if (val)
1863 return vn_reference_lookup_or_insert_for_pieces
1864 (vuse, vr->set, vr->type, vr->operands, val);
1869 /* 5) For aggregate copies translate the reference through them if
1870 the copy kills ref. */
1871 else if (vn_walk_kind == VN_WALKREWRITE
1872 && gimple_assign_single_p (def_stmt)
1873 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1874 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1875 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1877 tree base2;
1878 HOST_WIDE_INT offset2, size2, maxsize2;
1879 int i, j;
1880 auto_vec<vn_reference_op_s> rhs;
1881 vn_reference_op_t vro;
1882 ao_ref r;
1884 if (!lhs_ref_ok)
1885 return (void *)-1;
1887 /* See if the assignment kills REF. */
1888 base2 = ao_ref_base (&lhs_ref);
1889 offset2 = lhs_ref.offset;
1890 size2 = lhs_ref.size;
1891 maxsize2 = lhs_ref.max_size;
1892 if (maxsize2 == -1
1893 || (base != base2
1894 && (TREE_CODE (base) != MEM_REF
1895 || TREE_CODE (base2) != MEM_REF
1896 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1897 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1898 TREE_OPERAND (base2, 1))))
1899 || offset2 > offset
1900 || offset2 + size2 < offset + maxsize)
1901 return (void *)-1;
1903 /* Find the common base of ref and the lhs. lhs_ops already
1904 contains valueized operands for the lhs. */
1905 i = vr->operands.length () - 1;
1906 j = lhs_ops.length () - 1;
1907 while (j >= 0 && i >= 0
1908 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1910 i--;
1911 j--;
1914 /* ??? The innermost op should always be a MEM_REF and we already
1915 checked that the assignment to the lhs kills vr. Thus for
1916 aggregate copies using char[] types the vn_reference_op_eq
1917 may fail when comparing types for compatibility. But we really
1918 don't care here - further lookups with the rewritten operands
1919 will simply fail if we messed up types too badly. */
1920 HOST_WIDE_INT extra_off = 0;
1921 if (j == 0 && i >= 0
1922 && lhs_ops[0].opcode == MEM_REF
1923 && lhs_ops[0].off != -1)
1925 if (lhs_ops[0].off == vr->operands[i].off)
1926 i--, j--;
1927 else if (vr->operands[i].opcode == MEM_REF
1928 && vr->operands[i].off != -1)
1930 extra_off = vr->operands[i].off - lhs_ops[0].off;
1931 i--, j--;
1935 /* i now points to the first additional op.
1936 ??? LHS may not be completely contained in VR, one or more
1937 VIEW_CONVERT_EXPRs could be in its way. We could at least
1938 try handling outermost VIEW_CONVERT_EXPRs. */
1939 if (j != -1)
1940 return (void *)-1;
1942 /* Now re-write REF to be based on the rhs of the assignment. */
1943 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1945 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1946 if (extra_off != 0)
1948 if (rhs.length () < 2
1949 || rhs[0].opcode != MEM_REF
1950 || rhs[0].off == -1)
1951 return (void *)-1;
1952 rhs[0].off += extra_off;
1953 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1954 build_int_cst (TREE_TYPE (rhs[0].op0),
1955 extra_off));
1958 /* We need to pre-pend vr->operands[0..i] to rhs. */
1959 vec<vn_reference_op_s> old = vr->operands;
1960 if (i + 1 + rhs.length () > vr->operands.length ())
1962 vr->operands.safe_grow (i + 1 + rhs.length ());
1963 if (old == shared_lookup_references)
1964 shared_lookup_references = vr->operands;
1966 else
1967 vr->operands.truncate (i + 1 + rhs.length ());
1968 FOR_EACH_VEC_ELT (rhs, j, vro)
1969 vr->operands[i + 1 + j] = *vro;
1970 vr->operands = valueize_refs (vr->operands);
1971 if (old == shared_lookup_references)
1972 shared_lookup_references = vr->operands;
1973 vr->hashcode = vn_reference_compute_hash (vr);
1975 /* Try folding the new reference to a constant. */
1976 tree val = fully_constant_vn_reference_p (vr);
1977 if (val)
1978 return vn_reference_lookup_or_insert_for_pieces
1979 (vuse, vr->set, vr->type, vr->operands, val);
1981 /* Adjust *ref from the new operands. */
1982 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1983 return (void *)-1;
1984 /* This can happen with bitfields. */
1985 if (ref->size != r.size)
1986 return (void *)-1;
1987 *ref = r;
1989 /* Do not update last seen VUSE after translating. */
1990 last_vuse_ptr = NULL;
1992 /* Keep looking for the adjusted *REF / VR pair. */
1993 return NULL;
1996 /* 6) For memcpy copies translate the reference through them if
1997 the copy kills ref. */
1998 else if (vn_walk_kind == VN_WALKREWRITE
1999 && is_gimple_reg_type (vr->type)
2000 /* ??? Handle BCOPY as well. */
2001 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2002 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2003 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2004 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2005 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2006 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2007 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2008 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2010 tree lhs, rhs;
2011 ao_ref r;
2012 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2013 vn_reference_op_s op;
2014 HOST_WIDE_INT at;
2017 /* Only handle non-variable, addressable refs. */
2018 if (ref->size != maxsize
2019 || offset % BITS_PER_UNIT != 0
2020 || ref->size % BITS_PER_UNIT != 0)
2021 return (void *)-1;
2023 /* Extract a pointer base and an offset for the destination. */
2024 lhs = gimple_call_arg (def_stmt, 0);
2025 lhs_offset = 0;
2026 if (TREE_CODE (lhs) == SSA_NAME)
2028 lhs = SSA_VAL (lhs);
2029 if (TREE_CODE (lhs) == SSA_NAME)
2031 gimple def_stmt = SSA_NAME_DEF_STMT (lhs);
2032 if (gimple_assign_single_p (def_stmt)
2033 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2034 lhs = gimple_assign_rhs1 (def_stmt);
2037 if (TREE_CODE (lhs) == ADDR_EXPR)
2039 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2040 &lhs_offset);
2041 if (!tem)
2042 return (void *)-1;
2043 if (TREE_CODE (tem) == MEM_REF
2044 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2046 lhs = TREE_OPERAND (tem, 0);
2047 if (TREE_CODE (lhs) == SSA_NAME)
2048 lhs = SSA_VAL (lhs);
2049 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2051 else if (DECL_P (tem))
2052 lhs = build_fold_addr_expr (tem);
2053 else
2054 return (void *)-1;
2056 if (TREE_CODE (lhs) != SSA_NAME
2057 && TREE_CODE (lhs) != ADDR_EXPR)
2058 return (void *)-1;
2060 /* Extract a pointer base and an offset for the source. */
2061 rhs = gimple_call_arg (def_stmt, 1);
2062 rhs_offset = 0;
2063 if (TREE_CODE (rhs) == SSA_NAME)
2064 rhs = SSA_VAL (rhs);
2065 if (TREE_CODE (rhs) == ADDR_EXPR)
2067 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2068 &rhs_offset);
2069 if (!tem)
2070 return (void *)-1;
2071 if (TREE_CODE (tem) == MEM_REF
2072 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2074 rhs = TREE_OPERAND (tem, 0);
2075 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2077 else if (DECL_P (tem))
2078 rhs = build_fold_addr_expr (tem);
2079 else
2080 return (void *)-1;
2082 if (TREE_CODE (rhs) != SSA_NAME
2083 && TREE_CODE (rhs) != ADDR_EXPR)
2084 return (void *)-1;
2086 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2088 /* The bases of the destination and the references have to agree. */
2089 if ((TREE_CODE (base) != MEM_REF
2090 && !DECL_P (base))
2091 || (TREE_CODE (base) == MEM_REF
2092 && (TREE_OPERAND (base, 0) != lhs
2093 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2094 || (DECL_P (base)
2095 && (TREE_CODE (lhs) != ADDR_EXPR
2096 || TREE_OPERAND (lhs, 0) != base)))
2097 return (void *)-1;
2099 at = offset / BITS_PER_UNIT;
2100 if (TREE_CODE (base) == MEM_REF)
2101 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2102 /* If the access is completely outside of the memcpy destination
2103 area there is no aliasing. */
2104 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2105 || lhs_offset + copy_size <= at)
2106 return NULL;
2107 /* And the access has to be contained within the memcpy destination. */
2108 if (lhs_offset > at
2109 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2110 return (void *)-1;
2112 /* Make room for 2 operands in the new reference. */
2113 if (vr->operands.length () < 2)
2115 vec<vn_reference_op_s> old = vr->operands;
2116 vr->operands.safe_grow_cleared (2);
2117 if (old == shared_lookup_references
2118 && vr->operands != old)
2119 shared_lookup_references = vr->operands;
2121 else
2122 vr->operands.truncate (2);
2124 /* The looked-through reference is a simple MEM_REF. */
2125 memset (&op, 0, sizeof (op));
2126 op.type = vr->type;
2127 op.opcode = MEM_REF;
2128 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2129 op.off = at - lhs_offset + rhs_offset;
2130 vr->operands[0] = op;
2131 op.type = TREE_TYPE (rhs);
2132 op.opcode = TREE_CODE (rhs);
2133 op.op0 = rhs;
2134 op.off = -1;
2135 vr->operands[1] = op;
2136 vr->hashcode = vn_reference_compute_hash (vr);
2138 /* Adjust *ref from the new operands. */
2139 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2140 return (void *)-1;
2141 /* This can happen with bitfields. */
2142 if (ref->size != r.size)
2143 return (void *)-1;
2144 *ref = r;
2146 /* Do not update last seen VUSE after translating. */
2147 last_vuse_ptr = NULL;
2149 /* Keep looking for the adjusted *REF / VR pair. */
2150 return NULL;
2153 /* Bail out and stop walking. */
2154 return (void *)-1;
2157 /* Lookup a reference operation by it's parts, in the current hash table.
2158 Returns the resulting value number if it exists in the hash table,
2159 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2160 vn_reference_t stored in the hashtable if something is found. */
2162 tree
2163 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2164 vec<vn_reference_op_s> operands,
2165 vn_reference_t *vnresult, vn_lookup_kind kind)
2167 struct vn_reference_s vr1;
2168 vn_reference_t tmp;
2169 tree cst;
2171 if (!vnresult)
2172 vnresult = &tmp;
2173 *vnresult = NULL;
2175 vr1.vuse = vuse_ssa_val (vuse);
2176 shared_lookup_references.truncate (0);
2177 shared_lookup_references.safe_grow (operands.length ());
2178 memcpy (shared_lookup_references.address (),
2179 operands.address (),
2180 sizeof (vn_reference_op_s)
2181 * operands.length ());
2182 vr1.operands = operands = shared_lookup_references
2183 = valueize_refs (shared_lookup_references);
2184 vr1.type = type;
2185 vr1.set = set;
2186 vr1.hashcode = vn_reference_compute_hash (&vr1);
2187 if ((cst = fully_constant_vn_reference_p (&vr1)))
2188 return cst;
2190 vn_reference_lookup_1 (&vr1, vnresult);
2191 if (!*vnresult
2192 && kind != VN_NOWALK
2193 && vr1.vuse)
2195 ao_ref r;
2196 vn_walk_kind = kind;
2197 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2198 *vnresult =
2199 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2200 vn_reference_lookup_2,
2201 vn_reference_lookup_3,
2202 vuse_ssa_val, &vr1);
2203 gcc_checking_assert (vr1.operands == shared_lookup_references);
2206 if (*vnresult)
2207 return (*vnresult)->result;
2209 return NULL_TREE;
2212 /* Lookup OP in the current hash table, and return the resulting value
2213 number if it exists in the hash table. Return NULL_TREE if it does
2214 not exist in the hash table or if the result field of the structure
2215 was NULL.. VNRESULT will be filled in with the vn_reference_t
2216 stored in the hashtable if one exists. */
2218 tree
2219 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2220 vn_reference_t *vnresult)
2222 vec<vn_reference_op_s> operands;
2223 struct vn_reference_s vr1;
2224 tree cst;
2225 bool valuezied_anything;
2227 if (vnresult)
2228 *vnresult = NULL;
2230 vr1.vuse = vuse_ssa_val (vuse);
2231 vr1.operands = operands
2232 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2233 vr1.type = TREE_TYPE (op);
2234 vr1.set = get_alias_set (op);
2235 vr1.hashcode = vn_reference_compute_hash (&vr1);
2236 if ((cst = fully_constant_vn_reference_p (&vr1)))
2237 return cst;
2239 if (kind != VN_NOWALK
2240 && vr1.vuse)
2242 vn_reference_t wvnresult;
2243 ao_ref r;
2244 /* Make sure to use a valueized reference if we valueized anything.
2245 Otherwise preserve the full reference for advanced TBAA. */
2246 if (!valuezied_anything
2247 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2248 vr1.operands))
2249 ao_ref_init (&r, op);
2250 vn_walk_kind = kind;
2251 wvnresult =
2252 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2253 vn_reference_lookup_2,
2254 vn_reference_lookup_3,
2255 vuse_ssa_val, &vr1);
2256 gcc_checking_assert (vr1.operands == shared_lookup_references);
2257 if (wvnresult)
2259 if (vnresult)
2260 *vnresult = wvnresult;
2261 return wvnresult->result;
2264 return NULL_TREE;
2267 return vn_reference_lookup_1 (&vr1, vnresult);
2270 /* Lookup CALL in the current hash table and return the entry in
2271 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2273 void
2274 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2275 vn_reference_t vr)
2277 if (vnresult)
2278 *vnresult = NULL;
2280 tree vuse = gimple_vuse (call);
2282 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2283 vr->operands = valueize_shared_reference_ops_from_call (call);
2284 vr->type = gimple_expr_type (call);
2285 vr->set = 0;
2286 vr->hashcode = vn_reference_compute_hash (vr);
2287 vn_reference_lookup_1 (vr, vnresult);
2290 /* Insert OP into the current hash table with a value number of
2291 RESULT, and return the resulting reference structure we created. */
2293 static vn_reference_t
2294 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2296 vn_reference_s **slot;
2297 vn_reference_t vr1;
2298 bool tem;
2300 vr1 = current_info->references_pool->allocate ();
2301 if (TREE_CODE (result) == SSA_NAME)
2302 vr1->value_id = VN_INFO (result)->value_id;
2303 else
2304 vr1->value_id = get_or_alloc_constant_value_id (result);
2305 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2306 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2307 vr1->type = TREE_TYPE (op);
2308 vr1->set = get_alias_set (op);
2309 vr1->hashcode = vn_reference_compute_hash (vr1);
2310 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2311 vr1->result_vdef = vdef;
2313 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2314 INSERT);
2316 /* Because we lookup stores using vuses, and value number failures
2317 using the vdefs (see visit_reference_op_store for how and why),
2318 it's possible that on failure we may try to insert an already
2319 inserted store. This is not wrong, there is no ssa name for a
2320 store that we could use as a differentiator anyway. Thus, unlike
2321 the other lookup functions, you cannot gcc_assert (!*slot)
2322 here. */
2324 /* But free the old slot in case of a collision. */
2325 if (*slot)
2326 free_reference (*slot);
2328 *slot = vr1;
2329 return vr1;
2332 /* Insert a reference by it's pieces into the current hash table with
2333 a value number of RESULT. Return the resulting reference
2334 structure we created. */
2336 vn_reference_t
2337 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2338 vec<vn_reference_op_s> operands,
2339 tree result, unsigned int value_id)
2342 vn_reference_s **slot;
2343 vn_reference_t vr1;
2345 vr1 = current_info->references_pool->allocate ();
2346 vr1->value_id = value_id;
2347 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2348 vr1->operands = valueize_refs (operands);
2349 vr1->type = type;
2350 vr1->set = set;
2351 vr1->hashcode = vn_reference_compute_hash (vr1);
2352 if (result && TREE_CODE (result) == SSA_NAME)
2353 result = SSA_VAL (result);
2354 vr1->result = result;
2356 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2357 INSERT);
2359 /* At this point we should have all the things inserted that we have
2360 seen before, and we should never try inserting something that
2361 already exists. */
2362 gcc_assert (!*slot);
2363 if (*slot)
2364 free_reference (*slot);
2366 *slot = vr1;
2367 return vr1;
2370 /* Compute and return the hash value for nary operation VBO1. */
2372 static hashval_t
2373 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2375 inchash::hash hstate;
2376 unsigned i;
2378 for (i = 0; i < vno1->length; ++i)
2379 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2380 vno1->op[i] = SSA_VAL (vno1->op[i]);
2382 if (vno1->length == 2
2383 && commutative_tree_code (vno1->opcode)
2384 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2385 std::swap (vno1->op[0], vno1->op[1]);
2387 hstate.add_int (vno1->opcode);
2388 for (i = 0; i < vno1->length; ++i)
2389 inchash::add_expr (vno1->op[i], hstate);
2391 return hstate.end ();
2394 /* Compare nary operations VNO1 and VNO2 and return true if they are
2395 equivalent. */
2397 bool
2398 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2400 unsigned i;
2402 if (vno1->hashcode != vno2->hashcode)
2403 return false;
2405 if (vno1->length != vno2->length)
2406 return false;
2408 if (vno1->opcode != vno2->opcode
2409 || !types_compatible_p (vno1->type, vno2->type))
2410 return false;
2412 for (i = 0; i < vno1->length; ++i)
2413 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2414 return false;
2416 return true;
2419 /* Initialize VNO from the pieces provided. */
2421 static void
2422 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2423 enum tree_code code, tree type, tree *ops)
2425 vno->opcode = code;
2426 vno->length = length;
2427 vno->type = type;
2428 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2431 /* Initialize VNO from OP. */
2433 static void
2434 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2436 unsigned i;
2438 vno->opcode = TREE_CODE (op);
2439 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2440 vno->type = TREE_TYPE (op);
2441 for (i = 0; i < vno->length; ++i)
2442 vno->op[i] = TREE_OPERAND (op, i);
2445 /* Return the number of operands for a vn_nary ops structure from STMT. */
2447 static unsigned int
2448 vn_nary_length_from_stmt (gimple stmt)
2450 switch (gimple_assign_rhs_code (stmt))
2452 case REALPART_EXPR:
2453 case IMAGPART_EXPR:
2454 case VIEW_CONVERT_EXPR:
2455 return 1;
2457 case BIT_FIELD_REF:
2458 return 3;
2460 case CONSTRUCTOR:
2461 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2463 default:
2464 return gimple_num_ops (stmt) - 1;
2468 /* Initialize VNO from STMT. */
2470 static void
2471 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2473 unsigned i;
2475 vno->opcode = gimple_assign_rhs_code (stmt);
2476 vno->type = gimple_expr_type (stmt);
2477 switch (vno->opcode)
2479 case REALPART_EXPR:
2480 case IMAGPART_EXPR:
2481 case VIEW_CONVERT_EXPR:
2482 vno->length = 1;
2483 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2484 break;
2486 case BIT_FIELD_REF:
2487 vno->length = 3;
2488 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2489 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2490 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2491 break;
2493 case CONSTRUCTOR:
2494 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2495 for (i = 0; i < vno->length; ++i)
2496 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2497 break;
2499 default:
2500 gcc_checking_assert (!gimple_assign_single_p (stmt));
2501 vno->length = gimple_num_ops (stmt) - 1;
2502 for (i = 0; i < vno->length; ++i)
2503 vno->op[i] = gimple_op (stmt, i + 1);
2507 /* Compute the hashcode for VNO and look for it in the hash table;
2508 return the resulting value number if it exists in the hash table.
2509 Return NULL_TREE if it does not exist in the hash table or if the
2510 result field of the operation is NULL. VNRESULT will contain the
2511 vn_nary_op_t from the hashtable if it exists. */
2513 static tree
2514 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2516 vn_nary_op_s **slot;
2518 if (vnresult)
2519 *vnresult = NULL;
2521 vno->hashcode = vn_nary_op_compute_hash (vno);
2522 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2523 NO_INSERT);
2524 if (!slot && current_info == optimistic_info)
2525 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2526 NO_INSERT);
2527 if (!slot)
2528 return NULL_TREE;
2529 if (vnresult)
2530 *vnresult = *slot;
2531 return (*slot)->result;
2534 /* Lookup a n-ary operation by its pieces and return the resulting value
2535 number if it exists in the hash table. Return NULL_TREE if it does
2536 not exist in the hash table or if the result field of the operation
2537 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2538 if it exists. */
2540 tree
2541 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2542 tree type, tree *ops, vn_nary_op_t *vnresult)
2544 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2545 sizeof_vn_nary_op (length));
2546 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2547 return vn_nary_op_lookup_1 (vno1, vnresult);
2550 /* Lookup OP in the current hash table, and return the resulting value
2551 number if it exists in the hash table. Return NULL_TREE if it does
2552 not exist in the hash table or if the result field of the operation
2553 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2554 if it exists. */
2556 tree
2557 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2559 vn_nary_op_t vno1
2560 = XALLOCAVAR (struct vn_nary_op_s,
2561 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2562 init_vn_nary_op_from_op (vno1, op);
2563 return vn_nary_op_lookup_1 (vno1, vnresult);
2566 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2567 value number if it exists in the hash table. Return NULL_TREE if
2568 it does not exist in the hash table. VNRESULT will contain the
2569 vn_nary_op_t from the hashtable if it exists. */
2571 tree
2572 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2574 vn_nary_op_t vno1
2575 = XALLOCAVAR (struct vn_nary_op_s,
2576 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2577 init_vn_nary_op_from_stmt (vno1, stmt);
2578 return vn_nary_op_lookup_1 (vno1, vnresult);
2581 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2583 static vn_nary_op_t
2584 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2586 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2589 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2590 obstack. */
2592 static vn_nary_op_t
2593 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2595 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2596 &current_info->nary_obstack);
2598 vno1->value_id = value_id;
2599 vno1->length = length;
2600 vno1->result = result;
2602 return vno1;
2605 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2606 VNO->HASHCODE first. */
2608 static vn_nary_op_t
2609 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2610 bool compute_hash)
2612 vn_nary_op_s **slot;
2614 if (compute_hash)
2615 vno->hashcode = vn_nary_op_compute_hash (vno);
2617 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2618 gcc_assert (!*slot);
2620 *slot = vno;
2621 return vno;
2624 /* Insert a n-ary operation into the current hash table using it's
2625 pieces. Return the vn_nary_op_t structure we created and put in
2626 the hashtable. */
2628 vn_nary_op_t
2629 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2630 tree type, tree *ops,
2631 tree result, unsigned int value_id)
2633 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2634 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2635 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2638 /* Insert OP into the current hash table with a value number of
2639 RESULT. Return the vn_nary_op_t structure we created and put in
2640 the hashtable. */
2642 vn_nary_op_t
2643 vn_nary_op_insert (tree op, tree result)
2645 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2646 vn_nary_op_t vno1;
2648 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2649 init_vn_nary_op_from_op (vno1, op);
2650 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2653 /* Insert the rhs of STMT into the current hash table with a value number of
2654 RESULT. */
2656 vn_nary_op_t
2657 vn_nary_op_insert_stmt (gimple stmt, tree result)
2659 vn_nary_op_t vno1
2660 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2661 result, VN_INFO (result)->value_id);
2662 init_vn_nary_op_from_stmt (vno1, stmt);
2663 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2666 /* Compute a hashcode for PHI operation VP1 and return it. */
2668 static inline hashval_t
2669 vn_phi_compute_hash (vn_phi_t vp1)
2671 inchash::hash hstate (vp1->block->index);
2672 int i;
2673 tree phi1op;
2674 tree type;
2676 /* If all PHI arguments are constants we need to distinguish
2677 the PHI node via its type. */
2678 type = vp1->type;
2679 hstate.merge_hash (vn_hash_type (type));
2681 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2683 if (phi1op == VN_TOP)
2684 continue;
2685 inchash::add_expr (phi1op, hstate);
2688 return hstate.end ();
2691 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2693 static int
2694 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2696 if (vp1->hashcode != vp2->hashcode)
2697 return false;
2699 if (vp1->block == vp2->block)
2701 int i;
2702 tree phi1op;
2704 /* If the PHI nodes do not have compatible types
2705 they are not the same. */
2706 if (!types_compatible_p (vp1->type, vp2->type))
2707 return false;
2709 /* Any phi in the same block will have it's arguments in the
2710 same edge order, because of how we store phi nodes. */
2711 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2713 tree phi2op = vp2->phiargs[i];
2714 if (phi1op == VN_TOP || phi2op == VN_TOP)
2715 continue;
2716 if (!expressions_equal_p (phi1op, phi2op))
2717 return false;
2719 return true;
2721 return false;
2724 static vec<tree> shared_lookup_phiargs;
2726 /* Lookup PHI in the current hash table, and return the resulting
2727 value number if it exists in the hash table. Return NULL_TREE if
2728 it does not exist in the hash table. */
2730 static tree
2731 vn_phi_lookup (gimple phi)
2733 vn_phi_s **slot;
2734 struct vn_phi_s vp1;
2735 unsigned i;
2737 shared_lookup_phiargs.truncate (0);
2739 /* Canonicalize the SSA_NAME's to their value number. */
2740 for (i = 0; i < gimple_phi_num_args (phi); i++)
2742 tree def = PHI_ARG_DEF (phi, i);
2743 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2744 shared_lookup_phiargs.safe_push (def);
2746 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2747 vp1.phiargs = shared_lookup_phiargs;
2748 vp1.block = gimple_bb (phi);
2749 vp1.hashcode = vn_phi_compute_hash (&vp1);
2750 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2751 NO_INSERT);
2752 if (!slot && current_info == optimistic_info)
2753 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2754 NO_INSERT);
2755 if (!slot)
2756 return NULL_TREE;
2757 return (*slot)->result;
2760 /* Insert PHI into the current hash table with a value number of
2761 RESULT. */
2763 static vn_phi_t
2764 vn_phi_insert (gimple phi, tree result)
2766 vn_phi_s **slot;
2767 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2768 unsigned i;
2769 vec<tree> args = vNULL;
2771 /* Canonicalize the SSA_NAME's to their value number. */
2772 for (i = 0; i < gimple_phi_num_args (phi); i++)
2774 tree def = PHI_ARG_DEF (phi, i);
2775 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2776 args.safe_push (def);
2778 vp1->value_id = VN_INFO (result)->value_id;
2779 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2780 vp1->phiargs = args;
2781 vp1->block = gimple_bb (phi);
2782 vp1->result = result;
2783 vp1->hashcode = vn_phi_compute_hash (vp1);
2785 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2787 /* Because we iterate over phi operations more than once, it's
2788 possible the slot might already exist here, hence no assert.*/
2789 *slot = vp1;
2790 return vp1;
2794 /* Print set of components in strongly connected component SCC to OUT. */
2796 static void
2797 print_scc (FILE *out, vec<tree> scc)
2799 tree var;
2800 unsigned int i;
2802 fprintf (out, "SCC consists of:");
2803 FOR_EACH_VEC_ELT (scc, i, var)
2805 fprintf (out, " ");
2806 print_generic_expr (out, var, 0);
2808 fprintf (out, "\n");
2811 /* Set the value number of FROM to TO, return true if it has changed
2812 as a result. */
2814 static inline bool
2815 set_ssa_val_to (tree from, tree to)
2817 tree currval = SSA_VAL (from);
2818 HOST_WIDE_INT toff, coff;
2820 /* The only thing we allow as value numbers are ssa_names
2821 and invariants. So assert that here. We don't allow VN_TOP
2822 as visiting a stmt should produce a value-number other than
2823 that.
2824 ??? Still VN_TOP can happen for unreachable code, so force
2825 it to varying in that case. Not all code is prepared to
2826 get VN_TOP on valueization. */
2827 if (to == VN_TOP)
2829 if (dump_file && (dump_flags & TDF_DETAILS))
2830 fprintf (dump_file, "Forcing value number to varying on "
2831 "receiving VN_TOP\n");
2832 to = from;
2835 gcc_assert (to != NULL_TREE
2836 && ((TREE_CODE (to) == SSA_NAME
2837 && (to == from || SSA_VAL (to) == to))
2838 || is_gimple_min_invariant (to)));
2840 if (from != to)
2842 if (currval == from)
2844 if (dump_file && (dump_flags & TDF_DETAILS))
2846 fprintf (dump_file, "Not changing value number of ");
2847 print_generic_expr (dump_file, from, 0);
2848 fprintf (dump_file, " from VARYING to ");
2849 print_generic_expr (dump_file, to, 0);
2850 fprintf (dump_file, "\n");
2852 return false;
2854 else if (TREE_CODE (to) == SSA_NAME
2855 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2856 to = from;
2859 if (dump_file && (dump_flags & TDF_DETAILS))
2861 fprintf (dump_file, "Setting value number of ");
2862 print_generic_expr (dump_file, from, 0);
2863 fprintf (dump_file, " to ");
2864 print_generic_expr (dump_file, to, 0);
2867 if (currval != to
2868 && !operand_equal_p (currval, to, 0)
2869 /* ??? For addresses involving volatile objects or types operand_equal_p
2870 does not reliably detect ADDR_EXPRs as equal. We know we are only
2871 getting invariant gimple addresses here, so can use
2872 get_addr_base_and_unit_offset to do this comparison. */
2873 && !(TREE_CODE (currval) == ADDR_EXPR
2874 && TREE_CODE (to) == ADDR_EXPR
2875 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2876 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2877 && coff == toff))
2879 VN_INFO (from)->valnum = to;
2880 if (dump_file && (dump_flags & TDF_DETAILS))
2881 fprintf (dump_file, " (changed)\n");
2882 return true;
2884 if (dump_file && (dump_flags & TDF_DETAILS))
2885 fprintf (dump_file, "\n");
2886 return false;
2889 /* Mark as processed all the definitions in the defining stmt of USE, or
2890 the USE itself. */
2892 static void
2893 mark_use_processed (tree use)
2895 ssa_op_iter iter;
2896 def_operand_p defp;
2897 gimple stmt = SSA_NAME_DEF_STMT (use);
2899 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2901 VN_INFO (use)->use_processed = true;
2902 return;
2905 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2907 tree def = DEF_FROM_PTR (defp);
2909 VN_INFO (def)->use_processed = true;
2913 /* Set all definitions in STMT to value number to themselves.
2914 Return true if a value number changed. */
2916 static bool
2917 defs_to_varying (gimple stmt)
2919 bool changed = false;
2920 ssa_op_iter iter;
2921 def_operand_p defp;
2923 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2925 tree def = DEF_FROM_PTR (defp);
2926 changed |= set_ssa_val_to (def, def);
2928 return changed;
2931 static bool expr_has_constants (tree expr);
2933 /* Visit a copy between LHS and RHS, return true if the value number
2934 changed. */
2936 static bool
2937 visit_copy (tree lhs, tree rhs)
2939 /* The copy may have a more interesting constant filled expression
2940 (we don't, since we know our RHS is just an SSA name). */
2941 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2942 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2944 /* And finally valueize. */
2945 rhs = SSA_VAL (rhs);
2947 return set_ssa_val_to (lhs, rhs);
2950 /* Visit a nary operator RHS, value number it, and return true if the
2951 value number of LHS has changed as a result. */
2953 static bool
2954 visit_nary_op (tree lhs, gimple stmt)
2956 bool changed = false;
2957 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2959 if (result)
2960 changed = set_ssa_val_to (lhs, result);
2961 else
2963 changed = set_ssa_val_to (lhs, lhs);
2964 vn_nary_op_insert_stmt (stmt, lhs);
2967 return changed;
2970 /* Visit a call STMT storing into LHS. Return true if the value number
2971 of the LHS has changed as a result. */
2973 static bool
2974 visit_reference_op_call (tree lhs, gcall *stmt)
2976 bool changed = false;
2977 struct vn_reference_s vr1;
2978 vn_reference_t vnresult = NULL;
2979 tree vdef = gimple_vdef (stmt);
2981 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2982 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2983 lhs = NULL_TREE;
2985 vn_reference_lookup_call (stmt, &vnresult, &vr1);
2986 if (vnresult)
2988 if (vnresult->result_vdef && vdef)
2989 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2991 if (!vnresult->result && lhs)
2992 vnresult->result = lhs;
2994 if (vnresult->result && lhs)
2996 changed |= set_ssa_val_to (lhs, vnresult->result);
2998 if (VN_INFO (vnresult->result)->has_constants)
2999 VN_INFO (lhs)->has_constants = true;
3002 else
3004 vn_reference_t vr2;
3005 vn_reference_s **slot;
3006 if (vdef)
3007 changed |= set_ssa_val_to (vdef, vdef);
3008 if (lhs)
3009 changed |= set_ssa_val_to (lhs, lhs);
3010 vr2 = current_info->references_pool->allocate ();
3011 vr2->vuse = vr1.vuse;
3012 /* As we are not walking the virtual operand chain we know the
3013 shared_lookup_references are still original so we can re-use
3014 them here. */
3015 vr2->operands = vr1.operands.copy ();
3016 vr2->type = vr1.type;
3017 vr2->set = vr1.set;
3018 vr2->hashcode = vr1.hashcode;
3019 vr2->result = lhs;
3020 vr2->result_vdef = vdef;
3021 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3022 INSERT);
3023 gcc_assert (!*slot);
3024 *slot = vr2;
3027 return changed;
3030 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3031 and return true if the value number of the LHS has changed as a result. */
3033 static bool
3034 visit_reference_op_load (tree lhs, tree op, gimple stmt)
3036 bool changed = false;
3037 tree last_vuse;
3038 tree result;
3040 last_vuse = gimple_vuse (stmt);
3041 last_vuse_ptr = &last_vuse;
3042 result = vn_reference_lookup (op, gimple_vuse (stmt),
3043 default_vn_walk_kind, NULL);
3044 last_vuse_ptr = NULL;
3046 /* We handle type-punning through unions by value-numbering based
3047 on offset and size of the access. Be prepared to handle a
3048 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3049 if (result
3050 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3052 /* We will be setting the value number of lhs to the value number
3053 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3054 So first simplify and lookup this expression to see if it
3055 is already available. */
3056 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3057 if ((CONVERT_EXPR_P (val)
3058 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
3059 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
3061 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
3062 if ((CONVERT_EXPR_P (tem)
3063 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
3064 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
3065 TREE_TYPE (val), tem)))
3066 val = tem;
3068 result = val;
3069 if (!is_gimple_min_invariant (val)
3070 && TREE_CODE (val) != SSA_NAME)
3071 result = vn_nary_op_lookup (val, NULL);
3072 /* If the expression is not yet available, value-number lhs to
3073 a new SSA_NAME we create. */
3074 if (!result)
3076 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
3077 "vntemp");
3078 /* Initialize value-number information properly. */
3079 VN_INFO_GET (result)->valnum = result;
3080 VN_INFO (result)->value_id = get_next_value_id ();
3081 VN_INFO (result)->expr = val;
3082 VN_INFO (result)->has_constants = expr_has_constants (val);
3083 VN_INFO (result)->needs_insertion = true;
3084 /* As all "inserted" statements are singleton SCCs, insert
3085 to the valid table. This is strictly needed to
3086 avoid re-generating new value SSA_NAMEs for the same
3087 expression during SCC iteration over and over (the
3088 optimistic table gets cleared after each iteration).
3089 We do not need to insert into the optimistic table, as
3090 lookups there will fall back to the valid table. */
3091 if (current_info == optimistic_info)
3093 current_info = valid_info;
3094 vn_nary_op_insert (val, result);
3095 current_info = optimistic_info;
3097 else
3098 vn_nary_op_insert (val, result);
3099 if (dump_file && (dump_flags & TDF_DETAILS))
3101 fprintf (dump_file, "Inserting name ");
3102 print_generic_expr (dump_file, result, 0);
3103 fprintf (dump_file, " for expression ");
3104 print_generic_expr (dump_file, val, 0);
3105 fprintf (dump_file, "\n");
3110 if (result)
3112 changed = set_ssa_val_to (lhs, result);
3113 if (TREE_CODE (result) == SSA_NAME
3114 && VN_INFO (result)->has_constants)
3116 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3117 VN_INFO (lhs)->has_constants = true;
3120 else
3122 changed = set_ssa_val_to (lhs, lhs);
3123 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3126 return changed;
3130 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3131 and return true if the value number of the LHS has changed as a result. */
3133 static bool
3134 visit_reference_op_store (tree lhs, tree op, gimple stmt)
3136 bool changed = false;
3137 vn_reference_t vnresult = NULL;
3138 tree result, assign;
3139 bool resultsame = false;
3140 tree vuse = gimple_vuse (stmt);
3141 tree vdef = gimple_vdef (stmt);
3143 if (TREE_CODE (op) == SSA_NAME)
3144 op = SSA_VAL (op);
3146 /* First we want to lookup using the *vuses* from the store and see
3147 if there the last store to this location with the same address
3148 had the same value.
3150 The vuses represent the memory state before the store. If the
3151 memory state, address, and value of the store is the same as the
3152 last store to this location, then this store will produce the
3153 same memory state as that store.
3155 In this case the vdef versions for this store are value numbered to those
3156 vuse versions, since they represent the same memory state after
3157 this store.
3159 Otherwise, the vdefs for the store are used when inserting into
3160 the table, since the store generates a new memory state. */
3162 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3164 if (result)
3166 if (TREE_CODE (result) == SSA_NAME)
3167 result = SSA_VAL (result);
3168 resultsame = expressions_equal_p (result, op);
3171 if ((!result || !resultsame)
3172 /* Only perform the following when being called from PRE
3173 which embeds tail merging. */
3174 && default_vn_walk_kind == VN_WALK)
3176 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3177 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3178 if (vnresult)
3180 VN_INFO (vdef)->use_processed = true;
3181 return set_ssa_val_to (vdef, vnresult->result_vdef);
3185 if (!result || !resultsame)
3187 if (dump_file && (dump_flags & TDF_DETAILS))
3189 fprintf (dump_file, "No store match\n");
3190 fprintf (dump_file, "Value numbering store ");
3191 print_generic_expr (dump_file, lhs, 0);
3192 fprintf (dump_file, " to ");
3193 print_generic_expr (dump_file, op, 0);
3194 fprintf (dump_file, "\n");
3196 /* Have to set value numbers before insert, since insert is
3197 going to valueize the references in-place. */
3198 if (vdef)
3200 changed |= set_ssa_val_to (vdef, vdef);
3203 /* Do not insert structure copies into the tables. */
3204 if (is_gimple_min_invariant (op)
3205 || is_gimple_reg (op))
3206 vn_reference_insert (lhs, op, vdef, NULL);
3208 /* Only perform the following when being called from PRE
3209 which embeds tail merging. */
3210 if (default_vn_walk_kind == VN_WALK)
3212 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3213 vn_reference_insert (assign, lhs, vuse, vdef);
3216 else
3218 /* We had a match, so value number the vdef to have the value
3219 number of the vuse it came from. */
3221 if (dump_file && (dump_flags & TDF_DETAILS))
3222 fprintf (dump_file, "Store matched earlier value,"
3223 "value numbering store vdefs to matching vuses.\n");
3225 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3228 return changed;
3231 /* Visit and value number PHI, return true if the value number
3232 changed. */
3234 static bool
3235 visit_phi (gimple phi)
3237 bool changed = false;
3238 tree result;
3239 tree sameval = VN_TOP;
3240 bool allsame = true;
3242 /* TODO: We could check for this in init_sccvn, and replace this
3243 with a gcc_assert. */
3244 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3245 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3247 /* See if all non-TOP arguments have the same value. TOP is
3248 equivalent to everything, so we can ignore it. */
3249 edge_iterator ei;
3250 edge e;
3251 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3252 if (e->flags & EDGE_EXECUTABLE)
3254 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3256 if (TREE_CODE (def) == SSA_NAME)
3257 def = SSA_VAL (def);
3258 if (def == VN_TOP)
3259 continue;
3260 if (sameval == VN_TOP)
3262 sameval = def;
3264 else
3266 if (!expressions_equal_p (def, sameval))
3268 allsame = false;
3269 break;
3274 /* If all value numbered to the same value, the phi node has that
3275 value. */
3276 if (allsame)
3277 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3279 /* Otherwise, see if it is equivalent to a phi node in this block. */
3280 result = vn_phi_lookup (phi);
3281 if (result)
3282 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3283 else
3285 vn_phi_insert (phi, PHI_RESULT (phi));
3286 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3287 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3288 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3291 return changed;
3294 /* Return true if EXPR contains constants. */
3296 static bool
3297 expr_has_constants (tree expr)
3299 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3301 case tcc_unary:
3302 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3304 case tcc_binary:
3305 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3306 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3307 /* Constants inside reference ops are rarely interesting, but
3308 it can take a lot of looking to find them. */
3309 case tcc_reference:
3310 case tcc_declaration:
3311 return false;
3312 default:
3313 return is_gimple_min_invariant (expr);
3315 return false;
3318 /* Return true if STMT contains constants. */
3320 static bool
3321 stmt_has_constants (gimple stmt)
3323 tree tem;
3325 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3326 return false;
3328 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3330 case GIMPLE_TERNARY_RHS:
3331 tem = gimple_assign_rhs3 (stmt);
3332 if (TREE_CODE (tem) == SSA_NAME)
3333 tem = SSA_VAL (tem);
3334 if (is_gimple_min_invariant (tem))
3335 return true;
3336 /* Fallthru. */
3338 case GIMPLE_BINARY_RHS:
3339 tem = gimple_assign_rhs2 (stmt);
3340 if (TREE_CODE (tem) == SSA_NAME)
3341 tem = SSA_VAL (tem);
3342 if (is_gimple_min_invariant (tem))
3343 return true;
3344 /* Fallthru. */
3346 case GIMPLE_SINGLE_RHS:
3347 /* Constants inside reference ops are rarely interesting, but
3348 it can take a lot of looking to find them. */
3349 case GIMPLE_UNARY_RHS:
3350 tem = gimple_assign_rhs1 (stmt);
3351 if (TREE_CODE (tem) == SSA_NAME)
3352 tem = SSA_VAL (tem);
3353 return is_gimple_min_invariant (tem);
3355 default:
3356 gcc_unreachable ();
3358 return false;
3361 /* Simplify the binary expression RHS, and return the result if
3362 simplified. */
3364 static tree
3365 simplify_binary_expression (gimple stmt)
3367 tree result = NULL_TREE;
3368 tree op0 = gimple_assign_rhs1 (stmt);
3369 tree op1 = gimple_assign_rhs2 (stmt);
3370 enum tree_code code = gimple_assign_rhs_code (stmt);
3372 /* This will not catch every single case we could combine, but will
3373 catch those with constants. The goal here is to simultaneously
3374 combine constants between expressions, but avoid infinite
3375 expansion of expressions during simplification. */
3376 op0 = vn_valueize (op0);
3377 if (TREE_CODE (op0) == SSA_NAME
3378 && (VN_INFO (op0)->has_constants
3379 || TREE_CODE_CLASS (code) == tcc_comparison
3380 || code == COMPLEX_EXPR))
3381 op0 = vn_get_expr_for (op0);
3383 op1 = vn_valueize (op1);
3384 if (TREE_CODE (op1) == SSA_NAME
3385 && (VN_INFO (op1)->has_constants
3386 || code == COMPLEX_EXPR))
3387 op1 = vn_get_expr_for (op1);
3389 /* Pointer plus constant can be represented as invariant address.
3390 Do so to allow further propatation, see also tree forwprop. */
3391 if (code == POINTER_PLUS_EXPR
3392 && tree_fits_uhwi_p (op1)
3393 && TREE_CODE (op0) == ADDR_EXPR
3394 && is_gimple_min_invariant (op0))
3395 return build_invariant_address (TREE_TYPE (op0),
3396 TREE_OPERAND (op0, 0),
3397 tree_to_uhwi (op1));
3399 /* Avoid folding if nothing changed. */
3400 if (op0 == gimple_assign_rhs1 (stmt)
3401 && op1 == gimple_assign_rhs2 (stmt))
3402 return NULL_TREE;
3404 fold_defer_overflow_warnings ();
3406 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3407 if (result)
3408 STRIP_USELESS_TYPE_CONVERSION (result);
3410 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3411 stmt, 0);
3413 /* Make sure result is not a complex expression consisting
3414 of operators of operators (IE (a + b) + (a + c))
3415 Otherwise, we will end up with unbounded expressions if
3416 fold does anything at all. */
3417 if (result && valid_gimple_rhs_p (result))
3418 return result;
3420 return NULL_TREE;
3423 /* Simplify the unary expression RHS, and return the result if
3424 simplified. */
3426 static tree
3427 simplify_unary_expression (gassign *stmt)
3429 tree result = NULL_TREE;
3430 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3431 enum tree_code code = gimple_assign_rhs_code (stmt);
3433 /* We handle some tcc_reference codes here that are all
3434 GIMPLE_ASSIGN_SINGLE codes. */
3435 if (code == REALPART_EXPR
3436 || code == IMAGPART_EXPR
3437 || code == VIEW_CONVERT_EXPR
3438 || code == BIT_FIELD_REF)
3439 op0 = TREE_OPERAND (op0, 0);
3441 orig_op0 = op0;
3442 op0 = vn_valueize (op0);
3443 if (TREE_CODE (op0) == SSA_NAME)
3445 if (VN_INFO (op0)->has_constants)
3446 op0 = vn_get_expr_for (op0);
3447 else if (CONVERT_EXPR_CODE_P (code)
3448 || code == REALPART_EXPR
3449 || code == IMAGPART_EXPR
3450 || code == VIEW_CONVERT_EXPR
3451 || code == BIT_FIELD_REF)
3453 /* We want to do tree-combining on conversion-like expressions.
3454 Make sure we feed only SSA_NAMEs or constants to fold though. */
3455 tree tem = vn_get_expr_for (op0);
3456 if (UNARY_CLASS_P (tem)
3457 || BINARY_CLASS_P (tem)
3458 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3459 || TREE_CODE (tem) == SSA_NAME
3460 || TREE_CODE (tem) == CONSTRUCTOR
3461 || is_gimple_min_invariant (tem))
3462 op0 = tem;
3466 /* Avoid folding if nothing changed, but remember the expression. */
3467 if (op0 == orig_op0)
3468 return NULL_TREE;
3470 if (code == BIT_FIELD_REF)
3472 tree rhs = gimple_assign_rhs1 (stmt);
3473 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3474 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3476 else
3477 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3478 if (result)
3480 STRIP_USELESS_TYPE_CONVERSION (result);
3481 if (valid_gimple_rhs_p (result))
3482 return result;
3485 return NULL_TREE;
3488 /* Try to simplify RHS using equivalences and constant folding. */
3490 static tree
3491 try_to_simplify (gassign *stmt)
3493 enum tree_code code = gimple_assign_rhs_code (stmt);
3494 tree tem;
3496 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3497 in this case, there is no point in doing extra work. */
3498 if (code == SSA_NAME)
3499 return NULL_TREE;
3501 /* First try constant folding based on our current lattice. */
3502 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3503 if (tem
3504 && (TREE_CODE (tem) == SSA_NAME
3505 || is_gimple_min_invariant (tem)))
3506 return tem;
3508 /* If that didn't work try combining multiple statements. */
3509 switch (TREE_CODE_CLASS (code))
3511 case tcc_reference:
3512 /* Fallthrough for some unary codes that can operate on registers. */
3513 if (!(code == REALPART_EXPR
3514 || code == IMAGPART_EXPR
3515 || code == VIEW_CONVERT_EXPR
3516 || code == BIT_FIELD_REF))
3517 break;
3518 /* We could do a little more with unary ops, if they expand
3519 into binary ops, but it's debatable whether it is worth it. */
3520 case tcc_unary:
3521 return simplify_unary_expression (stmt);
3523 case tcc_comparison:
3524 case tcc_binary:
3525 return simplify_binary_expression (stmt);
3527 default:
3528 break;
3531 return NULL_TREE;
3534 /* Visit and value number USE, return true if the value number
3535 changed. */
3537 static bool
3538 visit_use (tree use)
3540 bool changed = false;
3541 gimple stmt = SSA_NAME_DEF_STMT (use);
3543 mark_use_processed (use);
3545 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3546 if (dump_file && (dump_flags & TDF_DETAILS)
3547 && !SSA_NAME_IS_DEFAULT_DEF (use))
3549 fprintf (dump_file, "Value numbering ");
3550 print_generic_expr (dump_file, use, 0);
3551 fprintf (dump_file, " stmt = ");
3552 print_gimple_stmt (dump_file, stmt, 0, 0);
3555 /* Handle uninitialized uses. */
3556 if (SSA_NAME_IS_DEFAULT_DEF (use))
3557 changed = set_ssa_val_to (use, use);
3558 else
3560 if (gimple_code (stmt) == GIMPLE_PHI)
3561 changed = visit_phi (stmt);
3562 else if (gimple_has_volatile_ops (stmt))
3563 changed = defs_to_varying (stmt);
3564 else if (is_gimple_assign (stmt))
3566 enum tree_code code = gimple_assign_rhs_code (stmt);
3567 tree lhs = gimple_assign_lhs (stmt);
3568 tree rhs1 = gimple_assign_rhs1 (stmt);
3569 tree simplified;
3571 /* Shortcut for copies. Simplifying copies is pointless,
3572 since we copy the expression and value they represent. */
3573 if (code == SSA_NAME
3574 && TREE_CODE (lhs) == SSA_NAME)
3576 changed = visit_copy (lhs, rhs1);
3577 goto done;
3579 simplified = try_to_simplify (as_a <gassign *> (stmt));
3580 if (simplified)
3582 if (dump_file && (dump_flags & TDF_DETAILS))
3584 fprintf (dump_file, "RHS ");
3585 print_gimple_expr (dump_file, stmt, 0, 0);
3586 fprintf (dump_file, " simplified to ");
3587 print_generic_expr (dump_file, simplified, 0);
3588 if (TREE_CODE (lhs) == SSA_NAME)
3589 fprintf (dump_file, " has constants %d\n",
3590 expr_has_constants (simplified));
3591 else
3592 fprintf (dump_file, "\n");
3595 /* Setting value numbers to constants will occasionally
3596 screw up phi congruence because constants are not
3597 uniquely associated with a single ssa name that can be
3598 looked up. */
3599 if (simplified
3600 && is_gimple_min_invariant (simplified)
3601 && TREE_CODE (lhs) == SSA_NAME)
3603 VN_INFO (lhs)->expr = simplified;
3604 VN_INFO (lhs)->has_constants = true;
3605 changed = set_ssa_val_to (lhs, simplified);
3606 goto done;
3608 else if (simplified
3609 && TREE_CODE (simplified) == SSA_NAME
3610 && TREE_CODE (lhs) == SSA_NAME)
3612 changed = visit_copy (lhs, simplified);
3613 goto done;
3615 else if (simplified)
3617 if (TREE_CODE (lhs) == SSA_NAME)
3619 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3620 /* We have to unshare the expression or else
3621 valuizing may change the IL stream. */
3622 VN_INFO (lhs)->expr = unshare_expr (simplified);
3625 else if (stmt_has_constants (stmt)
3626 && TREE_CODE (lhs) == SSA_NAME)
3627 VN_INFO (lhs)->has_constants = true;
3628 else if (TREE_CODE (lhs) == SSA_NAME)
3630 /* We reset expr and constantness here because we may
3631 have been value numbering optimistically, and
3632 iterating. They may become non-constant in this case,
3633 even if they were optimistically constant. */
3635 VN_INFO (lhs)->has_constants = false;
3636 VN_INFO (lhs)->expr = NULL_TREE;
3639 if ((TREE_CODE (lhs) == SSA_NAME
3640 /* We can substitute SSA_NAMEs that are live over
3641 abnormal edges with their constant value. */
3642 && !(gimple_assign_copy_p (stmt)
3643 && is_gimple_min_invariant (rhs1))
3644 && !(simplified
3645 && is_gimple_min_invariant (simplified))
3646 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3647 /* Stores or copies from SSA_NAMEs that are live over
3648 abnormal edges are a problem. */
3649 || (code == SSA_NAME
3650 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3651 changed = defs_to_varying (stmt);
3652 else if (REFERENCE_CLASS_P (lhs)
3653 || DECL_P (lhs))
3654 changed = visit_reference_op_store (lhs, rhs1, stmt);
3655 else if (TREE_CODE (lhs) == SSA_NAME)
3657 if ((gimple_assign_copy_p (stmt)
3658 && is_gimple_min_invariant (rhs1))
3659 || (simplified
3660 && is_gimple_min_invariant (simplified)))
3662 VN_INFO (lhs)->has_constants = true;
3663 if (simplified)
3664 changed = set_ssa_val_to (lhs, simplified);
3665 else
3666 changed = set_ssa_val_to (lhs, rhs1);
3668 else
3670 /* First try to lookup the simplified expression. */
3671 if (simplified)
3673 enum gimple_rhs_class rhs_class;
3676 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3677 if ((rhs_class == GIMPLE_UNARY_RHS
3678 || rhs_class == GIMPLE_BINARY_RHS
3679 || rhs_class == GIMPLE_TERNARY_RHS)
3680 && valid_gimple_rhs_p (simplified))
3682 tree result = vn_nary_op_lookup (simplified, NULL);
3683 if (result)
3685 changed = set_ssa_val_to (lhs, result);
3686 goto done;
3691 /* Otherwise visit the original statement. */
3692 switch (vn_get_stmt_kind (stmt))
3694 case VN_NARY:
3695 changed = visit_nary_op (lhs, stmt);
3696 break;
3697 case VN_REFERENCE:
3698 changed = visit_reference_op_load (lhs, rhs1, stmt);
3699 break;
3700 default:
3701 changed = defs_to_varying (stmt);
3702 break;
3706 else
3707 changed = defs_to_varying (stmt);
3709 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3711 tree lhs = gimple_call_lhs (stmt);
3712 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3714 /* Try constant folding based on our current lattice. */
3715 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3716 vn_valueize);
3717 if (simplified)
3719 if (dump_file && (dump_flags & TDF_DETAILS))
3721 fprintf (dump_file, "call ");
3722 print_gimple_expr (dump_file, stmt, 0, 0);
3723 fprintf (dump_file, " simplified to ");
3724 print_generic_expr (dump_file, simplified, 0);
3725 if (TREE_CODE (lhs) == SSA_NAME)
3726 fprintf (dump_file, " has constants %d\n",
3727 expr_has_constants (simplified));
3728 else
3729 fprintf (dump_file, "\n");
3732 /* Setting value numbers to constants will occasionally
3733 screw up phi congruence because constants are not
3734 uniquely associated with a single ssa name that can be
3735 looked up. */
3736 if (simplified
3737 && is_gimple_min_invariant (simplified))
3739 VN_INFO (lhs)->expr = simplified;
3740 VN_INFO (lhs)->has_constants = true;
3741 changed = set_ssa_val_to (lhs, simplified);
3742 if (gimple_vdef (stmt))
3743 changed |= set_ssa_val_to (gimple_vdef (stmt),
3744 SSA_VAL (gimple_vuse (stmt)));
3745 goto done;
3747 else if (simplified
3748 && TREE_CODE (simplified) == SSA_NAME)
3750 changed = visit_copy (lhs, simplified);
3751 if (gimple_vdef (stmt))
3752 changed |= set_ssa_val_to (gimple_vdef (stmt),
3753 SSA_VAL (gimple_vuse (stmt)));
3754 goto done;
3756 else
3758 if (stmt_has_constants (stmt))
3759 VN_INFO (lhs)->has_constants = true;
3760 else
3762 /* We reset expr and constantness here because we may
3763 have been value numbering optimistically, and
3764 iterating. They may become non-constant in this case,
3765 even if they were optimistically constant. */
3766 VN_INFO (lhs)->has_constants = false;
3767 VN_INFO (lhs)->expr = NULL_TREE;
3770 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3772 changed = defs_to_varying (stmt);
3773 goto done;
3778 if (!gimple_call_internal_p (stmt)
3779 && (/* Calls to the same function with the same vuse
3780 and the same operands do not necessarily return the same
3781 value, unless they're pure or const. */
3782 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3783 /* If calls have a vdef, subsequent calls won't have
3784 the same incoming vuse. So, if 2 calls with vdef have the
3785 same vuse, we know they're not subsequent.
3786 We can value number 2 calls to the same function with the
3787 same vuse and the same operands which are not subsequent
3788 the same, because there is no code in the program that can
3789 compare the 2 values... */
3790 || (gimple_vdef (stmt)
3791 /* ... unless the call returns a pointer which does
3792 not alias with anything else. In which case the
3793 information that the values are distinct are encoded
3794 in the IL. */
3795 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3796 /* Only perform the following when being called from PRE
3797 which embeds tail merging. */
3798 && default_vn_walk_kind == VN_WALK)))
3799 changed = visit_reference_op_call (lhs, call_stmt);
3800 else
3801 changed = defs_to_varying (stmt);
3803 else
3804 changed = defs_to_varying (stmt);
3806 done:
3807 return changed;
3810 /* Compare two operands by reverse postorder index */
3812 static int
3813 compare_ops (const void *pa, const void *pb)
3815 const tree opa = *((const tree *)pa);
3816 const tree opb = *((const tree *)pb);
3817 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3818 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3819 basic_block bba;
3820 basic_block bbb;
3822 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3823 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3824 else if (gimple_nop_p (opstmta))
3825 return -1;
3826 else if (gimple_nop_p (opstmtb))
3827 return 1;
3829 bba = gimple_bb (opstmta);
3830 bbb = gimple_bb (opstmtb);
3832 if (!bba && !bbb)
3833 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3834 else if (!bba)
3835 return -1;
3836 else if (!bbb)
3837 return 1;
3839 if (bba == bbb)
3841 if (gimple_code (opstmta) == GIMPLE_PHI
3842 && gimple_code (opstmtb) == GIMPLE_PHI)
3843 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3844 else if (gimple_code (opstmta) == GIMPLE_PHI)
3845 return -1;
3846 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3847 return 1;
3848 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3849 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3850 else
3851 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3853 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3856 /* Sort an array containing members of a strongly connected component
3857 SCC so that the members are ordered by RPO number.
3858 This means that when the sort is complete, iterating through the
3859 array will give you the members in RPO order. */
3861 static void
3862 sort_scc (vec<tree> scc)
3864 scc.qsort (compare_ops);
3867 /* Insert the no longer used nary ONARY to the hash INFO. */
3869 static void
3870 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3872 size_t size = sizeof_vn_nary_op (onary->length);
3873 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3874 &info->nary_obstack);
3875 memcpy (nary, onary, size);
3876 vn_nary_op_insert_into (nary, info->nary, false);
3879 /* Insert the no longer used phi OPHI to the hash INFO. */
3881 static void
3882 copy_phi (vn_phi_t ophi, vn_tables_t info)
3884 vn_phi_t phi = info->phis_pool->allocate ();
3885 vn_phi_s **slot;
3886 memcpy (phi, ophi, sizeof (*phi));
3887 ophi->phiargs.create (0);
3888 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3889 gcc_assert (!*slot);
3890 *slot = phi;
3893 /* Insert the no longer used reference OREF to the hash INFO. */
3895 static void
3896 copy_reference (vn_reference_t oref, vn_tables_t info)
3898 vn_reference_t ref;
3899 vn_reference_s **slot;
3900 ref = info->references_pool->allocate ();
3901 memcpy (ref, oref, sizeof (*ref));
3902 oref->operands.create (0);
3903 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3904 if (*slot)
3905 free_reference (*slot);
3906 *slot = ref;
3909 /* Process a strongly connected component in the SSA graph. */
3911 static void
3912 process_scc (vec<tree> scc)
3914 tree var;
3915 unsigned int i;
3916 unsigned int iterations = 0;
3917 bool changed = true;
3918 vn_nary_op_iterator_type hin;
3919 vn_phi_iterator_type hip;
3920 vn_reference_iterator_type hir;
3921 vn_nary_op_t nary;
3922 vn_phi_t phi;
3923 vn_reference_t ref;
3925 /* If the SCC has a single member, just visit it. */
3926 if (scc.length () == 1)
3928 tree use = scc[0];
3929 if (VN_INFO (use)->use_processed)
3930 return;
3931 /* We need to make sure it doesn't form a cycle itself, which can
3932 happen for self-referential PHI nodes. In that case we would
3933 end up inserting an expression with VN_TOP operands into the
3934 valid table which makes us derive bogus equivalences later.
3935 The cheapest way to check this is to assume it for all PHI nodes. */
3936 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3937 /* Fallthru to iteration. */ ;
3938 else
3940 visit_use (use);
3941 return;
3945 if (dump_file && (dump_flags & TDF_DETAILS))
3946 print_scc (dump_file, scc);
3948 /* Iterate over the SCC with the optimistic table until it stops
3949 changing. */
3950 current_info = optimistic_info;
3951 while (changed)
3953 changed = false;
3954 iterations++;
3955 if (dump_file && (dump_flags & TDF_DETAILS))
3956 fprintf (dump_file, "Starting iteration %d\n", iterations);
3957 /* As we are value-numbering optimistically we have to
3958 clear the expression tables and the simplified expressions
3959 in each iteration until we converge. */
3960 optimistic_info->nary->empty ();
3961 optimistic_info->phis->empty ();
3962 optimistic_info->references->empty ();
3963 obstack_free (&optimistic_info->nary_obstack, NULL);
3964 gcc_obstack_init (&optimistic_info->nary_obstack);
3965 optimistic_info->phis_pool->release ();
3966 optimistic_info->references_pool->release ();
3967 FOR_EACH_VEC_ELT (scc, i, var)
3968 VN_INFO (var)->expr = NULL_TREE;
3969 FOR_EACH_VEC_ELT (scc, i, var)
3970 changed |= visit_use (var);
3973 if (dump_file && (dump_flags & TDF_DETAILS))
3974 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3975 statistics_histogram_event (cfun, "SCC iterations", iterations);
3977 /* Finally, copy the contents of the no longer used optimistic
3978 table to the valid table. */
3979 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3980 copy_nary (nary, valid_info);
3981 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3982 copy_phi (phi, valid_info);
3983 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3984 ref, vn_reference_t, hir)
3985 copy_reference (ref, valid_info);
3987 current_info = valid_info;
3991 /* Pop the components of the found SCC for NAME off the SCC stack
3992 and process them. Returns true if all went well, false if
3993 we run into resource limits. */
3995 static bool
3996 extract_and_process_scc_for_name (tree name)
3998 auto_vec<tree> scc;
3999 tree x;
4001 /* Found an SCC, pop the components off the SCC stack and
4002 process them. */
4005 x = sccstack.pop ();
4007 VN_INFO (x)->on_sccstack = false;
4008 scc.safe_push (x);
4009 } while (x != name);
4011 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4012 if (scc.length ()
4013 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4015 if (dump_file)
4016 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4017 "SCC size %u exceeding %u\n", scc.length (),
4018 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4020 return false;
4023 if (scc.length () > 1)
4024 sort_scc (scc);
4026 process_scc (scc);
4028 return true;
4031 /* Depth first search on NAME to discover and process SCC's in the SSA
4032 graph.
4033 Execution of this algorithm relies on the fact that the SCC's are
4034 popped off the stack in topological order.
4035 Returns true if successful, false if we stopped processing SCC's due
4036 to resource constraints. */
4038 static bool
4039 DFS (tree name)
4041 vec<ssa_op_iter> itervec = vNULL;
4042 vec<tree> namevec = vNULL;
4043 use_operand_p usep = NULL;
4044 gimple defstmt;
4045 tree use;
4046 ssa_op_iter iter;
4048 start_over:
4049 /* SCC info */
4050 VN_INFO (name)->dfsnum = next_dfs_num++;
4051 VN_INFO (name)->visited = true;
4052 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4054 sccstack.safe_push (name);
4055 VN_INFO (name)->on_sccstack = true;
4056 defstmt = SSA_NAME_DEF_STMT (name);
4058 /* Recursively DFS on our operands, looking for SCC's. */
4059 if (!gimple_nop_p (defstmt))
4061 /* Push a new iterator. */
4062 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4063 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4064 else
4065 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4067 else
4068 clear_and_done_ssa_iter (&iter);
4070 while (1)
4072 /* If we are done processing uses of a name, go up the stack
4073 of iterators and process SCCs as we found them. */
4074 if (op_iter_done (&iter))
4076 /* See if we found an SCC. */
4077 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4078 if (!extract_and_process_scc_for_name (name))
4080 namevec.release ();
4081 itervec.release ();
4082 return false;
4085 /* Check if we are done. */
4086 if (namevec.is_empty ())
4088 namevec.release ();
4089 itervec.release ();
4090 return true;
4093 /* Restore the last use walker and continue walking there. */
4094 use = name;
4095 name = namevec.pop ();
4096 memcpy (&iter, &itervec.last (),
4097 sizeof (ssa_op_iter));
4098 itervec.pop ();
4099 goto continue_walking;
4102 use = USE_FROM_PTR (usep);
4104 /* Since we handle phi nodes, we will sometimes get
4105 invariants in the use expression. */
4106 if (TREE_CODE (use) == SSA_NAME)
4108 if (! (VN_INFO (use)->visited))
4110 /* Recurse by pushing the current use walking state on
4111 the stack and starting over. */
4112 itervec.safe_push (iter);
4113 namevec.safe_push (name);
4114 name = use;
4115 goto start_over;
4117 continue_walking:
4118 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4119 VN_INFO (use)->low);
4121 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4122 && VN_INFO (use)->on_sccstack)
4124 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4125 VN_INFO (name)->low);
4129 usep = op_iter_next_use (&iter);
4133 /* Allocate a value number table. */
4135 static void
4136 allocate_vn_table (vn_tables_t table)
4138 table->phis = new vn_phi_table_type (23);
4139 table->nary = new vn_nary_op_table_type (23);
4140 table->references = new vn_reference_table_type (23);
4142 gcc_obstack_init (&table->nary_obstack);
4143 table->phis_pool = new pool_allocator<vn_phi_s> ("VN phis", 30);
4144 table->references_pool = new pool_allocator<vn_reference_s> ("VN references",
4145 30);
4148 /* Free a value number table. */
4150 static void
4151 free_vn_table (vn_tables_t table)
4153 delete table->phis;
4154 table->phis = NULL;
4155 delete table->nary;
4156 table->nary = NULL;
4157 delete table->references;
4158 table->references = NULL;
4159 obstack_free (&table->nary_obstack, NULL);
4160 delete table->phis_pool;
4161 delete table->references_pool;
4164 static void
4165 init_scc_vn (void)
4167 size_t i;
4168 int j;
4169 int *rpo_numbers_temp;
4171 calculate_dominance_info (CDI_DOMINATORS);
4172 sccstack.create (0);
4173 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4175 constant_value_ids = BITMAP_ALLOC (NULL);
4177 next_dfs_num = 1;
4178 next_value_id = 1;
4180 vn_ssa_aux_table.create (num_ssa_names + 1);
4181 /* VEC_alloc doesn't actually grow it to the right size, it just
4182 preallocates the space to do so. */
4183 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4184 gcc_obstack_init (&vn_ssa_aux_obstack);
4186 shared_lookup_phiargs.create (0);
4187 shared_lookup_references.create (0);
4188 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4189 rpo_numbers_temp =
4190 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4191 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4193 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4194 the i'th block in RPO order is bb. We want to map bb's to RPO
4195 numbers, so we need to rearrange this array. */
4196 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4197 rpo_numbers[rpo_numbers_temp[j]] = j;
4199 XDELETE (rpo_numbers_temp);
4201 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4203 /* Create the VN_INFO structures, and initialize value numbers to
4204 TOP. */
4205 for (i = 0; i < num_ssa_names; i++)
4207 tree name = ssa_name (i);
4208 if (name)
4210 VN_INFO_GET (name)->valnum = VN_TOP;
4211 VN_INFO (name)->expr = NULL_TREE;
4212 VN_INFO (name)->value_id = 0;
4216 renumber_gimple_stmt_uids ();
4218 /* Create the valid and optimistic value numbering tables. */
4219 valid_info = XCNEW (struct vn_tables_s);
4220 allocate_vn_table (valid_info);
4221 optimistic_info = XCNEW (struct vn_tables_s);
4222 allocate_vn_table (optimistic_info);
4225 void
4226 free_scc_vn (void)
4228 size_t i;
4230 delete constant_to_value_id;
4231 constant_to_value_id = NULL;
4232 BITMAP_FREE (constant_value_ids);
4233 shared_lookup_phiargs.release ();
4234 shared_lookup_references.release ();
4235 XDELETEVEC (rpo_numbers);
4237 for (i = 0; i < num_ssa_names; i++)
4239 tree name = ssa_name (i);
4240 if (name
4241 && VN_INFO (name)->needs_insertion)
4242 release_ssa_name (name);
4244 obstack_free (&vn_ssa_aux_obstack, NULL);
4245 vn_ssa_aux_table.release ();
4247 sccstack.release ();
4248 free_vn_table (valid_info);
4249 XDELETE (valid_info);
4250 free_vn_table (optimistic_info);
4251 XDELETE (optimistic_info);
4254 /* Set *ID according to RESULT. */
4256 static void
4257 set_value_id_for_result (tree result, unsigned int *id)
4259 if (result && TREE_CODE (result) == SSA_NAME)
4260 *id = VN_INFO (result)->value_id;
4261 else if (result && is_gimple_min_invariant (result))
4262 *id = get_or_alloc_constant_value_id (result);
4263 else
4264 *id = get_next_value_id ();
4267 /* Set the value ids in the valid hash tables. */
4269 static void
4270 set_hashtable_value_ids (void)
4272 vn_nary_op_iterator_type hin;
4273 vn_phi_iterator_type hip;
4274 vn_reference_iterator_type hir;
4275 vn_nary_op_t vno;
4276 vn_reference_t vr;
4277 vn_phi_t vp;
4279 /* Now set the value ids of the things we had put in the hash
4280 table. */
4282 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4283 set_value_id_for_result (vno->result, &vno->value_id);
4285 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4286 set_value_id_for_result (vp->result, &vp->value_id);
4288 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4289 hir)
4290 set_value_id_for_result (vr->result, &vr->value_id);
4293 class cond_dom_walker : public dom_walker
4295 public:
4296 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4298 virtual void before_dom_children (basic_block);
4300 bool fail;
4303 void
4304 cond_dom_walker::before_dom_children (basic_block bb)
4306 edge e;
4307 edge_iterator ei;
4309 if (fail)
4310 return;
4312 /* If any of the predecessor edges that do not come from blocks dominated
4313 by us are still marked as possibly executable consider this block
4314 reachable. */
4315 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4316 FOR_EACH_EDGE (e, ei, bb->preds)
4317 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4318 reachable |= (e->flags & EDGE_EXECUTABLE);
4320 /* If the block is not reachable all outgoing edges are not
4321 executable. */
4322 if (!reachable)
4324 if (dump_file && (dump_flags & TDF_DETAILS))
4325 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4326 "BB %d as not executable\n", bb->index);
4328 FOR_EACH_EDGE (e, ei, bb->succs)
4329 e->flags &= ~EDGE_EXECUTABLE;
4330 return;
4333 gimple stmt = last_stmt (bb);
4334 if (!stmt)
4335 return;
4337 enum gimple_code code = gimple_code (stmt);
4338 if (code != GIMPLE_COND
4339 && code != GIMPLE_SWITCH
4340 && code != GIMPLE_GOTO)
4341 return;
4343 if (dump_file && (dump_flags & TDF_DETAILS))
4345 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4346 bb->index);
4347 print_gimple_stmt (dump_file, stmt, 0, 0);
4350 /* Value-number the last stmts SSA uses. */
4351 ssa_op_iter i;
4352 tree op;
4353 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4354 if (VN_INFO (op)->visited == false
4355 && !DFS (op))
4357 fail = true;
4358 return;
4361 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4362 if value-numbering can prove they are not reachable. Handling
4363 computed gotos is also possible. */
4364 tree val;
4365 switch (code)
4367 case GIMPLE_COND:
4369 tree lhs = gimple_cond_lhs (stmt);
4370 tree rhs = gimple_cond_rhs (stmt);
4371 /* Work hard in computing the condition and take into account
4372 the valueization of the defining stmt. */
4373 if (TREE_CODE (lhs) == SSA_NAME)
4374 lhs = vn_get_expr_for (lhs);
4375 if (TREE_CODE (rhs) == SSA_NAME)
4376 rhs = vn_get_expr_for (rhs);
4377 val = fold_binary (gimple_cond_code (stmt),
4378 boolean_type_node, lhs, rhs);
4379 break;
4381 case GIMPLE_SWITCH:
4382 val = gimple_switch_index (as_a <gswitch *> (stmt));
4383 break;
4384 case GIMPLE_GOTO:
4385 val = gimple_goto_dest (stmt);
4386 break;
4387 default:
4388 gcc_unreachable ();
4390 if (!val)
4391 return;
4393 edge taken = find_taken_edge (bb, vn_valueize (val));
4394 if (!taken)
4395 return;
4397 if (dump_file && (dump_flags & TDF_DETAILS))
4398 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4399 "not executable\n", bb->index, bb->index, taken->dest->index);
4401 FOR_EACH_EDGE (e, ei, bb->succs)
4402 if (e != taken)
4403 e->flags &= ~EDGE_EXECUTABLE;
4406 /* Do SCCVN. Returns true if it finished, false if we bailed out
4407 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4408 how we use the alias oracle walking during the VN process. */
4410 bool
4411 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4413 basic_block bb;
4414 size_t i;
4415 tree param;
4417 default_vn_walk_kind = default_vn_walk_kind_;
4419 init_scc_vn ();
4420 current_info = valid_info;
4422 for (param = DECL_ARGUMENTS (current_function_decl);
4423 param;
4424 param = DECL_CHAIN (param))
4426 tree def = ssa_default_def (cfun, param);
4427 if (def)
4429 VN_INFO (def)->visited = true;
4430 VN_INFO (def)->valnum = def;
4434 /* Mark all edges as possibly executable. */
4435 FOR_ALL_BB_FN (bb, cfun)
4437 edge_iterator ei;
4438 edge e;
4439 FOR_EACH_EDGE (e, ei, bb->succs)
4440 e->flags |= EDGE_EXECUTABLE;
4443 /* Walk all blocks in dominator order, value-numbering the last stmts
4444 SSA uses and decide whether outgoing edges are not executable. */
4445 cond_dom_walker walker;
4446 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4447 if (walker.fail)
4449 free_scc_vn ();
4450 return false;
4453 /* Value-number remaining SSA names. */
4454 for (i = 1; i < num_ssa_names; ++i)
4456 tree name = ssa_name (i);
4457 if (name
4458 && VN_INFO (name)->visited == false
4459 && !has_zero_uses (name))
4460 if (!DFS (name))
4462 free_scc_vn ();
4463 return false;
4467 /* Initialize the value ids. */
4469 for (i = 1; i < num_ssa_names; ++i)
4471 tree name = ssa_name (i);
4472 vn_ssa_aux_t info;
4473 if (!name)
4474 continue;
4475 info = VN_INFO (name);
4476 if (info->valnum == name
4477 || info->valnum == VN_TOP)
4478 info->value_id = get_next_value_id ();
4479 else if (is_gimple_min_invariant (info->valnum))
4480 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4483 /* Propagate. */
4484 for (i = 1; i < num_ssa_names; ++i)
4486 tree name = ssa_name (i);
4487 vn_ssa_aux_t info;
4488 if (!name)
4489 continue;
4490 info = VN_INFO (name);
4491 if (TREE_CODE (info->valnum) == SSA_NAME
4492 && info->valnum != name
4493 && info->value_id != VN_INFO (info->valnum)->value_id)
4494 info->value_id = VN_INFO (info->valnum)->value_id;
4497 set_hashtable_value_ids ();
4499 if (dump_file && (dump_flags & TDF_DETAILS))
4501 fprintf (dump_file, "Value numbers:\n");
4502 for (i = 0; i < num_ssa_names; i++)
4504 tree name = ssa_name (i);
4505 if (name
4506 && VN_INFO (name)->visited
4507 && SSA_VAL (name) != name)
4509 print_generic_expr (dump_file, name, 0);
4510 fprintf (dump_file, " = ");
4511 print_generic_expr (dump_file, SSA_VAL (name), 0);
4512 fprintf (dump_file, "\n");
4517 return true;
4520 /* Return the maximum value id we have ever seen. */
4522 unsigned int
4523 get_max_value_id (void)
4525 return next_value_id;
4528 /* Return the next unique value id. */
4530 unsigned int
4531 get_next_value_id (void)
4533 return next_value_id++;
4537 /* Compare two expressions E1 and E2 and return true if they are equal. */
4539 bool
4540 expressions_equal_p (tree e1, tree e2)
4542 /* The obvious case. */
4543 if (e1 == e2)
4544 return true;
4546 /* If only one of them is null, they cannot be equal. */
4547 if (!e1 || !e2)
4548 return false;
4550 /* Now perform the actual comparison. */
4551 if (TREE_CODE (e1) == TREE_CODE (e2)
4552 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4553 return true;
4555 return false;
4559 /* Return true if the nary operation NARY may trap. This is a copy
4560 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4562 bool
4563 vn_nary_may_trap (vn_nary_op_t nary)
4565 tree type;
4566 tree rhs2 = NULL_TREE;
4567 bool honor_nans = false;
4568 bool honor_snans = false;
4569 bool fp_operation = false;
4570 bool honor_trapv = false;
4571 bool handled, ret;
4572 unsigned i;
4574 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4575 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4576 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4578 type = nary->type;
4579 fp_operation = FLOAT_TYPE_P (type);
4580 if (fp_operation)
4582 honor_nans = flag_trapping_math && !flag_finite_math_only;
4583 honor_snans = flag_signaling_nans != 0;
4585 else if (INTEGRAL_TYPE_P (type)
4586 && TYPE_OVERFLOW_TRAPS (type))
4587 honor_trapv = true;
4589 if (nary->length >= 2)
4590 rhs2 = nary->op[1];
4591 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4592 honor_trapv,
4593 honor_nans, honor_snans, rhs2,
4594 &handled);
4595 if (handled
4596 && ret)
4597 return true;
4599 for (i = 0; i < nary->length; ++i)
4600 if (tree_could_trap_p (nary->op[i]))
4601 return true;
4603 return false;