2015-04-07 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob8ed5a51c22f79df43f8ac1974714c4fbc0a3ba7e
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "predict.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfganal.h"
43 #include "basic-block.h"
44 #include "gimple-pretty-print.h"
45 #include "tree-inline.h"
46 #include "hash-table.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-fold.h"
50 #include "tree-eh.h"
51 #include "gimple-expr.h"
52 #include "is-a.h"
53 #include "gimple.h"
54 #include "gimplify.h"
55 #include "gimple-ssa.h"
56 #include "tree-phinodes.h"
57 #include "ssa-iterators.h"
58 #include "stringpool.h"
59 #include "tree-ssanames.h"
60 #include "hashtab.h"
61 #include "rtl.h"
62 #include "flags.h"
63 #include "statistics.h"
64 #include "real.h"
65 #include "fixed-value.h"
66 #include "insn-config.h"
67 #include "expmed.h"
68 #include "dojump.h"
69 #include "explow.h"
70 #include "calls.h"
71 #include "emit-rtl.h"
72 #include "varasm.h"
73 #include "stmt.h"
74 #include "expr.h"
75 #include "tree-dfa.h"
76 #include "tree-ssa.h"
77 #include "dumpfile.h"
78 #include "alloc-pool.h"
79 #include "cfgloop.h"
80 #include "params.h"
81 #include "tree-ssa-propagate.h"
82 #include "tree-ssa-sccvn.h"
83 #include "tree-cfg.h"
84 #include "domwalk.h"
85 #include "ipa-ref.h"
86 #include "plugin-api.h"
87 #include "cgraph.h"
89 /* This algorithm is based on the SCC algorithm presented by Keith
90 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
91 (http://citeseer.ist.psu.edu/41805.html). In
92 straight line code, it is equivalent to a regular hash based value
93 numbering that is performed in reverse postorder.
95 For code with cycles, there are two alternatives, both of which
96 require keeping the hashtables separate from the actual list of
97 value numbers for SSA names.
99 1. Iterate value numbering in an RPO walk of the blocks, removing
100 all the entries from the hashtable after each iteration (but
101 keeping the SSA name->value number mapping between iterations).
102 Iterate until it does not change.
104 2. Perform value numbering as part of an SCC walk on the SSA graph,
105 iterating only the cycles in the SSA graph until they do not change
106 (using a separate, optimistic hashtable for value numbering the SCC
107 operands).
109 The second is not just faster in practice (because most SSA graph
110 cycles do not involve all the variables in the graph), it also has
111 some nice properties.
113 One of these nice properties is that when we pop an SCC off the
114 stack, we are guaranteed to have processed all the operands coming from
115 *outside of that SCC*, so we do not need to do anything special to
116 ensure they have value numbers.
118 Another nice property is that the SCC walk is done as part of a DFS
119 of the SSA graph, which makes it easy to perform combining and
120 simplifying operations at the same time.
122 The code below is deliberately written in a way that makes it easy
123 to separate the SCC walk from the other work it does.
125 In order to propagate constants through the code, we track which
126 expressions contain constants, and use those while folding. In
127 theory, we could also track expressions whose value numbers are
128 replaced, in case we end up folding based on expression
129 identities.
131 In order to value number memory, we assign value numbers to vuses.
132 This enables us to note that, for example, stores to the same
133 address of the same value from the same starting memory states are
134 equivalent.
135 TODO:
137 1. We can iterate only the changing portions of the SCC's, but
138 I have not seen an SCC big enough for this to be a win.
139 2. If you differentiate between phi nodes for loops and phi nodes
140 for if-then-else, you can properly consider phi nodes in different
141 blocks for equivalence.
142 3. We could value number vuses in more cases, particularly, whole
143 structure copies.
147 /* vn_nary_op hashtable helpers. */
149 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
151 typedef vn_nary_op_s value_type;
152 typedef vn_nary_op_s compare_type;
153 static inline hashval_t hash (const value_type *);
154 static inline bool equal (const value_type *, const compare_type *);
157 /* Return the computed hashcode for nary operation P1. */
159 inline hashval_t
160 vn_nary_op_hasher::hash (const value_type *vno1)
162 return vno1->hashcode;
165 /* Compare nary operations P1 and P2 and return true if they are
166 equivalent. */
168 inline bool
169 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
171 return vn_nary_op_eq (vno1, vno2);
174 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
175 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
178 /* vn_phi hashtable helpers. */
180 static int
181 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
183 struct vn_phi_hasher
185 typedef vn_phi_s value_type;
186 typedef vn_phi_s compare_type;
187 static inline hashval_t hash (const value_type *);
188 static inline bool equal (const value_type *, const compare_type *);
189 static inline void remove (value_type *);
192 /* Return the computed hashcode for phi operation P1. */
194 inline hashval_t
195 vn_phi_hasher::hash (const value_type *vp1)
197 return vp1->hashcode;
200 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
202 inline bool
203 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
205 return vn_phi_eq (vp1, vp2);
208 /* Free a phi operation structure VP. */
210 inline void
211 vn_phi_hasher::remove (value_type *phi)
213 phi->phiargs.release ();
216 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
217 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
220 /* Compare two reference operands P1 and P2 for equality. Return true if
221 they are equal, and false otherwise. */
223 static int
224 vn_reference_op_eq (const void *p1, const void *p2)
226 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
227 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
229 return (vro1->opcode == vro2->opcode
230 /* We do not care for differences in type qualification. */
231 && (vro1->type == vro2->type
232 || (vro1->type && vro2->type
233 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
234 TYPE_MAIN_VARIANT (vro2->type))))
235 && expressions_equal_p (vro1->op0, vro2->op0)
236 && expressions_equal_p (vro1->op1, vro2->op1)
237 && expressions_equal_p (vro1->op2, vro2->op2));
240 /* Free a reference operation structure VP. */
242 static inline void
243 free_reference (vn_reference_s *vr)
245 vr->operands.release ();
249 /* vn_reference hashtable helpers. */
251 struct vn_reference_hasher
253 typedef vn_reference_s value_type;
254 typedef vn_reference_s compare_type;
255 static inline hashval_t hash (const value_type *);
256 static inline bool equal (const value_type *, const compare_type *);
257 static inline void remove (value_type *);
260 /* Return the hashcode for a given reference operation P1. */
262 inline hashval_t
263 vn_reference_hasher::hash (const value_type *vr1)
265 return vr1->hashcode;
268 inline bool
269 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
271 return vn_reference_eq (v, c);
274 inline void
275 vn_reference_hasher::remove (value_type *v)
277 free_reference (v);
280 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
281 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
284 /* The set of hashtables and alloc_pool's for their items. */
286 typedef struct vn_tables_s
288 vn_nary_op_table_type *nary;
289 vn_phi_table_type *phis;
290 vn_reference_table_type *references;
291 struct obstack nary_obstack;
292 alloc_pool phis_pool;
293 alloc_pool references_pool;
294 } *vn_tables_t;
297 /* vn_constant hashtable helpers. */
299 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
301 typedef vn_constant_s value_type;
302 typedef vn_constant_s compare_type;
303 static inline hashval_t hash (const value_type *);
304 static inline bool equal (const value_type *, const compare_type *);
307 /* Hash table hash function for vn_constant_t. */
309 inline hashval_t
310 vn_constant_hasher::hash (const value_type *vc1)
312 return vc1->hashcode;
315 /* Hash table equality function for vn_constant_t. */
317 inline bool
318 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
320 if (vc1->hashcode != vc2->hashcode)
321 return false;
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 static hash_table<vn_constant_hasher> *constant_to_value_id;
327 static bitmap constant_value_ids;
330 /* Valid hashtables storing information we have proven to be
331 correct. */
333 static vn_tables_t valid_info;
335 /* Optimistic hashtables storing information we are making assumptions about
336 during iterations. */
338 static vn_tables_t optimistic_info;
340 /* Pointer to the set of hashtables that is currently being used.
341 Should always point to either the optimistic_info, or the
342 valid_info. */
344 static vn_tables_t current_info;
347 /* Reverse post order index for each basic block. */
349 static int *rpo_numbers;
351 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
353 /* Return the SSA value of the VUSE x, supporting released VDEFs
354 during elimination which will value-number the VDEF to the
355 associated VUSE (but not substitute in the whole lattice). */
357 static inline tree
358 vuse_ssa_val (tree x)
360 if (!x)
361 return NULL_TREE;
365 x = SSA_VAL (x);
367 while (SSA_NAME_IN_FREE_LIST (x));
369 return x;
372 /* This represents the top of the VN lattice, which is the universal
373 value. */
375 tree VN_TOP;
377 /* Unique counter for our value ids. */
379 static unsigned int next_value_id;
381 /* Next DFS number and the stack for strongly connected component
382 detection. */
384 static unsigned int next_dfs_num;
385 static vec<tree> sccstack;
389 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
390 are allocated on an obstack for locality reasons, and to free them
391 without looping over the vec. */
393 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
394 static struct obstack vn_ssa_aux_obstack;
396 /* Return the value numbering information for a given SSA name. */
398 vn_ssa_aux_t
399 VN_INFO (tree name)
401 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
402 gcc_checking_assert (res);
403 return res;
406 /* Set the value numbering info for a given SSA name to a given
407 value. */
409 static inline void
410 VN_INFO_SET (tree name, vn_ssa_aux_t value)
412 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
415 /* Initialize the value numbering info for a given SSA name.
416 This should be called just once for every SSA name. */
418 vn_ssa_aux_t
419 VN_INFO_GET (tree name)
421 vn_ssa_aux_t newinfo;
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Get the representative expression for the SSA_NAME NAME. Returns
433 the representative SSA_NAME if there is no expression associated with it. */
435 tree
436 vn_get_expr_for (tree name)
438 vn_ssa_aux_t vn = VN_INFO (name);
439 if (vn->valnum == VN_TOP)
440 return name;
442 /* If the value-number is a constant it is the representative
443 expression. */
444 if (TREE_CODE (vn->valnum) != SSA_NAME)
445 return vn->valnum;
447 /* Get to the information of the value of this SSA_NAME. */
448 vn = VN_INFO (vn->valnum);
450 /* If the value-number is a constant it is the representative
451 expression. */
452 if (TREE_CODE (vn->valnum) != SSA_NAME)
453 return vn->valnum;
455 /* Else if we have an expression, return it. */
456 if (vn->expr != NULL_TREE)
457 return vn->expr;
459 /* If not, return the value-number. */
460 return vn->valnum;
463 /* Return the vn_kind the expression computed by the stmt should be
464 associated with. */
466 enum vn_kind
467 vn_get_stmt_kind (gimple stmt)
469 switch (gimple_code (stmt))
471 case GIMPLE_CALL:
472 return VN_REFERENCE;
473 case GIMPLE_PHI:
474 return VN_PHI;
475 case GIMPLE_ASSIGN:
477 enum tree_code code = gimple_assign_rhs_code (stmt);
478 tree rhs1 = gimple_assign_rhs1 (stmt);
479 switch (get_gimple_rhs_class (code))
481 case GIMPLE_UNARY_RHS:
482 case GIMPLE_BINARY_RHS:
483 case GIMPLE_TERNARY_RHS:
484 return VN_NARY;
485 case GIMPLE_SINGLE_RHS:
486 switch (TREE_CODE_CLASS (code))
488 case tcc_reference:
489 /* VOP-less references can go through unary case. */
490 if ((code == REALPART_EXPR
491 || code == IMAGPART_EXPR
492 || code == VIEW_CONVERT_EXPR
493 || code == BIT_FIELD_REF)
494 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
495 return VN_NARY;
497 /* Fallthrough. */
498 case tcc_declaration:
499 return VN_REFERENCE;
501 case tcc_constant:
502 return VN_CONSTANT;
504 default:
505 if (code == ADDR_EXPR)
506 return (is_gimple_min_invariant (rhs1)
507 ? VN_CONSTANT : VN_REFERENCE);
508 else if (code == CONSTRUCTOR)
509 return VN_NARY;
510 return VN_NONE;
512 default:
513 return VN_NONE;
516 default:
517 return VN_NONE;
521 /* Lookup a value id for CONSTANT and return it. If it does not
522 exist returns 0. */
524 unsigned int
525 get_constant_value_id (tree constant)
527 vn_constant_s **slot;
528 struct vn_constant_s vc;
530 vc.hashcode = vn_hash_constant_with_type (constant);
531 vc.constant = constant;
532 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
533 if (slot)
534 return (*slot)->value_id;
535 return 0;
538 /* Lookup a value id for CONSTANT, and if it does not exist, create a
539 new one and return it. If it does exist, return it. */
541 unsigned int
542 get_or_alloc_constant_value_id (tree constant)
544 vn_constant_s **slot;
545 struct vn_constant_s vc;
546 vn_constant_t vcp;
548 vc.hashcode = vn_hash_constant_with_type (constant);
549 vc.constant = constant;
550 slot = constant_to_value_id->find_slot (&vc, INSERT);
551 if (*slot)
552 return (*slot)->value_id;
554 vcp = XNEW (struct vn_constant_s);
555 vcp->hashcode = vc.hashcode;
556 vcp->constant = constant;
557 vcp->value_id = get_next_value_id ();
558 *slot = vcp;
559 bitmap_set_bit (constant_value_ids, vcp->value_id);
560 return vcp->value_id;
563 /* Return true if V is a value id for a constant. */
565 bool
566 value_id_constant_p (unsigned int v)
568 return bitmap_bit_p (constant_value_ids, v);
571 /* Compute the hash for a reference operand VRO1. */
573 static void
574 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
576 hstate.add_int (vro1->opcode);
577 if (vro1->op0)
578 inchash::add_expr (vro1->op0, hstate);
579 if (vro1->op1)
580 inchash::add_expr (vro1->op1, hstate);
581 if (vro1->op2)
582 inchash::add_expr (vro1->op2, hstate);
585 /* Compute a hash for the reference operation VR1 and return it. */
587 static hashval_t
588 vn_reference_compute_hash (const vn_reference_t vr1)
590 inchash::hash hstate;
591 hashval_t result;
592 int i;
593 vn_reference_op_t vro;
594 HOST_WIDE_INT off = -1;
595 bool deref = false;
597 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
599 if (vro->opcode == MEM_REF)
600 deref = true;
601 else if (vro->opcode != ADDR_EXPR)
602 deref = false;
603 if (vro->off != -1)
605 if (off == -1)
606 off = 0;
607 off += vro->off;
609 else
611 if (off != -1
612 && off != 0)
613 hstate.add_int (off);
614 off = -1;
615 if (deref
616 && vro->opcode == ADDR_EXPR)
618 if (vro->op0)
620 tree op = TREE_OPERAND (vro->op0, 0);
621 hstate.add_int (TREE_CODE (op));
622 inchash::add_expr (op, hstate);
625 else
626 vn_reference_op_compute_hash (vro, hstate);
629 result = hstate.end ();
630 /* ??? We would ICE later if we hash instead of adding that in. */
631 if (vr1->vuse)
632 result += SSA_NAME_VERSION (vr1->vuse);
634 return result;
637 /* Return true if reference operations VR1 and VR2 are equivalent. This
638 means they have the same set of operands and vuses. */
640 bool
641 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
643 unsigned i, j;
645 /* Early out if this is not a hash collision. */
646 if (vr1->hashcode != vr2->hashcode)
647 return false;
649 /* The VOP needs to be the same. */
650 if (vr1->vuse != vr2->vuse)
651 return false;
653 /* If the operands are the same we are done. */
654 if (vr1->operands == vr2->operands)
655 return true;
657 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
658 return false;
660 if (INTEGRAL_TYPE_P (vr1->type)
661 && INTEGRAL_TYPE_P (vr2->type))
663 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
664 return false;
666 else if (INTEGRAL_TYPE_P (vr1->type)
667 && (TYPE_PRECISION (vr1->type)
668 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
669 return false;
670 else if (INTEGRAL_TYPE_P (vr2->type)
671 && (TYPE_PRECISION (vr2->type)
672 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
673 return false;
675 i = 0;
676 j = 0;
679 HOST_WIDE_INT off1 = 0, off2 = 0;
680 vn_reference_op_t vro1, vro2;
681 vn_reference_op_s tem1, tem2;
682 bool deref1 = false, deref2 = false;
683 for (; vr1->operands.iterate (i, &vro1); i++)
685 if (vro1->opcode == MEM_REF)
686 deref1 = true;
687 if (vro1->off == -1)
688 break;
689 off1 += vro1->off;
691 for (; vr2->operands.iterate (j, &vro2); j++)
693 if (vro2->opcode == MEM_REF)
694 deref2 = true;
695 if (vro2->off == -1)
696 break;
697 off2 += vro2->off;
699 if (off1 != off2)
700 return false;
701 if (deref1 && vro1->opcode == ADDR_EXPR)
703 memset (&tem1, 0, sizeof (tem1));
704 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
705 tem1.type = TREE_TYPE (tem1.op0);
706 tem1.opcode = TREE_CODE (tem1.op0);
707 vro1 = &tem1;
708 deref1 = false;
710 if (deref2 && vro2->opcode == ADDR_EXPR)
712 memset (&tem2, 0, sizeof (tem2));
713 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
714 tem2.type = TREE_TYPE (tem2.op0);
715 tem2.opcode = TREE_CODE (tem2.op0);
716 vro2 = &tem2;
717 deref2 = false;
719 if (deref1 != deref2)
720 return false;
721 if (!vn_reference_op_eq (vro1, vro2))
722 return false;
723 ++j;
724 ++i;
726 while (vr1->operands.length () != i
727 || vr2->operands.length () != j);
729 return true;
732 /* Copy the operations present in load/store REF into RESULT, a vector of
733 vn_reference_op_s's. */
735 static void
736 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
738 if (TREE_CODE (ref) == TARGET_MEM_REF)
740 vn_reference_op_s temp;
742 result->reserve (3);
744 memset (&temp, 0, sizeof (temp));
745 temp.type = TREE_TYPE (ref);
746 temp.opcode = TREE_CODE (ref);
747 temp.op0 = TMR_INDEX (ref);
748 temp.op1 = TMR_STEP (ref);
749 temp.op2 = TMR_OFFSET (ref);
750 temp.off = -1;
751 result->quick_push (temp);
753 memset (&temp, 0, sizeof (temp));
754 temp.type = NULL_TREE;
755 temp.opcode = ERROR_MARK;
756 temp.op0 = TMR_INDEX2 (ref);
757 temp.off = -1;
758 result->quick_push (temp);
760 memset (&temp, 0, sizeof (temp));
761 temp.type = NULL_TREE;
762 temp.opcode = TREE_CODE (TMR_BASE (ref));
763 temp.op0 = TMR_BASE (ref);
764 temp.off = -1;
765 result->quick_push (temp);
766 return;
769 /* For non-calls, store the information that makes up the address. */
770 tree orig = ref;
771 while (ref)
773 vn_reference_op_s temp;
775 memset (&temp, 0, sizeof (temp));
776 temp.type = TREE_TYPE (ref);
777 temp.opcode = TREE_CODE (ref);
778 temp.off = -1;
780 switch (temp.opcode)
782 case MODIFY_EXPR:
783 temp.op0 = TREE_OPERAND (ref, 1);
784 break;
785 case WITH_SIZE_EXPR:
786 temp.op0 = TREE_OPERAND (ref, 1);
787 temp.off = 0;
788 break;
789 case MEM_REF:
790 /* The base address gets its own vn_reference_op_s structure. */
791 temp.op0 = TREE_OPERAND (ref, 1);
792 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
793 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
794 break;
795 case BIT_FIELD_REF:
796 /* Record bits and position. */
797 temp.op0 = TREE_OPERAND (ref, 1);
798 temp.op1 = TREE_OPERAND (ref, 2);
799 break;
800 case COMPONENT_REF:
801 /* The field decl is enough to unambiguously specify the field,
802 a matching type is not necessary and a mismatching type
803 is always a spurious difference. */
804 temp.type = NULL_TREE;
805 temp.op0 = TREE_OPERAND (ref, 1);
806 temp.op1 = TREE_OPERAND (ref, 2);
808 tree this_offset = component_ref_field_offset (ref);
809 if (this_offset
810 && TREE_CODE (this_offset) == INTEGER_CST)
812 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
813 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
815 offset_int off
816 = (wi::to_offset (this_offset)
817 + wi::lrshift (wi::to_offset (bit_offset),
818 LOG2_BITS_PER_UNIT));
819 if (wi::fits_shwi_p (off)
820 /* Probibit value-numbering zero offset components
821 of addresses the same before the pass folding
822 __builtin_object_size had a chance to run
823 (checking cfun->after_inlining does the
824 trick here). */
825 && (TREE_CODE (orig) != ADDR_EXPR
826 || off != 0
827 || cfun->after_inlining))
828 temp.off = off.to_shwi ();
832 break;
833 case ARRAY_RANGE_REF:
834 case ARRAY_REF:
835 /* Record index as operand. */
836 temp.op0 = TREE_OPERAND (ref, 1);
837 /* Always record lower bounds and element size. */
838 temp.op1 = array_ref_low_bound (ref);
839 temp.op2 = array_ref_element_size (ref);
840 if (TREE_CODE (temp.op0) == INTEGER_CST
841 && TREE_CODE (temp.op1) == INTEGER_CST
842 && TREE_CODE (temp.op2) == INTEGER_CST)
844 offset_int off = ((wi::to_offset (temp.op0)
845 - wi::to_offset (temp.op1))
846 * wi::to_offset (temp.op2));
847 if (wi::fits_shwi_p (off))
848 temp.off = off.to_shwi();
850 break;
851 case VAR_DECL:
852 if (DECL_HARD_REGISTER (ref))
854 temp.op0 = ref;
855 break;
857 /* Fallthru. */
858 case PARM_DECL:
859 case CONST_DECL:
860 case RESULT_DECL:
861 /* Canonicalize decls to MEM[&decl] which is what we end up with
862 when valueizing MEM[ptr] with ptr = &decl. */
863 temp.opcode = MEM_REF;
864 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
865 temp.off = 0;
866 result->safe_push (temp);
867 temp.opcode = ADDR_EXPR;
868 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
869 temp.type = TREE_TYPE (temp.op0);
870 temp.off = -1;
871 break;
872 case STRING_CST:
873 case INTEGER_CST:
874 case COMPLEX_CST:
875 case VECTOR_CST:
876 case REAL_CST:
877 case FIXED_CST:
878 case CONSTRUCTOR:
879 case SSA_NAME:
880 temp.op0 = ref;
881 break;
882 case ADDR_EXPR:
883 if (is_gimple_min_invariant (ref))
885 temp.op0 = ref;
886 break;
888 break;
889 /* These are only interesting for their operands, their
890 existence, and their type. They will never be the last
891 ref in the chain of references (IE they require an
892 operand), so we don't have to put anything
893 for op* as it will be handled by the iteration */
894 case REALPART_EXPR:
895 case VIEW_CONVERT_EXPR:
896 temp.off = 0;
897 break;
898 case IMAGPART_EXPR:
899 /* This is only interesting for its constant offset. */
900 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
901 break;
902 default:
903 gcc_unreachable ();
905 result->safe_push (temp);
907 if (REFERENCE_CLASS_P (ref)
908 || TREE_CODE (ref) == MODIFY_EXPR
909 || TREE_CODE (ref) == WITH_SIZE_EXPR
910 || (TREE_CODE (ref) == ADDR_EXPR
911 && !is_gimple_min_invariant (ref)))
912 ref = TREE_OPERAND (ref, 0);
913 else
914 ref = NULL_TREE;
918 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
919 operands in *OPS, the reference alias set SET and the reference type TYPE.
920 Return true if something useful was produced. */
922 bool
923 ao_ref_init_from_vn_reference (ao_ref *ref,
924 alias_set_type set, tree type,
925 vec<vn_reference_op_s> ops)
927 vn_reference_op_t op;
928 unsigned i;
929 tree base = NULL_TREE;
930 tree *op0_p = &base;
931 HOST_WIDE_INT offset = 0;
932 HOST_WIDE_INT max_size;
933 HOST_WIDE_INT size = -1;
934 tree size_tree = NULL_TREE;
935 alias_set_type base_alias_set = -1;
937 /* First get the final access size from just the outermost expression. */
938 op = &ops[0];
939 if (op->opcode == COMPONENT_REF)
940 size_tree = DECL_SIZE (op->op0);
941 else if (op->opcode == BIT_FIELD_REF)
942 size_tree = op->op0;
943 else
945 machine_mode mode = TYPE_MODE (type);
946 if (mode == BLKmode)
947 size_tree = TYPE_SIZE (type);
948 else
949 size = GET_MODE_BITSIZE (mode);
951 if (size_tree != NULL_TREE)
953 if (!tree_fits_uhwi_p (size_tree))
954 size = -1;
955 else
956 size = tree_to_uhwi (size_tree);
959 /* Initially, maxsize is the same as the accessed element size.
960 In the following it will only grow (or become -1). */
961 max_size = size;
963 /* Compute cumulative bit-offset for nested component-refs and array-refs,
964 and find the ultimate containing object. */
965 FOR_EACH_VEC_ELT (ops, i, op)
967 switch (op->opcode)
969 /* These may be in the reference ops, but we cannot do anything
970 sensible with them here. */
971 case ADDR_EXPR:
972 /* Apart from ADDR_EXPR arguments to MEM_REF. */
973 if (base != NULL_TREE
974 && TREE_CODE (base) == MEM_REF
975 && op->op0
976 && DECL_P (TREE_OPERAND (op->op0, 0)))
978 vn_reference_op_t pop = &ops[i-1];
979 base = TREE_OPERAND (op->op0, 0);
980 if (pop->off == -1)
982 max_size = -1;
983 offset = 0;
985 else
986 offset += pop->off * BITS_PER_UNIT;
987 op0_p = NULL;
988 break;
990 /* Fallthru. */
991 case CALL_EXPR:
992 return false;
994 /* Record the base objects. */
995 case MEM_REF:
996 base_alias_set = get_deref_alias_set (op->op0);
997 *op0_p = build2 (MEM_REF, op->type,
998 NULL_TREE, op->op0);
999 op0_p = &TREE_OPERAND (*op0_p, 0);
1000 break;
1002 case VAR_DECL:
1003 case PARM_DECL:
1004 case RESULT_DECL:
1005 case SSA_NAME:
1006 *op0_p = op->op0;
1007 op0_p = NULL;
1008 break;
1010 /* And now the usual component-reference style ops. */
1011 case BIT_FIELD_REF:
1012 offset += tree_to_shwi (op->op1);
1013 break;
1015 case COMPONENT_REF:
1017 tree field = op->op0;
1018 /* We do not have a complete COMPONENT_REF tree here so we
1019 cannot use component_ref_field_offset. Do the interesting
1020 parts manually. */
1022 if (op->op1
1023 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1024 max_size = -1;
1025 else
1027 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1028 * BITS_PER_UNIT);
1029 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1031 break;
1034 case ARRAY_RANGE_REF:
1035 case ARRAY_REF:
1036 /* We recorded the lower bound and the element size. */
1037 if (!tree_fits_shwi_p (op->op0)
1038 || !tree_fits_shwi_p (op->op1)
1039 || !tree_fits_shwi_p (op->op2))
1040 max_size = -1;
1041 else
1043 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1044 hindex -= tree_to_shwi (op->op1);
1045 hindex *= tree_to_shwi (op->op2);
1046 hindex *= BITS_PER_UNIT;
1047 offset += hindex;
1049 break;
1051 case REALPART_EXPR:
1052 break;
1054 case IMAGPART_EXPR:
1055 offset += size;
1056 break;
1058 case VIEW_CONVERT_EXPR:
1059 break;
1061 case STRING_CST:
1062 case INTEGER_CST:
1063 case COMPLEX_CST:
1064 case VECTOR_CST:
1065 case REAL_CST:
1066 case CONSTRUCTOR:
1067 case CONST_DECL:
1068 return false;
1070 default:
1071 return false;
1075 if (base == NULL_TREE)
1076 return false;
1078 ref->ref = NULL_TREE;
1079 ref->base = base;
1080 ref->offset = offset;
1081 ref->size = size;
1082 ref->max_size = max_size;
1083 ref->ref_alias_set = set;
1084 if (base_alias_set != -1)
1085 ref->base_alias_set = base_alias_set;
1086 else
1087 ref->base_alias_set = get_alias_set (base);
1088 /* We discount volatiles from value-numbering elsewhere. */
1089 ref->volatile_p = false;
1091 return true;
1094 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1095 vn_reference_op_s's. */
1097 static void
1098 copy_reference_ops_from_call (gcall *call,
1099 vec<vn_reference_op_s> *result)
1101 vn_reference_op_s temp;
1102 unsigned i;
1103 tree lhs = gimple_call_lhs (call);
1104 int lr;
1106 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1107 different. By adding the lhs here in the vector, we ensure that the
1108 hashcode is different, guaranteeing a different value number. */
1109 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1111 memset (&temp, 0, sizeof (temp));
1112 temp.opcode = MODIFY_EXPR;
1113 temp.type = TREE_TYPE (lhs);
1114 temp.op0 = lhs;
1115 temp.off = -1;
1116 result->safe_push (temp);
1119 /* Copy the type, opcode, function, static chain and EH region, if any. */
1120 memset (&temp, 0, sizeof (temp));
1121 temp.type = gimple_call_return_type (call);
1122 temp.opcode = CALL_EXPR;
1123 temp.op0 = gimple_call_fn (call);
1124 temp.op1 = gimple_call_chain (call);
1125 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1126 temp.op2 = size_int (lr);
1127 temp.off = -1;
1128 if (gimple_call_with_bounds_p (call))
1129 temp.with_bounds = 1;
1130 result->safe_push (temp);
1132 /* Copy the call arguments. As they can be references as well,
1133 just chain them together. */
1134 for (i = 0; i < gimple_call_num_args (call); ++i)
1136 tree callarg = gimple_call_arg (call, i);
1137 copy_reference_ops_from_ref (callarg, result);
1141 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1142 *I_P to point to the last element of the replacement. */
1143 void
1144 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1145 unsigned int *i_p)
1147 unsigned int i = *i_p;
1148 vn_reference_op_t op = &(*ops)[i];
1149 vn_reference_op_t mem_op = &(*ops)[i - 1];
1150 tree addr_base;
1151 HOST_WIDE_INT addr_offset = 0;
1153 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1154 from .foo.bar to the preceding MEM_REF offset and replace the
1155 address with &OBJ. */
1156 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1157 &addr_offset);
1158 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1159 if (addr_base != TREE_OPERAND (op->op0, 0))
1161 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1162 off += addr_offset;
1163 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1164 op->op0 = build_fold_addr_expr (addr_base);
1165 if (tree_fits_shwi_p (mem_op->op0))
1166 mem_op->off = tree_to_shwi (mem_op->op0);
1167 else
1168 mem_op->off = -1;
1172 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1173 *I_P to point to the last element of the replacement. */
1174 static void
1175 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1176 unsigned int *i_p)
1178 unsigned int i = *i_p;
1179 vn_reference_op_t op = &(*ops)[i];
1180 vn_reference_op_t mem_op = &(*ops)[i - 1];
1181 gimple def_stmt;
1182 enum tree_code code;
1183 offset_int off;
1185 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1186 if (!is_gimple_assign (def_stmt))
1187 return;
1189 code = gimple_assign_rhs_code (def_stmt);
1190 if (code != ADDR_EXPR
1191 && code != POINTER_PLUS_EXPR)
1192 return;
1194 off = offset_int::from (mem_op->op0, SIGNED);
1196 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1197 from .foo.bar to the preceding MEM_REF offset and replace the
1198 address with &OBJ. */
1199 if (code == ADDR_EXPR)
1201 tree addr, addr_base;
1202 HOST_WIDE_INT addr_offset;
1204 addr = gimple_assign_rhs1 (def_stmt);
1205 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1206 &addr_offset);
1207 if (!addr_base
1208 || TREE_CODE (addr_base) != MEM_REF)
1209 return;
1211 off += addr_offset;
1212 off += mem_ref_offset (addr_base);
1213 op->op0 = TREE_OPERAND (addr_base, 0);
1215 else
1217 tree ptr, ptroff;
1218 ptr = gimple_assign_rhs1 (def_stmt);
1219 ptroff = gimple_assign_rhs2 (def_stmt);
1220 if (TREE_CODE (ptr) != SSA_NAME
1221 || TREE_CODE (ptroff) != INTEGER_CST)
1222 return;
1224 off += wi::to_offset (ptroff);
1225 op->op0 = ptr;
1228 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1229 if (tree_fits_shwi_p (mem_op->op0))
1230 mem_op->off = tree_to_shwi (mem_op->op0);
1231 else
1232 mem_op->off = -1;
1233 if (TREE_CODE (op->op0) == SSA_NAME)
1234 op->op0 = SSA_VAL (op->op0);
1235 if (TREE_CODE (op->op0) != SSA_NAME)
1236 op->opcode = TREE_CODE (op->op0);
1238 /* And recurse. */
1239 if (TREE_CODE (op->op0) == SSA_NAME)
1240 vn_reference_maybe_forwprop_address (ops, i_p);
1241 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1242 vn_reference_fold_indirect (ops, i_p);
1245 /* Optimize the reference REF to a constant if possible or return
1246 NULL_TREE if not. */
1248 tree
1249 fully_constant_vn_reference_p (vn_reference_t ref)
1251 vec<vn_reference_op_s> operands = ref->operands;
1252 vn_reference_op_t op;
1254 /* Try to simplify the translated expression if it is
1255 a call to a builtin function with at most two arguments. */
1256 op = &operands[0];
1257 if (op->opcode == CALL_EXPR
1258 && TREE_CODE (op->op0) == ADDR_EXPR
1259 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1260 && DECL_BUILT_IN_CLASS (TREE_OPERAND (op->op0, 0)) == BUILT_IN_NORMAL
1261 && operands.length () >= 2
1262 && operands.length () <= 3)
1264 vn_reference_op_t arg0, arg1 = NULL;
1265 bool anyconst = false;
1266 arg0 = &operands[1];
1267 if (operands.length () > 2)
1268 arg1 = &operands[2];
1269 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1270 || (arg0->opcode == ADDR_EXPR
1271 && is_gimple_min_invariant (arg0->op0)))
1272 anyconst = true;
1273 if (arg1
1274 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1275 || (arg1->opcode == ADDR_EXPR
1276 && is_gimple_min_invariant (arg1->op0))))
1277 anyconst = true;
1278 if (anyconst)
1280 enum built_in_function fcode
1281 = DECL_FUNCTION_CODE (TREE_OPERAND (op->op0, 0));
1282 tree folded;
1283 if (arg1)
1284 folded = gimple_simplify (fcode, op->type, arg0->op0, arg1->op0,
1285 NULL, vn_valueize);
1286 else
1287 folded = gimple_simplify (fcode, op->type, arg0->op0,
1288 NULL, vn_valueize);
1289 if (folded
1290 && is_gimple_min_invariant (folded))
1291 return folded;
1295 /* Simplify reads from constants or constant initializers. */
1296 else if (BITS_PER_UNIT == 8
1297 && is_gimple_reg_type (ref->type)
1298 && (!INTEGRAL_TYPE_P (ref->type)
1299 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1301 HOST_WIDE_INT off = 0;
1302 HOST_WIDE_INT size;
1303 if (INTEGRAL_TYPE_P (ref->type))
1304 size = TYPE_PRECISION (ref->type);
1305 else
1306 size = tree_to_shwi (TYPE_SIZE (ref->type));
1307 if (size % BITS_PER_UNIT != 0
1308 || size > MAX_BITSIZE_MODE_ANY_MODE)
1309 return NULL_TREE;
1310 size /= BITS_PER_UNIT;
1311 unsigned i;
1312 for (i = 0; i < operands.length (); ++i)
1314 if (operands[i].off == -1)
1315 return NULL_TREE;
1316 off += operands[i].off;
1317 if (operands[i].opcode == MEM_REF)
1319 ++i;
1320 break;
1323 vn_reference_op_t base = &operands[--i];
1324 tree ctor = error_mark_node;
1325 tree decl = NULL_TREE;
1326 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1327 ctor = base->op0;
1328 else if (base->opcode == MEM_REF
1329 && base[1].opcode == ADDR_EXPR
1330 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1331 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1333 decl = TREE_OPERAND (base[1].op0, 0);
1334 ctor = ctor_for_folding (decl);
1336 if (ctor == NULL_TREE)
1337 return build_zero_cst (ref->type);
1338 else if (ctor != error_mark_node)
1340 if (decl)
1342 tree res = fold_ctor_reference (ref->type, ctor,
1343 off * BITS_PER_UNIT,
1344 size * BITS_PER_UNIT, decl);
1345 if (res)
1347 STRIP_USELESS_TYPE_CONVERSION (res);
1348 if (is_gimple_min_invariant (res))
1349 return res;
1352 else
1354 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1355 if (native_encode_expr (ctor, buf, size, off) > 0)
1356 return native_interpret_expr (ref->type, buf, size);
1361 return NULL_TREE;
1364 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1365 structures into their value numbers. This is done in-place, and
1366 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1367 whether any operands were valueized. */
1369 static vec<vn_reference_op_s>
1370 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1372 vn_reference_op_t vro;
1373 unsigned int i;
1375 *valueized_anything = false;
1377 FOR_EACH_VEC_ELT (orig, i, vro)
1379 if (vro->opcode == SSA_NAME
1380 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1382 tree tem = SSA_VAL (vro->op0);
1383 if (tem != vro->op0)
1385 *valueized_anything = true;
1386 vro->op0 = tem;
1388 /* If it transforms from an SSA_NAME to a constant, update
1389 the opcode. */
1390 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1391 vro->opcode = TREE_CODE (vro->op0);
1393 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1395 tree tem = SSA_VAL (vro->op1);
1396 if (tem != vro->op1)
1398 *valueized_anything = true;
1399 vro->op1 = tem;
1402 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1404 tree tem = SSA_VAL (vro->op2);
1405 if (tem != vro->op2)
1407 *valueized_anything = true;
1408 vro->op2 = tem;
1411 /* If it transforms from an SSA_NAME to an address, fold with
1412 a preceding indirect reference. */
1413 if (i > 0
1414 && vro->op0
1415 && TREE_CODE (vro->op0) == ADDR_EXPR
1416 && orig[i - 1].opcode == MEM_REF)
1417 vn_reference_fold_indirect (&orig, &i);
1418 else if (i > 0
1419 && vro->opcode == SSA_NAME
1420 && orig[i - 1].opcode == MEM_REF)
1421 vn_reference_maybe_forwprop_address (&orig, &i);
1422 /* If it transforms a non-constant ARRAY_REF into a constant
1423 one, adjust the constant offset. */
1424 else if (vro->opcode == ARRAY_REF
1425 && vro->off == -1
1426 && TREE_CODE (vro->op0) == INTEGER_CST
1427 && TREE_CODE (vro->op1) == INTEGER_CST
1428 && TREE_CODE (vro->op2) == INTEGER_CST)
1430 offset_int off = ((wi::to_offset (vro->op0)
1431 - wi::to_offset (vro->op1))
1432 * wi::to_offset (vro->op2));
1433 if (wi::fits_shwi_p (off))
1434 vro->off = off.to_shwi ();
1438 return orig;
1441 static vec<vn_reference_op_s>
1442 valueize_refs (vec<vn_reference_op_s> orig)
1444 bool tem;
1445 return valueize_refs_1 (orig, &tem);
1448 static vec<vn_reference_op_s> shared_lookup_references;
1450 /* Create a vector of vn_reference_op_s structures from REF, a
1451 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1452 this function. *VALUEIZED_ANYTHING will specify whether any
1453 operands were valueized. */
1455 static vec<vn_reference_op_s>
1456 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1458 if (!ref)
1459 return vNULL;
1460 shared_lookup_references.truncate (0);
1461 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1462 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1463 valueized_anything);
1464 return shared_lookup_references;
1467 /* Create a vector of vn_reference_op_s structures from CALL, a
1468 call statement. The vector is shared among all callers of
1469 this function. */
1471 static vec<vn_reference_op_s>
1472 valueize_shared_reference_ops_from_call (gcall *call)
1474 if (!call)
1475 return vNULL;
1476 shared_lookup_references.truncate (0);
1477 copy_reference_ops_from_call (call, &shared_lookup_references);
1478 shared_lookup_references = valueize_refs (shared_lookup_references);
1479 return shared_lookup_references;
1482 /* Lookup a SCCVN reference operation VR in the current hash table.
1483 Returns the resulting value number if it exists in the hash table,
1484 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1485 vn_reference_t stored in the hashtable if something is found. */
1487 static tree
1488 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1490 vn_reference_s **slot;
1491 hashval_t hash;
1493 hash = vr->hashcode;
1494 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1495 if (!slot && current_info == optimistic_info)
1496 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1497 if (slot)
1499 if (vnresult)
1500 *vnresult = (vn_reference_t)*slot;
1501 return ((vn_reference_t)*slot)->result;
1504 return NULL_TREE;
1507 static tree *last_vuse_ptr;
1508 static vn_lookup_kind vn_walk_kind;
1509 static vn_lookup_kind default_vn_walk_kind;
1511 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1512 with the current VUSE and performs the expression lookup. */
1514 static void *
1515 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1516 unsigned int cnt, void *vr_)
1518 vn_reference_t vr = (vn_reference_t)vr_;
1519 vn_reference_s **slot;
1520 hashval_t hash;
1522 /* This bounds the stmt walks we perform on reference lookups
1523 to O(1) instead of O(N) where N is the number of dominating
1524 stores. */
1525 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1526 return (void *)-1;
1528 if (last_vuse_ptr)
1529 *last_vuse_ptr = vuse;
1531 /* Fixup vuse and hash. */
1532 if (vr->vuse)
1533 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1534 vr->vuse = vuse_ssa_val (vuse);
1535 if (vr->vuse)
1536 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1538 hash = vr->hashcode;
1539 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1540 if (!slot && current_info == optimistic_info)
1541 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1542 if (slot)
1543 return *slot;
1545 return NULL;
1548 /* Lookup an existing or insert a new vn_reference entry into the
1549 value table for the VUSE, SET, TYPE, OPERANDS reference which
1550 has the value VALUE which is either a constant or an SSA name. */
1552 static vn_reference_t
1553 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1554 alias_set_type set,
1555 tree type,
1556 vec<vn_reference_op_s,
1557 va_heap> operands,
1558 tree value)
1560 vn_reference_s vr1;
1561 vn_reference_t result;
1562 unsigned value_id;
1563 vr1.vuse = vuse;
1564 vr1.operands = operands;
1565 vr1.type = type;
1566 vr1.set = set;
1567 vr1.hashcode = vn_reference_compute_hash (&vr1);
1568 if (vn_reference_lookup_1 (&vr1, &result))
1569 return result;
1570 if (TREE_CODE (value) == SSA_NAME)
1571 value_id = VN_INFO (value)->value_id;
1572 else
1573 value_id = get_or_alloc_constant_value_id (value);
1574 return vn_reference_insert_pieces (vuse, set, type,
1575 operands.copy (), value, value_id);
1578 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1579 from the statement defining VUSE and if not successful tries to
1580 translate *REFP and VR_ through an aggregate copy at the definition
1581 of VUSE. */
1583 static void *
1584 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1585 bool disambiguate_only)
1587 vn_reference_t vr = (vn_reference_t)vr_;
1588 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1589 tree base;
1590 HOST_WIDE_INT offset, maxsize;
1591 static vec<vn_reference_op_s>
1592 lhs_ops = vNULL;
1593 ao_ref lhs_ref;
1594 bool lhs_ref_ok = false;
1596 /* First try to disambiguate after value-replacing in the definitions LHS. */
1597 if (is_gimple_assign (def_stmt))
1599 vec<vn_reference_op_s> tem;
1600 tree lhs = gimple_assign_lhs (def_stmt);
1601 bool valueized_anything = false;
1602 /* Avoid re-allocation overhead. */
1603 lhs_ops.truncate (0);
1604 copy_reference_ops_from_ref (lhs, &lhs_ops);
1605 tem = lhs_ops;
1606 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1607 gcc_assert (lhs_ops == tem);
1608 if (valueized_anything)
1610 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1611 get_alias_set (lhs),
1612 TREE_TYPE (lhs), lhs_ops);
1613 if (lhs_ref_ok
1614 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1615 return NULL;
1617 else
1619 ao_ref_init (&lhs_ref, lhs);
1620 lhs_ref_ok = true;
1623 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1624 && gimple_call_num_args (def_stmt) <= 4)
1626 /* For builtin calls valueize its arguments and call the
1627 alias oracle again. Valueization may improve points-to
1628 info of pointers and constify size and position arguments.
1629 Originally this was motivated by PR61034 which has
1630 conditional calls to free falsely clobbering ref because
1631 of imprecise points-to info of the argument. */
1632 tree oldargs[4];
1633 bool valueized_anything = false;
1634 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1636 oldargs[i] = gimple_call_arg (def_stmt, i);
1637 if (TREE_CODE (oldargs[i]) == SSA_NAME
1638 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1640 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1641 valueized_anything = true;
1644 if (valueized_anything)
1646 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1647 ref);
1648 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1649 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1650 if (!res)
1651 return NULL;
1655 if (disambiguate_only)
1656 return (void *)-1;
1658 base = ao_ref_base (ref);
1659 offset = ref->offset;
1660 maxsize = ref->max_size;
1662 /* If we cannot constrain the size of the reference we cannot
1663 test if anything kills it. */
1664 if (maxsize == -1)
1665 return (void *)-1;
1667 /* We can't deduce anything useful from clobbers. */
1668 if (gimple_clobber_p (def_stmt))
1669 return (void *)-1;
1671 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1672 from that definition.
1673 1) Memset. */
1674 if (is_gimple_reg_type (vr->type)
1675 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1676 && integer_zerop (gimple_call_arg (def_stmt, 1))
1677 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1678 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1680 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1681 tree base2;
1682 HOST_WIDE_INT offset2, size2, maxsize2;
1683 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1684 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1685 if ((unsigned HOST_WIDE_INT)size2 / 8
1686 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1687 && maxsize2 != -1
1688 && operand_equal_p (base, base2, 0)
1689 && offset2 <= offset
1690 && offset2 + size2 >= offset + maxsize)
1692 tree val = build_zero_cst (vr->type);
1693 return vn_reference_lookup_or_insert_for_pieces
1694 (vuse, vr->set, vr->type, vr->operands, val);
1698 /* 2) Assignment from an empty CONSTRUCTOR. */
1699 else if (is_gimple_reg_type (vr->type)
1700 && gimple_assign_single_p (def_stmt)
1701 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1702 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1704 tree base2;
1705 HOST_WIDE_INT offset2, size2, maxsize2;
1706 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1707 &offset2, &size2, &maxsize2);
1708 if (maxsize2 != -1
1709 && operand_equal_p (base, base2, 0)
1710 && offset2 <= offset
1711 && offset2 + size2 >= offset + maxsize)
1713 tree val = build_zero_cst (vr->type);
1714 return vn_reference_lookup_or_insert_for_pieces
1715 (vuse, vr->set, vr->type, vr->operands, val);
1719 /* 3) Assignment from a constant. We can use folds native encode/interpret
1720 routines to extract the assigned bits. */
1721 else if (vn_walk_kind == VN_WALKREWRITE
1722 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1723 && ref->size == maxsize
1724 && maxsize % BITS_PER_UNIT == 0
1725 && offset % BITS_PER_UNIT == 0
1726 && is_gimple_reg_type (vr->type)
1727 && gimple_assign_single_p (def_stmt)
1728 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1730 tree base2;
1731 HOST_WIDE_INT offset2, size2, maxsize2;
1732 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1733 &offset2, &size2, &maxsize2);
1734 if (maxsize2 != -1
1735 && maxsize2 == size2
1736 && size2 % BITS_PER_UNIT == 0
1737 && offset2 % BITS_PER_UNIT == 0
1738 && operand_equal_p (base, base2, 0)
1739 && offset2 <= offset
1740 && offset2 + size2 >= offset + maxsize)
1742 /* We support up to 512-bit values (for V8DFmode). */
1743 unsigned char buffer[64];
1744 int len;
1746 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1747 buffer, sizeof (buffer));
1748 if (len > 0)
1750 tree val = native_interpret_expr (vr->type,
1751 buffer
1752 + ((offset - offset2)
1753 / BITS_PER_UNIT),
1754 ref->size / BITS_PER_UNIT);
1755 if (val)
1756 return vn_reference_lookup_or_insert_for_pieces
1757 (vuse, vr->set, vr->type, vr->operands, val);
1762 /* 4) Assignment from an SSA name which definition we may be able
1763 to access pieces from. */
1764 else if (ref->size == maxsize
1765 && is_gimple_reg_type (vr->type)
1766 && gimple_assign_single_p (def_stmt)
1767 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1769 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1770 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1771 if (is_gimple_assign (def_stmt2)
1772 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1773 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1774 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1776 tree base2;
1777 HOST_WIDE_INT offset2, size2, maxsize2, off;
1778 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1779 &offset2, &size2, &maxsize2);
1780 off = offset - offset2;
1781 if (maxsize2 != -1
1782 && maxsize2 == size2
1783 && operand_equal_p (base, base2, 0)
1784 && offset2 <= offset
1785 && offset2 + size2 >= offset + maxsize)
1787 tree val = NULL_TREE;
1788 HOST_WIDE_INT elsz
1789 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1790 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1792 if (off == 0)
1793 val = gimple_assign_rhs1 (def_stmt2);
1794 else if (off == elsz)
1795 val = gimple_assign_rhs2 (def_stmt2);
1797 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1798 && off % elsz == 0)
1800 tree ctor = gimple_assign_rhs1 (def_stmt2);
1801 unsigned i = off / elsz;
1802 if (i < CONSTRUCTOR_NELTS (ctor))
1804 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1805 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1807 if (TREE_CODE (TREE_TYPE (elt->value))
1808 != VECTOR_TYPE)
1809 val = elt->value;
1813 if (val)
1814 return vn_reference_lookup_or_insert_for_pieces
1815 (vuse, vr->set, vr->type, vr->operands, val);
1820 /* 5) For aggregate copies translate the reference through them if
1821 the copy kills ref. */
1822 else if (vn_walk_kind == VN_WALKREWRITE
1823 && gimple_assign_single_p (def_stmt)
1824 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1825 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1826 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1828 tree base2;
1829 HOST_WIDE_INT offset2, size2, maxsize2;
1830 int i, j;
1831 auto_vec<vn_reference_op_s> rhs;
1832 vn_reference_op_t vro;
1833 ao_ref r;
1835 if (!lhs_ref_ok)
1836 return (void *)-1;
1838 /* See if the assignment kills REF. */
1839 base2 = ao_ref_base (&lhs_ref);
1840 offset2 = lhs_ref.offset;
1841 size2 = lhs_ref.size;
1842 maxsize2 = lhs_ref.max_size;
1843 if (maxsize2 == -1
1844 || (base != base2 && !operand_equal_p (base, base2, 0))
1845 || offset2 > offset
1846 || offset2 + size2 < offset + maxsize)
1847 return (void *)-1;
1849 /* Find the common base of ref and the lhs. lhs_ops already
1850 contains valueized operands for the lhs. */
1851 i = vr->operands.length () - 1;
1852 j = lhs_ops.length () - 1;
1853 while (j >= 0 && i >= 0
1854 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1856 i--;
1857 j--;
1860 /* ??? The innermost op should always be a MEM_REF and we already
1861 checked that the assignment to the lhs kills vr. Thus for
1862 aggregate copies using char[] types the vn_reference_op_eq
1863 may fail when comparing types for compatibility. But we really
1864 don't care here - further lookups with the rewritten operands
1865 will simply fail if we messed up types too badly. */
1866 HOST_WIDE_INT extra_off = 0;
1867 if (j == 0 && i >= 0
1868 && lhs_ops[0].opcode == MEM_REF
1869 && lhs_ops[0].off != -1)
1871 if (lhs_ops[0].off == vr->operands[i].off)
1872 i--, j--;
1873 else if (vr->operands[i].opcode == MEM_REF
1874 && vr->operands[i].off != -1)
1876 extra_off = vr->operands[i].off - lhs_ops[0].off;
1877 i--, j--;
1881 /* i now points to the first additional op.
1882 ??? LHS may not be completely contained in VR, one or more
1883 VIEW_CONVERT_EXPRs could be in its way. We could at least
1884 try handling outermost VIEW_CONVERT_EXPRs. */
1885 if (j != -1)
1886 return (void *)-1;
1888 /* Now re-write REF to be based on the rhs of the assignment. */
1889 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1891 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1892 if (extra_off != 0)
1894 if (rhs.length () < 2
1895 || rhs[0].opcode != MEM_REF
1896 || rhs[0].off == -1)
1897 return (void *)-1;
1898 rhs[0].off += extra_off;
1899 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1900 build_int_cst (TREE_TYPE (rhs[0].op0),
1901 extra_off));
1904 /* We need to pre-pend vr->operands[0..i] to rhs. */
1905 vec<vn_reference_op_s> old = vr->operands;
1906 if (i + 1 + rhs.length () > vr->operands.length ())
1908 vr->operands.safe_grow (i + 1 + rhs.length ());
1909 if (old == shared_lookup_references)
1910 shared_lookup_references = vr->operands;
1912 else
1913 vr->operands.truncate (i + 1 + rhs.length ());
1914 FOR_EACH_VEC_ELT (rhs, j, vro)
1915 vr->operands[i + 1 + j] = *vro;
1916 vr->operands = valueize_refs (vr->operands);
1917 if (old == shared_lookup_references)
1918 shared_lookup_references = vr->operands;
1919 vr->hashcode = vn_reference_compute_hash (vr);
1921 /* Try folding the new reference to a constant. */
1922 tree val = fully_constant_vn_reference_p (vr);
1923 if (val)
1924 return vn_reference_lookup_or_insert_for_pieces
1925 (vuse, vr->set, vr->type, vr->operands, val);
1927 /* Adjust *ref from the new operands. */
1928 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1929 return (void *)-1;
1930 /* This can happen with bitfields. */
1931 if (ref->size != r.size)
1932 return (void *)-1;
1933 *ref = r;
1935 /* Do not update last seen VUSE after translating. */
1936 last_vuse_ptr = NULL;
1938 /* Keep looking for the adjusted *REF / VR pair. */
1939 return NULL;
1942 /* 6) For memcpy copies translate the reference through them if
1943 the copy kills ref. */
1944 else if (vn_walk_kind == VN_WALKREWRITE
1945 && is_gimple_reg_type (vr->type)
1946 /* ??? Handle BCOPY as well. */
1947 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1948 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1949 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1950 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1951 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1952 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1953 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1954 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
1956 tree lhs, rhs;
1957 ao_ref r;
1958 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1959 vn_reference_op_s op;
1960 HOST_WIDE_INT at;
1963 /* Only handle non-variable, addressable refs. */
1964 if (ref->size != maxsize
1965 || offset % BITS_PER_UNIT != 0
1966 || ref->size % BITS_PER_UNIT != 0)
1967 return (void *)-1;
1969 /* Extract a pointer base and an offset for the destination. */
1970 lhs = gimple_call_arg (def_stmt, 0);
1971 lhs_offset = 0;
1972 if (TREE_CODE (lhs) == SSA_NAME)
1973 lhs = SSA_VAL (lhs);
1974 if (TREE_CODE (lhs) == ADDR_EXPR)
1976 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1977 &lhs_offset);
1978 if (!tem)
1979 return (void *)-1;
1980 if (TREE_CODE (tem) == MEM_REF
1981 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
1983 lhs = TREE_OPERAND (tem, 0);
1984 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
1986 else if (DECL_P (tem))
1987 lhs = build_fold_addr_expr (tem);
1988 else
1989 return (void *)-1;
1991 if (TREE_CODE (lhs) != SSA_NAME
1992 && TREE_CODE (lhs) != ADDR_EXPR)
1993 return (void *)-1;
1995 /* Extract a pointer base and an offset for the source. */
1996 rhs = gimple_call_arg (def_stmt, 1);
1997 rhs_offset = 0;
1998 if (TREE_CODE (rhs) == SSA_NAME)
1999 rhs = SSA_VAL (rhs);
2000 if (TREE_CODE (rhs) == ADDR_EXPR)
2002 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2003 &rhs_offset);
2004 if (!tem)
2005 return (void *)-1;
2006 if (TREE_CODE (tem) == MEM_REF
2007 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2009 rhs = TREE_OPERAND (tem, 0);
2010 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2012 else if (DECL_P (tem))
2013 rhs = build_fold_addr_expr (tem);
2014 else
2015 return (void *)-1;
2017 if (TREE_CODE (rhs) != SSA_NAME
2018 && TREE_CODE (rhs) != ADDR_EXPR)
2019 return (void *)-1;
2021 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2023 /* The bases of the destination and the references have to agree. */
2024 if ((TREE_CODE (base) != MEM_REF
2025 && !DECL_P (base))
2026 || (TREE_CODE (base) == MEM_REF
2027 && (TREE_OPERAND (base, 0) != lhs
2028 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2029 || (DECL_P (base)
2030 && (TREE_CODE (lhs) != ADDR_EXPR
2031 || TREE_OPERAND (lhs, 0) != base)))
2032 return (void *)-1;
2034 /* And the access has to be contained within the memcpy destination. */
2035 at = offset / BITS_PER_UNIT;
2036 if (TREE_CODE (base) == MEM_REF)
2037 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2038 if (lhs_offset > at
2039 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2040 return (void *)-1;
2042 /* Make room for 2 operands in the new reference. */
2043 if (vr->operands.length () < 2)
2045 vec<vn_reference_op_s> old = vr->operands;
2046 vr->operands.safe_grow_cleared (2);
2047 if (old == shared_lookup_references
2048 && vr->operands != old)
2049 shared_lookup_references = vr->operands;
2051 else
2052 vr->operands.truncate (2);
2054 /* The looked-through reference is a simple MEM_REF. */
2055 memset (&op, 0, sizeof (op));
2056 op.type = vr->type;
2057 op.opcode = MEM_REF;
2058 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2059 op.off = at - lhs_offset + rhs_offset;
2060 vr->operands[0] = op;
2061 op.type = TREE_TYPE (rhs);
2062 op.opcode = TREE_CODE (rhs);
2063 op.op0 = rhs;
2064 op.off = -1;
2065 vr->operands[1] = op;
2066 vr->hashcode = vn_reference_compute_hash (vr);
2068 /* Adjust *ref from the new operands. */
2069 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2070 return (void *)-1;
2071 /* This can happen with bitfields. */
2072 if (ref->size != r.size)
2073 return (void *)-1;
2074 *ref = r;
2076 /* Do not update last seen VUSE after translating. */
2077 last_vuse_ptr = NULL;
2079 /* Keep looking for the adjusted *REF / VR pair. */
2080 return NULL;
2083 /* Bail out and stop walking. */
2084 return (void *)-1;
2087 /* Lookup a reference operation by it's parts, in the current hash table.
2088 Returns the resulting value number if it exists in the hash table,
2089 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2090 vn_reference_t stored in the hashtable if something is found. */
2092 tree
2093 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2094 vec<vn_reference_op_s> operands,
2095 vn_reference_t *vnresult, vn_lookup_kind kind)
2097 struct vn_reference_s vr1;
2098 vn_reference_t tmp;
2099 tree cst;
2101 if (!vnresult)
2102 vnresult = &tmp;
2103 *vnresult = NULL;
2105 vr1.vuse = vuse_ssa_val (vuse);
2106 shared_lookup_references.truncate (0);
2107 shared_lookup_references.safe_grow (operands.length ());
2108 memcpy (shared_lookup_references.address (),
2109 operands.address (),
2110 sizeof (vn_reference_op_s)
2111 * operands.length ());
2112 vr1.operands = operands = shared_lookup_references
2113 = valueize_refs (shared_lookup_references);
2114 vr1.type = type;
2115 vr1.set = set;
2116 vr1.hashcode = vn_reference_compute_hash (&vr1);
2117 if ((cst = fully_constant_vn_reference_p (&vr1)))
2118 return cst;
2120 vn_reference_lookup_1 (&vr1, vnresult);
2121 if (!*vnresult
2122 && kind != VN_NOWALK
2123 && vr1.vuse)
2125 ao_ref r;
2126 vn_walk_kind = kind;
2127 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2128 *vnresult =
2129 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2130 vn_reference_lookup_2,
2131 vn_reference_lookup_3,
2132 vuse_ssa_val, &vr1);
2133 gcc_checking_assert (vr1.operands == shared_lookup_references);
2136 if (*vnresult)
2137 return (*vnresult)->result;
2139 return NULL_TREE;
2142 /* Lookup OP in the current hash table, and return the resulting value
2143 number if it exists in the hash table. Return NULL_TREE if it does
2144 not exist in the hash table or if the result field of the structure
2145 was NULL.. VNRESULT will be filled in with the vn_reference_t
2146 stored in the hashtable if one exists. */
2148 tree
2149 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2150 vn_reference_t *vnresult)
2152 vec<vn_reference_op_s> operands;
2153 struct vn_reference_s vr1;
2154 tree cst;
2155 bool valuezied_anything;
2157 if (vnresult)
2158 *vnresult = NULL;
2160 vr1.vuse = vuse_ssa_val (vuse);
2161 vr1.operands = operands
2162 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2163 vr1.type = TREE_TYPE (op);
2164 vr1.set = get_alias_set (op);
2165 vr1.hashcode = vn_reference_compute_hash (&vr1);
2166 if ((cst = fully_constant_vn_reference_p (&vr1)))
2167 return cst;
2169 if (kind != VN_NOWALK
2170 && vr1.vuse)
2172 vn_reference_t wvnresult;
2173 ao_ref r;
2174 /* Make sure to use a valueized reference if we valueized anything.
2175 Otherwise preserve the full reference for advanced TBAA. */
2176 if (!valuezied_anything
2177 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2178 vr1.operands))
2179 ao_ref_init (&r, op);
2180 vn_walk_kind = kind;
2181 wvnresult =
2182 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2183 vn_reference_lookup_2,
2184 vn_reference_lookup_3,
2185 vuse_ssa_val, &vr1);
2186 gcc_checking_assert (vr1.operands == shared_lookup_references);
2187 if (wvnresult)
2189 if (vnresult)
2190 *vnresult = wvnresult;
2191 return wvnresult->result;
2194 return NULL_TREE;
2197 return vn_reference_lookup_1 (&vr1, vnresult);
2200 /* Lookup CALL in the current hash table and return the entry in
2201 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2203 void
2204 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2205 vn_reference_t vr)
2207 if (vnresult)
2208 *vnresult = NULL;
2210 tree vuse = gimple_vuse (call);
2212 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2213 vr->operands = valueize_shared_reference_ops_from_call (call);
2214 vr->type = gimple_expr_type (call);
2215 vr->set = 0;
2216 vr->hashcode = vn_reference_compute_hash (vr);
2217 vn_reference_lookup_1 (vr, vnresult);
2220 /* Insert OP into the current hash table with a value number of
2221 RESULT, and return the resulting reference structure we created. */
2223 static vn_reference_t
2224 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2226 vn_reference_s **slot;
2227 vn_reference_t vr1;
2228 bool tem;
2230 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2231 if (TREE_CODE (result) == SSA_NAME)
2232 vr1->value_id = VN_INFO (result)->value_id;
2233 else
2234 vr1->value_id = get_or_alloc_constant_value_id (result);
2235 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2236 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2237 vr1->type = TREE_TYPE (op);
2238 vr1->set = get_alias_set (op);
2239 vr1->hashcode = vn_reference_compute_hash (vr1);
2240 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2241 vr1->result_vdef = vdef;
2243 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2244 INSERT);
2246 /* Because we lookup stores using vuses, and value number failures
2247 using the vdefs (see visit_reference_op_store for how and why),
2248 it's possible that on failure we may try to insert an already
2249 inserted store. This is not wrong, there is no ssa name for a
2250 store that we could use as a differentiator anyway. Thus, unlike
2251 the other lookup functions, you cannot gcc_assert (!*slot)
2252 here. */
2254 /* But free the old slot in case of a collision. */
2255 if (*slot)
2256 free_reference (*slot);
2258 *slot = vr1;
2259 return vr1;
2262 /* Insert a reference by it's pieces into the current hash table with
2263 a value number of RESULT. Return the resulting reference
2264 structure we created. */
2266 vn_reference_t
2267 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2268 vec<vn_reference_op_s> operands,
2269 tree result, unsigned int value_id)
2272 vn_reference_s **slot;
2273 vn_reference_t vr1;
2275 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2276 vr1->value_id = value_id;
2277 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2278 vr1->operands = valueize_refs (operands);
2279 vr1->type = type;
2280 vr1->set = set;
2281 vr1->hashcode = vn_reference_compute_hash (vr1);
2282 if (result && TREE_CODE (result) == SSA_NAME)
2283 result = SSA_VAL (result);
2284 vr1->result = result;
2286 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2287 INSERT);
2289 /* At this point we should have all the things inserted that we have
2290 seen before, and we should never try inserting something that
2291 already exists. */
2292 gcc_assert (!*slot);
2293 if (*slot)
2294 free_reference (*slot);
2296 *slot = vr1;
2297 return vr1;
2300 /* Compute and return the hash value for nary operation VBO1. */
2302 static hashval_t
2303 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2305 inchash::hash hstate;
2306 unsigned i;
2308 for (i = 0; i < vno1->length; ++i)
2309 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2310 vno1->op[i] = SSA_VAL (vno1->op[i]);
2312 if (vno1->length == 2
2313 && commutative_tree_code (vno1->opcode)
2314 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2316 tree temp = vno1->op[0];
2317 vno1->op[0] = vno1->op[1];
2318 vno1->op[1] = temp;
2321 hstate.add_int (vno1->opcode);
2322 for (i = 0; i < vno1->length; ++i)
2323 inchash::add_expr (vno1->op[i], hstate);
2325 return hstate.end ();
2328 /* Compare nary operations VNO1 and VNO2 and return true if they are
2329 equivalent. */
2331 bool
2332 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2334 unsigned i;
2336 if (vno1->hashcode != vno2->hashcode)
2337 return false;
2339 if (vno1->length != vno2->length)
2340 return false;
2342 if (vno1->opcode != vno2->opcode
2343 || !types_compatible_p (vno1->type, vno2->type))
2344 return false;
2346 for (i = 0; i < vno1->length; ++i)
2347 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2348 return false;
2350 return true;
2353 /* Initialize VNO from the pieces provided. */
2355 static void
2356 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2357 enum tree_code code, tree type, tree *ops)
2359 vno->opcode = code;
2360 vno->length = length;
2361 vno->type = type;
2362 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2365 /* Initialize VNO from OP. */
2367 static void
2368 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2370 unsigned i;
2372 vno->opcode = TREE_CODE (op);
2373 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2374 vno->type = TREE_TYPE (op);
2375 for (i = 0; i < vno->length; ++i)
2376 vno->op[i] = TREE_OPERAND (op, i);
2379 /* Return the number of operands for a vn_nary ops structure from STMT. */
2381 static unsigned int
2382 vn_nary_length_from_stmt (gimple stmt)
2384 switch (gimple_assign_rhs_code (stmt))
2386 case REALPART_EXPR:
2387 case IMAGPART_EXPR:
2388 case VIEW_CONVERT_EXPR:
2389 return 1;
2391 case BIT_FIELD_REF:
2392 return 3;
2394 case CONSTRUCTOR:
2395 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2397 default:
2398 return gimple_num_ops (stmt) - 1;
2402 /* Initialize VNO from STMT. */
2404 static void
2405 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2407 unsigned i;
2409 vno->opcode = gimple_assign_rhs_code (stmt);
2410 vno->type = gimple_expr_type (stmt);
2411 switch (vno->opcode)
2413 case REALPART_EXPR:
2414 case IMAGPART_EXPR:
2415 case VIEW_CONVERT_EXPR:
2416 vno->length = 1;
2417 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2418 break;
2420 case BIT_FIELD_REF:
2421 vno->length = 3;
2422 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2423 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2424 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2425 break;
2427 case CONSTRUCTOR:
2428 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2429 for (i = 0; i < vno->length; ++i)
2430 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2431 break;
2433 default:
2434 gcc_checking_assert (!gimple_assign_single_p (stmt));
2435 vno->length = gimple_num_ops (stmt) - 1;
2436 for (i = 0; i < vno->length; ++i)
2437 vno->op[i] = gimple_op (stmt, i + 1);
2441 /* Compute the hashcode for VNO and look for it in the hash table;
2442 return the resulting value number if it exists in the hash table.
2443 Return NULL_TREE if it does not exist in the hash table or if the
2444 result field of the operation is NULL. VNRESULT will contain the
2445 vn_nary_op_t from the hashtable if it exists. */
2447 static tree
2448 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2450 vn_nary_op_s **slot;
2452 if (vnresult)
2453 *vnresult = NULL;
2455 vno->hashcode = vn_nary_op_compute_hash (vno);
2456 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2457 NO_INSERT);
2458 if (!slot && current_info == optimistic_info)
2459 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2460 NO_INSERT);
2461 if (!slot)
2462 return NULL_TREE;
2463 if (vnresult)
2464 *vnresult = *slot;
2465 return (*slot)->result;
2468 /* Lookup a n-ary operation by its pieces and return the resulting value
2469 number if it exists in the hash table. Return NULL_TREE if it does
2470 not exist in the hash table or if the result field of the operation
2471 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2472 if it exists. */
2474 tree
2475 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2476 tree type, tree *ops, vn_nary_op_t *vnresult)
2478 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2479 sizeof_vn_nary_op (length));
2480 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2481 return vn_nary_op_lookup_1 (vno1, vnresult);
2484 /* Lookup OP in the current hash table, and return the resulting value
2485 number if it exists in the hash table. Return NULL_TREE if it does
2486 not exist in the hash table or if the result field of the operation
2487 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2488 if it exists. */
2490 tree
2491 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2493 vn_nary_op_t vno1
2494 = XALLOCAVAR (struct vn_nary_op_s,
2495 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2496 init_vn_nary_op_from_op (vno1, op);
2497 return vn_nary_op_lookup_1 (vno1, vnresult);
2500 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2501 value number if it exists in the hash table. Return NULL_TREE if
2502 it does not exist in the hash table. VNRESULT will contain the
2503 vn_nary_op_t from the hashtable if it exists. */
2505 tree
2506 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2508 vn_nary_op_t vno1
2509 = XALLOCAVAR (struct vn_nary_op_s,
2510 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2511 init_vn_nary_op_from_stmt (vno1, stmt);
2512 return vn_nary_op_lookup_1 (vno1, vnresult);
2515 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2517 static vn_nary_op_t
2518 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2520 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2523 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2524 obstack. */
2526 static vn_nary_op_t
2527 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2529 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2530 &current_info->nary_obstack);
2532 vno1->value_id = value_id;
2533 vno1->length = length;
2534 vno1->result = result;
2536 return vno1;
2539 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2540 VNO->HASHCODE first. */
2542 static vn_nary_op_t
2543 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2544 bool compute_hash)
2546 vn_nary_op_s **slot;
2548 if (compute_hash)
2549 vno->hashcode = vn_nary_op_compute_hash (vno);
2551 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2552 gcc_assert (!*slot);
2554 *slot = vno;
2555 return vno;
2558 /* Insert a n-ary operation into the current hash table using it's
2559 pieces. Return the vn_nary_op_t structure we created and put in
2560 the hashtable. */
2562 vn_nary_op_t
2563 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2564 tree type, tree *ops,
2565 tree result, unsigned int value_id)
2567 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2568 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2569 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2572 /* Insert OP into the current hash table with a value number of
2573 RESULT. Return the vn_nary_op_t structure we created and put in
2574 the hashtable. */
2576 vn_nary_op_t
2577 vn_nary_op_insert (tree op, tree result)
2579 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2580 vn_nary_op_t vno1;
2582 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2583 init_vn_nary_op_from_op (vno1, op);
2584 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2587 /* Insert the rhs of STMT into the current hash table with a value number of
2588 RESULT. */
2590 vn_nary_op_t
2591 vn_nary_op_insert_stmt (gimple stmt, tree result)
2593 vn_nary_op_t vno1
2594 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2595 result, VN_INFO (result)->value_id);
2596 init_vn_nary_op_from_stmt (vno1, stmt);
2597 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2600 /* Compute a hashcode for PHI operation VP1 and return it. */
2602 static inline hashval_t
2603 vn_phi_compute_hash (vn_phi_t vp1)
2605 inchash::hash hstate (vp1->block->index);
2606 int i;
2607 tree phi1op;
2608 tree type;
2610 /* If all PHI arguments are constants we need to distinguish
2611 the PHI node via its type. */
2612 type = vp1->type;
2613 hstate.merge_hash (vn_hash_type (type));
2615 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2617 if (phi1op == VN_TOP)
2618 continue;
2619 inchash::add_expr (phi1op, hstate);
2622 return hstate.end ();
2625 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2627 static int
2628 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2630 if (vp1->hashcode != vp2->hashcode)
2631 return false;
2633 if (vp1->block == vp2->block)
2635 int i;
2636 tree phi1op;
2638 /* If the PHI nodes do not have compatible types
2639 they are not the same. */
2640 if (!types_compatible_p (vp1->type, vp2->type))
2641 return false;
2643 /* Any phi in the same block will have it's arguments in the
2644 same edge order, because of how we store phi nodes. */
2645 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2647 tree phi2op = vp2->phiargs[i];
2648 if (phi1op == VN_TOP || phi2op == VN_TOP)
2649 continue;
2650 if (!expressions_equal_p (phi1op, phi2op))
2651 return false;
2653 return true;
2655 return false;
2658 static vec<tree> shared_lookup_phiargs;
2660 /* Lookup PHI in the current hash table, and return the resulting
2661 value number if it exists in the hash table. Return NULL_TREE if
2662 it does not exist in the hash table. */
2664 static tree
2665 vn_phi_lookup (gimple phi)
2667 vn_phi_s **slot;
2668 struct vn_phi_s vp1;
2669 unsigned i;
2671 shared_lookup_phiargs.truncate (0);
2673 /* Canonicalize the SSA_NAME's to their value number. */
2674 for (i = 0; i < gimple_phi_num_args (phi); i++)
2676 tree def = PHI_ARG_DEF (phi, i);
2677 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2678 shared_lookup_phiargs.safe_push (def);
2680 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2681 vp1.phiargs = shared_lookup_phiargs;
2682 vp1.block = gimple_bb (phi);
2683 vp1.hashcode = vn_phi_compute_hash (&vp1);
2684 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2685 NO_INSERT);
2686 if (!slot && current_info == optimistic_info)
2687 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2688 NO_INSERT);
2689 if (!slot)
2690 return NULL_TREE;
2691 return (*slot)->result;
2694 /* Insert PHI into the current hash table with a value number of
2695 RESULT. */
2697 static vn_phi_t
2698 vn_phi_insert (gimple phi, tree result)
2700 vn_phi_s **slot;
2701 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2702 unsigned i;
2703 vec<tree> args = vNULL;
2705 /* Canonicalize the SSA_NAME's to their value number. */
2706 for (i = 0; i < gimple_phi_num_args (phi); i++)
2708 tree def = PHI_ARG_DEF (phi, i);
2709 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2710 args.safe_push (def);
2712 vp1->value_id = VN_INFO (result)->value_id;
2713 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2714 vp1->phiargs = args;
2715 vp1->block = gimple_bb (phi);
2716 vp1->result = result;
2717 vp1->hashcode = vn_phi_compute_hash (vp1);
2719 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2721 /* Because we iterate over phi operations more than once, it's
2722 possible the slot might already exist here, hence no assert.*/
2723 *slot = vp1;
2724 return vp1;
2728 /* Print set of components in strongly connected component SCC to OUT. */
2730 static void
2731 print_scc (FILE *out, vec<tree> scc)
2733 tree var;
2734 unsigned int i;
2736 fprintf (out, "SCC consists of:");
2737 FOR_EACH_VEC_ELT (scc, i, var)
2739 fprintf (out, " ");
2740 print_generic_expr (out, var, 0);
2742 fprintf (out, "\n");
2745 /* Set the value number of FROM to TO, return true if it has changed
2746 as a result. */
2748 static inline bool
2749 set_ssa_val_to (tree from, tree to)
2751 tree currval = SSA_VAL (from);
2752 HOST_WIDE_INT toff, coff;
2754 /* The only thing we allow as value numbers are ssa_names
2755 and invariants. So assert that here. We don't allow VN_TOP
2756 as visiting a stmt should produce a value-number other than
2757 that.
2758 ??? Still VN_TOP can happen for unreachable code, so force
2759 it to varying in that case. Not all code is prepared to
2760 get VN_TOP on valueization. */
2761 if (to == VN_TOP)
2763 if (dump_file && (dump_flags & TDF_DETAILS))
2764 fprintf (dump_file, "Forcing value number to varying on "
2765 "receiving VN_TOP\n");
2766 to = from;
2769 gcc_assert (to != NULL_TREE
2770 && ((TREE_CODE (to) == SSA_NAME
2771 && (to == from || SSA_VAL (to) == to))
2772 || is_gimple_min_invariant (to)));
2774 if (from != to)
2776 if (currval == from)
2778 if (dump_file && (dump_flags & TDF_DETAILS))
2780 fprintf (dump_file, "Not changing value number of ");
2781 print_generic_expr (dump_file, from, 0);
2782 fprintf (dump_file, " from VARYING to ");
2783 print_generic_expr (dump_file, to, 0);
2784 fprintf (dump_file, "\n");
2786 return false;
2788 else if (TREE_CODE (to) == SSA_NAME
2789 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2790 to = from;
2793 if (dump_file && (dump_flags & TDF_DETAILS))
2795 fprintf (dump_file, "Setting value number of ");
2796 print_generic_expr (dump_file, from, 0);
2797 fprintf (dump_file, " to ");
2798 print_generic_expr (dump_file, to, 0);
2801 if (currval != to
2802 && !operand_equal_p (currval, to, 0)
2803 /* ??? For addresses involving volatile objects or types operand_equal_p
2804 does not reliably detect ADDR_EXPRs as equal. We know we are only
2805 getting invariant gimple addresses here, so can use
2806 get_addr_base_and_unit_offset to do this comparison. */
2807 && !(TREE_CODE (currval) == ADDR_EXPR
2808 && TREE_CODE (to) == ADDR_EXPR
2809 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2810 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2811 && coff == toff))
2813 VN_INFO (from)->valnum = to;
2814 if (dump_file && (dump_flags & TDF_DETAILS))
2815 fprintf (dump_file, " (changed)\n");
2816 return true;
2818 if (dump_file && (dump_flags & TDF_DETAILS))
2819 fprintf (dump_file, "\n");
2820 return false;
2823 /* Mark as processed all the definitions in the defining stmt of USE, or
2824 the USE itself. */
2826 static void
2827 mark_use_processed (tree use)
2829 ssa_op_iter iter;
2830 def_operand_p defp;
2831 gimple stmt = SSA_NAME_DEF_STMT (use);
2833 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2835 VN_INFO (use)->use_processed = true;
2836 return;
2839 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2841 tree def = DEF_FROM_PTR (defp);
2843 VN_INFO (def)->use_processed = true;
2847 /* Set all definitions in STMT to value number to themselves.
2848 Return true if a value number changed. */
2850 static bool
2851 defs_to_varying (gimple stmt)
2853 bool changed = false;
2854 ssa_op_iter iter;
2855 def_operand_p defp;
2857 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2859 tree def = DEF_FROM_PTR (defp);
2860 changed |= set_ssa_val_to (def, def);
2862 return changed;
2865 /* Visit a copy between LHS and RHS, return true if the value number
2866 changed. */
2868 static bool
2869 visit_copy (tree lhs, tree rhs)
2871 /* And finally valueize. */
2872 rhs = SSA_VAL (rhs);
2874 return set_ssa_val_to (lhs, rhs);
2877 /* Visit a nary operator RHS, value number it, and return true if the
2878 value number of LHS has changed as a result. */
2880 static bool
2881 visit_nary_op (tree lhs, gimple stmt)
2883 bool changed = false;
2884 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2886 if (result)
2887 changed = set_ssa_val_to (lhs, result);
2888 else
2890 changed = set_ssa_val_to (lhs, lhs);
2891 vn_nary_op_insert_stmt (stmt, lhs);
2894 return changed;
2897 /* Visit a call STMT storing into LHS. Return true if the value number
2898 of the LHS has changed as a result. */
2900 static bool
2901 visit_reference_op_call (tree lhs, gcall *stmt)
2903 bool changed = false;
2904 struct vn_reference_s vr1;
2905 vn_reference_t vnresult = NULL;
2906 tree vdef = gimple_vdef (stmt);
2908 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2909 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2910 lhs = NULL_TREE;
2912 vn_reference_lookup_call (stmt, &vnresult, &vr1);
2913 if (vnresult)
2915 if (vnresult->result_vdef && vdef)
2916 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2918 if (!vnresult->result && lhs)
2919 vnresult->result = lhs;
2921 if (vnresult->result && lhs)
2922 changed |= set_ssa_val_to (lhs, vnresult->result);
2924 else
2926 vn_reference_t vr2;
2927 vn_reference_s **slot;
2928 if (vdef)
2929 changed |= set_ssa_val_to (vdef, vdef);
2930 if (lhs)
2931 changed |= set_ssa_val_to (lhs, lhs);
2932 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2933 vr2->vuse = vr1.vuse;
2934 /* As we are not walking the virtual operand chain we know the
2935 shared_lookup_references are still original so we can re-use
2936 them here. */
2937 vr2->operands = vr1.operands.copy ();
2938 vr2->type = vr1.type;
2939 vr2->set = vr1.set;
2940 vr2->hashcode = vr1.hashcode;
2941 vr2->result = lhs;
2942 vr2->result_vdef = vdef;
2943 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
2944 INSERT);
2945 gcc_assert (!*slot);
2946 *slot = vr2;
2949 return changed;
2952 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2953 and return true if the value number of the LHS has changed as a result. */
2955 static bool
2956 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2958 bool changed = false;
2959 tree last_vuse;
2960 tree result;
2962 last_vuse = gimple_vuse (stmt);
2963 last_vuse_ptr = &last_vuse;
2964 result = vn_reference_lookup (op, gimple_vuse (stmt),
2965 default_vn_walk_kind, NULL);
2966 last_vuse_ptr = NULL;
2968 /* We handle type-punning through unions by value-numbering based
2969 on offset and size of the access. Be prepared to handle a
2970 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2971 if (result
2972 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2974 /* We will be setting the value number of lhs to the value number
2975 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2976 So first simplify and lookup this expression to see if it
2977 is already available. */
2978 tree val = gimple_simplify (VIEW_CONVERT_EXPR, TREE_TYPE (op),
2979 result, NULL, vn_valueize);
2980 if (!val)
2981 val = vn_nary_op_lookup_pieces (1, VIEW_CONVERT_EXPR,
2982 TREE_TYPE (op), &result, NULL);
2983 /* If the expression is not yet available, value-number lhs to
2984 a new SSA_NAME we create. */
2985 if (!val)
2987 /* ??? Instead of recording a tree here we should use
2988 gimple_build and record a sequence in VN_INFO->expr. */
2989 val = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2990 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2991 "vntemp");
2992 /* Initialize value-number information properly. */
2993 VN_INFO_GET (result)->valnum = result;
2994 VN_INFO (result)->value_id = get_next_value_id ();
2995 VN_INFO (result)->expr = val;
2996 VN_INFO (result)->needs_insertion = true;
2997 /* As all "inserted" statements are singleton SCCs, insert
2998 to the valid table. This is strictly needed to
2999 avoid re-generating new value SSA_NAMEs for the same
3000 expression during SCC iteration over and over (the
3001 optimistic table gets cleared after each iteration).
3002 We do not need to insert into the optimistic table, as
3003 lookups there will fall back to the valid table. */
3004 if (current_info == optimistic_info)
3006 current_info = valid_info;
3007 vn_nary_op_insert (val, result);
3008 current_info = optimistic_info;
3010 else
3011 vn_nary_op_insert (val, result);
3012 if (dump_file && (dump_flags & TDF_DETAILS))
3014 fprintf (dump_file, "Inserting name ");
3015 print_generic_expr (dump_file, result, 0);
3016 fprintf (dump_file, " for expression ");
3017 print_generic_expr (dump_file, val, 0);
3018 fprintf (dump_file, "\n");
3021 else
3022 result = val;
3025 if (result)
3026 changed = set_ssa_val_to (lhs, result);
3027 else
3029 changed = set_ssa_val_to (lhs, lhs);
3030 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3033 return changed;
3037 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3038 and return true if the value number of the LHS has changed as a result. */
3040 static bool
3041 visit_reference_op_store (tree lhs, tree op, gimple stmt)
3043 bool changed = false;
3044 vn_reference_t vnresult = NULL;
3045 tree result, assign;
3046 bool resultsame = false;
3047 tree vuse = gimple_vuse (stmt);
3048 tree vdef = gimple_vdef (stmt);
3050 if (TREE_CODE (op) == SSA_NAME)
3051 op = SSA_VAL (op);
3053 /* First we want to lookup using the *vuses* from the store and see
3054 if there the last store to this location with the same address
3055 had the same value.
3057 The vuses represent the memory state before the store. If the
3058 memory state, address, and value of the store is the same as the
3059 last store to this location, then this store will produce the
3060 same memory state as that store.
3062 In this case the vdef versions for this store are value numbered to those
3063 vuse versions, since they represent the same memory state after
3064 this store.
3066 Otherwise, the vdefs for the store are used when inserting into
3067 the table, since the store generates a new memory state. */
3069 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3071 if (result)
3073 if (TREE_CODE (result) == SSA_NAME)
3074 result = SSA_VAL (result);
3075 resultsame = expressions_equal_p (result, op);
3078 if ((!result || !resultsame)
3079 /* Only perform the following when being called from PRE
3080 which embeds tail merging. */
3081 && default_vn_walk_kind == VN_WALK)
3083 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3084 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3085 if (vnresult)
3087 VN_INFO (vdef)->use_processed = true;
3088 return set_ssa_val_to (vdef, vnresult->result_vdef);
3092 if (!result || !resultsame)
3094 if (dump_file && (dump_flags & TDF_DETAILS))
3096 fprintf (dump_file, "No store match\n");
3097 fprintf (dump_file, "Value numbering store ");
3098 print_generic_expr (dump_file, lhs, 0);
3099 fprintf (dump_file, " to ");
3100 print_generic_expr (dump_file, op, 0);
3101 fprintf (dump_file, "\n");
3103 /* Have to set value numbers before insert, since insert is
3104 going to valueize the references in-place. */
3105 if (vdef)
3107 changed |= set_ssa_val_to (vdef, vdef);
3110 /* Do not insert structure copies into the tables. */
3111 if (is_gimple_min_invariant (op)
3112 || is_gimple_reg (op))
3113 vn_reference_insert (lhs, op, vdef, NULL);
3115 /* Only perform the following when being called from PRE
3116 which embeds tail merging. */
3117 if (default_vn_walk_kind == VN_WALK)
3119 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3120 vn_reference_insert (assign, lhs, vuse, vdef);
3123 else
3125 /* We had a match, so value number the vdef to have the value
3126 number of the vuse it came from. */
3128 if (dump_file && (dump_flags & TDF_DETAILS))
3129 fprintf (dump_file, "Store matched earlier value,"
3130 "value numbering store vdefs to matching vuses.\n");
3132 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3135 return changed;
3138 /* Visit and value number PHI, return true if the value number
3139 changed. */
3141 static bool
3142 visit_phi (gimple phi)
3144 bool changed = false;
3145 tree result;
3146 tree sameval = VN_TOP;
3147 bool allsame = true;
3149 /* TODO: We could check for this in init_sccvn, and replace this
3150 with a gcc_assert. */
3151 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3152 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3154 /* See if all non-TOP arguments have the same value. TOP is
3155 equivalent to everything, so we can ignore it. */
3156 edge_iterator ei;
3157 edge e;
3158 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3159 if (e->flags & EDGE_EXECUTABLE)
3161 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3163 if (TREE_CODE (def) == SSA_NAME)
3164 def = SSA_VAL (def);
3165 if (def == VN_TOP)
3166 continue;
3167 if (sameval == VN_TOP)
3169 sameval = def;
3171 else
3173 if (!expressions_equal_p (def, sameval))
3175 allsame = false;
3176 break;
3181 /* If all value numbered to the same value, the phi node has that
3182 value. */
3183 if (allsame)
3184 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3186 /* Otherwise, see if it is equivalent to a phi node in this block. */
3187 result = vn_phi_lookup (phi);
3188 if (result)
3189 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3190 else
3192 vn_phi_insert (phi, PHI_RESULT (phi));
3193 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3196 return changed;
3200 /* Try to simplify RHS using equivalences and constant folding. */
3202 static tree
3203 try_to_simplify (gassign *stmt)
3205 enum tree_code code = gimple_assign_rhs_code (stmt);
3206 tree tem;
3208 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3209 in this case, there is no point in doing extra work. */
3210 if (code == SSA_NAME)
3211 return NULL_TREE;
3213 /* If that didn't work try combining multiple statements.
3214 ??? Handle multiple stmts being generated by storing
3215 at most one in VN_INFO->expr? But then we'd have to
3216 transparently support materializing temporary SSA names
3217 created by gimple_simplify - or we never value-number
3218 to them. */
3219 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3220 if (tem
3221 && (TREE_CODE (tem) == SSA_NAME
3222 || is_gimple_min_invariant (tem)))
3223 return tem;
3225 return NULL_TREE;
3228 /* Visit and value number USE, return true if the value number
3229 changed. */
3231 static bool
3232 visit_use (tree use)
3234 bool changed = false;
3235 gimple stmt = SSA_NAME_DEF_STMT (use);
3237 mark_use_processed (use);
3239 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3240 if (dump_file && (dump_flags & TDF_DETAILS)
3241 && !SSA_NAME_IS_DEFAULT_DEF (use))
3243 fprintf (dump_file, "Value numbering ");
3244 print_generic_expr (dump_file, use, 0);
3245 fprintf (dump_file, " stmt = ");
3246 print_gimple_stmt (dump_file, stmt, 0, 0);
3249 /* Handle uninitialized uses. */
3250 if (SSA_NAME_IS_DEFAULT_DEF (use))
3251 changed = set_ssa_val_to (use, use);
3252 else
3254 if (gimple_code (stmt) == GIMPLE_PHI)
3255 changed = visit_phi (stmt);
3256 else if (gimple_has_volatile_ops (stmt))
3257 changed = defs_to_varying (stmt);
3258 else if (is_gimple_assign (stmt))
3260 enum tree_code code = gimple_assign_rhs_code (stmt);
3261 tree lhs = gimple_assign_lhs (stmt);
3262 tree rhs1 = gimple_assign_rhs1 (stmt);
3263 tree simplified;
3265 /* Shortcut for copies. Simplifying copies is pointless,
3266 since we copy the expression and value they represent. */
3267 if (code == SSA_NAME
3268 && TREE_CODE (lhs) == SSA_NAME)
3270 changed = visit_copy (lhs, rhs1);
3271 goto done;
3273 simplified = try_to_simplify (as_a <gassign *> (stmt));
3274 if (simplified)
3276 if (dump_file && (dump_flags & TDF_DETAILS))
3278 fprintf (dump_file, "RHS ");
3279 print_gimple_expr (dump_file, stmt, 0, 0);
3280 fprintf (dump_file, " simplified to ");
3281 print_generic_expr (dump_file, simplified, 0);
3282 fprintf (dump_file, "\n");
3285 /* Setting value numbers to constants will occasionally
3286 screw up phi congruence because constants are not
3287 uniquely associated with a single ssa name that can be
3288 looked up. */
3289 if (simplified
3290 && is_gimple_min_invariant (simplified)
3291 && TREE_CODE (lhs) == SSA_NAME)
3293 changed = set_ssa_val_to (lhs, simplified);
3294 goto done;
3296 else if (simplified
3297 && TREE_CODE (simplified) == SSA_NAME
3298 && TREE_CODE (lhs) == SSA_NAME)
3300 changed = visit_copy (lhs, simplified);
3301 goto done;
3304 if ((TREE_CODE (lhs) == SSA_NAME
3305 /* We can substitute SSA_NAMEs that are live over
3306 abnormal edges with their constant value. */
3307 && !(gimple_assign_copy_p (stmt)
3308 && is_gimple_min_invariant (rhs1))
3309 && !(simplified
3310 && is_gimple_min_invariant (simplified))
3311 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3312 /* Stores or copies from SSA_NAMEs that are live over
3313 abnormal edges are a problem. */
3314 || (code == SSA_NAME
3315 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3316 changed = defs_to_varying (stmt);
3317 else if (REFERENCE_CLASS_P (lhs)
3318 || DECL_P (lhs))
3319 changed = visit_reference_op_store (lhs, rhs1, stmt);
3320 else if (TREE_CODE (lhs) == SSA_NAME)
3322 if ((gimple_assign_copy_p (stmt)
3323 && is_gimple_min_invariant (rhs1))
3324 || (simplified
3325 && is_gimple_min_invariant (simplified)))
3327 if (simplified)
3328 changed = set_ssa_val_to (lhs, simplified);
3329 else
3330 changed = set_ssa_val_to (lhs, rhs1);
3332 else
3334 /* First try to lookup the simplified expression. */
3335 if (simplified)
3337 enum gimple_rhs_class rhs_class;
3340 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3341 if ((rhs_class == GIMPLE_UNARY_RHS
3342 || rhs_class == GIMPLE_BINARY_RHS
3343 || rhs_class == GIMPLE_TERNARY_RHS)
3344 && valid_gimple_rhs_p (simplified))
3346 tree result = vn_nary_op_lookup (simplified, NULL);
3347 if (result)
3349 changed = set_ssa_val_to (lhs, result);
3350 goto done;
3355 /* Otherwise visit the original statement. */
3356 switch (vn_get_stmt_kind (stmt))
3358 case VN_NARY:
3359 changed = visit_nary_op (lhs, stmt);
3360 break;
3361 case VN_REFERENCE:
3362 changed = visit_reference_op_load (lhs, rhs1, stmt);
3363 break;
3364 default:
3365 changed = defs_to_varying (stmt);
3366 break;
3370 else
3371 changed = defs_to_varying (stmt);
3373 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3375 tree lhs = gimple_call_lhs (stmt);
3376 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3378 /* Try constant folding based on our current lattice. */
3379 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3380 vn_valueize,
3381 vn_valueize);
3382 if (simplified)
3384 if (dump_file && (dump_flags & TDF_DETAILS))
3386 fprintf (dump_file, "call ");
3387 print_gimple_expr (dump_file, stmt, 0, 0);
3388 fprintf (dump_file, " simplified to ");
3389 print_generic_expr (dump_file, simplified, 0);
3390 fprintf (dump_file, "\n");
3393 /* Setting value numbers to constants will occasionally
3394 screw up phi congruence because constants are not
3395 uniquely associated with a single ssa name that can be
3396 looked up. */
3397 if (simplified
3398 && is_gimple_min_invariant (simplified))
3400 VN_INFO (lhs)->expr = simplified;
3401 changed = set_ssa_val_to (lhs, simplified);
3402 if (gimple_vdef (stmt))
3403 changed |= set_ssa_val_to (gimple_vdef (stmt),
3404 SSA_VAL (gimple_vuse (stmt)));
3405 goto done;
3407 else if (simplified
3408 && TREE_CODE (simplified) == SSA_NAME)
3410 changed = visit_copy (lhs, simplified);
3411 if (gimple_vdef (stmt))
3412 changed |= set_ssa_val_to (gimple_vdef (stmt),
3413 SSA_VAL (gimple_vuse (stmt)));
3414 goto done;
3416 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3418 changed = defs_to_varying (stmt);
3419 goto done;
3423 if (!gimple_call_internal_p (stmt)
3424 && (/* Calls to the same function with the same vuse
3425 and the same operands do not necessarily return the same
3426 value, unless they're pure or const. */
3427 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3428 /* If calls have a vdef, subsequent calls won't have
3429 the same incoming vuse. So, if 2 calls with vdef have the
3430 same vuse, we know they're not subsequent.
3431 We can value number 2 calls to the same function with the
3432 same vuse and the same operands which are not subsequent
3433 the same, because there is no code in the program that can
3434 compare the 2 values... */
3435 || (gimple_vdef (stmt)
3436 /* ... unless the call returns a pointer which does
3437 not alias with anything else. In which case the
3438 information that the values are distinct are encoded
3439 in the IL. */
3440 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3441 /* Only perform the following when being called from PRE
3442 which embeds tail merging. */
3443 && default_vn_walk_kind == VN_WALK)))
3444 changed = visit_reference_op_call (lhs, call_stmt);
3445 else
3446 changed = defs_to_varying (stmt);
3448 else
3449 changed = defs_to_varying (stmt);
3451 done:
3452 return changed;
3455 /* Compare two operands by reverse postorder index */
3457 static int
3458 compare_ops (const void *pa, const void *pb)
3460 const tree opa = *((const tree *)pa);
3461 const tree opb = *((const tree *)pb);
3462 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3463 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3464 basic_block bba;
3465 basic_block bbb;
3467 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3468 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3469 else if (gimple_nop_p (opstmta))
3470 return -1;
3471 else if (gimple_nop_p (opstmtb))
3472 return 1;
3474 bba = gimple_bb (opstmta);
3475 bbb = gimple_bb (opstmtb);
3477 if (!bba && !bbb)
3478 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3479 else if (!bba)
3480 return -1;
3481 else if (!bbb)
3482 return 1;
3484 if (bba == bbb)
3486 if (gimple_code (opstmta) == GIMPLE_PHI
3487 && gimple_code (opstmtb) == GIMPLE_PHI)
3488 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3489 else if (gimple_code (opstmta) == GIMPLE_PHI)
3490 return -1;
3491 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3492 return 1;
3493 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3494 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3495 else
3496 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3498 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3501 /* Sort an array containing members of a strongly connected component
3502 SCC so that the members are ordered by RPO number.
3503 This means that when the sort is complete, iterating through the
3504 array will give you the members in RPO order. */
3506 static void
3507 sort_scc (vec<tree> scc)
3509 scc.qsort (compare_ops);
3512 /* Insert the no longer used nary ONARY to the hash INFO. */
3514 static void
3515 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3517 size_t size = sizeof_vn_nary_op (onary->length);
3518 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3519 &info->nary_obstack);
3520 memcpy (nary, onary, size);
3521 vn_nary_op_insert_into (nary, info->nary, false);
3524 /* Insert the no longer used phi OPHI to the hash INFO. */
3526 static void
3527 copy_phi (vn_phi_t ophi, vn_tables_t info)
3529 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3530 vn_phi_s **slot;
3531 memcpy (phi, ophi, sizeof (*phi));
3532 ophi->phiargs.create (0);
3533 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3534 gcc_assert (!*slot);
3535 *slot = phi;
3538 /* Insert the no longer used reference OREF to the hash INFO. */
3540 static void
3541 copy_reference (vn_reference_t oref, vn_tables_t info)
3543 vn_reference_t ref;
3544 vn_reference_s **slot;
3545 ref = (vn_reference_t) pool_alloc (info->references_pool);
3546 memcpy (ref, oref, sizeof (*ref));
3547 oref->operands.create (0);
3548 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3549 if (*slot)
3550 free_reference (*slot);
3551 *slot = ref;
3554 /* Process a strongly connected component in the SSA graph. */
3556 static void
3557 process_scc (vec<tree> scc)
3559 tree var;
3560 unsigned int i;
3561 unsigned int iterations = 0;
3562 bool changed = true;
3563 vn_nary_op_iterator_type hin;
3564 vn_phi_iterator_type hip;
3565 vn_reference_iterator_type hir;
3566 vn_nary_op_t nary;
3567 vn_phi_t phi;
3568 vn_reference_t ref;
3570 /* If the SCC has a single member, just visit it. */
3571 if (scc.length () == 1)
3573 tree use = scc[0];
3574 if (VN_INFO (use)->use_processed)
3575 return;
3576 /* We need to make sure it doesn't form a cycle itself, which can
3577 happen for self-referential PHI nodes. In that case we would
3578 end up inserting an expression with VN_TOP operands into the
3579 valid table which makes us derive bogus equivalences later.
3580 The cheapest way to check this is to assume it for all PHI nodes. */
3581 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3582 /* Fallthru to iteration. */ ;
3583 else
3585 visit_use (use);
3586 return;
3590 if (dump_file && (dump_flags & TDF_DETAILS))
3591 print_scc (dump_file, scc);
3593 /* Iterate over the SCC with the optimistic table until it stops
3594 changing. */
3595 current_info = optimistic_info;
3596 while (changed)
3598 changed = false;
3599 iterations++;
3600 if (dump_file && (dump_flags & TDF_DETAILS))
3601 fprintf (dump_file, "Starting iteration %d\n", iterations);
3602 /* As we are value-numbering optimistically we have to
3603 clear the expression tables and the simplified expressions
3604 in each iteration until we converge. */
3605 optimistic_info->nary->empty ();
3606 optimistic_info->phis->empty ();
3607 optimistic_info->references->empty ();
3608 obstack_free (&optimistic_info->nary_obstack, NULL);
3609 gcc_obstack_init (&optimistic_info->nary_obstack);
3610 empty_alloc_pool (optimistic_info->phis_pool);
3611 empty_alloc_pool (optimistic_info->references_pool);
3612 FOR_EACH_VEC_ELT (scc, i, var)
3613 VN_INFO (var)->expr = NULL_TREE;
3614 FOR_EACH_VEC_ELT (scc, i, var)
3615 changed |= visit_use (var);
3618 if (dump_file && (dump_flags & TDF_DETAILS))
3619 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3620 statistics_histogram_event (cfun, "SCC iterations", iterations);
3622 /* Finally, copy the contents of the no longer used optimistic
3623 table to the valid table. */
3624 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3625 copy_nary (nary, valid_info);
3626 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3627 copy_phi (phi, valid_info);
3628 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3629 ref, vn_reference_t, hir)
3630 copy_reference (ref, valid_info);
3632 current_info = valid_info;
3636 /* Pop the components of the found SCC for NAME off the SCC stack
3637 and process them. Returns true if all went well, false if
3638 we run into resource limits. */
3640 static bool
3641 extract_and_process_scc_for_name (tree name)
3643 auto_vec<tree> scc;
3644 tree x;
3646 /* Found an SCC, pop the components off the SCC stack and
3647 process them. */
3650 x = sccstack.pop ();
3652 VN_INFO (x)->on_sccstack = false;
3653 scc.safe_push (x);
3654 } while (x != name);
3656 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3657 if (scc.length ()
3658 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3660 if (dump_file)
3661 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3662 "SCC size %u exceeding %u\n", scc.length (),
3663 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3665 return false;
3668 if (scc.length () > 1)
3669 sort_scc (scc);
3671 process_scc (scc);
3673 return true;
3676 /* Depth first search on NAME to discover and process SCC's in the SSA
3677 graph.
3678 Execution of this algorithm relies on the fact that the SCC's are
3679 popped off the stack in topological order.
3680 Returns true if successful, false if we stopped processing SCC's due
3681 to resource constraints. */
3683 static bool
3684 DFS (tree name)
3686 vec<ssa_op_iter> itervec = vNULL;
3687 vec<tree> namevec = vNULL;
3688 use_operand_p usep = NULL;
3689 gimple defstmt;
3690 tree use;
3691 ssa_op_iter iter;
3693 start_over:
3694 /* SCC info */
3695 VN_INFO (name)->dfsnum = next_dfs_num++;
3696 VN_INFO (name)->visited = true;
3697 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3699 sccstack.safe_push (name);
3700 VN_INFO (name)->on_sccstack = true;
3701 defstmt = SSA_NAME_DEF_STMT (name);
3703 /* Recursively DFS on our operands, looking for SCC's. */
3704 if (!gimple_nop_p (defstmt))
3706 /* Push a new iterator. */
3707 if (gphi *phi = dyn_cast <gphi *> (defstmt))
3708 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
3709 else
3710 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3712 else
3713 clear_and_done_ssa_iter (&iter);
3715 while (1)
3717 /* If we are done processing uses of a name, go up the stack
3718 of iterators and process SCCs as we found them. */
3719 if (op_iter_done (&iter))
3721 /* See if we found an SCC. */
3722 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3723 if (!extract_and_process_scc_for_name (name))
3725 namevec.release ();
3726 itervec.release ();
3727 return false;
3730 /* Check if we are done. */
3731 if (namevec.is_empty ())
3733 namevec.release ();
3734 itervec.release ();
3735 return true;
3738 /* Restore the last use walker and continue walking there. */
3739 use = name;
3740 name = namevec.pop ();
3741 memcpy (&iter, &itervec.last (),
3742 sizeof (ssa_op_iter));
3743 itervec.pop ();
3744 goto continue_walking;
3747 use = USE_FROM_PTR (usep);
3749 /* Since we handle phi nodes, we will sometimes get
3750 invariants in the use expression. */
3751 if (TREE_CODE (use) == SSA_NAME)
3753 if (! (VN_INFO (use)->visited))
3755 /* Recurse by pushing the current use walking state on
3756 the stack and starting over. */
3757 itervec.safe_push (iter);
3758 namevec.safe_push (name);
3759 name = use;
3760 goto start_over;
3762 continue_walking:
3763 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3764 VN_INFO (use)->low);
3766 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3767 && VN_INFO (use)->on_sccstack)
3769 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3770 VN_INFO (name)->low);
3774 usep = op_iter_next_use (&iter);
3778 /* Allocate a value number table. */
3780 static void
3781 allocate_vn_table (vn_tables_t table)
3783 table->phis = new vn_phi_table_type (23);
3784 table->nary = new vn_nary_op_table_type (23);
3785 table->references = new vn_reference_table_type (23);
3787 gcc_obstack_init (&table->nary_obstack);
3788 table->phis_pool = create_alloc_pool ("VN phis",
3789 sizeof (struct vn_phi_s),
3790 30);
3791 table->references_pool = create_alloc_pool ("VN references",
3792 sizeof (struct vn_reference_s),
3793 30);
3796 /* Free a value number table. */
3798 static void
3799 free_vn_table (vn_tables_t table)
3801 delete table->phis;
3802 table->phis = NULL;
3803 delete table->nary;
3804 table->nary = NULL;
3805 delete table->references;
3806 table->references = NULL;
3807 obstack_free (&table->nary_obstack, NULL);
3808 free_alloc_pool (table->phis_pool);
3809 free_alloc_pool (table->references_pool);
3812 static void
3813 init_scc_vn (void)
3815 size_t i;
3816 int j;
3817 int *rpo_numbers_temp;
3819 calculate_dominance_info (CDI_DOMINATORS);
3820 sccstack.create (0);
3821 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
3823 constant_value_ids = BITMAP_ALLOC (NULL);
3825 next_dfs_num = 1;
3826 next_value_id = 1;
3828 vn_ssa_aux_table.create (num_ssa_names + 1);
3829 /* VEC_alloc doesn't actually grow it to the right size, it just
3830 preallocates the space to do so. */
3831 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3832 gcc_obstack_init (&vn_ssa_aux_obstack);
3834 shared_lookup_phiargs.create (0);
3835 shared_lookup_references.create (0);
3836 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
3837 rpo_numbers_temp =
3838 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
3839 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3841 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3842 the i'th block in RPO order is bb. We want to map bb's to RPO
3843 numbers, so we need to rearrange this array. */
3844 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
3845 rpo_numbers[rpo_numbers_temp[j]] = j;
3847 XDELETE (rpo_numbers_temp);
3849 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3851 /* Create the VN_INFO structures, and initialize value numbers to
3852 TOP. */
3853 for (i = 0; i < num_ssa_names; i++)
3855 tree name = ssa_name (i);
3856 if (name)
3858 VN_INFO_GET (name)->valnum = VN_TOP;
3859 VN_INFO (name)->expr = NULL_TREE;
3860 VN_INFO (name)->value_id = 0;
3864 renumber_gimple_stmt_uids ();
3866 /* Create the valid and optimistic value numbering tables. */
3867 valid_info = XCNEW (struct vn_tables_s);
3868 allocate_vn_table (valid_info);
3869 optimistic_info = XCNEW (struct vn_tables_s);
3870 allocate_vn_table (optimistic_info);
3873 void
3874 free_scc_vn (void)
3876 size_t i;
3878 delete constant_to_value_id;
3879 constant_to_value_id = NULL;
3880 BITMAP_FREE (constant_value_ids);
3881 shared_lookup_phiargs.release ();
3882 shared_lookup_references.release ();
3883 XDELETEVEC (rpo_numbers);
3885 for (i = 0; i < num_ssa_names; i++)
3887 tree name = ssa_name (i);
3888 if (name
3889 && VN_INFO (name)->needs_insertion)
3890 release_ssa_name (name);
3892 obstack_free (&vn_ssa_aux_obstack, NULL);
3893 vn_ssa_aux_table.release ();
3895 sccstack.release ();
3896 free_vn_table (valid_info);
3897 XDELETE (valid_info);
3898 free_vn_table (optimistic_info);
3899 XDELETE (optimistic_info);
3902 /* Set *ID according to RESULT. */
3904 static void
3905 set_value_id_for_result (tree result, unsigned int *id)
3907 if (result && TREE_CODE (result) == SSA_NAME)
3908 *id = VN_INFO (result)->value_id;
3909 else if (result && is_gimple_min_invariant (result))
3910 *id = get_or_alloc_constant_value_id (result);
3911 else
3912 *id = get_next_value_id ();
3915 /* Set the value ids in the valid hash tables. */
3917 static void
3918 set_hashtable_value_ids (void)
3920 vn_nary_op_iterator_type hin;
3921 vn_phi_iterator_type hip;
3922 vn_reference_iterator_type hir;
3923 vn_nary_op_t vno;
3924 vn_reference_t vr;
3925 vn_phi_t vp;
3927 /* Now set the value ids of the things we had put in the hash
3928 table. */
3930 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
3931 set_value_id_for_result (vno->result, &vno->value_id);
3933 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
3934 set_value_id_for_result (vp->result, &vp->value_id);
3936 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
3937 hir)
3938 set_value_id_for_result (vr->result, &vr->value_id);
3941 class cond_dom_walker : public dom_walker
3943 public:
3944 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
3946 virtual void before_dom_children (basic_block);
3948 bool fail;
3951 void
3952 cond_dom_walker::before_dom_children (basic_block bb)
3954 edge e;
3955 edge_iterator ei;
3957 if (fail)
3958 return;
3960 /* If any of the predecessor edges that do not come from blocks dominated
3961 by us are still marked as possibly executable consider this block
3962 reachable. */
3963 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
3964 FOR_EACH_EDGE (e, ei, bb->preds)
3965 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
3966 reachable |= (e->flags & EDGE_EXECUTABLE);
3968 /* If the block is not reachable all outgoing edges are not
3969 executable. */
3970 if (!reachable)
3972 if (dump_file && (dump_flags & TDF_DETAILS))
3973 fprintf (dump_file, "Marking all outgoing edges of unreachable "
3974 "BB %d as not executable\n", bb->index);
3976 FOR_EACH_EDGE (e, ei, bb->succs)
3977 e->flags &= ~EDGE_EXECUTABLE;
3978 return;
3981 gimple stmt = last_stmt (bb);
3982 if (!stmt)
3983 return;
3985 enum gimple_code code = gimple_code (stmt);
3986 if (code != GIMPLE_COND
3987 && code != GIMPLE_SWITCH
3988 && code != GIMPLE_GOTO)
3989 return;
3991 if (dump_file && (dump_flags & TDF_DETAILS))
3993 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
3994 bb->index);
3995 print_gimple_stmt (dump_file, stmt, 0, 0);
3998 /* Value-number the last stmts SSA uses. */
3999 ssa_op_iter i;
4000 tree op;
4001 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4002 if (VN_INFO (op)->visited == false
4003 && !DFS (op))
4005 fail = true;
4006 return;
4009 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4010 if value-numbering can prove they are not reachable. Handling
4011 computed gotos is also possible. */
4012 tree val;
4013 switch (code)
4015 case GIMPLE_COND:
4017 tree lhs = gimple_cond_lhs (stmt);
4018 tree rhs = gimple_cond_rhs (stmt);
4019 /* Work hard in computing the condition and take into account
4020 the valueization of the defining stmt. */
4021 if (TREE_CODE (lhs) == SSA_NAME)
4022 lhs = vn_get_expr_for (lhs);
4023 if (TREE_CODE (rhs) == SSA_NAME)
4024 rhs = vn_get_expr_for (rhs);
4025 val = fold_binary (gimple_cond_code (stmt),
4026 boolean_type_node, lhs, rhs);
4027 break;
4029 case GIMPLE_SWITCH:
4030 val = gimple_switch_index (as_a <gswitch *> (stmt));
4031 break;
4032 case GIMPLE_GOTO:
4033 val = gimple_goto_dest (stmt);
4034 break;
4035 default:
4036 gcc_unreachable ();
4038 if (!val)
4039 return;
4041 edge taken = find_taken_edge (bb, vn_valueize (val));
4042 if (!taken)
4043 return;
4045 if (dump_file && (dump_flags & TDF_DETAILS))
4046 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4047 "not executable\n", bb->index, bb->index, taken->dest->index);
4049 FOR_EACH_EDGE (e, ei, bb->succs)
4050 if (e != taken)
4051 e->flags &= ~EDGE_EXECUTABLE;
4054 /* Do SCCVN. Returns true if it finished, false if we bailed out
4055 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4056 how we use the alias oracle walking during the VN process. */
4058 bool
4059 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4061 basic_block bb;
4062 size_t i;
4063 tree param;
4065 default_vn_walk_kind = default_vn_walk_kind_;
4067 init_scc_vn ();
4068 current_info = valid_info;
4070 for (param = DECL_ARGUMENTS (current_function_decl);
4071 param;
4072 param = DECL_CHAIN (param))
4074 tree def = ssa_default_def (cfun, param);
4075 if (def)
4077 VN_INFO (def)->visited = true;
4078 VN_INFO (def)->valnum = def;
4082 /* Mark all edges as possibly executable. */
4083 FOR_ALL_BB_FN (bb, cfun)
4085 edge_iterator ei;
4086 edge e;
4087 FOR_EACH_EDGE (e, ei, bb->succs)
4088 e->flags |= EDGE_EXECUTABLE;
4091 /* Walk all blocks in dominator order, value-numbering the last stmts
4092 SSA uses and decide whether outgoing edges are not executable. */
4093 cond_dom_walker walker;
4094 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4095 if (walker.fail)
4097 free_scc_vn ();
4098 return false;
4101 /* Value-number remaining SSA names. */
4102 for (i = 1; i < num_ssa_names; ++i)
4104 tree name = ssa_name (i);
4105 if (name
4106 && VN_INFO (name)->visited == false
4107 && !has_zero_uses (name))
4108 if (!DFS (name))
4110 free_scc_vn ();
4111 return false;
4115 /* Initialize the value ids. */
4117 for (i = 1; i < num_ssa_names; ++i)
4119 tree name = ssa_name (i);
4120 vn_ssa_aux_t info;
4121 if (!name)
4122 continue;
4123 info = VN_INFO (name);
4124 if (info->valnum == name
4125 || info->valnum == VN_TOP)
4126 info->value_id = get_next_value_id ();
4127 else if (is_gimple_min_invariant (info->valnum))
4128 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4131 /* Propagate. */
4132 for (i = 1; i < num_ssa_names; ++i)
4134 tree name = ssa_name (i);
4135 vn_ssa_aux_t info;
4136 if (!name)
4137 continue;
4138 info = VN_INFO (name);
4139 if (TREE_CODE (info->valnum) == SSA_NAME
4140 && info->valnum != name
4141 && info->value_id != VN_INFO (info->valnum)->value_id)
4142 info->value_id = VN_INFO (info->valnum)->value_id;
4145 set_hashtable_value_ids ();
4147 if (dump_file && (dump_flags & TDF_DETAILS))
4149 fprintf (dump_file, "Value numbers:\n");
4150 for (i = 0; i < num_ssa_names; i++)
4152 tree name = ssa_name (i);
4153 if (name
4154 && VN_INFO (name)->visited
4155 && SSA_VAL (name) != name)
4157 print_generic_expr (dump_file, name, 0);
4158 fprintf (dump_file, " = ");
4159 print_generic_expr (dump_file, SSA_VAL (name), 0);
4160 fprintf (dump_file, "\n");
4165 return true;
4168 /* Return the maximum value id we have ever seen. */
4170 unsigned int
4171 get_max_value_id (void)
4173 return next_value_id;
4176 /* Return the next unique value id. */
4178 unsigned int
4179 get_next_value_id (void)
4181 return next_value_id++;
4185 /* Compare two expressions E1 and E2 and return true if they are equal. */
4187 bool
4188 expressions_equal_p (tree e1, tree e2)
4190 /* The obvious case. */
4191 if (e1 == e2)
4192 return true;
4194 /* If only one of them is null, they cannot be equal. */
4195 if (!e1 || !e2)
4196 return false;
4198 /* Now perform the actual comparison. */
4199 if (TREE_CODE (e1) == TREE_CODE (e2)
4200 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4201 return true;
4203 return false;
4207 /* Return true if the nary operation NARY may trap. This is a copy
4208 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4210 bool
4211 vn_nary_may_trap (vn_nary_op_t nary)
4213 tree type;
4214 tree rhs2 = NULL_TREE;
4215 bool honor_nans = false;
4216 bool honor_snans = false;
4217 bool fp_operation = false;
4218 bool honor_trapv = false;
4219 bool handled, ret;
4220 unsigned i;
4222 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4223 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4224 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4226 type = nary->type;
4227 fp_operation = FLOAT_TYPE_P (type);
4228 if (fp_operation)
4230 honor_nans = flag_trapping_math && !flag_finite_math_only;
4231 honor_snans = flag_signaling_nans != 0;
4233 else if (INTEGRAL_TYPE_P (type)
4234 && TYPE_OVERFLOW_TRAPS (type))
4235 honor_trapv = true;
4237 if (nary->length >= 2)
4238 rhs2 = nary->op[1];
4239 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4240 honor_trapv,
4241 honor_nans, honor_snans, rhs2,
4242 &handled);
4243 if (handled
4244 && ret)
4245 return true;
4247 for (i = 0; i < nary->length; ++i)
4248 if (tree_could_trap_p (nary->op[i]))
4249 return true;
4251 return false;