2015-06-11 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob238cf306dd1cf771f85ba9b5d7c07595663459d4
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "input.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "stor-layout.h"
31 #include "predict.h"
32 #include "hard-reg-set.h"
33 #include "function.h"
34 #include "dominance.h"
35 #include "cfg.h"
36 #include "cfganal.h"
37 #include "basic-block.h"
38 #include "gimple-pretty-print.h"
39 #include "tree-inline.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimplify.h"
48 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "rtl.h"
54 #include "flags.h"
55 #include "insn-config.h"
56 #include "expmed.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "calls.h"
60 #include "emit-rtl.h"
61 #include "varasm.h"
62 #include "stmt.h"
63 #include "expr.h"
64 #include "tree-dfa.h"
65 #include "tree-ssa.h"
66 #include "dumpfile.h"
67 #include "alloc-pool.h"
68 #include "cfgloop.h"
69 #include "params.h"
70 #include "tree-ssa-propagate.h"
71 #include "tree-ssa-sccvn.h"
72 #include "tree-cfg.h"
73 #include "domwalk.h"
74 #include "ipa-ref.h"
75 #include "plugin-api.h"
76 #include "cgraph.h"
78 /* This algorithm is based on the SCC algorithm presented by Keith
79 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
80 (http://citeseer.ist.psu.edu/41805.html). In
81 straight line code, it is equivalent to a regular hash based value
82 numbering that is performed in reverse postorder.
84 For code with cycles, there are two alternatives, both of which
85 require keeping the hashtables separate from the actual list of
86 value numbers for SSA names.
88 1. Iterate value numbering in an RPO walk of the blocks, removing
89 all the entries from the hashtable after each iteration (but
90 keeping the SSA name->value number mapping between iterations).
91 Iterate until it does not change.
93 2. Perform value numbering as part of an SCC walk on the SSA graph,
94 iterating only the cycles in the SSA graph until they do not change
95 (using a separate, optimistic hashtable for value numbering the SCC
96 operands).
98 The second is not just faster in practice (because most SSA graph
99 cycles do not involve all the variables in the graph), it also has
100 some nice properties.
102 One of these nice properties is that when we pop an SCC off the
103 stack, we are guaranteed to have processed all the operands coming from
104 *outside of that SCC*, so we do not need to do anything special to
105 ensure they have value numbers.
107 Another nice property is that the SCC walk is done as part of a DFS
108 of the SSA graph, which makes it easy to perform combining and
109 simplifying operations at the same time.
111 The code below is deliberately written in a way that makes it easy
112 to separate the SCC walk from the other work it does.
114 In order to propagate constants through the code, we track which
115 expressions contain constants, and use those while folding. In
116 theory, we could also track expressions whose value numbers are
117 replaced, in case we end up folding based on expression
118 identities.
120 In order to value number memory, we assign value numbers to vuses.
121 This enables us to note that, for example, stores to the same
122 address of the same value from the same starting memory states are
123 equivalent.
124 TODO:
126 1. We can iterate only the changing portions of the SCC's, but
127 I have not seen an SCC big enough for this to be a win.
128 2. If you differentiate between phi nodes for loops and phi nodes
129 for if-then-else, you can properly consider phi nodes in different
130 blocks for equivalence.
131 3. We could value number vuses in more cases, particularly, whole
132 structure copies.
136 static tree *last_vuse_ptr;
137 static vn_lookup_kind vn_walk_kind;
138 static vn_lookup_kind default_vn_walk_kind;
140 /* vn_nary_op hashtable helpers. */
142 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
144 typedef vn_nary_op_s *value_type;
145 typedef vn_nary_op_s *compare_type;
146 static inline hashval_t hash (const vn_nary_op_s *);
147 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
150 /* Return the computed hashcode for nary operation P1. */
152 inline hashval_t
153 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
155 return vno1->hashcode;
158 /* Compare nary operations P1 and P2 and return true if they are
159 equivalent. */
161 inline bool
162 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
164 return vn_nary_op_eq (vno1, vno2);
167 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
168 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
171 /* vn_phi hashtable helpers. */
173 static int
174 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
176 struct vn_phi_hasher
178 typedef vn_phi_s *value_type;
179 typedef vn_phi_s *compare_type;
180 static inline hashval_t hash (const vn_phi_s *);
181 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
182 static inline void remove (vn_phi_s *);
185 /* Return the computed hashcode for phi operation P1. */
187 inline hashval_t
188 vn_phi_hasher::hash (const vn_phi_s *vp1)
190 return vp1->hashcode;
193 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
195 inline bool
196 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
198 return vn_phi_eq (vp1, vp2);
201 /* Free a phi operation structure VP. */
203 inline void
204 vn_phi_hasher::remove (vn_phi_s *phi)
206 phi->phiargs.release ();
209 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
210 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
213 /* Compare two reference operands P1 and P2 for equality. Return true if
214 they are equal, and false otherwise. */
216 static int
217 vn_reference_op_eq (const void *p1, const void *p2)
219 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
220 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
222 return (vro1->opcode == vro2->opcode
223 /* We do not care for differences in type qualification. */
224 && (vro1->type == vro2->type
225 || (vro1->type && vro2->type
226 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
227 TYPE_MAIN_VARIANT (vro2->type))))
228 && expressions_equal_p (vro1->op0, vro2->op0)
229 && expressions_equal_p (vro1->op1, vro2->op1)
230 && expressions_equal_p (vro1->op2, vro2->op2));
233 /* Free a reference operation structure VP. */
235 static inline void
236 free_reference (vn_reference_s *vr)
238 vr->operands.release ();
242 /* vn_reference hashtable helpers. */
244 struct vn_reference_hasher
246 typedef vn_reference_s *value_type;
247 typedef vn_reference_s *compare_type;
248 static inline hashval_t hash (const vn_reference_s *);
249 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
250 static inline void remove (vn_reference_s *);
253 /* Return the hashcode for a given reference operation P1. */
255 inline hashval_t
256 vn_reference_hasher::hash (const vn_reference_s *vr1)
258 return vr1->hashcode;
261 inline bool
262 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
264 return vn_reference_eq (v, c);
267 inline void
268 vn_reference_hasher::remove (vn_reference_s *v)
270 free_reference (v);
273 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
274 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
277 /* The set of hashtables and alloc_pool's for their items. */
279 typedef struct vn_tables_s
281 vn_nary_op_table_type *nary;
282 vn_phi_table_type *phis;
283 vn_reference_table_type *references;
284 struct obstack nary_obstack;
285 pool_allocator<vn_phi_s> *phis_pool;
286 pool_allocator<vn_reference_s> *references_pool;
287 } *vn_tables_t;
290 /* vn_constant hashtable helpers. */
292 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
294 typedef vn_constant_s *value_type;
295 typedef vn_constant_s *compare_type;
296 static inline hashval_t hash (const vn_constant_s *);
297 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
300 /* Hash table hash function for vn_constant_t. */
302 inline hashval_t
303 vn_constant_hasher::hash (const vn_constant_s *vc1)
305 return vc1->hashcode;
308 /* Hash table equality function for vn_constant_t. */
310 inline bool
311 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
313 if (vc1->hashcode != vc2->hashcode)
314 return false;
316 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
319 static hash_table<vn_constant_hasher> *constant_to_value_id;
320 static bitmap constant_value_ids;
323 /* Valid hashtables storing information we have proven to be
324 correct. */
326 static vn_tables_t valid_info;
328 /* Optimistic hashtables storing information we are making assumptions about
329 during iterations. */
331 static vn_tables_t optimistic_info;
333 /* Pointer to the set of hashtables that is currently being used.
334 Should always point to either the optimistic_info, or the
335 valid_info. */
337 static vn_tables_t current_info;
340 /* Reverse post order index for each basic block. */
342 static int *rpo_numbers;
344 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
346 /* Return the SSA value of the VUSE x, supporting released VDEFs
347 during elimination which will value-number the VDEF to the
348 associated VUSE (but not substitute in the whole lattice). */
350 static inline tree
351 vuse_ssa_val (tree x)
353 if (!x)
354 return NULL_TREE;
358 x = SSA_VAL (x);
360 while (SSA_NAME_IN_FREE_LIST (x));
362 return x;
365 /* This represents the top of the VN lattice, which is the universal
366 value. */
368 tree VN_TOP;
370 /* Unique counter for our value ids. */
372 static unsigned int next_value_id;
374 /* Next DFS number and the stack for strongly connected component
375 detection. */
377 static unsigned int next_dfs_num;
378 static vec<tree> sccstack;
382 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
383 are allocated on an obstack for locality reasons, and to free them
384 without looping over the vec. */
386 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
387 static struct obstack vn_ssa_aux_obstack;
389 /* Return the value numbering information for a given SSA name. */
391 vn_ssa_aux_t
392 VN_INFO (tree name)
394 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
395 gcc_checking_assert (res);
396 return res;
399 /* Set the value numbering info for a given SSA name to a given
400 value. */
402 static inline void
403 VN_INFO_SET (tree name, vn_ssa_aux_t value)
405 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
408 /* Initialize the value numbering info for a given SSA name.
409 This should be called just once for every SSA name. */
411 vn_ssa_aux_t
412 VN_INFO_GET (tree name)
414 vn_ssa_aux_t newinfo;
416 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
417 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
418 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
419 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
420 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
421 return newinfo;
425 /* Get the representative expression for the SSA_NAME NAME. Returns
426 the representative SSA_NAME if there is no expression associated with it. */
428 tree
429 vn_get_expr_for (tree name)
431 vn_ssa_aux_t vn = VN_INFO (name);
432 gimple def_stmt;
433 tree expr = NULL_TREE;
434 enum tree_code code;
436 if (vn->valnum == VN_TOP)
437 return name;
439 /* If the value-number is a constant it is the representative
440 expression. */
441 if (TREE_CODE (vn->valnum) != SSA_NAME)
442 return vn->valnum;
444 /* Get to the information of the value of this SSA_NAME. */
445 vn = VN_INFO (vn->valnum);
447 /* If the value-number is a constant it is the representative
448 expression. */
449 if (TREE_CODE (vn->valnum) != SSA_NAME)
450 return vn->valnum;
452 /* Else if we have an expression, return it. */
453 if (vn->expr != NULL_TREE)
454 return vn->expr;
456 /* Otherwise use the defining statement to build the expression. */
457 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
459 /* If the value number is not an assignment use it directly. */
460 if (!is_gimple_assign (def_stmt))
461 return vn->valnum;
463 /* Note that we can valueize here because we clear the cached
464 simplified expressions after each optimistic iteration. */
465 code = gimple_assign_rhs_code (def_stmt);
466 switch (TREE_CODE_CLASS (code))
468 case tcc_reference:
469 if ((code == REALPART_EXPR
470 || code == IMAGPART_EXPR
471 || code == VIEW_CONVERT_EXPR)
472 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
473 0)) == SSA_NAME)
474 expr = fold_build1 (code,
475 gimple_expr_type (def_stmt),
476 vn_valueize (TREE_OPERAND
477 (gimple_assign_rhs1 (def_stmt), 0)));
478 break;
480 case tcc_unary:
481 expr = fold_build1 (code,
482 gimple_expr_type (def_stmt),
483 vn_valueize (gimple_assign_rhs1 (def_stmt)));
484 break;
486 case tcc_binary:
487 expr = fold_build2 (code,
488 gimple_expr_type (def_stmt),
489 vn_valueize (gimple_assign_rhs1 (def_stmt)),
490 vn_valueize (gimple_assign_rhs2 (def_stmt)));
491 break;
493 case tcc_exceptional:
494 if (code == CONSTRUCTOR
495 && TREE_CODE
496 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
497 expr = gimple_assign_rhs1 (def_stmt);
498 break;
500 default:;
502 if (expr == NULL_TREE)
503 return vn->valnum;
505 /* Cache the expression. */
506 vn->expr = expr;
508 return expr;
511 /* Return the vn_kind the expression computed by the stmt should be
512 associated with. */
514 enum vn_kind
515 vn_get_stmt_kind (gimple stmt)
517 switch (gimple_code (stmt))
519 case GIMPLE_CALL:
520 return VN_REFERENCE;
521 case GIMPLE_PHI:
522 return VN_PHI;
523 case GIMPLE_ASSIGN:
525 enum tree_code code = gimple_assign_rhs_code (stmt);
526 tree rhs1 = gimple_assign_rhs1 (stmt);
527 switch (get_gimple_rhs_class (code))
529 case GIMPLE_UNARY_RHS:
530 case GIMPLE_BINARY_RHS:
531 case GIMPLE_TERNARY_RHS:
532 return VN_NARY;
533 case GIMPLE_SINGLE_RHS:
534 switch (TREE_CODE_CLASS (code))
536 case tcc_reference:
537 /* VOP-less references can go through unary case. */
538 if ((code == REALPART_EXPR
539 || code == IMAGPART_EXPR
540 || code == VIEW_CONVERT_EXPR
541 || code == BIT_FIELD_REF)
542 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
543 return VN_NARY;
545 /* Fallthrough. */
546 case tcc_declaration:
547 return VN_REFERENCE;
549 case tcc_constant:
550 return VN_CONSTANT;
552 default:
553 if (code == ADDR_EXPR)
554 return (is_gimple_min_invariant (rhs1)
555 ? VN_CONSTANT : VN_REFERENCE);
556 else if (code == CONSTRUCTOR)
557 return VN_NARY;
558 return VN_NONE;
560 default:
561 return VN_NONE;
564 default:
565 return VN_NONE;
569 /* Lookup a value id for CONSTANT and return it. If it does not
570 exist returns 0. */
572 unsigned int
573 get_constant_value_id (tree constant)
575 vn_constant_s **slot;
576 struct vn_constant_s vc;
578 vc.hashcode = vn_hash_constant_with_type (constant);
579 vc.constant = constant;
580 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
581 if (slot)
582 return (*slot)->value_id;
583 return 0;
586 /* Lookup a value id for CONSTANT, and if it does not exist, create a
587 new one and return it. If it does exist, return it. */
589 unsigned int
590 get_or_alloc_constant_value_id (tree constant)
592 vn_constant_s **slot;
593 struct vn_constant_s vc;
594 vn_constant_t vcp;
596 vc.hashcode = vn_hash_constant_with_type (constant);
597 vc.constant = constant;
598 slot = constant_to_value_id->find_slot (&vc, INSERT);
599 if (*slot)
600 return (*slot)->value_id;
602 vcp = XNEW (struct vn_constant_s);
603 vcp->hashcode = vc.hashcode;
604 vcp->constant = constant;
605 vcp->value_id = get_next_value_id ();
606 *slot = vcp;
607 bitmap_set_bit (constant_value_ids, vcp->value_id);
608 return vcp->value_id;
611 /* Return true if V is a value id for a constant. */
613 bool
614 value_id_constant_p (unsigned int v)
616 return bitmap_bit_p (constant_value_ids, v);
619 /* Compute the hash for a reference operand VRO1. */
621 static void
622 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
624 hstate.add_int (vro1->opcode);
625 if (vro1->op0)
626 inchash::add_expr (vro1->op0, hstate);
627 if (vro1->op1)
628 inchash::add_expr (vro1->op1, hstate);
629 if (vro1->op2)
630 inchash::add_expr (vro1->op2, hstate);
633 /* Compute a hash for the reference operation VR1 and return it. */
635 static hashval_t
636 vn_reference_compute_hash (const vn_reference_t vr1)
638 inchash::hash hstate;
639 hashval_t result;
640 int i;
641 vn_reference_op_t vro;
642 HOST_WIDE_INT off = -1;
643 bool deref = false;
645 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
647 if (vro->opcode == MEM_REF)
648 deref = true;
649 else if (vro->opcode != ADDR_EXPR)
650 deref = false;
651 if (vro->off != -1)
653 if (off == -1)
654 off = 0;
655 off += vro->off;
657 else
659 if (off != -1
660 && off != 0)
661 hstate.add_int (off);
662 off = -1;
663 if (deref
664 && vro->opcode == ADDR_EXPR)
666 if (vro->op0)
668 tree op = TREE_OPERAND (vro->op0, 0);
669 hstate.add_int (TREE_CODE (op));
670 inchash::add_expr (op, hstate);
673 else
674 vn_reference_op_compute_hash (vro, hstate);
677 result = hstate.end ();
678 /* ??? We would ICE later if we hash instead of adding that in. */
679 if (vr1->vuse)
680 result += SSA_NAME_VERSION (vr1->vuse);
682 return result;
685 /* Return true if reference operations VR1 and VR2 are equivalent. This
686 means they have the same set of operands and vuses. */
688 bool
689 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
691 unsigned i, j;
693 /* Early out if this is not a hash collision. */
694 if (vr1->hashcode != vr2->hashcode)
695 return false;
697 /* The VOP needs to be the same. */
698 if (vr1->vuse != vr2->vuse)
699 return false;
701 /* If the operands are the same we are done. */
702 if (vr1->operands == vr2->operands)
703 return true;
705 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
706 return false;
708 if (INTEGRAL_TYPE_P (vr1->type)
709 && INTEGRAL_TYPE_P (vr2->type))
711 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
712 return false;
714 else if (INTEGRAL_TYPE_P (vr1->type)
715 && (TYPE_PRECISION (vr1->type)
716 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
717 return false;
718 else if (INTEGRAL_TYPE_P (vr2->type)
719 && (TYPE_PRECISION (vr2->type)
720 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
721 return false;
723 i = 0;
724 j = 0;
727 HOST_WIDE_INT off1 = 0, off2 = 0;
728 vn_reference_op_t vro1, vro2;
729 vn_reference_op_s tem1, tem2;
730 bool deref1 = false, deref2 = false;
731 for (; vr1->operands.iterate (i, &vro1); i++)
733 if (vro1->opcode == MEM_REF)
734 deref1 = true;
735 if (vro1->off == -1)
736 break;
737 off1 += vro1->off;
739 for (; vr2->operands.iterate (j, &vro2); j++)
741 if (vro2->opcode == MEM_REF)
742 deref2 = true;
743 if (vro2->off == -1)
744 break;
745 off2 += vro2->off;
747 if (off1 != off2)
748 return false;
749 if (deref1 && vro1->opcode == ADDR_EXPR)
751 memset (&tem1, 0, sizeof (tem1));
752 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
753 tem1.type = TREE_TYPE (tem1.op0);
754 tem1.opcode = TREE_CODE (tem1.op0);
755 vro1 = &tem1;
756 deref1 = false;
758 if (deref2 && vro2->opcode == ADDR_EXPR)
760 memset (&tem2, 0, sizeof (tem2));
761 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
762 tem2.type = TREE_TYPE (tem2.op0);
763 tem2.opcode = TREE_CODE (tem2.op0);
764 vro2 = &tem2;
765 deref2 = false;
767 if (deref1 != deref2)
768 return false;
769 if (!vn_reference_op_eq (vro1, vro2))
770 return false;
771 ++j;
772 ++i;
774 while (vr1->operands.length () != i
775 || vr2->operands.length () != j);
777 return true;
780 /* Copy the operations present in load/store REF into RESULT, a vector of
781 vn_reference_op_s's. */
783 static void
784 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
786 if (TREE_CODE (ref) == TARGET_MEM_REF)
788 vn_reference_op_s temp;
790 result->reserve (3);
792 memset (&temp, 0, sizeof (temp));
793 temp.type = TREE_TYPE (ref);
794 temp.opcode = TREE_CODE (ref);
795 temp.op0 = TMR_INDEX (ref);
796 temp.op1 = TMR_STEP (ref);
797 temp.op2 = TMR_OFFSET (ref);
798 temp.off = -1;
799 result->quick_push (temp);
801 memset (&temp, 0, sizeof (temp));
802 temp.type = NULL_TREE;
803 temp.opcode = ERROR_MARK;
804 temp.op0 = TMR_INDEX2 (ref);
805 temp.off = -1;
806 result->quick_push (temp);
808 memset (&temp, 0, sizeof (temp));
809 temp.type = NULL_TREE;
810 temp.opcode = TREE_CODE (TMR_BASE (ref));
811 temp.op0 = TMR_BASE (ref);
812 temp.off = -1;
813 result->quick_push (temp);
814 return;
817 /* For non-calls, store the information that makes up the address. */
818 tree orig = ref;
819 while (ref)
821 vn_reference_op_s temp;
823 memset (&temp, 0, sizeof (temp));
824 temp.type = TREE_TYPE (ref);
825 temp.opcode = TREE_CODE (ref);
826 temp.off = -1;
828 switch (temp.opcode)
830 case MODIFY_EXPR:
831 temp.op0 = TREE_OPERAND (ref, 1);
832 break;
833 case WITH_SIZE_EXPR:
834 temp.op0 = TREE_OPERAND (ref, 1);
835 temp.off = 0;
836 break;
837 case MEM_REF:
838 /* The base address gets its own vn_reference_op_s structure. */
839 temp.op0 = TREE_OPERAND (ref, 1);
840 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
841 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
842 break;
843 case BIT_FIELD_REF:
844 /* Record bits and position. */
845 temp.op0 = TREE_OPERAND (ref, 1);
846 temp.op1 = TREE_OPERAND (ref, 2);
847 break;
848 case COMPONENT_REF:
849 /* The field decl is enough to unambiguously specify the field,
850 a matching type is not necessary and a mismatching type
851 is always a spurious difference. */
852 temp.type = NULL_TREE;
853 temp.op0 = TREE_OPERAND (ref, 1);
854 temp.op1 = TREE_OPERAND (ref, 2);
856 tree this_offset = component_ref_field_offset (ref);
857 if (this_offset
858 && TREE_CODE (this_offset) == INTEGER_CST)
860 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
861 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
863 offset_int off
864 = (wi::to_offset (this_offset)
865 + wi::lrshift (wi::to_offset (bit_offset),
866 LOG2_BITS_PER_UNIT));
867 if (wi::fits_shwi_p (off)
868 /* Probibit value-numbering zero offset components
869 of addresses the same before the pass folding
870 __builtin_object_size had a chance to run
871 (checking cfun->after_inlining does the
872 trick here). */
873 && (TREE_CODE (orig) != ADDR_EXPR
874 || off != 0
875 || cfun->after_inlining))
876 temp.off = off.to_shwi ();
880 break;
881 case ARRAY_RANGE_REF:
882 case ARRAY_REF:
883 /* Record index as operand. */
884 temp.op0 = TREE_OPERAND (ref, 1);
885 /* Always record lower bounds and element size. */
886 temp.op1 = array_ref_low_bound (ref);
887 temp.op2 = array_ref_element_size (ref);
888 if (TREE_CODE (temp.op0) == INTEGER_CST
889 && TREE_CODE (temp.op1) == INTEGER_CST
890 && TREE_CODE (temp.op2) == INTEGER_CST)
892 offset_int off = ((wi::to_offset (temp.op0)
893 - wi::to_offset (temp.op1))
894 * wi::to_offset (temp.op2));
895 if (wi::fits_shwi_p (off))
896 temp.off = off.to_shwi();
898 break;
899 case VAR_DECL:
900 if (DECL_HARD_REGISTER (ref))
902 temp.op0 = ref;
903 break;
905 /* Fallthru. */
906 case PARM_DECL:
907 case CONST_DECL:
908 case RESULT_DECL:
909 /* Canonicalize decls to MEM[&decl] which is what we end up with
910 when valueizing MEM[ptr] with ptr = &decl. */
911 temp.opcode = MEM_REF;
912 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
913 temp.off = 0;
914 result->safe_push (temp);
915 temp.opcode = ADDR_EXPR;
916 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
917 temp.type = TREE_TYPE (temp.op0);
918 temp.off = -1;
919 break;
920 case STRING_CST:
921 case INTEGER_CST:
922 case COMPLEX_CST:
923 case VECTOR_CST:
924 case REAL_CST:
925 case FIXED_CST:
926 case CONSTRUCTOR:
927 case SSA_NAME:
928 temp.op0 = ref;
929 break;
930 case ADDR_EXPR:
931 if (is_gimple_min_invariant (ref))
933 temp.op0 = ref;
934 break;
936 break;
937 /* These are only interesting for their operands, their
938 existence, and their type. They will never be the last
939 ref in the chain of references (IE they require an
940 operand), so we don't have to put anything
941 for op* as it will be handled by the iteration */
942 case REALPART_EXPR:
943 case VIEW_CONVERT_EXPR:
944 temp.off = 0;
945 break;
946 case IMAGPART_EXPR:
947 /* This is only interesting for its constant offset. */
948 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
949 break;
950 default:
951 gcc_unreachable ();
953 result->safe_push (temp);
955 if (REFERENCE_CLASS_P (ref)
956 || TREE_CODE (ref) == MODIFY_EXPR
957 || TREE_CODE (ref) == WITH_SIZE_EXPR
958 || (TREE_CODE (ref) == ADDR_EXPR
959 && !is_gimple_min_invariant (ref)))
960 ref = TREE_OPERAND (ref, 0);
961 else
962 ref = NULL_TREE;
966 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
967 operands in *OPS, the reference alias set SET and the reference type TYPE.
968 Return true if something useful was produced. */
970 bool
971 ao_ref_init_from_vn_reference (ao_ref *ref,
972 alias_set_type set, tree type,
973 vec<vn_reference_op_s> ops)
975 vn_reference_op_t op;
976 unsigned i;
977 tree base = NULL_TREE;
978 tree *op0_p = &base;
979 HOST_WIDE_INT offset = 0;
980 HOST_WIDE_INT max_size;
981 HOST_WIDE_INT size = -1;
982 tree size_tree = NULL_TREE;
983 alias_set_type base_alias_set = -1;
985 /* First get the final access size from just the outermost expression. */
986 op = &ops[0];
987 if (op->opcode == COMPONENT_REF)
988 size_tree = DECL_SIZE (op->op0);
989 else if (op->opcode == BIT_FIELD_REF)
990 size_tree = op->op0;
991 else
993 machine_mode mode = TYPE_MODE (type);
994 if (mode == BLKmode)
995 size_tree = TYPE_SIZE (type);
996 else
997 size = GET_MODE_BITSIZE (mode);
999 if (size_tree != NULL_TREE)
1001 if (!tree_fits_uhwi_p (size_tree))
1002 size = -1;
1003 else
1004 size = tree_to_uhwi (size_tree);
1007 /* Initially, maxsize is the same as the accessed element size.
1008 In the following it will only grow (or become -1). */
1009 max_size = size;
1011 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1012 and find the ultimate containing object. */
1013 FOR_EACH_VEC_ELT (ops, i, op)
1015 switch (op->opcode)
1017 /* These may be in the reference ops, but we cannot do anything
1018 sensible with them here. */
1019 case ADDR_EXPR:
1020 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1021 if (base != NULL_TREE
1022 && TREE_CODE (base) == MEM_REF
1023 && op->op0
1024 && DECL_P (TREE_OPERAND (op->op0, 0)))
1026 vn_reference_op_t pop = &ops[i-1];
1027 base = TREE_OPERAND (op->op0, 0);
1028 if (pop->off == -1)
1030 max_size = -1;
1031 offset = 0;
1033 else
1034 offset += pop->off * BITS_PER_UNIT;
1035 op0_p = NULL;
1036 break;
1038 /* Fallthru. */
1039 case CALL_EXPR:
1040 return false;
1042 /* Record the base objects. */
1043 case MEM_REF:
1044 base_alias_set = get_deref_alias_set (op->op0);
1045 *op0_p = build2 (MEM_REF, op->type,
1046 NULL_TREE, op->op0);
1047 op0_p = &TREE_OPERAND (*op0_p, 0);
1048 break;
1050 case VAR_DECL:
1051 case PARM_DECL:
1052 case RESULT_DECL:
1053 case SSA_NAME:
1054 *op0_p = op->op0;
1055 op0_p = NULL;
1056 break;
1058 /* And now the usual component-reference style ops. */
1059 case BIT_FIELD_REF:
1060 offset += tree_to_shwi (op->op1);
1061 break;
1063 case COMPONENT_REF:
1065 tree field = op->op0;
1066 /* We do not have a complete COMPONENT_REF tree here so we
1067 cannot use component_ref_field_offset. Do the interesting
1068 parts manually. */
1070 if (op->op1
1071 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1072 max_size = -1;
1073 else
1075 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1076 * BITS_PER_UNIT);
1077 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1079 break;
1082 case ARRAY_RANGE_REF:
1083 case ARRAY_REF:
1084 /* We recorded the lower bound and the element size. */
1085 if (!tree_fits_shwi_p (op->op0)
1086 || !tree_fits_shwi_p (op->op1)
1087 || !tree_fits_shwi_p (op->op2))
1088 max_size = -1;
1089 else
1091 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1092 hindex -= tree_to_shwi (op->op1);
1093 hindex *= tree_to_shwi (op->op2);
1094 hindex *= BITS_PER_UNIT;
1095 offset += hindex;
1097 break;
1099 case REALPART_EXPR:
1100 break;
1102 case IMAGPART_EXPR:
1103 offset += size;
1104 break;
1106 case VIEW_CONVERT_EXPR:
1107 break;
1109 case STRING_CST:
1110 case INTEGER_CST:
1111 case COMPLEX_CST:
1112 case VECTOR_CST:
1113 case REAL_CST:
1114 case CONSTRUCTOR:
1115 case CONST_DECL:
1116 return false;
1118 default:
1119 return false;
1123 if (base == NULL_TREE)
1124 return false;
1126 ref->ref = NULL_TREE;
1127 ref->base = base;
1128 ref->offset = offset;
1129 ref->size = size;
1130 ref->max_size = max_size;
1131 ref->ref_alias_set = set;
1132 if (base_alias_set != -1)
1133 ref->base_alias_set = base_alias_set;
1134 else
1135 ref->base_alias_set = get_alias_set (base);
1136 /* We discount volatiles from value-numbering elsewhere. */
1137 ref->volatile_p = false;
1139 return true;
1142 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1143 vn_reference_op_s's. */
1145 static void
1146 copy_reference_ops_from_call (gcall *call,
1147 vec<vn_reference_op_s> *result)
1149 vn_reference_op_s temp;
1150 unsigned i;
1151 tree lhs = gimple_call_lhs (call);
1152 int lr;
1154 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1155 different. By adding the lhs here in the vector, we ensure that the
1156 hashcode is different, guaranteeing a different value number. */
1157 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1159 memset (&temp, 0, sizeof (temp));
1160 temp.opcode = MODIFY_EXPR;
1161 temp.type = TREE_TYPE (lhs);
1162 temp.op0 = lhs;
1163 temp.off = -1;
1164 result->safe_push (temp);
1167 /* Copy the type, opcode, function, static chain and EH region, if any. */
1168 memset (&temp, 0, sizeof (temp));
1169 temp.type = gimple_call_return_type (call);
1170 temp.opcode = CALL_EXPR;
1171 temp.op0 = gimple_call_fn (call);
1172 temp.op1 = gimple_call_chain (call);
1173 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1174 temp.op2 = size_int (lr);
1175 temp.off = -1;
1176 if (gimple_call_with_bounds_p (call))
1177 temp.with_bounds = 1;
1178 result->safe_push (temp);
1180 /* Copy the call arguments. As they can be references as well,
1181 just chain them together. */
1182 for (i = 0; i < gimple_call_num_args (call); ++i)
1184 tree callarg = gimple_call_arg (call, i);
1185 copy_reference_ops_from_ref (callarg, result);
1189 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1190 *I_P to point to the last element of the replacement. */
1191 void
1192 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1193 unsigned int *i_p)
1195 unsigned int i = *i_p;
1196 vn_reference_op_t op = &(*ops)[i];
1197 vn_reference_op_t mem_op = &(*ops)[i - 1];
1198 tree addr_base;
1199 HOST_WIDE_INT addr_offset = 0;
1201 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1202 from .foo.bar to the preceding MEM_REF offset and replace the
1203 address with &OBJ. */
1204 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1205 &addr_offset);
1206 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1207 if (addr_base != TREE_OPERAND (op->op0, 0))
1209 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1210 off += addr_offset;
1211 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1212 op->op0 = build_fold_addr_expr (addr_base);
1213 if (tree_fits_shwi_p (mem_op->op0))
1214 mem_op->off = tree_to_shwi (mem_op->op0);
1215 else
1216 mem_op->off = -1;
1220 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1221 *I_P to point to the last element of the replacement. */
1222 static void
1223 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1224 unsigned int *i_p)
1226 unsigned int i = *i_p;
1227 vn_reference_op_t op = &(*ops)[i];
1228 vn_reference_op_t mem_op = &(*ops)[i - 1];
1229 gimple def_stmt;
1230 enum tree_code code;
1231 offset_int off;
1233 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1234 if (!is_gimple_assign (def_stmt))
1235 return;
1237 code = gimple_assign_rhs_code (def_stmt);
1238 if (code != ADDR_EXPR
1239 && code != POINTER_PLUS_EXPR)
1240 return;
1242 off = offset_int::from (mem_op->op0, SIGNED);
1244 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1245 from .foo.bar to the preceding MEM_REF offset and replace the
1246 address with &OBJ. */
1247 if (code == ADDR_EXPR)
1249 tree addr, addr_base;
1250 HOST_WIDE_INT addr_offset;
1252 addr = gimple_assign_rhs1 (def_stmt);
1253 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1254 &addr_offset);
1255 /* If that didn't work because the address isn't invariant propagate
1256 the reference tree from the address operation in case the current
1257 dereference isn't offsetted. */
1258 if (!addr_base
1259 && *i_p == ops->length () - 1
1260 && off == 0
1261 /* This makes us disable this transform for PRE where the
1262 reference ops might be also used for code insertion which
1263 is invalid. */
1264 && default_vn_walk_kind == VN_WALKREWRITE)
1266 auto_vec<vn_reference_op_s, 32> tem;
1267 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1268 ops->pop ();
1269 ops->pop ();
1270 ops->safe_splice (tem);
1271 --*i_p;
1272 return;
1274 if (!addr_base
1275 || TREE_CODE (addr_base) != MEM_REF)
1276 return;
1278 off += addr_offset;
1279 off += mem_ref_offset (addr_base);
1280 op->op0 = TREE_OPERAND (addr_base, 0);
1282 else
1284 tree ptr, ptroff;
1285 ptr = gimple_assign_rhs1 (def_stmt);
1286 ptroff = gimple_assign_rhs2 (def_stmt);
1287 if (TREE_CODE (ptr) != SSA_NAME
1288 || TREE_CODE (ptroff) != INTEGER_CST)
1289 return;
1291 off += wi::to_offset (ptroff);
1292 op->op0 = ptr;
1295 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1296 if (tree_fits_shwi_p (mem_op->op0))
1297 mem_op->off = tree_to_shwi (mem_op->op0);
1298 else
1299 mem_op->off = -1;
1300 if (TREE_CODE (op->op0) == SSA_NAME)
1301 op->op0 = SSA_VAL (op->op0);
1302 if (TREE_CODE (op->op0) != SSA_NAME)
1303 op->opcode = TREE_CODE (op->op0);
1305 /* And recurse. */
1306 if (TREE_CODE (op->op0) == SSA_NAME)
1307 vn_reference_maybe_forwprop_address (ops, i_p);
1308 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1309 vn_reference_fold_indirect (ops, i_p);
1312 /* Optimize the reference REF to a constant if possible or return
1313 NULL_TREE if not. */
1315 tree
1316 fully_constant_vn_reference_p (vn_reference_t ref)
1318 vec<vn_reference_op_s> operands = ref->operands;
1319 vn_reference_op_t op;
1321 /* Try to simplify the translated expression if it is
1322 a call to a builtin function with at most two arguments. */
1323 op = &operands[0];
1324 if (op->opcode == CALL_EXPR
1325 && TREE_CODE (op->op0) == ADDR_EXPR
1326 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1327 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1328 && operands.length () >= 2
1329 && operands.length () <= 3)
1331 vn_reference_op_t arg0, arg1 = NULL;
1332 bool anyconst = false;
1333 arg0 = &operands[1];
1334 if (operands.length () > 2)
1335 arg1 = &operands[2];
1336 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1337 || (arg0->opcode == ADDR_EXPR
1338 && is_gimple_min_invariant (arg0->op0)))
1339 anyconst = true;
1340 if (arg1
1341 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1342 || (arg1->opcode == ADDR_EXPR
1343 && is_gimple_min_invariant (arg1->op0))))
1344 anyconst = true;
1345 if (anyconst)
1347 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1348 arg1 ? 2 : 1,
1349 arg0->op0,
1350 arg1 ? arg1->op0 : NULL);
1351 if (folded
1352 && TREE_CODE (folded) == NOP_EXPR)
1353 folded = TREE_OPERAND (folded, 0);
1354 if (folded
1355 && is_gimple_min_invariant (folded))
1356 return folded;
1360 /* Simplify reads from constants or constant initializers. */
1361 else if (BITS_PER_UNIT == 8
1362 && is_gimple_reg_type (ref->type)
1363 && (!INTEGRAL_TYPE_P (ref->type)
1364 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1366 HOST_WIDE_INT off = 0;
1367 HOST_WIDE_INT size;
1368 if (INTEGRAL_TYPE_P (ref->type))
1369 size = TYPE_PRECISION (ref->type);
1370 else
1371 size = tree_to_shwi (TYPE_SIZE (ref->type));
1372 if (size % BITS_PER_UNIT != 0
1373 || size > MAX_BITSIZE_MODE_ANY_MODE)
1374 return NULL_TREE;
1375 size /= BITS_PER_UNIT;
1376 unsigned i;
1377 for (i = 0; i < operands.length (); ++i)
1379 if (operands[i].off == -1)
1380 return NULL_TREE;
1381 off += operands[i].off;
1382 if (operands[i].opcode == MEM_REF)
1384 ++i;
1385 break;
1388 vn_reference_op_t base = &operands[--i];
1389 tree ctor = error_mark_node;
1390 tree decl = NULL_TREE;
1391 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1392 ctor = base->op0;
1393 else if (base->opcode == MEM_REF
1394 && base[1].opcode == ADDR_EXPR
1395 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1396 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1398 decl = TREE_OPERAND (base[1].op0, 0);
1399 ctor = ctor_for_folding (decl);
1401 if (ctor == NULL_TREE)
1402 return build_zero_cst (ref->type);
1403 else if (ctor != error_mark_node)
1405 if (decl)
1407 tree res = fold_ctor_reference (ref->type, ctor,
1408 off * BITS_PER_UNIT,
1409 size * BITS_PER_UNIT, decl);
1410 if (res)
1412 STRIP_USELESS_TYPE_CONVERSION (res);
1413 if (is_gimple_min_invariant (res))
1414 return res;
1417 else
1419 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1420 if (native_encode_expr (ctor, buf, size, off) > 0)
1421 return native_interpret_expr (ref->type, buf, size);
1426 return NULL_TREE;
1429 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1430 structures into their value numbers. This is done in-place, and
1431 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1432 whether any operands were valueized. */
1434 static vec<vn_reference_op_s>
1435 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1437 vn_reference_op_t vro;
1438 unsigned int i;
1440 *valueized_anything = false;
1442 FOR_EACH_VEC_ELT (orig, i, vro)
1444 if (vro->opcode == SSA_NAME
1445 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1447 tree tem = SSA_VAL (vro->op0);
1448 if (tem != vro->op0)
1450 *valueized_anything = true;
1451 vro->op0 = tem;
1453 /* If it transforms from an SSA_NAME to a constant, update
1454 the opcode. */
1455 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1456 vro->opcode = TREE_CODE (vro->op0);
1458 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1460 tree tem = SSA_VAL (vro->op1);
1461 if (tem != vro->op1)
1463 *valueized_anything = true;
1464 vro->op1 = tem;
1467 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1469 tree tem = SSA_VAL (vro->op2);
1470 if (tem != vro->op2)
1472 *valueized_anything = true;
1473 vro->op2 = tem;
1476 /* If it transforms from an SSA_NAME to an address, fold with
1477 a preceding indirect reference. */
1478 if (i > 0
1479 && vro->op0
1480 && TREE_CODE (vro->op0) == ADDR_EXPR
1481 && orig[i - 1].opcode == MEM_REF)
1482 vn_reference_fold_indirect (&orig, &i);
1483 else if (i > 0
1484 && vro->opcode == SSA_NAME
1485 && orig[i - 1].opcode == MEM_REF)
1486 vn_reference_maybe_forwprop_address (&orig, &i);
1487 /* If it transforms a non-constant ARRAY_REF into a constant
1488 one, adjust the constant offset. */
1489 else if (vro->opcode == ARRAY_REF
1490 && vro->off == -1
1491 && TREE_CODE (vro->op0) == INTEGER_CST
1492 && TREE_CODE (vro->op1) == INTEGER_CST
1493 && TREE_CODE (vro->op2) == INTEGER_CST)
1495 offset_int off = ((wi::to_offset (vro->op0)
1496 - wi::to_offset (vro->op1))
1497 * wi::to_offset (vro->op2));
1498 if (wi::fits_shwi_p (off))
1499 vro->off = off.to_shwi ();
1503 return orig;
1506 static vec<vn_reference_op_s>
1507 valueize_refs (vec<vn_reference_op_s> orig)
1509 bool tem;
1510 return valueize_refs_1 (orig, &tem);
1513 static vec<vn_reference_op_s> shared_lookup_references;
1515 /* Create a vector of vn_reference_op_s structures from REF, a
1516 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1517 this function. *VALUEIZED_ANYTHING will specify whether any
1518 operands were valueized. */
1520 static vec<vn_reference_op_s>
1521 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1523 if (!ref)
1524 return vNULL;
1525 shared_lookup_references.truncate (0);
1526 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1527 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1528 valueized_anything);
1529 return shared_lookup_references;
1532 /* Create a vector of vn_reference_op_s structures from CALL, a
1533 call statement. The vector is shared among all callers of
1534 this function. */
1536 static vec<vn_reference_op_s>
1537 valueize_shared_reference_ops_from_call (gcall *call)
1539 if (!call)
1540 return vNULL;
1541 shared_lookup_references.truncate (0);
1542 copy_reference_ops_from_call (call, &shared_lookup_references);
1543 shared_lookup_references = valueize_refs (shared_lookup_references);
1544 return shared_lookup_references;
1547 /* Lookup a SCCVN reference operation VR in the current hash table.
1548 Returns the resulting value number if it exists in the hash table,
1549 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1550 vn_reference_t stored in the hashtable if something is found. */
1552 static tree
1553 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1555 vn_reference_s **slot;
1556 hashval_t hash;
1558 hash = vr->hashcode;
1559 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1560 if (!slot && current_info == optimistic_info)
1561 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1562 if (slot)
1564 if (vnresult)
1565 *vnresult = (vn_reference_t)*slot;
1566 return ((vn_reference_t)*slot)->result;
1569 return NULL_TREE;
1572 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1573 with the current VUSE and performs the expression lookup. */
1575 static void *
1576 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1577 unsigned int cnt, void *vr_)
1579 vn_reference_t vr = (vn_reference_t)vr_;
1580 vn_reference_s **slot;
1581 hashval_t hash;
1583 /* This bounds the stmt walks we perform on reference lookups
1584 to O(1) instead of O(N) where N is the number of dominating
1585 stores. */
1586 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1587 return (void *)-1;
1589 if (last_vuse_ptr)
1590 *last_vuse_ptr = vuse;
1592 /* Fixup vuse and hash. */
1593 if (vr->vuse)
1594 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1595 vr->vuse = vuse_ssa_val (vuse);
1596 if (vr->vuse)
1597 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1599 hash = vr->hashcode;
1600 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1601 if (!slot && current_info == optimistic_info)
1602 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1603 if (slot)
1604 return *slot;
1606 return NULL;
1609 /* Lookup an existing or insert a new vn_reference entry into the
1610 value table for the VUSE, SET, TYPE, OPERANDS reference which
1611 has the value VALUE which is either a constant or an SSA name. */
1613 static vn_reference_t
1614 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1615 alias_set_type set,
1616 tree type,
1617 vec<vn_reference_op_s,
1618 va_heap> operands,
1619 tree value)
1621 vn_reference_s vr1;
1622 vn_reference_t result;
1623 unsigned value_id;
1624 vr1.vuse = vuse;
1625 vr1.operands = operands;
1626 vr1.type = type;
1627 vr1.set = set;
1628 vr1.hashcode = vn_reference_compute_hash (&vr1);
1629 if (vn_reference_lookup_1 (&vr1, &result))
1630 return result;
1631 if (TREE_CODE (value) == SSA_NAME)
1632 value_id = VN_INFO (value)->value_id;
1633 else
1634 value_id = get_or_alloc_constant_value_id (value);
1635 return vn_reference_insert_pieces (vuse, set, type,
1636 operands.copy (), value, value_id);
1639 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1640 from the statement defining VUSE and if not successful tries to
1641 translate *REFP and VR_ through an aggregate copy at the definition
1642 of VUSE. */
1644 static void *
1645 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1646 bool disambiguate_only)
1648 vn_reference_t vr = (vn_reference_t)vr_;
1649 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1650 tree base;
1651 HOST_WIDE_INT offset, maxsize;
1652 static vec<vn_reference_op_s>
1653 lhs_ops = vNULL;
1654 ao_ref lhs_ref;
1655 bool lhs_ref_ok = false;
1657 /* First try to disambiguate after value-replacing in the definitions LHS. */
1658 if (is_gimple_assign (def_stmt))
1660 tree lhs = gimple_assign_lhs (def_stmt);
1661 bool valueized_anything = false;
1662 /* Avoid re-allocation overhead. */
1663 lhs_ops.truncate (0);
1664 copy_reference_ops_from_ref (lhs, &lhs_ops);
1665 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1666 if (valueized_anything)
1668 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1669 get_alias_set (lhs),
1670 TREE_TYPE (lhs), lhs_ops);
1671 if (lhs_ref_ok
1672 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1673 return NULL;
1675 else
1677 ao_ref_init (&lhs_ref, lhs);
1678 lhs_ref_ok = true;
1681 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1682 && gimple_call_num_args (def_stmt) <= 4)
1684 /* For builtin calls valueize its arguments and call the
1685 alias oracle again. Valueization may improve points-to
1686 info of pointers and constify size and position arguments.
1687 Originally this was motivated by PR61034 which has
1688 conditional calls to free falsely clobbering ref because
1689 of imprecise points-to info of the argument. */
1690 tree oldargs[4];
1691 bool valueized_anything = false;
1692 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1694 oldargs[i] = gimple_call_arg (def_stmt, i);
1695 if (TREE_CODE (oldargs[i]) == SSA_NAME
1696 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1698 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1699 valueized_anything = true;
1702 if (valueized_anything)
1704 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1705 ref);
1706 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1707 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1708 if (!res)
1709 return NULL;
1713 if (disambiguate_only)
1714 return (void *)-1;
1716 base = ao_ref_base (ref);
1717 offset = ref->offset;
1718 maxsize = ref->max_size;
1720 /* If we cannot constrain the size of the reference we cannot
1721 test if anything kills it. */
1722 if (maxsize == -1)
1723 return (void *)-1;
1725 /* We can't deduce anything useful from clobbers. */
1726 if (gimple_clobber_p (def_stmt))
1727 return (void *)-1;
1729 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1730 from that definition.
1731 1) Memset. */
1732 if (is_gimple_reg_type (vr->type)
1733 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1734 && integer_zerop (gimple_call_arg (def_stmt, 1))
1735 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1736 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1738 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1739 tree base2;
1740 HOST_WIDE_INT offset2, size2, maxsize2;
1741 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1742 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1743 if ((unsigned HOST_WIDE_INT)size2 / 8
1744 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1745 && maxsize2 != -1
1746 && operand_equal_p (base, base2, 0)
1747 && offset2 <= offset
1748 && offset2 + size2 >= offset + maxsize)
1750 tree val = build_zero_cst (vr->type);
1751 return vn_reference_lookup_or_insert_for_pieces
1752 (vuse, vr->set, vr->type, vr->operands, val);
1756 /* 2) Assignment from an empty CONSTRUCTOR. */
1757 else if (is_gimple_reg_type (vr->type)
1758 && gimple_assign_single_p (def_stmt)
1759 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1760 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1762 tree base2;
1763 HOST_WIDE_INT offset2, size2, maxsize2;
1764 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1765 &offset2, &size2, &maxsize2);
1766 if (maxsize2 != -1
1767 && operand_equal_p (base, base2, 0)
1768 && offset2 <= offset
1769 && offset2 + size2 >= offset + maxsize)
1771 tree val = build_zero_cst (vr->type);
1772 return vn_reference_lookup_or_insert_for_pieces
1773 (vuse, vr->set, vr->type, vr->operands, val);
1777 /* 3) Assignment from a constant. We can use folds native encode/interpret
1778 routines to extract the assigned bits. */
1779 else if (vn_walk_kind == VN_WALKREWRITE
1780 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1781 && ref->size == maxsize
1782 && maxsize % BITS_PER_UNIT == 0
1783 && offset % BITS_PER_UNIT == 0
1784 && is_gimple_reg_type (vr->type)
1785 && gimple_assign_single_p (def_stmt)
1786 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1788 tree base2;
1789 HOST_WIDE_INT offset2, size2, maxsize2;
1790 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1791 &offset2, &size2, &maxsize2);
1792 if (maxsize2 != -1
1793 && maxsize2 == size2
1794 && size2 % BITS_PER_UNIT == 0
1795 && offset2 % BITS_PER_UNIT == 0
1796 && operand_equal_p (base, base2, 0)
1797 && offset2 <= offset
1798 && offset2 + size2 >= offset + maxsize)
1800 /* We support up to 512-bit values (for V8DFmode). */
1801 unsigned char buffer[64];
1802 int len;
1804 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1805 buffer, sizeof (buffer));
1806 if (len > 0)
1808 tree val = native_interpret_expr (vr->type,
1809 buffer
1810 + ((offset - offset2)
1811 / BITS_PER_UNIT),
1812 ref->size / BITS_PER_UNIT);
1813 if (val)
1814 return vn_reference_lookup_or_insert_for_pieces
1815 (vuse, vr->set, vr->type, vr->operands, val);
1820 /* 4) Assignment from an SSA name which definition we may be able
1821 to access pieces from. */
1822 else if (ref->size == maxsize
1823 && is_gimple_reg_type (vr->type)
1824 && gimple_assign_single_p (def_stmt)
1825 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1827 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1828 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1829 if (is_gimple_assign (def_stmt2)
1830 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1831 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1832 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1834 tree base2;
1835 HOST_WIDE_INT offset2, size2, maxsize2, off;
1836 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1837 &offset2, &size2, &maxsize2);
1838 off = offset - offset2;
1839 if (maxsize2 != -1
1840 && maxsize2 == size2
1841 && operand_equal_p (base, base2, 0)
1842 && offset2 <= offset
1843 && offset2 + size2 >= offset + maxsize)
1845 tree val = NULL_TREE;
1846 HOST_WIDE_INT elsz
1847 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1848 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1850 if (off == 0)
1851 val = gimple_assign_rhs1 (def_stmt2);
1852 else if (off == elsz)
1853 val = gimple_assign_rhs2 (def_stmt2);
1855 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1856 && off % elsz == 0)
1858 tree ctor = gimple_assign_rhs1 (def_stmt2);
1859 unsigned i = off / elsz;
1860 if (i < CONSTRUCTOR_NELTS (ctor))
1862 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1863 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1865 if (TREE_CODE (TREE_TYPE (elt->value))
1866 != VECTOR_TYPE)
1867 val = elt->value;
1871 if (val)
1872 return vn_reference_lookup_or_insert_for_pieces
1873 (vuse, vr->set, vr->type, vr->operands, val);
1878 /* 5) For aggregate copies translate the reference through them if
1879 the copy kills ref. */
1880 else if (vn_walk_kind == VN_WALKREWRITE
1881 && gimple_assign_single_p (def_stmt)
1882 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1883 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1884 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1886 tree base2;
1887 HOST_WIDE_INT offset2, size2, maxsize2;
1888 int i, j;
1889 auto_vec<vn_reference_op_s> rhs;
1890 vn_reference_op_t vro;
1891 ao_ref r;
1893 if (!lhs_ref_ok)
1894 return (void *)-1;
1896 /* See if the assignment kills REF. */
1897 base2 = ao_ref_base (&lhs_ref);
1898 offset2 = lhs_ref.offset;
1899 size2 = lhs_ref.size;
1900 maxsize2 = lhs_ref.max_size;
1901 if (maxsize2 == -1
1902 || (base != base2
1903 && (TREE_CODE (base) != MEM_REF
1904 || TREE_CODE (base2) != MEM_REF
1905 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1906 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1907 TREE_OPERAND (base2, 1))))
1908 || offset2 > offset
1909 || offset2 + size2 < offset + maxsize)
1910 return (void *)-1;
1912 /* Find the common base of ref and the lhs. lhs_ops already
1913 contains valueized operands for the lhs. */
1914 i = vr->operands.length () - 1;
1915 j = lhs_ops.length () - 1;
1916 while (j >= 0 && i >= 0
1917 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1919 i--;
1920 j--;
1923 /* ??? The innermost op should always be a MEM_REF and we already
1924 checked that the assignment to the lhs kills vr. Thus for
1925 aggregate copies using char[] types the vn_reference_op_eq
1926 may fail when comparing types for compatibility. But we really
1927 don't care here - further lookups with the rewritten operands
1928 will simply fail if we messed up types too badly. */
1929 HOST_WIDE_INT extra_off = 0;
1930 if (j == 0 && i >= 0
1931 && lhs_ops[0].opcode == MEM_REF
1932 && lhs_ops[0].off != -1)
1934 if (lhs_ops[0].off == vr->operands[i].off)
1935 i--, j--;
1936 else if (vr->operands[i].opcode == MEM_REF
1937 && vr->operands[i].off != -1)
1939 extra_off = vr->operands[i].off - lhs_ops[0].off;
1940 i--, j--;
1944 /* i now points to the first additional op.
1945 ??? LHS may not be completely contained in VR, one or more
1946 VIEW_CONVERT_EXPRs could be in its way. We could at least
1947 try handling outermost VIEW_CONVERT_EXPRs. */
1948 if (j != -1)
1949 return (void *)-1;
1951 /* Now re-write REF to be based on the rhs of the assignment. */
1952 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1954 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1955 if (extra_off != 0)
1957 if (rhs.length () < 2
1958 || rhs[0].opcode != MEM_REF
1959 || rhs[0].off == -1)
1960 return (void *)-1;
1961 rhs[0].off += extra_off;
1962 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1963 build_int_cst (TREE_TYPE (rhs[0].op0),
1964 extra_off));
1967 /* We need to pre-pend vr->operands[0..i] to rhs. */
1968 vec<vn_reference_op_s> old = vr->operands;
1969 if (i + 1 + rhs.length () > vr->operands.length ())
1971 vr->operands.safe_grow (i + 1 + rhs.length ());
1972 if (old == shared_lookup_references)
1973 shared_lookup_references = vr->operands;
1975 else
1976 vr->operands.truncate (i + 1 + rhs.length ());
1977 FOR_EACH_VEC_ELT (rhs, j, vro)
1978 vr->operands[i + 1 + j] = *vro;
1979 vr->operands = valueize_refs (vr->operands);
1980 if (old == shared_lookup_references)
1981 shared_lookup_references = vr->operands;
1982 vr->hashcode = vn_reference_compute_hash (vr);
1984 /* Try folding the new reference to a constant. */
1985 tree val = fully_constant_vn_reference_p (vr);
1986 if (val)
1987 return vn_reference_lookup_or_insert_for_pieces
1988 (vuse, vr->set, vr->type, vr->operands, val);
1990 /* Adjust *ref from the new operands. */
1991 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1992 return (void *)-1;
1993 /* This can happen with bitfields. */
1994 if (ref->size != r.size)
1995 return (void *)-1;
1996 *ref = r;
1998 /* Do not update last seen VUSE after translating. */
1999 last_vuse_ptr = NULL;
2001 /* Keep looking for the adjusted *REF / VR pair. */
2002 return NULL;
2005 /* 6) For memcpy copies translate the reference through them if
2006 the copy kills ref. */
2007 else if (vn_walk_kind == VN_WALKREWRITE
2008 && is_gimple_reg_type (vr->type)
2009 /* ??? Handle BCOPY as well. */
2010 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2011 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2012 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2013 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2014 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2015 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2016 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2017 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2019 tree lhs, rhs;
2020 ao_ref r;
2021 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2022 vn_reference_op_s op;
2023 HOST_WIDE_INT at;
2026 /* Only handle non-variable, addressable refs. */
2027 if (ref->size != maxsize
2028 || offset % BITS_PER_UNIT != 0
2029 || ref->size % BITS_PER_UNIT != 0)
2030 return (void *)-1;
2032 /* Extract a pointer base and an offset for the destination. */
2033 lhs = gimple_call_arg (def_stmt, 0);
2034 lhs_offset = 0;
2035 if (TREE_CODE (lhs) == SSA_NAME)
2037 lhs = SSA_VAL (lhs);
2038 if (TREE_CODE (lhs) == SSA_NAME)
2040 gimple def_stmt = SSA_NAME_DEF_STMT (lhs);
2041 if (gimple_assign_single_p (def_stmt)
2042 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2043 lhs = gimple_assign_rhs1 (def_stmt);
2046 if (TREE_CODE (lhs) == ADDR_EXPR)
2048 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2049 &lhs_offset);
2050 if (!tem)
2051 return (void *)-1;
2052 if (TREE_CODE (tem) == MEM_REF
2053 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2055 lhs = TREE_OPERAND (tem, 0);
2056 if (TREE_CODE (lhs) == SSA_NAME)
2057 lhs = SSA_VAL (lhs);
2058 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2060 else if (DECL_P (tem))
2061 lhs = build_fold_addr_expr (tem);
2062 else
2063 return (void *)-1;
2065 if (TREE_CODE (lhs) != SSA_NAME
2066 && TREE_CODE (lhs) != ADDR_EXPR)
2067 return (void *)-1;
2069 /* Extract a pointer base and an offset for the source. */
2070 rhs = gimple_call_arg (def_stmt, 1);
2071 rhs_offset = 0;
2072 if (TREE_CODE (rhs) == SSA_NAME)
2073 rhs = SSA_VAL (rhs);
2074 if (TREE_CODE (rhs) == ADDR_EXPR)
2076 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2077 &rhs_offset);
2078 if (!tem)
2079 return (void *)-1;
2080 if (TREE_CODE (tem) == MEM_REF
2081 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2083 rhs = TREE_OPERAND (tem, 0);
2084 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2086 else if (DECL_P (tem))
2087 rhs = build_fold_addr_expr (tem);
2088 else
2089 return (void *)-1;
2091 if (TREE_CODE (rhs) != SSA_NAME
2092 && TREE_CODE (rhs) != ADDR_EXPR)
2093 return (void *)-1;
2095 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2097 /* The bases of the destination and the references have to agree. */
2098 if ((TREE_CODE (base) != MEM_REF
2099 && !DECL_P (base))
2100 || (TREE_CODE (base) == MEM_REF
2101 && (TREE_OPERAND (base, 0) != lhs
2102 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2103 || (DECL_P (base)
2104 && (TREE_CODE (lhs) != ADDR_EXPR
2105 || TREE_OPERAND (lhs, 0) != base)))
2106 return (void *)-1;
2108 at = offset / BITS_PER_UNIT;
2109 if (TREE_CODE (base) == MEM_REF)
2110 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2111 /* If the access is completely outside of the memcpy destination
2112 area there is no aliasing. */
2113 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2114 || lhs_offset + copy_size <= at)
2115 return NULL;
2116 /* And the access has to be contained within the memcpy destination. */
2117 if (lhs_offset > at
2118 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2119 return (void *)-1;
2121 /* Make room for 2 operands in the new reference. */
2122 if (vr->operands.length () < 2)
2124 vec<vn_reference_op_s> old = vr->operands;
2125 vr->operands.safe_grow_cleared (2);
2126 if (old == shared_lookup_references
2127 && vr->operands != old)
2128 shared_lookup_references = vr->operands;
2130 else
2131 vr->operands.truncate (2);
2133 /* The looked-through reference is a simple MEM_REF. */
2134 memset (&op, 0, sizeof (op));
2135 op.type = vr->type;
2136 op.opcode = MEM_REF;
2137 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2138 op.off = at - lhs_offset + rhs_offset;
2139 vr->operands[0] = op;
2140 op.type = TREE_TYPE (rhs);
2141 op.opcode = TREE_CODE (rhs);
2142 op.op0 = rhs;
2143 op.off = -1;
2144 vr->operands[1] = op;
2145 vr->hashcode = vn_reference_compute_hash (vr);
2147 /* Adjust *ref from the new operands. */
2148 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2149 return (void *)-1;
2150 /* This can happen with bitfields. */
2151 if (ref->size != r.size)
2152 return (void *)-1;
2153 *ref = r;
2155 /* Do not update last seen VUSE after translating. */
2156 last_vuse_ptr = NULL;
2158 /* Keep looking for the adjusted *REF / VR pair. */
2159 return NULL;
2162 /* Bail out and stop walking. */
2163 return (void *)-1;
2166 /* Lookup a reference operation by it's parts, in the current hash table.
2167 Returns the resulting value number if it exists in the hash table,
2168 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2169 vn_reference_t stored in the hashtable if something is found. */
2171 tree
2172 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2173 vec<vn_reference_op_s> operands,
2174 vn_reference_t *vnresult, vn_lookup_kind kind)
2176 struct vn_reference_s vr1;
2177 vn_reference_t tmp;
2178 tree cst;
2180 if (!vnresult)
2181 vnresult = &tmp;
2182 *vnresult = NULL;
2184 vr1.vuse = vuse_ssa_val (vuse);
2185 shared_lookup_references.truncate (0);
2186 shared_lookup_references.safe_grow (operands.length ());
2187 memcpy (shared_lookup_references.address (),
2188 operands.address (),
2189 sizeof (vn_reference_op_s)
2190 * operands.length ());
2191 vr1.operands = operands = shared_lookup_references
2192 = valueize_refs (shared_lookup_references);
2193 vr1.type = type;
2194 vr1.set = set;
2195 vr1.hashcode = vn_reference_compute_hash (&vr1);
2196 if ((cst = fully_constant_vn_reference_p (&vr1)))
2197 return cst;
2199 vn_reference_lookup_1 (&vr1, vnresult);
2200 if (!*vnresult
2201 && kind != VN_NOWALK
2202 && vr1.vuse)
2204 ao_ref r;
2205 vn_walk_kind = kind;
2206 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2207 *vnresult =
2208 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2209 vn_reference_lookup_2,
2210 vn_reference_lookup_3,
2211 vuse_ssa_val, &vr1);
2212 gcc_checking_assert (vr1.operands == shared_lookup_references);
2215 if (*vnresult)
2216 return (*vnresult)->result;
2218 return NULL_TREE;
2221 /* Lookup OP in the current hash table, and return the resulting value
2222 number if it exists in the hash table. Return NULL_TREE if it does
2223 not exist in the hash table or if the result field of the structure
2224 was NULL.. VNRESULT will be filled in with the vn_reference_t
2225 stored in the hashtable if one exists. */
2227 tree
2228 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2229 vn_reference_t *vnresult)
2231 vec<vn_reference_op_s> operands;
2232 struct vn_reference_s vr1;
2233 tree cst;
2234 bool valuezied_anything;
2236 if (vnresult)
2237 *vnresult = NULL;
2239 vr1.vuse = vuse_ssa_val (vuse);
2240 vr1.operands = operands
2241 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2242 vr1.type = TREE_TYPE (op);
2243 vr1.set = get_alias_set (op);
2244 vr1.hashcode = vn_reference_compute_hash (&vr1);
2245 if ((cst = fully_constant_vn_reference_p (&vr1)))
2246 return cst;
2248 if (kind != VN_NOWALK
2249 && vr1.vuse)
2251 vn_reference_t wvnresult;
2252 ao_ref r;
2253 /* Make sure to use a valueized reference if we valueized anything.
2254 Otherwise preserve the full reference for advanced TBAA. */
2255 if (!valuezied_anything
2256 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2257 vr1.operands))
2258 ao_ref_init (&r, op);
2259 vn_walk_kind = kind;
2260 wvnresult =
2261 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2262 vn_reference_lookup_2,
2263 vn_reference_lookup_3,
2264 vuse_ssa_val, &vr1);
2265 gcc_checking_assert (vr1.operands == shared_lookup_references);
2266 if (wvnresult)
2268 if (vnresult)
2269 *vnresult = wvnresult;
2270 return wvnresult->result;
2273 return NULL_TREE;
2276 return vn_reference_lookup_1 (&vr1, vnresult);
2279 /* Lookup CALL in the current hash table and return the entry in
2280 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2282 void
2283 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2284 vn_reference_t vr)
2286 if (vnresult)
2287 *vnresult = NULL;
2289 tree vuse = gimple_vuse (call);
2291 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2292 vr->operands = valueize_shared_reference_ops_from_call (call);
2293 vr->type = gimple_expr_type (call);
2294 vr->set = 0;
2295 vr->hashcode = vn_reference_compute_hash (vr);
2296 vn_reference_lookup_1 (vr, vnresult);
2299 /* Insert OP into the current hash table with a value number of
2300 RESULT, and return the resulting reference structure we created. */
2302 static vn_reference_t
2303 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2305 vn_reference_s **slot;
2306 vn_reference_t vr1;
2307 bool tem;
2309 vr1 = current_info->references_pool->allocate ();
2310 if (TREE_CODE (result) == SSA_NAME)
2311 vr1->value_id = VN_INFO (result)->value_id;
2312 else
2313 vr1->value_id = get_or_alloc_constant_value_id (result);
2314 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2315 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2316 vr1->type = TREE_TYPE (op);
2317 vr1->set = get_alias_set (op);
2318 vr1->hashcode = vn_reference_compute_hash (vr1);
2319 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2320 vr1->result_vdef = vdef;
2322 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2323 INSERT);
2325 /* Because we lookup stores using vuses, and value number failures
2326 using the vdefs (see visit_reference_op_store for how and why),
2327 it's possible that on failure we may try to insert an already
2328 inserted store. This is not wrong, there is no ssa name for a
2329 store that we could use as a differentiator anyway. Thus, unlike
2330 the other lookup functions, you cannot gcc_assert (!*slot)
2331 here. */
2333 /* But free the old slot in case of a collision. */
2334 if (*slot)
2335 free_reference (*slot);
2337 *slot = vr1;
2338 return vr1;
2341 /* Insert a reference by it's pieces into the current hash table with
2342 a value number of RESULT. Return the resulting reference
2343 structure we created. */
2345 vn_reference_t
2346 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2347 vec<vn_reference_op_s> operands,
2348 tree result, unsigned int value_id)
2351 vn_reference_s **slot;
2352 vn_reference_t vr1;
2354 vr1 = current_info->references_pool->allocate ();
2355 vr1->value_id = value_id;
2356 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2357 vr1->operands = valueize_refs (operands);
2358 vr1->type = type;
2359 vr1->set = set;
2360 vr1->hashcode = vn_reference_compute_hash (vr1);
2361 if (result && TREE_CODE (result) == SSA_NAME)
2362 result = SSA_VAL (result);
2363 vr1->result = result;
2365 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2366 INSERT);
2368 /* At this point we should have all the things inserted that we have
2369 seen before, and we should never try inserting something that
2370 already exists. */
2371 gcc_assert (!*slot);
2372 if (*slot)
2373 free_reference (*slot);
2375 *slot = vr1;
2376 return vr1;
2379 /* Compute and return the hash value for nary operation VBO1. */
2381 static hashval_t
2382 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2384 inchash::hash hstate;
2385 unsigned i;
2387 for (i = 0; i < vno1->length; ++i)
2388 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2389 vno1->op[i] = SSA_VAL (vno1->op[i]);
2391 if (vno1->length == 2
2392 && commutative_tree_code (vno1->opcode)
2393 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2395 tree temp = vno1->op[0];
2396 vno1->op[0] = vno1->op[1];
2397 vno1->op[1] = temp;
2400 hstate.add_int (vno1->opcode);
2401 for (i = 0; i < vno1->length; ++i)
2402 inchash::add_expr (vno1->op[i], hstate);
2404 return hstate.end ();
2407 /* Compare nary operations VNO1 and VNO2 and return true if they are
2408 equivalent. */
2410 bool
2411 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2413 unsigned i;
2415 if (vno1->hashcode != vno2->hashcode)
2416 return false;
2418 if (vno1->length != vno2->length)
2419 return false;
2421 if (vno1->opcode != vno2->opcode
2422 || !types_compatible_p (vno1->type, vno2->type))
2423 return false;
2425 for (i = 0; i < vno1->length; ++i)
2426 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2427 return false;
2429 return true;
2432 /* Initialize VNO from the pieces provided. */
2434 static void
2435 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2436 enum tree_code code, tree type, tree *ops)
2438 vno->opcode = code;
2439 vno->length = length;
2440 vno->type = type;
2441 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2444 /* Initialize VNO from OP. */
2446 static void
2447 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2449 unsigned i;
2451 vno->opcode = TREE_CODE (op);
2452 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2453 vno->type = TREE_TYPE (op);
2454 for (i = 0; i < vno->length; ++i)
2455 vno->op[i] = TREE_OPERAND (op, i);
2458 /* Return the number of operands for a vn_nary ops structure from STMT. */
2460 static unsigned int
2461 vn_nary_length_from_stmt (gimple stmt)
2463 switch (gimple_assign_rhs_code (stmt))
2465 case REALPART_EXPR:
2466 case IMAGPART_EXPR:
2467 case VIEW_CONVERT_EXPR:
2468 return 1;
2470 case BIT_FIELD_REF:
2471 return 3;
2473 case CONSTRUCTOR:
2474 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2476 default:
2477 return gimple_num_ops (stmt) - 1;
2481 /* Initialize VNO from STMT. */
2483 static void
2484 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2486 unsigned i;
2488 vno->opcode = gimple_assign_rhs_code (stmt);
2489 vno->type = gimple_expr_type (stmt);
2490 switch (vno->opcode)
2492 case REALPART_EXPR:
2493 case IMAGPART_EXPR:
2494 case VIEW_CONVERT_EXPR:
2495 vno->length = 1;
2496 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2497 break;
2499 case BIT_FIELD_REF:
2500 vno->length = 3;
2501 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2502 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2503 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2504 break;
2506 case CONSTRUCTOR:
2507 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2508 for (i = 0; i < vno->length; ++i)
2509 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2510 break;
2512 default:
2513 gcc_checking_assert (!gimple_assign_single_p (stmt));
2514 vno->length = gimple_num_ops (stmt) - 1;
2515 for (i = 0; i < vno->length; ++i)
2516 vno->op[i] = gimple_op (stmt, i + 1);
2520 /* Compute the hashcode for VNO and look for it in the hash table;
2521 return the resulting value number if it exists in the hash table.
2522 Return NULL_TREE if it does not exist in the hash table or if the
2523 result field of the operation is NULL. VNRESULT will contain the
2524 vn_nary_op_t from the hashtable if it exists. */
2526 static tree
2527 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2529 vn_nary_op_s **slot;
2531 if (vnresult)
2532 *vnresult = NULL;
2534 vno->hashcode = vn_nary_op_compute_hash (vno);
2535 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2536 NO_INSERT);
2537 if (!slot && current_info == optimistic_info)
2538 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2539 NO_INSERT);
2540 if (!slot)
2541 return NULL_TREE;
2542 if (vnresult)
2543 *vnresult = *slot;
2544 return (*slot)->result;
2547 /* Lookup a n-ary operation by its pieces and return the resulting value
2548 number if it exists in the hash table. Return NULL_TREE if it does
2549 not exist in the hash table or if the result field of the operation
2550 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2551 if it exists. */
2553 tree
2554 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2555 tree type, tree *ops, vn_nary_op_t *vnresult)
2557 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2558 sizeof_vn_nary_op (length));
2559 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2560 return vn_nary_op_lookup_1 (vno1, vnresult);
2563 /* Lookup OP in the current hash table, and return the resulting value
2564 number if it exists in the hash table. Return NULL_TREE if it does
2565 not exist in the hash table or if the result field of the operation
2566 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2567 if it exists. */
2569 tree
2570 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2572 vn_nary_op_t vno1
2573 = XALLOCAVAR (struct vn_nary_op_s,
2574 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2575 init_vn_nary_op_from_op (vno1, op);
2576 return vn_nary_op_lookup_1 (vno1, vnresult);
2579 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2580 value number if it exists in the hash table. Return NULL_TREE if
2581 it does not exist in the hash table. VNRESULT will contain the
2582 vn_nary_op_t from the hashtable if it exists. */
2584 tree
2585 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2587 vn_nary_op_t vno1
2588 = XALLOCAVAR (struct vn_nary_op_s,
2589 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2590 init_vn_nary_op_from_stmt (vno1, stmt);
2591 return vn_nary_op_lookup_1 (vno1, vnresult);
2594 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2596 static vn_nary_op_t
2597 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2599 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2602 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2603 obstack. */
2605 static vn_nary_op_t
2606 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2608 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2609 &current_info->nary_obstack);
2611 vno1->value_id = value_id;
2612 vno1->length = length;
2613 vno1->result = result;
2615 return vno1;
2618 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2619 VNO->HASHCODE first. */
2621 static vn_nary_op_t
2622 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2623 bool compute_hash)
2625 vn_nary_op_s **slot;
2627 if (compute_hash)
2628 vno->hashcode = vn_nary_op_compute_hash (vno);
2630 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2631 gcc_assert (!*slot);
2633 *slot = vno;
2634 return vno;
2637 /* Insert a n-ary operation into the current hash table using it's
2638 pieces. Return the vn_nary_op_t structure we created and put in
2639 the hashtable. */
2641 vn_nary_op_t
2642 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2643 tree type, tree *ops,
2644 tree result, unsigned int value_id)
2646 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2647 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2648 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2651 /* Insert OP into the current hash table with a value number of
2652 RESULT. Return the vn_nary_op_t structure we created and put in
2653 the hashtable. */
2655 vn_nary_op_t
2656 vn_nary_op_insert (tree op, tree result)
2658 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2659 vn_nary_op_t vno1;
2661 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2662 init_vn_nary_op_from_op (vno1, op);
2663 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2666 /* Insert the rhs of STMT into the current hash table with a value number of
2667 RESULT. */
2669 vn_nary_op_t
2670 vn_nary_op_insert_stmt (gimple stmt, tree result)
2672 vn_nary_op_t vno1
2673 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2674 result, VN_INFO (result)->value_id);
2675 init_vn_nary_op_from_stmt (vno1, stmt);
2676 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2679 /* Compute a hashcode for PHI operation VP1 and return it. */
2681 static inline hashval_t
2682 vn_phi_compute_hash (vn_phi_t vp1)
2684 inchash::hash hstate (vp1->block->index);
2685 int i;
2686 tree phi1op;
2687 tree type;
2689 /* If all PHI arguments are constants we need to distinguish
2690 the PHI node via its type. */
2691 type = vp1->type;
2692 hstate.merge_hash (vn_hash_type (type));
2694 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2696 if (phi1op == VN_TOP)
2697 continue;
2698 inchash::add_expr (phi1op, hstate);
2701 return hstate.end ();
2704 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2706 static int
2707 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2709 if (vp1->hashcode != vp2->hashcode)
2710 return false;
2712 if (vp1->block == vp2->block)
2714 int i;
2715 tree phi1op;
2717 /* If the PHI nodes do not have compatible types
2718 they are not the same. */
2719 if (!types_compatible_p (vp1->type, vp2->type))
2720 return false;
2722 /* Any phi in the same block will have it's arguments in the
2723 same edge order, because of how we store phi nodes. */
2724 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2726 tree phi2op = vp2->phiargs[i];
2727 if (phi1op == VN_TOP || phi2op == VN_TOP)
2728 continue;
2729 if (!expressions_equal_p (phi1op, phi2op))
2730 return false;
2732 return true;
2734 return false;
2737 static vec<tree> shared_lookup_phiargs;
2739 /* Lookup PHI in the current hash table, and return the resulting
2740 value number if it exists in the hash table. Return NULL_TREE if
2741 it does not exist in the hash table. */
2743 static tree
2744 vn_phi_lookup (gimple phi)
2746 vn_phi_s **slot;
2747 struct vn_phi_s vp1;
2748 unsigned i;
2750 shared_lookup_phiargs.truncate (0);
2752 /* Canonicalize the SSA_NAME's to their value number. */
2753 for (i = 0; i < gimple_phi_num_args (phi); i++)
2755 tree def = PHI_ARG_DEF (phi, i);
2756 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2757 shared_lookup_phiargs.safe_push (def);
2759 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2760 vp1.phiargs = shared_lookup_phiargs;
2761 vp1.block = gimple_bb (phi);
2762 vp1.hashcode = vn_phi_compute_hash (&vp1);
2763 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2764 NO_INSERT);
2765 if (!slot && current_info == optimistic_info)
2766 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2767 NO_INSERT);
2768 if (!slot)
2769 return NULL_TREE;
2770 return (*slot)->result;
2773 /* Insert PHI into the current hash table with a value number of
2774 RESULT. */
2776 static vn_phi_t
2777 vn_phi_insert (gimple phi, tree result)
2779 vn_phi_s **slot;
2780 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2781 unsigned i;
2782 vec<tree> args = vNULL;
2784 /* Canonicalize the SSA_NAME's to their value number. */
2785 for (i = 0; i < gimple_phi_num_args (phi); i++)
2787 tree def = PHI_ARG_DEF (phi, i);
2788 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2789 args.safe_push (def);
2791 vp1->value_id = VN_INFO (result)->value_id;
2792 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2793 vp1->phiargs = args;
2794 vp1->block = gimple_bb (phi);
2795 vp1->result = result;
2796 vp1->hashcode = vn_phi_compute_hash (vp1);
2798 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2800 /* Because we iterate over phi operations more than once, it's
2801 possible the slot might already exist here, hence no assert.*/
2802 *slot = vp1;
2803 return vp1;
2807 /* Print set of components in strongly connected component SCC to OUT. */
2809 static void
2810 print_scc (FILE *out, vec<tree> scc)
2812 tree var;
2813 unsigned int i;
2815 fprintf (out, "SCC consists of:");
2816 FOR_EACH_VEC_ELT (scc, i, var)
2818 fprintf (out, " ");
2819 print_generic_expr (out, var, 0);
2821 fprintf (out, "\n");
2824 /* Set the value number of FROM to TO, return true if it has changed
2825 as a result. */
2827 static inline bool
2828 set_ssa_val_to (tree from, tree to)
2830 tree currval = SSA_VAL (from);
2831 HOST_WIDE_INT toff, coff;
2833 /* The only thing we allow as value numbers are ssa_names
2834 and invariants. So assert that here. We don't allow VN_TOP
2835 as visiting a stmt should produce a value-number other than
2836 that.
2837 ??? Still VN_TOP can happen for unreachable code, so force
2838 it to varying in that case. Not all code is prepared to
2839 get VN_TOP on valueization. */
2840 if (to == VN_TOP)
2842 if (dump_file && (dump_flags & TDF_DETAILS))
2843 fprintf (dump_file, "Forcing value number to varying on "
2844 "receiving VN_TOP\n");
2845 to = from;
2848 gcc_assert (to != NULL_TREE
2849 && ((TREE_CODE (to) == SSA_NAME
2850 && (to == from || SSA_VAL (to) == to))
2851 || is_gimple_min_invariant (to)));
2853 if (from != to)
2855 if (currval == from)
2857 if (dump_file && (dump_flags & TDF_DETAILS))
2859 fprintf (dump_file, "Not changing value number of ");
2860 print_generic_expr (dump_file, from, 0);
2861 fprintf (dump_file, " from VARYING to ");
2862 print_generic_expr (dump_file, to, 0);
2863 fprintf (dump_file, "\n");
2865 return false;
2867 else if (TREE_CODE (to) == SSA_NAME
2868 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2869 to = from;
2872 if (dump_file && (dump_flags & TDF_DETAILS))
2874 fprintf (dump_file, "Setting value number of ");
2875 print_generic_expr (dump_file, from, 0);
2876 fprintf (dump_file, " to ");
2877 print_generic_expr (dump_file, to, 0);
2880 if (currval != to
2881 && !operand_equal_p (currval, to, 0)
2882 /* ??? For addresses involving volatile objects or types operand_equal_p
2883 does not reliably detect ADDR_EXPRs as equal. We know we are only
2884 getting invariant gimple addresses here, so can use
2885 get_addr_base_and_unit_offset to do this comparison. */
2886 && !(TREE_CODE (currval) == ADDR_EXPR
2887 && TREE_CODE (to) == ADDR_EXPR
2888 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2889 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2890 && coff == toff))
2892 VN_INFO (from)->valnum = to;
2893 if (dump_file && (dump_flags & TDF_DETAILS))
2894 fprintf (dump_file, " (changed)\n");
2895 return true;
2897 if (dump_file && (dump_flags & TDF_DETAILS))
2898 fprintf (dump_file, "\n");
2899 return false;
2902 /* Mark as processed all the definitions in the defining stmt of USE, or
2903 the USE itself. */
2905 static void
2906 mark_use_processed (tree use)
2908 ssa_op_iter iter;
2909 def_operand_p defp;
2910 gimple stmt = SSA_NAME_DEF_STMT (use);
2912 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2914 VN_INFO (use)->use_processed = true;
2915 return;
2918 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2920 tree def = DEF_FROM_PTR (defp);
2922 VN_INFO (def)->use_processed = true;
2926 /* Set all definitions in STMT to value number to themselves.
2927 Return true if a value number changed. */
2929 static bool
2930 defs_to_varying (gimple stmt)
2932 bool changed = false;
2933 ssa_op_iter iter;
2934 def_operand_p defp;
2936 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2938 tree def = DEF_FROM_PTR (defp);
2939 changed |= set_ssa_val_to (def, def);
2941 return changed;
2944 static bool expr_has_constants (tree expr);
2946 /* Visit a copy between LHS and RHS, return true if the value number
2947 changed. */
2949 static bool
2950 visit_copy (tree lhs, tree rhs)
2952 /* The copy may have a more interesting constant filled expression
2953 (we don't, since we know our RHS is just an SSA name). */
2954 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2955 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2957 /* And finally valueize. */
2958 rhs = SSA_VAL (rhs);
2960 return set_ssa_val_to (lhs, rhs);
2963 /* Visit a nary operator RHS, value number it, and return true if the
2964 value number of LHS has changed as a result. */
2966 static bool
2967 visit_nary_op (tree lhs, gimple stmt)
2969 bool changed = false;
2970 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2972 if (result)
2973 changed = set_ssa_val_to (lhs, result);
2974 else
2976 changed = set_ssa_val_to (lhs, lhs);
2977 vn_nary_op_insert_stmt (stmt, lhs);
2980 return changed;
2983 /* Visit a call STMT storing into LHS. Return true if the value number
2984 of the LHS has changed as a result. */
2986 static bool
2987 visit_reference_op_call (tree lhs, gcall *stmt)
2989 bool changed = false;
2990 struct vn_reference_s vr1;
2991 vn_reference_t vnresult = NULL;
2992 tree vdef = gimple_vdef (stmt);
2994 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2995 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2996 lhs = NULL_TREE;
2998 vn_reference_lookup_call (stmt, &vnresult, &vr1);
2999 if (vnresult)
3001 if (vnresult->result_vdef && vdef)
3002 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3004 if (!vnresult->result && lhs)
3005 vnresult->result = lhs;
3007 if (vnresult->result && lhs)
3009 changed |= set_ssa_val_to (lhs, vnresult->result);
3011 if (VN_INFO (vnresult->result)->has_constants)
3012 VN_INFO (lhs)->has_constants = true;
3015 else
3017 vn_reference_t vr2;
3018 vn_reference_s **slot;
3019 if (vdef)
3020 changed |= set_ssa_val_to (vdef, vdef);
3021 if (lhs)
3022 changed |= set_ssa_val_to (lhs, lhs);
3023 vr2 = current_info->references_pool->allocate ();
3024 vr2->vuse = vr1.vuse;
3025 /* As we are not walking the virtual operand chain we know the
3026 shared_lookup_references are still original so we can re-use
3027 them here. */
3028 vr2->operands = vr1.operands.copy ();
3029 vr2->type = vr1.type;
3030 vr2->set = vr1.set;
3031 vr2->hashcode = vr1.hashcode;
3032 vr2->result = lhs;
3033 vr2->result_vdef = vdef;
3034 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3035 INSERT);
3036 gcc_assert (!*slot);
3037 *slot = vr2;
3040 return changed;
3043 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3044 and return true if the value number of the LHS has changed as a result. */
3046 static bool
3047 visit_reference_op_load (tree lhs, tree op, gimple stmt)
3049 bool changed = false;
3050 tree last_vuse;
3051 tree result;
3053 last_vuse = gimple_vuse (stmt);
3054 last_vuse_ptr = &last_vuse;
3055 result = vn_reference_lookup (op, gimple_vuse (stmt),
3056 default_vn_walk_kind, NULL);
3057 last_vuse_ptr = NULL;
3059 /* We handle type-punning through unions by value-numbering based
3060 on offset and size of the access. Be prepared to handle a
3061 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3062 if (result
3063 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3065 /* We will be setting the value number of lhs to the value number
3066 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3067 So first simplify and lookup this expression to see if it
3068 is already available. */
3069 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3070 if ((CONVERT_EXPR_P (val)
3071 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
3072 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
3074 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
3075 if ((CONVERT_EXPR_P (tem)
3076 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
3077 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
3078 TREE_TYPE (val), tem)))
3079 val = tem;
3081 result = val;
3082 if (!is_gimple_min_invariant (val)
3083 && TREE_CODE (val) != SSA_NAME)
3084 result = vn_nary_op_lookup (val, NULL);
3085 /* If the expression is not yet available, value-number lhs to
3086 a new SSA_NAME we create. */
3087 if (!result)
3089 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
3090 "vntemp");
3091 /* Initialize value-number information properly. */
3092 VN_INFO_GET (result)->valnum = result;
3093 VN_INFO (result)->value_id = get_next_value_id ();
3094 VN_INFO (result)->expr = val;
3095 VN_INFO (result)->has_constants = expr_has_constants (val);
3096 VN_INFO (result)->needs_insertion = true;
3097 /* As all "inserted" statements are singleton SCCs, insert
3098 to the valid table. This is strictly needed to
3099 avoid re-generating new value SSA_NAMEs for the same
3100 expression during SCC iteration over and over (the
3101 optimistic table gets cleared after each iteration).
3102 We do not need to insert into the optimistic table, as
3103 lookups there will fall back to the valid table. */
3104 if (current_info == optimistic_info)
3106 current_info = valid_info;
3107 vn_nary_op_insert (val, result);
3108 current_info = optimistic_info;
3110 else
3111 vn_nary_op_insert (val, result);
3112 if (dump_file && (dump_flags & TDF_DETAILS))
3114 fprintf (dump_file, "Inserting name ");
3115 print_generic_expr (dump_file, result, 0);
3116 fprintf (dump_file, " for expression ");
3117 print_generic_expr (dump_file, val, 0);
3118 fprintf (dump_file, "\n");
3123 if (result)
3125 changed = set_ssa_val_to (lhs, result);
3126 if (TREE_CODE (result) == SSA_NAME
3127 && VN_INFO (result)->has_constants)
3129 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3130 VN_INFO (lhs)->has_constants = true;
3133 else
3135 changed = set_ssa_val_to (lhs, lhs);
3136 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3139 return changed;
3143 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3144 and return true if the value number of the LHS has changed as a result. */
3146 static bool
3147 visit_reference_op_store (tree lhs, tree op, gimple stmt)
3149 bool changed = false;
3150 vn_reference_t vnresult = NULL;
3151 tree result, assign;
3152 bool resultsame = false;
3153 tree vuse = gimple_vuse (stmt);
3154 tree vdef = gimple_vdef (stmt);
3156 if (TREE_CODE (op) == SSA_NAME)
3157 op = SSA_VAL (op);
3159 /* First we want to lookup using the *vuses* from the store and see
3160 if there the last store to this location with the same address
3161 had the same value.
3163 The vuses represent the memory state before the store. If the
3164 memory state, address, and value of the store is the same as the
3165 last store to this location, then this store will produce the
3166 same memory state as that store.
3168 In this case the vdef versions for this store are value numbered to those
3169 vuse versions, since they represent the same memory state after
3170 this store.
3172 Otherwise, the vdefs for the store are used when inserting into
3173 the table, since the store generates a new memory state. */
3175 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3177 if (result)
3179 if (TREE_CODE (result) == SSA_NAME)
3180 result = SSA_VAL (result);
3181 resultsame = expressions_equal_p (result, op);
3184 if ((!result || !resultsame)
3185 /* Only perform the following when being called from PRE
3186 which embeds tail merging. */
3187 && default_vn_walk_kind == VN_WALK)
3189 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3190 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3191 if (vnresult)
3193 VN_INFO (vdef)->use_processed = true;
3194 return set_ssa_val_to (vdef, vnresult->result_vdef);
3198 if (!result || !resultsame)
3200 if (dump_file && (dump_flags & TDF_DETAILS))
3202 fprintf (dump_file, "No store match\n");
3203 fprintf (dump_file, "Value numbering store ");
3204 print_generic_expr (dump_file, lhs, 0);
3205 fprintf (dump_file, " to ");
3206 print_generic_expr (dump_file, op, 0);
3207 fprintf (dump_file, "\n");
3209 /* Have to set value numbers before insert, since insert is
3210 going to valueize the references in-place. */
3211 if (vdef)
3213 changed |= set_ssa_val_to (vdef, vdef);
3216 /* Do not insert structure copies into the tables. */
3217 if (is_gimple_min_invariant (op)
3218 || is_gimple_reg (op))
3219 vn_reference_insert (lhs, op, vdef, NULL);
3221 /* Only perform the following when being called from PRE
3222 which embeds tail merging. */
3223 if (default_vn_walk_kind == VN_WALK)
3225 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3226 vn_reference_insert (assign, lhs, vuse, vdef);
3229 else
3231 /* We had a match, so value number the vdef to have the value
3232 number of the vuse it came from. */
3234 if (dump_file && (dump_flags & TDF_DETAILS))
3235 fprintf (dump_file, "Store matched earlier value,"
3236 "value numbering store vdefs to matching vuses.\n");
3238 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3241 return changed;
3244 /* Visit and value number PHI, return true if the value number
3245 changed. */
3247 static bool
3248 visit_phi (gimple phi)
3250 bool changed = false;
3251 tree result;
3252 tree sameval = VN_TOP;
3253 bool allsame = true;
3255 /* TODO: We could check for this in init_sccvn, and replace this
3256 with a gcc_assert. */
3257 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3258 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3260 /* See if all non-TOP arguments have the same value. TOP is
3261 equivalent to everything, so we can ignore it. */
3262 edge_iterator ei;
3263 edge e;
3264 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3265 if (e->flags & EDGE_EXECUTABLE)
3267 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3269 if (TREE_CODE (def) == SSA_NAME)
3270 def = SSA_VAL (def);
3271 if (def == VN_TOP)
3272 continue;
3273 if (sameval == VN_TOP)
3275 sameval = def;
3277 else
3279 if (!expressions_equal_p (def, sameval))
3281 allsame = false;
3282 break;
3287 /* If all value numbered to the same value, the phi node has that
3288 value. */
3289 if (allsame)
3290 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3292 /* Otherwise, see if it is equivalent to a phi node in this block. */
3293 result = vn_phi_lookup (phi);
3294 if (result)
3295 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3296 else
3298 vn_phi_insert (phi, PHI_RESULT (phi));
3299 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3300 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3301 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3304 return changed;
3307 /* Return true if EXPR contains constants. */
3309 static bool
3310 expr_has_constants (tree expr)
3312 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3314 case tcc_unary:
3315 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3317 case tcc_binary:
3318 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3319 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3320 /* Constants inside reference ops are rarely interesting, but
3321 it can take a lot of looking to find them. */
3322 case tcc_reference:
3323 case tcc_declaration:
3324 return false;
3325 default:
3326 return is_gimple_min_invariant (expr);
3328 return false;
3331 /* Return true if STMT contains constants. */
3333 static bool
3334 stmt_has_constants (gimple stmt)
3336 tree tem;
3338 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3339 return false;
3341 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3343 case GIMPLE_TERNARY_RHS:
3344 tem = gimple_assign_rhs3 (stmt);
3345 if (TREE_CODE (tem) == SSA_NAME)
3346 tem = SSA_VAL (tem);
3347 if (is_gimple_min_invariant (tem))
3348 return true;
3349 /* Fallthru. */
3351 case GIMPLE_BINARY_RHS:
3352 tem = gimple_assign_rhs2 (stmt);
3353 if (TREE_CODE (tem) == SSA_NAME)
3354 tem = SSA_VAL (tem);
3355 if (is_gimple_min_invariant (tem))
3356 return true;
3357 /* Fallthru. */
3359 case GIMPLE_SINGLE_RHS:
3360 /* Constants inside reference ops are rarely interesting, but
3361 it can take a lot of looking to find them. */
3362 case GIMPLE_UNARY_RHS:
3363 tem = gimple_assign_rhs1 (stmt);
3364 if (TREE_CODE (tem) == SSA_NAME)
3365 tem = SSA_VAL (tem);
3366 return is_gimple_min_invariant (tem);
3368 default:
3369 gcc_unreachable ();
3371 return false;
3374 /* Simplify the binary expression RHS, and return the result if
3375 simplified. */
3377 static tree
3378 simplify_binary_expression (gimple stmt)
3380 tree result = NULL_TREE;
3381 tree op0 = gimple_assign_rhs1 (stmt);
3382 tree op1 = gimple_assign_rhs2 (stmt);
3383 enum tree_code code = gimple_assign_rhs_code (stmt);
3385 /* This will not catch every single case we could combine, but will
3386 catch those with constants. The goal here is to simultaneously
3387 combine constants between expressions, but avoid infinite
3388 expansion of expressions during simplification. */
3389 op0 = vn_valueize (op0);
3390 if (TREE_CODE (op0) == SSA_NAME
3391 && (VN_INFO (op0)->has_constants
3392 || TREE_CODE_CLASS (code) == tcc_comparison
3393 || code == COMPLEX_EXPR))
3394 op0 = vn_get_expr_for (op0);
3396 op1 = vn_valueize (op1);
3397 if (TREE_CODE (op1) == SSA_NAME
3398 && (VN_INFO (op1)->has_constants
3399 || code == COMPLEX_EXPR))
3400 op1 = vn_get_expr_for (op1);
3402 /* Pointer plus constant can be represented as invariant address.
3403 Do so to allow further propatation, see also tree forwprop. */
3404 if (code == POINTER_PLUS_EXPR
3405 && tree_fits_uhwi_p (op1)
3406 && TREE_CODE (op0) == ADDR_EXPR
3407 && is_gimple_min_invariant (op0))
3408 return build_invariant_address (TREE_TYPE (op0),
3409 TREE_OPERAND (op0, 0),
3410 tree_to_uhwi (op1));
3412 /* Avoid folding if nothing changed. */
3413 if (op0 == gimple_assign_rhs1 (stmt)
3414 && op1 == gimple_assign_rhs2 (stmt))
3415 return NULL_TREE;
3417 fold_defer_overflow_warnings ();
3419 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3420 if (result)
3421 STRIP_USELESS_TYPE_CONVERSION (result);
3423 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3424 stmt, 0);
3426 /* Make sure result is not a complex expression consisting
3427 of operators of operators (IE (a + b) + (a + c))
3428 Otherwise, we will end up with unbounded expressions if
3429 fold does anything at all. */
3430 if (result && valid_gimple_rhs_p (result))
3431 return result;
3433 return NULL_TREE;
3436 /* Simplify the unary expression RHS, and return the result if
3437 simplified. */
3439 static tree
3440 simplify_unary_expression (gassign *stmt)
3442 tree result = NULL_TREE;
3443 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3444 enum tree_code code = gimple_assign_rhs_code (stmt);
3446 /* We handle some tcc_reference codes here that are all
3447 GIMPLE_ASSIGN_SINGLE codes. */
3448 if (code == REALPART_EXPR
3449 || code == IMAGPART_EXPR
3450 || code == VIEW_CONVERT_EXPR
3451 || code == BIT_FIELD_REF)
3452 op0 = TREE_OPERAND (op0, 0);
3454 orig_op0 = op0;
3455 op0 = vn_valueize (op0);
3456 if (TREE_CODE (op0) == SSA_NAME)
3458 if (VN_INFO (op0)->has_constants)
3459 op0 = vn_get_expr_for (op0);
3460 else if (CONVERT_EXPR_CODE_P (code)
3461 || code == REALPART_EXPR
3462 || code == IMAGPART_EXPR
3463 || code == VIEW_CONVERT_EXPR
3464 || code == BIT_FIELD_REF)
3466 /* We want to do tree-combining on conversion-like expressions.
3467 Make sure we feed only SSA_NAMEs or constants to fold though. */
3468 tree tem = vn_get_expr_for (op0);
3469 if (UNARY_CLASS_P (tem)
3470 || BINARY_CLASS_P (tem)
3471 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3472 || TREE_CODE (tem) == SSA_NAME
3473 || TREE_CODE (tem) == CONSTRUCTOR
3474 || is_gimple_min_invariant (tem))
3475 op0 = tem;
3479 /* Avoid folding if nothing changed, but remember the expression. */
3480 if (op0 == orig_op0)
3481 return NULL_TREE;
3483 if (code == BIT_FIELD_REF)
3485 tree rhs = gimple_assign_rhs1 (stmt);
3486 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3487 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3489 else
3490 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3491 if (result)
3493 STRIP_USELESS_TYPE_CONVERSION (result);
3494 if (valid_gimple_rhs_p (result))
3495 return result;
3498 return NULL_TREE;
3501 /* Try to simplify RHS using equivalences and constant folding. */
3503 static tree
3504 try_to_simplify (gassign *stmt)
3506 enum tree_code code = gimple_assign_rhs_code (stmt);
3507 tree tem;
3509 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3510 in this case, there is no point in doing extra work. */
3511 if (code == SSA_NAME)
3512 return NULL_TREE;
3514 /* First try constant folding based on our current lattice. */
3515 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3516 if (tem
3517 && (TREE_CODE (tem) == SSA_NAME
3518 || is_gimple_min_invariant (tem)))
3519 return tem;
3521 /* If that didn't work try combining multiple statements. */
3522 switch (TREE_CODE_CLASS (code))
3524 case tcc_reference:
3525 /* Fallthrough for some unary codes that can operate on registers. */
3526 if (!(code == REALPART_EXPR
3527 || code == IMAGPART_EXPR
3528 || code == VIEW_CONVERT_EXPR
3529 || code == BIT_FIELD_REF))
3530 break;
3531 /* We could do a little more with unary ops, if they expand
3532 into binary ops, but it's debatable whether it is worth it. */
3533 case tcc_unary:
3534 return simplify_unary_expression (stmt);
3536 case tcc_comparison:
3537 case tcc_binary:
3538 return simplify_binary_expression (stmt);
3540 default:
3541 break;
3544 return NULL_TREE;
3547 /* Visit and value number USE, return true if the value number
3548 changed. */
3550 static bool
3551 visit_use (tree use)
3553 bool changed = false;
3554 gimple stmt = SSA_NAME_DEF_STMT (use);
3556 mark_use_processed (use);
3558 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3559 if (dump_file && (dump_flags & TDF_DETAILS)
3560 && !SSA_NAME_IS_DEFAULT_DEF (use))
3562 fprintf (dump_file, "Value numbering ");
3563 print_generic_expr (dump_file, use, 0);
3564 fprintf (dump_file, " stmt = ");
3565 print_gimple_stmt (dump_file, stmt, 0, 0);
3568 /* Handle uninitialized uses. */
3569 if (SSA_NAME_IS_DEFAULT_DEF (use))
3570 changed = set_ssa_val_to (use, use);
3571 else
3573 if (gimple_code (stmt) == GIMPLE_PHI)
3574 changed = visit_phi (stmt);
3575 else if (gimple_has_volatile_ops (stmt))
3576 changed = defs_to_varying (stmt);
3577 else if (is_gimple_assign (stmt))
3579 enum tree_code code = gimple_assign_rhs_code (stmt);
3580 tree lhs = gimple_assign_lhs (stmt);
3581 tree rhs1 = gimple_assign_rhs1 (stmt);
3582 tree simplified;
3584 /* Shortcut for copies. Simplifying copies is pointless,
3585 since we copy the expression and value they represent. */
3586 if (code == SSA_NAME
3587 && TREE_CODE (lhs) == SSA_NAME)
3589 changed = visit_copy (lhs, rhs1);
3590 goto done;
3592 simplified = try_to_simplify (as_a <gassign *> (stmt));
3593 if (simplified)
3595 if (dump_file && (dump_flags & TDF_DETAILS))
3597 fprintf (dump_file, "RHS ");
3598 print_gimple_expr (dump_file, stmt, 0, 0);
3599 fprintf (dump_file, " simplified to ");
3600 print_generic_expr (dump_file, simplified, 0);
3601 if (TREE_CODE (lhs) == SSA_NAME)
3602 fprintf (dump_file, " has constants %d\n",
3603 expr_has_constants (simplified));
3604 else
3605 fprintf (dump_file, "\n");
3608 /* Setting value numbers to constants will occasionally
3609 screw up phi congruence because constants are not
3610 uniquely associated with a single ssa name that can be
3611 looked up. */
3612 if (simplified
3613 && is_gimple_min_invariant (simplified)
3614 && TREE_CODE (lhs) == SSA_NAME)
3616 VN_INFO (lhs)->expr = simplified;
3617 VN_INFO (lhs)->has_constants = true;
3618 changed = set_ssa_val_to (lhs, simplified);
3619 goto done;
3621 else if (simplified
3622 && TREE_CODE (simplified) == SSA_NAME
3623 && TREE_CODE (lhs) == SSA_NAME)
3625 changed = visit_copy (lhs, simplified);
3626 goto done;
3628 else if (simplified)
3630 if (TREE_CODE (lhs) == SSA_NAME)
3632 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3633 /* We have to unshare the expression or else
3634 valuizing may change the IL stream. */
3635 VN_INFO (lhs)->expr = unshare_expr (simplified);
3638 else if (stmt_has_constants (stmt)
3639 && TREE_CODE (lhs) == SSA_NAME)
3640 VN_INFO (lhs)->has_constants = true;
3641 else if (TREE_CODE (lhs) == SSA_NAME)
3643 /* We reset expr and constantness here because we may
3644 have been value numbering optimistically, and
3645 iterating. They may become non-constant in this case,
3646 even if they were optimistically constant. */
3648 VN_INFO (lhs)->has_constants = false;
3649 VN_INFO (lhs)->expr = NULL_TREE;
3652 if ((TREE_CODE (lhs) == SSA_NAME
3653 /* We can substitute SSA_NAMEs that are live over
3654 abnormal edges with their constant value. */
3655 && !(gimple_assign_copy_p (stmt)
3656 && is_gimple_min_invariant (rhs1))
3657 && !(simplified
3658 && is_gimple_min_invariant (simplified))
3659 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3660 /* Stores or copies from SSA_NAMEs that are live over
3661 abnormal edges are a problem. */
3662 || (code == SSA_NAME
3663 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3664 changed = defs_to_varying (stmt);
3665 else if (REFERENCE_CLASS_P (lhs)
3666 || DECL_P (lhs))
3667 changed = visit_reference_op_store (lhs, rhs1, stmt);
3668 else if (TREE_CODE (lhs) == SSA_NAME)
3670 if ((gimple_assign_copy_p (stmt)
3671 && is_gimple_min_invariant (rhs1))
3672 || (simplified
3673 && is_gimple_min_invariant (simplified)))
3675 VN_INFO (lhs)->has_constants = true;
3676 if (simplified)
3677 changed = set_ssa_val_to (lhs, simplified);
3678 else
3679 changed = set_ssa_val_to (lhs, rhs1);
3681 else
3683 /* First try to lookup the simplified expression. */
3684 if (simplified)
3686 enum gimple_rhs_class rhs_class;
3689 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3690 if ((rhs_class == GIMPLE_UNARY_RHS
3691 || rhs_class == GIMPLE_BINARY_RHS
3692 || rhs_class == GIMPLE_TERNARY_RHS)
3693 && valid_gimple_rhs_p (simplified))
3695 tree result = vn_nary_op_lookup (simplified, NULL);
3696 if (result)
3698 changed = set_ssa_val_to (lhs, result);
3699 goto done;
3704 /* Otherwise visit the original statement. */
3705 switch (vn_get_stmt_kind (stmt))
3707 case VN_NARY:
3708 changed = visit_nary_op (lhs, stmt);
3709 break;
3710 case VN_REFERENCE:
3711 changed = visit_reference_op_load (lhs, rhs1, stmt);
3712 break;
3713 default:
3714 changed = defs_to_varying (stmt);
3715 break;
3719 else
3720 changed = defs_to_varying (stmt);
3722 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3724 tree lhs = gimple_call_lhs (stmt);
3725 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3727 /* Try constant folding based on our current lattice. */
3728 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3729 vn_valueize);
3730 if (simplified)
3732 if (dump_file && (dump_flags & TDF_DETAILS))
3734 fprintf (dump_file, "call ");
3735 print_gimple_expr (dump_file, stmt, 0, 0);
3736 fprintf (dump_file, " simplified to ");
3737 print_generic_expr (dump_file, simplified, 0);
3738 if (TREE_CODE (lhs) == SSA_NAME)
3739 fprintf (dump_file, " has constants %d\n",
3740 expr_has_constants (simplified));
3741 else
3742 fprintf (dump_file, "\n");
3745 /* Setting value numbers to constants will occasionally
3746 screw up phi congruence because constants are not
3747 uniquely associated with a single ssa name that can be
3748 looked up. */
3749 if (simplified
3750 && is_gimple_min_invariant (simplified))
3752 VN_INFO (lhs)->expr = simplified;
3753 VN_INFO (lhs)->has_constants = true;
3754 changed = set_ssa_val_to (lhs, simplified);
3755 if (gimple_vdef (stmt))
3756 changed |= set_ssa_val_to (gimple_vdef (stmt),
3757 SSA_VAL (gimple_vuse (stmt)));
3758 goto done;
3760 else if (simplified
3761 && TREE_CODE (simplified) == SSA_NAME)
3763 changed = visit_copy (lhs, simplified);
3764 if (gimple_vdef (stmt))
3765 changed |= set_ssa_val_to (gimple_vdef (stmt),
3766 SSA_VAL (gimple_vuse (stmt)));
3767 goto done;
3769 else
3771 if (stmt_has_constants (stmt))
3772 VN_INFO (lhs)->has_constants = true;
3773 else
3775 /* We reset expr and constantness here because we may
3776 have been value numbering optimistically, and
3777 iterating. They may become non-constant in this case,
3778 even if they were optimistically constant. */
3779 VN_INFO (lhs)->has_constants = false;
3780 VN_INFO (lhs)->expr = NULL_TREE;
3783 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3785 changed = defs_to_varying (stmt);
3786 goto done;
3791 if (!gimple_call_internal_p (stmt)
3792 && (/* Calls to the same function with the same vuse
3793 and the same operands do not necessarily return the same
3794 value, unless they're pure or const. */
3795 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3796 /* If calls have a vdef, subsequent calls won't have
3797 the same incoming vuse. So, if 2 calls with vdef have the
3798 same vuse, we know they're not subsequent.
3799 We can value number 2 calls to the same function with the
3800 same vuse and the same operands which are not subsequent
3801 the same, because there is no code in the program that can
3802 compare the 2 values... */
3803 || (gimple_vdef (stmt)
3804 /* ... unless the call returns a pointer which does
3805 not alias with anything else. In which case the
3806 information that the values are distinct are encoded
3807 in the IL. */
3808 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3809 /* Only perform the following when being called from PRE
3810 which embeds tail merging. */
3811 && default_vn_walk_kind == VN_WALK)))
3812 changed = visit_reference_op_call (lhs, call_stmt);
3813 else
3814 changed = defs_to_varying (stmt);
3816 else
3817 changed = defs_to_varying (stmt);
3819 done:
3820 return changed;
3823 /* Compare two operands by reverse postorder index */
3825 static int
3826 compare_ops (const void *pa, const void *pb)
3828 const tree opa = *((const tree *)pa);
3829 const tree opb = *((const tree *)pb);
3830 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3831 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3832 basic_block bba;
3833 basic_block bbb;
3835 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3836 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3837 else if (gimple_nop_p (opstmta))
3838 return -1;
3839 else if (gimple_nop_p (opstmtb))
3840 return 1;
3842 bba = gimple_bb (opstmta);
3843 bbb = gimple_bb (opstmtb);
3845 if (!bba && !bbb)
3846 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3847 else if (!bba)
3848 return -1;
3849 else if (!bbb)
3850 return 1;
3852 if (bba == bbb)
3854 if (gimple_code (opstmta) == GIMPLE_PHI
3855 && gimple_code (opstmtb) == GIMPLE_PHI)
3856 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3857 else if (gimple_code (opstmta) == GIMPLE_PHI)
3858 return -1;
3859 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3860 return 1;
3861 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3862 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3863 else
3864 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3866 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3869 /* Sort an array containing members of a strongly connected component
3870 SCC so that the members are ordered by RPO number.
3871 This means that when the sort is complete, iterating through the
3872 array will give you the members in RPO order. */
3874 static void
3875 sort_scc (vec<tree> scc)
3877 scc.qsort (compare_ops);
3880 /* Insert the no longer used nary ONARY to the hash INFO. */
3882 static void
3883 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3885 size_t size = sizeof_vn_nary_op (onary->length);
3886 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3887 &info->nary_obstack);
3888 memcpy (nary, onary, size);
3889 vn_nary_op_insert_into (nary, info->nary, false);
3892 /* Insert the no longer used phi OPHI to the hash INFO. */
3894 static void
3895 copy_phi (vn_phi_t ophi, vn_tables_t info)
3897 vn_phi_t phi = info->phis_pool->allocate ();
3898 vn_phi_s **slot;
3899 memcpy (phi, ophi, sizeof (*phi));
3900 ophi->phiargs.create (0);
3901 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3902 gcc_assert (!*slot);
3903 *slot = phi;
3906 /* Insert the no longer used reference OREF to the hash INFO. */
3908 static void
3909 copy_reference (vn_reference_t oref, vn_tables_t info)
3911 vn_reference_t ref;
3912 vn_reference_s **slot;
3913 ref = info->references_pool->allocate ();
3914 memcpy (ref, oref, sizeof (*ref));
3915 oref->operands.create (0);
3916 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3917 if (*slot)
3918 free_reference (*slot);
3919 *slot = ref;
3922 /* Process a strongly connected component in the SSA graph. */
3924 static void
3925 process_scc (vec<tree> scc)
3927 tree var;
3928 unsigned int i;
3929 unsigned int iterations = 0;
3930 bool changed = true;
3931 vn_nary_op_iterator_type hin;
3932 vn_phi_iterator_type hip;
3933 vn_reference_iterator_type hir;
3934 vn_nary_op_t nary;
3935 vn_phi_t phi;
3936 vn_reference_t ref;
3938 /* If the SCC has a single member, just visit it. */
3939 if (scc.length () == 1)
3941 tree use = scc[0];
3942 if (VN_INFO (use)->use_processed)
3943 return;
3944 /* We need to make sure it doesn't form a cycle itself, which can
3945 happen for self-referential PHI nodes. In that case we would
3946 end up inserting an expression with VN_TOP operands into the
3947 valid table which makes us derive bogus equivalences later.
3948 The cheapest way to check this is to assume it for all PHI nodes. */
3949 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3950 /* Fallthru to iteration. */ ;
3951 else
3953 visit_use (use);
3954 return;
3958 if (dump_file && (dump_flags & TDF_DETAILS))
3959 print_scc (dump_file, scc);
3961 /* Iterate over the SCC with the optimistic table until it stops
3962 changing. */
3963 current_info = optimistic_info;
3964 while (changed)
3966 changed = false;
3967 iterations++;
3968 if (dump_file && (dump_flags & TDF_DETAILS))
3969 fprintf (dump_file, "Starting iteration %d\n", iterations);
3970 /* As we are value-numbering optimistically we have to
3971 clear the expression tables and the simplified expressions
3972 in each iteration until we converge. */
3973 optimistic_info->nary->empty ();
3974 optimistic_info->phis->empty ();
3975 optimistic_info->references->empty ();
3976 obstack_free (&optimistic_info->nary_obstack, NULL);
3977 gcc_obstack_init (&optimistic_info->nary_obstack);
3978 optimistic_info->phis_pool->release ();
3979 optimistic_info->references_pool->release ();
3980 FOR_EACH_VEC_ELT (scc, i, var)
3981 VN_INFO (var)->expr = NULL_TREE;
3982 FOR_EACH_VEC_ELT (scc, i, var)
3983 changed |= visit_use (var);
3986 if (dump_file && (dump_flags & TDF_DETAILS))
3987 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3988 statistics_histogram_event (cfun, "SCC iterations", iterations);
3990 /* Finally, copy the contents of the no longer used optimistic
3991 table to the valid table. */
3992 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3993 copy_nary (nary, valid_info);
3994 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3995 copy_phi (phi, valid_info);
3996 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3997 ref, vn_reference_t, hir)
3998 copy_reference (ref, valid_info);
4000 current_info = valid_info;
4004 /* Pop the components of the found SCC for NAME off the SCC stack
4005 and process them. Returns true if all went well, false if
4006 we run into resource limits. */
4008 static bool
4009 extract_and_process_scc_for_name (tree name)
4011 auto_vec<tree> scc;
4012 tree x;
4014 /* Found an SCC, pop the components off the SCC stack and
4015 process them. */
4018 x = sccstack.pop ();
4020 VN_INFO (x)->on_sccstack = false;
4021 scc.safe_push (x);
4022 } while (x != name);
4024 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4025 if (scc.length ()
4026 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4028 if (dump_file)
4029 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4030 "SCC size %u exceeding %u\n", scc.length (),
4031 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4033 return false;
4036 if (scc.length () > 1)
4037 sort_scc (scc);
4039 process_scc (scc);
4041 return true;
4044 /* Depth first search on NAME to discover and process SCC's in the SSA
4045 graph.
4046 Execution of this algorithm relies on the fact that the SCC's are
4047 popped off the stack in topological order.
4048 Returns true if successful, false if we stopped processing SCC's due
4049 to resource constraints. */
4051 static bool
4052 DFS (tree name)
4054 vec<ssa_op_iter> itervec = vNULL;
4055 vec<tree> namevec = vNULL;
4056 use_operand_p usep = NULL;
4057 gimple defstmt;
4058 tree use;
4059 ssa_op_iter iter;
4061 start_over:
4062 /* SCC info */
4063 VN_INFO (name)->dfsnum = next_dfs_num++;
4064 VN_INFO (name)->visited = true;
4065 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4067 sccstack.safe_push (name);
4068 VN_INFO (name)->on_sccstack = true;
4069 defstmt = SSA_NAME_DEF_STMT (name);
4071 /* Recursively DFS on our operands, looking for SCC's. */
4072 if (!gimple_nop_p (defstmt))
4074 /* Push a new iterator. */
4075 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4076 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4077 else
4078 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4080 else
4081 clear_and_done_ssa_iter (&iter);
4083 while (1)
4085 /* If we are done processing uses of a name, go up the stack
4086 of iterators and process SCCs as we found them. */
4087 if (op_iter_done (&iter))
4089 /* See if we found an SCC. */
4090 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4091 if (!extract_and_process_scc_for_name (name))
4093 namevec.release ();
4094 itervec.release ();
4095 return false;
4098 /* Check if we are done. */
4099 if (namevec.is_empty ())
4101 namevec.release ();
4102 itervec.release ();
4103 return true;
4106 /* Restore the last use walker and continue walking there. */
4107 use = name;
4108 name = namevec.pop ();
4109 memcpy (&iter, &itervec.last (),
4110 sizeof (ssa_op_iter));
4111 itervec.pop ();
4112 goto continue_walking;
4115 use = USE_FROM_PTR (usep);
4117 /* Since we handle phi nodes, we will sometimes get
4118 invariants in the use expression. */
4119 if (TREE_CODE (use) == SSA_NAME)
4121 if (! (VN_INFO (use)->visited))
4123 /* Recurse by pushing the current use walking state on
4124 the stack and starting over. */
4125 itervec.safe_push (iter);
4126 namevec.safe_push (name);
4127 name = use;
4128 goto start_over;
4130 continue_walking:
4131 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4132 VN_INFO (use)->low);
4134 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4135 && VN_INFO (use)->on_sccstack)
4137 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4138 VN_INFO (name)->low);
4142 usep = op_iter_next_use (&iter);
4146 /* Allocate a value number table. */
4148 static void
4149 allocate_vn_table (vn_tables_t table)
4151 table->phis = new vn_phi_table_type (23);
4152 table->nary = new vn_nary_op_table_type (23);
4153 table->references = new vn_reference_table_type (23);
4155 gcc_obstack_init (&table->nary_obstack);
4156 table->phis_pool = new pool_allocator<vn_phi_s> ("VN phis", 30);
4157 table->references_pool = new pool_allocator<vn_reference_s> ("VN references",
4158 30);
4161 /* Free a value number table. */
4163 static void
4164 free_vn_table (vn_tables_t table)
4166 delete table->phis;
4167 table->phis = NULL;
4168 delete table->nary;
4169 table->nary = NULL;
4170 delete table->references;
4171 table->references = NULL;
4172 obstack_free (&table->nary_obstack, NULL);
4173 delete table->phis_pool;
4174 delete table->references_pool;
4177 static void
4178 init_scc_vn (void)
4180 size_t i;
4181 int j;
4182 int *rpo_numbers_temp;
4184 calculate_dominance_info (CDI_DOMINATORS);
4185 sccstack.create (0);
4186 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4188 constant_value_ids = BITMAP_ALLOC (NULL);
4190 next_dfs_num = 1;
4191 next_value_id = 1;
4193 vn_ssa_aux_table.create (num_ssa_names + 1);
4194 /* VEC_alloc doesn't actually grow it to the right size, it just
4195 preallocates the space to do so. */
4196 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4197 gcc_obstack_init (&vn_ssa_aux_obstack);
4199 shared_lookup_phiargs.create (0);
4200 shared_lookup_references.create (0);
4201 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4202 rpo_numbers_temp =
4203 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4204 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4206 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4207 the i'th block in RPO order is bb. We want to map bb's to RPO
4208 numbers, so we need to rearrange this array. */
4209 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4210 rpo_numbers[rpo_numbers_temp[j]] = j;
4212 XDELETE (rpo_numbers_temp);
4214 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4216 /* Create the VN_INFO structures, and initialize value numbers to
4217 TOP. */
4218 for (i = 0; i < num_ssa_names; i++)
4220 tree name = ssa_name (i);
4221 if (name)
4223 VN_INFO_GET (name)->valnum = VN_TOP;
4224 VN_INFO (name)->expr = NULL_TREE;
4225 VN_INFO (name)->value_id = 0;
4229 renumber_gimple_stmt_uids ();
4231 /* Create the valid and optimistic value numbering tables. */
4232 valid_info = XCNEW (struct vn_tables_s);
4233 allocate_vn_table (valid_info);
4234 optimistic_info = XCNEW (struct vn_tables_s);
4235 allocate_vn_table (optimistic_info);
4238 void
4239 free_scc_vn (void)
4241 size_t i;
4243 delete constant_to_value_id;
4244 constant_to_value_id = NULL;
4245 BITMAP_FREE (constant_value_ids);
4246 shared_lookup_phiargs.release ();
4247 shared_lookup_references.release ();
4248 XDELETEVEC (rpo_numbers);
4250 for (i = 0; i < num_ssa_names; i++)
4252 tree name = ssa_name (i);
4253 if (name
4254 && VN_INFO (name)->needs_insertion)
4255 release_ssa_name (name);
4257 obstack_free (&vn_ssa_aux_obstack, NULL);
4258 vn_ssa_aux_table.release ();
4260 sccstack.release ();
4261 free_vn_table (valid_info);
4262 XDELETE (valid_info);
4263 free_vn_table (optimistic_info);
4264 XDELETE (optimistic_info);
4267 /* Set *ID according to RESULT. */
4269 static void
4270 set_value_id_for_result (tree result, unsigned int *id)
4272 if (result && TREE_CODE (result) == SSA_NAME)
4273 *id = VN_INFO (result)->value_id;
4274 else if (result && is_gimple_min_invariant (result))
4275 *id = get_or_alloc_constant_value_id (result);
4276 else
4277 *id = get_next_value_id ();
4280 /* Set the value ids in the valid hash tables. */
4282 static void
4283 set_hashtable_value_ids (void)
4285 vn_nary_op_iterator_type hin;
4286 vn_phi_iterator_type hip;
4287 vn_reference_iterator_type hir;
4288 vn_nary_op_t vno;
4289 vn_reference_t vr;
4290 vn_phi_t vp;
4292 /* Now set the value ids of the things we had put in the hash
4293 table. */
4295 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4296 set_value_id_for_result (vno->result, &vno->value_id);
4298 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4299 set_value_id_for_result (vp->result, &vp->value_id);
4301 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4302 hir)
4303 set_value_id_for_result (vr->result, &vr->value_id);
4306 class cond_dom_walker : public dom_walker
4308 public:
4309 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4311 virtual void before_dom_children (basic_block);
4313 bool fail;
4316 void
4317 cond_dom_walker::before_dom_children (basic_block bb)
4319 edge e;
4320 edge_iterator ei;
4322 if (fail)
4323 return;
4325 /* If any of the predecessor edges that do not come from blocks dominated
4326 by us are still marked as possibly executable consider this block
4327 reachable. */
4328 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4329 FOR_EACH_EDGE (e, ei, bb->preds)
4330 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4331 reachable |= (e->flags & EDGE_EXECUTABLE);
4333 /* If the block is not reachable all outgoing edges are not
4334 executable. */
4335 if (!reachable)
4337 if (dump_file && (dump_flags & TDF_DETAILS))
4338 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4339 "BB %d as not executable\n", bb->index);
4341 FOR_EACH_EDGE (e, ei, bb->succs)
4342 e->flags &= ~EDGE_EXECUTABLE;
4343 return;
4346 gimple stmt = last_stmt (bb);
4347 if (!stmt)
4348 return;
4350 enum gimple_code code = gimple_code (stmt);
4351 if (code != GIMPLE_COND
4352 && code != GIMPLE_SWITCH
4353 && code != GIMPLE_GOTO)
4354 return;
4356 if (dump_file && (dump_flags & TDF_DETAILS))
4358 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4359 bb->index);
4360 print_gimple_stmt (dump_file, stmt, 0, 0);
4363 /* Value-number the last stmts SSA uses. */
4364 ssa_op_iter i;
4365 tree op;
4366 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4367 if (VN_INFO (op)->visited == false
4368 && !DFS (op))
4370 fail = true;
4371 return;
4374 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4375 if value-numbering can prove they are not reachable. Handling
4376 computed gotos is also possible. */
4377 tree val;
4378 switch (code)
4380 case GIMPLE_COND:
4382 tree lhs = gimple_cond_lhs (stmt);
4383 tree rhs = gimple_cond_rhs (stmt);
4384 /* Work hard in computing the condition and take into account
4385 the valueization of the defining stmt. */
4386 if (TREE_CODE (lhs) == SSA_NAME)
4387 lhs = vn_get_expr_for (lhs);
4388 if (TREE_CODE (rhs) == SSA_NAME)
4389 rhs = vn_get_expr_for (rhs);
4390 val = fold_binary (gimple_cond_code (stmt),
4391 boolean_type_node, lhs, rhs);
4392 break;
4394 case GIMPLE_SWITCH:
4395 val = gimple_switch_index (as_a <gswitch *> (stmt));
4396 break;
4397 case GIMPLE_GOTO:
4398 val = gimple_goto_dest (stmt);
4399 break;
4400 default:
4401 gcc_unreachable ();
4403 if (!val)
4404 return;
4406 edge taken = find_taken_edge (bb, vn_valueize (val));
4407 if (!taken)
4408 return;
4410 if (dump_file && (dump_flags & TDF_DETAILS))
4411 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4412 "not executable\n", bb->index, bb->index, taken->dest->index);
4414 FOR_EACH_EDGE (e, ei, bb->succs)
4415 if (e != taken)
4416 e->flags &= ~EDGE_EXECUTABLE;
4419 /* Do SCCVN. Returns true if it finished, false if we bailed out
4420 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4421 how we use the alias oracle walking during the VN process. */
4423 bool
4424 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4426 basic_block bb;
4427 size_t i;
4428 tree param;
4430 default_vn_walk_kind = default_vn_walk_kind_;
4432 init_scc_vn ();
4433 current_info = valid_info;
4435 for (param = DECL_ARGUMENTS (current_function_decl);
4436 param;
4437 param = DECL_CHAIN (param))
4439 tree def = ssa_default_def (cfun, param);
4440 if (def)
4442 VN_INFO (def)->visited = true;
4443 VN_INFO (def)->valnum = def;
4447 /* Mark all edges as possibly executable. */
4448 FOR_ALL_BB_FN (bb, cfun)
4450 edge_iterator ei;
4451 edge e;
4452 FOR_EACH_EDGE (e, ei, bb->succs)
4453 e->flags |= EDGE_EXECUTABLE;
4456 /* Walk all blocks in dominator order, value-numbering the last stmts
4457 SSA uses and decide whether outgoing edges are not executable. */
4458 cond_dom_walker walker;
4459 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4460 if (walker.fail)
4462 free_scc_vn ();
4463 return false;
4466 /* Value-number remaining SSA names. */
4467 for (i = 1; i < num_ssa_names; ++i)
4469 tree name = ssa_name (i);
4470 if (name
4471 && VN_INFO (name)->visited == false
4472 && !has_zero_uses (name))
4473 if (!DFS (name))
4475 free_scc_vn ();
4476 return false;
4480 /* Initialize the value ids. */
4482 for (i = 1; i < num_ssa_names; ++i)
4484 tree name = ssa_name (i);
4485 vn_ssa_aux_t info;
4486 if (!name)
4487 continue;
4488 info = VN_INFO (name);
4489 if (info->valnum == name
4490 || info->valnum == VN_TOP)
4491 info->value_id = get_next_value_id ();
4492 else if (is_gimple_min_invariant (info->valnum))
4493 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4496 /* Propagate. */
4497 for (i = 1; i < num_ssa_names; ++i)
4499 tree name = ssa_name (i);
4500 vn_ssa_aux_t info;
4501 if (!name)
4502 continue;
4503 info = VN_INFO (name);
4504 if (TREE_CODE (info->valnum) == SSA_NAME
4505 && info->valnum != name
4506 && info->value_id != VN_INFO (info->valnum)->value_id)
4507 info->value_id = VN_INFO (info->valnum)->value_id;
4510 set_hashtable_value_ids ();
4512 if (dump_file && (dump_flags & TDF_DETAILS))
4514 fprintf (dump_file, "Value numbers:\n");
4515 for (i = 0; i < num_ssa_names; i++)
4517 tree name = ssa_name (i);
4518 if (name
4519 && VN_INFO (name)->visited
4520 && SSA_VAL (name) != name)
4522 print_generic_expr (dump_file, name, 0);
4523 fprintf (dump_file, " = ");
4524 print_generic_expr (dump_file, SSA_VAL (name), 0);
4525 fprintf (dump_file, "\n");
4530 return true;
4533 /* Return the maximum value id we have ever seen. */
4535 unsigned int
4536 get_max_value_id (void)
4538 return next_value_id;
4541 /* Return the next unique value id. */
4543 unsigned int
4544 get_next_value_id (void)
4546 return next_value_id++;
4550 /* Compare two expressions E1 and E2 and return true if they are equal. */
4552 bool
4553 expressions_equal_p (tree e1, tree e2)
4555 /* The obvious case. */
4556 if (e1 == e2)
4557 return true;
4559 /* If only one of them is null, they cannot be equal. */
4560 if (!e1 || !e2)
4561 return false;
4563 /* Now perform the actual comparison. */
4564 if (TREE_CODE (e1) == TREE_CODE (e2)
4565 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4566 return true;
4568 return false;
4572 /* Return true if the nary operation NARY may trap. This is a copy
4573 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4575 bool
4576 vn_nary_may_trap (vn_nary_op_t nary)
4578 tree type;
4579 tree rhs2 = NULL_TREE;
4580 bool honor_nans = false;
4581 bool honor_snans = false;
4582 bool fp_operation = false;
4583 bool honor_trapv = false;
4584 bool handled, ret;
4585 unsigned i;
4587 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4588 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4589 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4591 type = nary->type;
4592 fp_operation = FLOAT_TYPE_P (type);
4593 if (fp_operation)
4595 honor_nans = flag_trapping_math && !flag_finite_math_only;
4596 honor_snans = flag_signaling_nans != 0;
4598 else if (INTEGRAL_TYPE_P (type)
4599 && TYPE_OVERFLOW_TRAPS (type))
4600 honor_trapv = true;
4602 if (nary->length >= 2)
4603 rhs2 = nary->op[1];
4604 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4605 honor_trapv,
4606 honor_nans, honor_snans, rhs2,
4607 &handled);
4608 if (handled
4609 && ret)
4610 return true;
4612 for (i = 0; i < nary->length; ++i)
4613 if (tree_could_trap_p (nary->op[i]))
4614 return true;
4616 return false;