* tree-ssa-sccvn.c (vn_reference_lookup_3): Do not access pieces of
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobcab5848ab996f7ffbd693d853e78c2847263b8eb
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "input.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "stor-layout.h"
31 #include "predict.h"
32 #include "hard-reg-set.h"
33 #include "function.h"
34 #include "dominance.h"
35 #include "cfg.h"
36 #include "cfganal.h"
37 #include "basic-block.h"
38 #include "gimple-pretty-print.h"
39 #include "tree-inline.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimplify.h"
48 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "rtl.h"
54 #include "flags.h"
55 #include "insn-config.h"
56 #include "expmed.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "calls.h"
60 #include "emit-rtl.h"
61 #include "varasm.h"
62 #include "stmt.h"
63 #include "expr.h"
64 #include "tree-dfa.h"
65 #include "tree-ssa.h"
66 #include "dumpfile.h"
67 #include "alloc-pool.h"
68 #include "cfgloop.h"
69 #include "params.h"
70 #include "tree-ssa-propagate.h"
71 #include "tree-ssa-sccvn.h"
72 #include "tree-cfg.h"
73 #include "domwalk.h"
74 #include "ipa-ref.h"
75 #include "plugin-api.h"
76 #include "cgraph.h"
78 /* This algorithm is based on the SCC algorithm presented by Keith
79 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
80 (http://citeseer.ist.psu.edu/41805.html). In
81 straight line code, it is equivalent to a regular hash based value
82 numbering that is performed in reverse postorder.
84 For code with cycles, there are two alternatives, both of which
85 require keeping the hashtables separate from the actual list of
86 value numbers for SSA names.
88 1. Iterate value numbering in an RPO walk of the blocks, removing
89 all the entries from the hashtable after each iteration (but
90 keeping the SSA name->value number mapping between iterations).
91 Iterate until it does not change.
93 2. Perform value numbering as part of an SCC walk on the SSA graph,
94 iterating only the cycles in the SSA graph until they do not change
95 (using a separate, optimistic hashtable for value numbering the SCC
96 operands).
98 The second is not just faster in practice (because most SSA graph
99 cycles do not involve all the variables in the graph), it also has
100 some nice properties.
102 One of these nice properties is that when we pop an SCC off the
103 stack, we are guaranteed to have processed all the operands coming from
104 *outside of that SCC*, so we do not need to do anything special to
105 ensure they have value numbers.
107 Another nice property is that the SCC walk is done as part of a DFS
108 of the SSA graph, which makes it easy to perform combining and
109 simplifying operations at the same time.
111 The code below is deliberately written in a way that makes it easy
112 to separate the SCC walk from the other work it does.
114 In order to propagate constants through the code, we track which
115 expressions contain constants, and use those while folding. In
116 theory, we could also track expressions whose value numbers are
117 replaced, in case we end up folding based on expression
118 identities.
120 In order to value number memory, we assign value numbers to vuses.
121 This enables us to note that, for example, stores to the same
122 address of the same value from the same starting memory states are
123 equivalent.
124 TODO:
126 1. We can iterate only the changing portions of the SCC's, but
127 I have not seen an SCC big enough for this to be a win.
128 2. If you differentiate between phi nodes for loops and phi nodes
129 for if-then-else, you can properly consider phi nodes in different
130 blocks for equivalence.
131 3. We could value number vuses in more cases, particularly, whole
132 structure copies.
136 static tree *last_vuse_ptr;
137 static vn_lookup_kind vn_walk_kind;
138 static vn_lookup_kind default_vn_walk_kind;
140 /* vn_nary_op hashtable helpers. */
142 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
144 typedef vn_nary_op_s *value_type;
145 typedef vn_nary_op_s *compare_type;
146 static inline hashval_t hash (const vn_nary_op_s *);
147 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
150 /* Return the computed hashcode for nary operation P1. */
152 inline hashval_t
153 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
155 return vno1->hashcode;
158 /* Compare nary operations P1 and P2 and return true if they are
159 equivalent. */
161 inline bool
162 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
164 return vn_nary_op_eq (vno1, vno2);
167 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
168 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
171 /* vn_phi hashtable helpers. */
173 static int
174 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
176 struct vn_phi_hasher
178 typedef vn_phi_s *value_type;
179 typedef vn_phi_s *compare_type;
180 static inline hashval_t hash (const vn_phi_s *);
181 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
182 static inline void remove (vn_phi_s *);
185 /* Return the computed hashcode for phi operation P1. */
187 inline hashval_t
188 vn_phi_hasher::hash (const vn_phi_s *vp1)
190 return vp1->hashcode;
193 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
195 inline bool
196 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
198 return vn_phi_eq (vp1, vp2);
201 /* Free a phi operation structure VP. */
203 inline void
204 vn_phi_hasher::remove (vn_phi_s *phi)
206 phi->phiargs.release ();
209 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
210 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
213 /* Compare two reference operands P1 and P2 for equality. Return true if
214 they are equal, and false otherwise. */
216 static int
217 vn_reference_op_eq (const void *p1, const void *p2)
219 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
220 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
222 return (vro1->opcode == vro2->opcode
223 /* We do not care for differences in type qualification. */
224 && (vro1->type == vro2->type
225 || (vro1->type && vro2->type
226 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
227 TYPE_MAIN_VARIANT (vro2->type))))
228 && expressions_equal_p (vro1->op0, vro2->op0)
229 && expressions_equal_p (vro1->op1, vro2->op1)
230 && expressions_equal_p (vro1->op2, vro2->op2));
233 /* Free a reference operation structure VP. */
235 static inline void
236 free_reference (vn_reference_s *vr)
238 vr->operands.release ();
242 /* vn_reference hashtable helpers. */
244 struct vn_reference_hasher
246 typedef vn_reference_s *value_type;
247 typedef vn_reference_s *compare_type;
248 static inline hashval_t hash (const vn_reference_s *);
249 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
250 static inline void remove (vn_reference_s *);
253 /* Return the hashcode for a given reference operation P1. */
255 inline hashval_t
256 vn_reference_hasher::hash (const vn_reference_s *vr1)
258 return vr1->hashcode;
261 inline bool
262 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
264 return vn_reference_eq (v, c);
267 inline void
268 vn_reference_hasher::remove (vn_reference_s *v)
270 free_reference (v);
273 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
274 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
277 /* The set of hashtables and alloc_pool's for their items. */
279 typedef struct vn_tables_s
281 vn_nary_op_table_type *nary;
282 vn_phi_table_type *phis;
283 vn_reference_table_type *references;
284 struct obstack nary_obstack;
285 pool_allocator<vn_phi_s> *phis_pool;
286 pool_allocator<vn_reference_s> *references_pool;
287 } *vn_tables_t;
290 /* vn_constant hashtable helpers. */
292 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
294 typedef vn_constant_s *value_type;
295 typedef vn_constant_s *compare_type;
296 static inline hashval_t hash (const vn_constant_s *);
297 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
300 /* Hash table hash function for vn_constant_t. */
302 inline hashval_t
303 vn_constant_hasher::hash (const vn_constant_s *vc1)
305 return vc1->hashcode;
308 /* Hash table equality function for vn_constant_t. */
310 inline bool
311 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
313 if (vc1->hashcode != vc2->hashcode)
314 return false;
316 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
319 static hash_table<vn_constant_hasher> *constant_to_value_id;
320 static bitmap constant_value_ids;
323 /* Valid hashtables storing information we have proven to be
324 correct. */
326 static vn_tables_t valid_info;
328 /* Optimistic hashtables storing information we are making assumptions about
329 during iterations. */
331 static vn_tables_t optimistic_info;
333 /* Pointer to the set of hashtables that is currently being used.
334 Should always point to either the optimistic_info, or the
335 valid_info. */
337 static vn_tables_t current_info;
340 /* Reverse post order index for each basic block. */
342 static int *rpo_numbers;
344 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
346 /* Return the SSA value of the VUSE x, supporting released VDEFs
347 during elimination which will value-number the VDEF to the
348 associated VUSE (but not substitute in the whole lattice). */
350 static inline tree
351 vuse_ssa_val (tree x)
353 if (!x)
354 return NULL_TREE;
358 x = SSA_VAL (x);
360 while (SSA_NAME_IN_FREE_LIST (x));
362 return x;
365 /* This represents the top of the VN lattice, which is the universal
366 value. */
368 tree VN_TOP;
370 /* Unique counter for our value ids. */
372 static unsigned int next_value_id;
374 /* Next DFS number and the stack for strongly connected component
375 detection. */
377 static unsigned int next_dfs_num;
378 static vec<tree> sccstack;
382 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
383 are allocated on an obstack for locality reasons, and to free them
384 without looping over the vec. */
386 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
387 static struct obstack vn_ssa_aux_obstack;
389 /* Return the value numbering information for a given SSA name. */
391 vn_ssa_aux_t
392 VN_INFO (tree name)
394 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
395 gcc_checking_assert (res);
396 return res;
399 /* Set the value numbering info for a given SSA name to a given
400 value. */
402 static inline void
403 VN_INFO_SET (tree name, vn_ssa_aux_t value)
405 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
408 /* Initialize the value numbering info for a given SSA name.
409 This should be called just once for every SSA name. */
411 vn_ssa_aux_t
412 VN_INFO_GET (tree name)
414 vn_ssa_aux_t newinfo;
416 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
417 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
418 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
419 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
420 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
421 return newinfo;
425 /* Get the representative expression for the SSA_NAME NAME. Returns
426 the representative SSA_NAME if there is no expression associated with it. */
428 tree
429 vn_get_expr_for (tree name)
431 vn_ssa_aux_t vn = VN_INFO (name);
432 gimple def_stmt;
433 tree expr = NULL_TREE;
434 enum tree_code code;
436 if (vn->valnum == VN_TOP)
437 return name;
439 /* If the value-number is a constant it is the representative
440 expression. */
441 if (TREE_CODE (vn->valnum) != SSA_NAME)
442 return vn->valnum;
444 /* Get to the information of the value of this SSA_NAME. */
445 vn = VN_INFO (vn->valnum);
447 /* If the value-number is a constant it is the representative
448 expression. */
449 if (TREE_CODE (vn->valnum) != SSA_NAME)
450 return vn->valnum;
452 /* Else if we have an expression, return it. */
453 if (vn->expr != NULL_TREE)
454 return vn->expr;
456 /* Otherwise use the defining statement to build the expression. */
457 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
459 /* If the value number is not an assignment use it directly. */
460 if (!is_gimple_assign (def_stmt))
461 return vn->valnum;
463 /* Note that we can valueize here because we clear the cached
464 simplified expressions after each optimistic iteration. */
465 code = gimple_assign_rhs_code (def_stmt);
466 switch (TREE_CODE_CLASS (code))
468 case tcc_reference:
469 if ((code == REALPART_EXPR
470 || code == IMAGPART_EXPR
471 || code == VIEW_CONVERT_EXPR)
472 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
473 0)) == SSA_NAME)
474 expr = fold_build1 (code,
475 gimple_expr_type (def_stmt),
476 vn_valueize (TREE_OPERAND
477 (gimple_assign_rhs1 (def_stmt), 0)));
478 break;
480 case tcc_unary:
481 expr = fold_build1 (code,
482 gimple_expr_type (def_stmt),
483 vn_valueize (gimple_assign_rhs1 (def_stmt)));
484 break;
486 case tcc_binary:
487 expr = fold_build2 (code,
488 gimple_expr_type (def_stmt),
489 vn_valueize (gimple_assign_rhs1 (def_stmt)),
490 vn_valueize (gimple_assign_rhs2 (def_stmt)));
491 break;
493 case tcc_exceptional:
494 if (code == CONSTRUCTOR
495 && TREE_CODE
496 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
497 expr = gimple_assign_rhs1 (def_stmt);
498 break;
500 default:;
502 if (expr == NULL_TREE)
503 return vn->valnum;
505 /* Cache the expression. */
506 vn->expr = expr;
508 return expr;
511 /* Return the vn_kind the expression computed by the stmt should be
512 associated with. */
514 enum vn_kind
515 vn_get_stmt_kind (gimple stmt)
517 switch (gimple_code (stmt))
519 case GIMPLE_CALL:
520 return VN_REFERENCE;
521 case GIMPLE_PHI:
522 return VN_PHI;
523 case GIMPLE_ASSIGN:
525 enum tree_code code = gimple_assign_rhs_code (stmt);
526 tree rhs1 = gimple_assign_rhs1 (stmt);
527 switch (get_gimple_rhs_class (code))
529 case GIMPLE_UNARY_RHS:
530 case GIMPLE_BINARY_RHS:
531 case GIMPLE_TERNARY_RHS:
532 return VN_NARY;
533 case GIMPLE_SINGLE_RHS:
534 switch (TREE_CODE_CLASS (code))
536 case tcc_reference:
537 /* VOP-less references can go through unary case. */
538 if ((code == REALPART_EXPR
539 || code == IMAGPART_EXPR
540 || code == VIEW_CONVERT_EXPR
541 || code == BIT_FIELD_REF)
542 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
543 return VN_NARY;
545 /* Fallthrough. */
546 case tcc_declaration:
547 return VN_REFERENCE;
549 case tcc_constant:
550 return VN_CONSTANT;
552 default:
553 if (code == ADDR_EXPR)
554 return (is_gimple_min_invariant (rhs1)
555 ? VN_CONSTANT : VN_REFERENCE);
556 else if (code == CONSTRUCTOR)
557 return VN_NARY;
558 return VN_NONE;
560 default:
561 return VN_NONE;
564 default:
565 return VN_NONE;
569 /* Lookup a value id for CONSTANT and return it. If it does not
570 exist returns 0. */
572 unsigned int
573 get_constant_value_id (tree constant)
575 vn_constant_s **slot;
576 struct vn_constant_s vc;
578 vc.hashcode = vn_hash_constant_with_type (constant);
579 vc.constant = constant;
580 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
581 if (slot)
582 return (*slot)->value_id;
583 return 0;
586 /* Lookup a value id for CONSTANT, and if it does not exist, create a
587 new one and return it. If it does exist, return it. */
589 unsigned int
590 get_or_alloc_constant_value_id (tree constant)
592 vn_constant_s **slot;
593 struct vn_constant_s vc;
594 vn_constant_t vcp;
596 vc.hashcode = vn_hash_constant_with_type (constant);
597 vc.constant = constant;
598 slot = constant_to_value_id->find_slot (&vc, INSERT);
599 if (*slot)
600 return (*slot)->value_id;
602 vcp = XNEW (struct vn_constant_s);
603 vcp->hashcode = vc.hashcode;
604 vcp->constant = constant;
605 vcp->value_id = get_next_value_id ();
606 *slot = vcp;
607 bitmap_set_bit (constant_value_ids, vcp->value_id);
608 return vcp->value_id;
611 /* Return true if V is a value id for a constant. */
613 bool
614 value_id_constant_p (unsigned int v)
616 return bitmap_bit_p (constant_value_ids, v);
619 /* Compute the hash for a reference operand VRO1. */
621 static void
622 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
624 hstate.add_int (vro1->opcode);
625 if (vro1->op0)
626 inchash::add_expr (vro1->op0, hstate);
627 if (vro1->op1)
628 inchash::add_expr (vro1->op1, hstate);
629 if (vro1->op2)
630 inchash::add_expr (vro1->op2, hstate);
633 /* Compute a hash for the reference operation VR1 and return it. */
635 static hashval_t
636 vn_reference_compute_hash (const vn_reference_t vr1)
638 inchash::hash hstate;
639 hashval_t result;
640 int i;
641 vn_reference_op_t vro;
642 HOST_WIDE_INT off = -1;
643 bool deref = false;
645 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
647 if (vro->opcode == MEM_REF)
648 deref = true;
649 else if (vro->opcode != ADDR_EXPR)
650 deref = false;
651 if (vro->off != -1)
653 if (off == -1)
654 off = 0;
655 off += vro->off;
657 else
659 if (off != -1
660 && off != 0)
661 hstate.add_int (off);
662 off = -1;
663 if (deref
664 && vro->opcode == ADDR_EXPR)
666 if (vro->op0)
668 tree op = TREE_OPERAND (vro->op0, 0);
669 hstate.add_int (TREE_CODE (op));
670 inchash::add_expr (op, hstate);
673 else
674 vn_reference_op_compute_hash (vro, hstate);
677 result = hstate.end ();
678 /* ??? We would ICE later if we hash instead of adding that in. */
679 if (vr1->vuse)
680 result += SSA_NAME_VERSION (vr1->vuse);
682 return result;
685 /* Return true if reference operations VR1 and VR2 are equivalent. This
686 means they have the same set of operands and vuses. */
688 bool
689 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
691 unsigned i, j;
693 /* Early out if this is not a hash collision. */
694 if (vr1->hashcode != vr2->hashcode)
695 return false;
697 /* The VOP needs to be the same. */
698 if (vr1->vuse != vr2->vuse)
699 return false;
701 /* If the operands are the same we are done. */
702 if (vr1->operands == vr2->operands)
703 return true;
705 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
706 return false;
708 if (INTEGRAL_TYPE_P (vr1->type)
709 && INTEGRAL_TYPE_P (vr2->type))
711 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
712 return false;
714 else if (INTEGRAL_TYPE_P (vr1->type)
715 && (TYPE_PRECISION (vr1->type)
716 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
717 return false;
718 else if (INTEGRAL_TYPE_P (vr2->type)
719 && (TYPE_PRECISION (vr2->type)
720 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
721 return false;
723 i = 0;
724 j = 0;
727 HOST_WIDE_INT off1 = 0, off2 = 0;
728 vn_reference_op_t vro1, vro2;
729 vn_reference_op_s tem1, tem2;
730 bool deref1 = false, deref2 = false;
731 for (; vr1->operands.iterate (i, &vro1); i++)
733 if (vro1->opcode == MEM_REF)
734 deref1 = true;
735 /* Do not look through a storage order barrier. */
736 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
737 return false;
738 if (vro1->off == -1)
739 break;
740 off1 += vro1->off;
742 for (; vr2->operands.iterate (j, &vro2); j++)
744 if (vro2->opcode == MEM_REF)
745 deref2 = true;
746 /* Do not look through a storage order barrier. */
747 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
748 return false;
749 if (vro2->off == -1)
750 break;
751 off2 += vro2->off;
753 if (off1 != off2)
754 return false;
755 if (deref1 && vro1->opcode == ADDR_EXPR)
757 memset (&tem1, 0, sizeof (tem1));
758 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
759 tem1.type = TREE_TYPE (tem1.op0);
760 tem1.opcode = TREE_CODE (tem1.op0);
761 vro1 = &tem1;
762 deref1 = false;
764 if (deref2 && vro2->opcode == ADDR_EXPR)
766 memset (&tem2, 0, sizeof (tem2));
767 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
768 tem2.type = TREE_TYPE (tem2.op0);
769 tem2.opcode = TREE_CODE (tem2.op0);
770 vro2 = &tem2;
771 deref2 = false;
773 if (deref1 != deref2)
774 return false;
775 if (!vn_reference_op_eq (vro1, vro2))
776 return false;
777 ++j;
778 ++i;
780 while (vr1->operands.length () != i
781 || vr2->operands.length () != j);
783 return true;
786 /* Copy the operations present in load/store REF into RESULT, a vector of
787 vn_reference_op_s's. */
789 static void
790 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
792 if (TREE_CODE (ref) == TARGET_MEM_REF)
794 vn_reference_op_s temp;
796 result->reserve (3);
798 memset (&temp, 0, sizeof (temp));
799 temp.type = TREE_TYPE (ref);
800 temp.opcode = TREE_CODE (ref);
801 temp.op0 = TMR_INDEX (ref);
802 temp.op1 = TMR_STEP (ref);
803 temp.op2 = TMR_OFFSET (ref);
804 temp.off = -1;
805 result->quick_push (temp);
807 memset (&temp, 0, sizeof (temp));
808 temp.type = NULL_TREE;
809 temp.opcode = ERROR_MARK;
810 temp.op0 = TMR_INDEX2 (ref);
811 temp.off = -1;
812 result->quick_push (temp);
814 memset (&temp, 0, sizeof (temp));
815 temp.type = NULL_TREE;
816 temp.opcode = TREE_CODE (TMR_BASE (ref));
817 temp.op0 = TMR_BASE (ref);
818 temp.off = -1;
819 result->quick_push (temp);
820 return;
823 /* For non-calls, store the information that makes up the address. */
824 tree orig = ref;
825 while (ref)
827 vn_reference_op_s temp;
829 memset (&temp, 0, sizeof (temp));
830 temp.type = TREE_TYPE (ref);
831 temp.opcode = TREE_CODE (ref);
832 temp.off = -1;
834 switch (temp.opcode)
836 case MODIFY_EXPR:
837 temp.op0 = TREE_OPERAND (ref, 1);
838 break;
839 case WITH_SIZE_EXPR:
840 temp.op0 = TREE_OPERAND (ref, 1);
841 temp.off = 0;
842 break;
843 case MEM_REF:
844 /* The base address gets its own vn_reference_op_s structure. */
845 temp.op0 = TREE_OPERAND (ref, 1);
846 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
847 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
848 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
849 break;
850 case BIT_FIELD_REF:
851 /* Record bits, position and storage order. */
852 temp.op0 = TREE_OPERAND (ref, 1);
853 temp.op1 = TREE_OPERAND (ref, 2);
854 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
855 break;
856 case COMPONENT_REF:
857 /* The field decl is enough to unambiguously specify the field,
858 a matching type is not necessary and a mismatching type
859 is always a spurious difference. */
860 temp.type = NULL_TREE;
861 temp.op0 = TREE_OPERAND (ref, 1);
862 temp.op1 = TREE_OPERAND (ref, 2);
864 tree this_offset = component_ref_field_offset (ref);
865 if (this_offset
866 && TREE_CODE (this_offset) == INTEGER_CST)
868 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
869 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
871 offset_int off
872 = (wi::to_offset (this_offset)
873 + wi::lrshift (wi::to_offset (bit_offset),
874 LOG2_BITS_PER_UNIT));
875 if (wi::fits_shwi_p (off)
876 /* Probibit value-numbering zero offset components
877 of addresses the same before the pass folding
878 __builtin_object_size had a chance to run
879 (checking cfun->after_inlining does the
880 trick here). */
881 && (TREE_CODE (orig) != ADDR_EXPR
882 || off != 0
883 || cfun->after_inlining))
884 temp.off = off.to_shwi ();
888 break;
889 case ARRAY_RANGE_REF:
890 case ARRAY_REF:
891 /* Record index as operand. */
892 temp.op0 = TREE_OPERAND (ref, 1);
893 /* Always record lower bounds and element size. */
894 temp.op1 = array_ref_low_bound (ref);
895 temp.op2 = array_ref_element_size (ref);
896 if (TREE_CODE (temp.op0) == INTEGER_CST
897 && TREE_CODE (temp.op1) == INTEGER_CST
898 && TREE_CODE (temp.op2) == INTEGER_CST)
900 offset_int off = ((wi::to_offset (temp.op0)
901 - wi::to_offset (temp.op1))
902 * wi::to_offset (temp.op2));
903 if (wi::fits_shwi_p (off))
904 temp.off = off.to_shwi();
906 break;
907 case VAR_DECL:
908 if (DECL_HARD_REGISTER (ref))
910 temp.op0 = ref;
911 break;
913 /* Fallthru. */
914 case PARM_DECL:
915 case CONST_DECL:
916 case RESULT_DECL:
917 /* Canonicalize decls to MEM[&decl] which is what we end up with
918 when valueizing MEM[ptr] with ptr = &decl. */
919 temp.opcode = MEM_REF;
920 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
921 temp.off = 0;
922 result->safe_push (temp);
923 temp.opcode = ADDR_EXPR;
924 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
925 temp.type = TREE_TYPE (temp.op0);
926 temp.off = -1;
927 break;
928 case STRING_CST:
929 case INTEGER_CST:
930 case COMPLEX_CST:
931 case VECTOR_CST:
932 case REAL_CST:
933 case FIXED_CST:
934 case CONSTRUCTOR:
935 case SSA_NAME:
936 temp.op0 = ref;
937 break;
938 case ADDR_EXPR:
939 if (is_gimple_min_invariant (ref))
941 temp.op0 = ref;
942 break;
944 break;
945 /* These are only interesting for their operands, their
946 existence, and their type. They will never be the last
947 ref in the chain of references (IE they require an
948 operand), so we don't have to put anything
949 for op* as it will be handled by the iteration */
950 case REALPART_EXPR:
951 temp.off = 0;
952 break;
953 case VIEW_CONVERT_EXPR:
954 temp.off = 0;
955 temp.reverse = storage_order_barrier_p (ref);
956 break;
957 case IMAGPART_EXPR:
958 /* This is only interesting for its constant offset. */
959 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
960 break;
961 default:
962 gcc_unreachable ();
964 result->safe_push (temp);
966 if (REFERENCE_CLASS_P (ref)
967 || TREE_CODE (ref) == MODIFY_EXPR
968 || TREE_CODE (ref) == WITH_SIZE_EXPR
969 || (TREE_CODE (ref) == ADDR_EXPR
970 && !is_gimple_min_invariant (ref)))
971 ref = TREE_OPERAND (ref, 0);
972 else
973 ref = NULL_TREE;
977 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
978 operands in *OPS, the reference alias set SET and the reference type TYPE.
979 Return true if something useful was produced. */
981 bool
982 ao_ref_init_from_vn_reference (ao_ref *ref,
983 alias_set_type set, tree type,
984 vec<vn_reference_op_s> ops)
986 vn_reference_op_t op;
987 unsigned i;
988 tree base = NULL_TREE;
989 tree *op0_p = &base;
990 HOST_WIDE_INT offset = 0;
991 HOST_WIDE_INT max_size;
992 HOST_WIDE_INT size = -1;
993 tree size_tree = NULL_TREE;
994 alias_set_type base_alias_set = -1;
996 /* First get the final access size from just the outermost expression. */
997 op = &ops[0];
998 if (op->opcode == COMPONENT_REF)
999 size_tree = DECL_SIZE (op->op0);
1000 else if (op->opcode == BIT_FIELD_REF)
1001 size_tree = op->op0;
1002 else
1004 machine_mode mode = TYPE_MODE (type);
1005 if (mode == BLKmode)
1006 size_tree = TYPE_SIZE (type);
1007 else
1008 size = GET_MODE_BITSIZE (mode);
1010 if (size_tree != NULL_TREE)
1012 if (!tree_fits_uhwi_p (size_tree))
1013 size = -1;
1014 else
1015 size = tree_to_uhwi (size_tree);
1018 /* Initially, maxsize is the same as the accessed element size.
1019 In the following it will only grow (or become -1). */
1020 max_size = size;
1022 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1023 and find the ultimate containing object. */
1024 FOR_EACH_VEC_ELT (ops, i, op)
1026 switch (op->opcode)
1028 /* These may be in the reference ops, but we cannot do anything
1029 sensible with them here. */
1030 case ADDR_EXPR:
1031 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1032 if (base != NULL_TREE
1033 && TREE_CODE (base) == MEM_REF
1034 && op->op0
1035 && DECL_P (TREE_OPERAND (op->op0, 0)))
1037 vn_reference_op_t pop = &ops[i-1];
1038 base = TREE_OPERAND (op->op0, 0);
1039 if (pop->off == -1)
1041 max_size = -1;
1042 offset = 0;
1044 else
1045 offset += pop->off * BITS_PER_UNIT;
1046 op0_p = NULL;
1047 break;
1049 /* Fallthru. */
1050 case CALL_EXPR:
1051 return false;
1053 /* Record the base objects. */
1054 case MEM_REF:
1055 base_alias_set = get_deref_alias_set (op->op0);
1056 *op0_p = build2 (MEM_REF, op->type,
1057 NULL_TREE, op->op0);
1058 op0_p = &TREE_OPERAND (*op0_p, 0);
1059 break;
1061 case VAR_DECL:
1062 case PARM_DECL:
1063 case RESULT_DECL:
1064 case SSA_NAME:
1065 *op0_p = op->op0;
1066 op0_p = NULL;
1067 break;
1069 /* And now the usual component-reference style ops. */
1070 case BIT_FIELD_REF:
1071 offset += tree_to_shwi (op->op1);
1072 break;
1074 case COMPONENT_REF:
1076 tree field = op->op0;
1077 /* We do not have a complete COMPONENT_REF tree here so we
1078 cannot use component_ref_field_offset. Do the interesting
1079 parts manually. */
1081 if (op->op1
1082 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1083 max_size = -1;
1084 else
1086 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1087 * BITS_PER_UNIT);
1088 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1090 break;
1093 case ARRAY_RANGE_REF:
1094 case ARRAY_REF:
1095 /* We recorded the lower bound and the element size. */
1096 if (!tree_fits_shwi_p (op->op0)
1097 || !tree_fits_shwi_p (op->op1)
1098 || !tree_fits_shwi_p (op->op2))
1099 max_size = -1;
1100 else
1102 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1103 hindex -= tree_to_shwi (op->op1);
1104 hindex *= tree_to_shwi (op->op2);
1105 hindex *= BITS_PER_UNIT;
1106 offset += hindex;
1108 break;
1110 case REALPART_EXPR:
1111 break;
1113 case IMAGPART_EXPR:
1114 offset += size;
1115 break;
1117 case VIEW_CONVERT_EXPR:
1118 break;
1120 case STRING_CST:
1121 case INTEGER_CST:
1122 case COMPLEX_CST:
1123 case VECTOR_CST:
1124 case REAL_CST:
1125 case CONSTRUCTOR:
1126 case CONST_DECL:
1127 return false;
1129 default:
1130 return false;
1134 if (base == NULL_TREE)
1135 return false;
1137 ref->ref = NULL_TREE;
1138 ref->base = base;
1139 ref->offset = offset;
1140 ref->size = size;
1141 ref->max_size = max_size;
1142 ref->ref_alias_set = set;
1143 if (base_alias_set != -1)
1144 ref->base_alias_set = base_alias_set;
1145 else
1146 ref->base_alias_set = get_alias_set (base);
1147 /* We discount volatiles from value-numbering elsewhere. */
1148 ref->volatile_p = false;
1150 return true;
1153 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1154 vn_reference_op_s's. */
1156 static void
1157 copy_reference_ops_from_call (gcall *call,
1158 vec<vn_reference_op_s> *result)
1160 vn_reference_op_s temp;
1161 unsigned i;
1162 tree lhs = gimple_call_lhs (call);
1163 int lr;
1165 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1166 different. By adding the lhs here in the vector, we ensure that the
1167 hashcode is different, guaranteeing a different value number. */
1168 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1170 memset (&temp, 0, sizeof (temp));
1171 temp.opcode = MODIFY_EXPR;
1172 temp.type = TREE_TYPE (lhs);
1173 temp.op0 = lhs;
1174 temp.off = -1;
1175 result->safe_push (temp);
1178 /* Copy the type, opcode, function, static chain and EH region, if any. */
1179 memset (&temp, 0, sizeof (temp));
1180 temp.type = gimple_call_return_type (call);
1181 temp.opcode = CALL_EXPR;
1182 temp.op0 = gimple_call_fn (call);
1183 temp.op1 = gimple_call_chain (call);
1184 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1185 temp.op2 = size_int (lr);
1186 temp.off = -1;
1187 if (gimple_call_with_bounds_p (call))
1188 temp.with_bounds = 1;
1189 result->safe_push (temp);
1191 /* Copy the call arguments. As they can be references as well,
1192 just chain them together. */
1193 for (i = 0; i < gimple_call_num_args (call); ++i)
1195 tree callarg = gimple_call_arg (call, i);
1196 copy_reference_ops_from_ref (callarg, result);
1200 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1201 *I_P to point to the last element of the replacement. */
1202 void
1203 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1204 unsigned int *i_p)
1206 unsigned int i = *i_p;
1207 vn_reference_op_t op = &(*ops)[i];
1208 vn_reference_op_t mem_op = &(*ops)[i - 1];
1209 tree addr_base;
1210 HOST_WIDE_INT addr_offset = 0;
1212 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1213 from .foo.bar to the preceding MEM_REF offset and replace the
1214 address with &OBJ. */
1215 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1216 &addr_offset);
1217 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1218 if (addr_base != TREE_OPERAND (op->op0, 0))
1220 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1221 off += addr_offset;
1222 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1223 op->op0 = build_fold_addr_expr (addr_base);
1224 if (tree_fits_shwi_p (mem_op->op0))
1225 mem_op->off = tree_to_shwi (mem_op->op0);
1226 else
1227 mem_op->off = -1;
1231 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1232 *I_P to point to the last element of the replacement. */
1233 static void
1234 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1235 unsigned int *i_p)
1237 unsigned int i = *i_p;
1238 vn_reference_op_t op = &(*ops)[i];
1239 vn_reference_op_t mem_op = &(*ops)[i - 1];
1240 gimple def_stmt;
1241 enum tree_code code;
1242 offset_int off;
1244 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1245 if (!is_gimple_assign (def_stmt))
1246 return;
1248 code = gimple_assign_rhs_code (def_stmt);
1249 if (code != ADDR_EXPR
1250 && code != POINTER_PLUS_EXPR)
1251 return;
1253 off = offset_int::from (mem_op->op0, SIGNED);
1255 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1256 from .foo.bar to the preceding MEM_REF offset and replace the
1257 address with &OBJ. */
1258 if (code == ADDR_EXPR)
1260 tree addr, addr_base;
1261 HOST_WIDE_INT addr_offset;
1263 addr = gimple_assign_rhs1 (def_stmt);
1264 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1265 &addr_offset);
1266 /* If that didn't work because the address isn't invariant propagate
1267 the reference tree from the address operation in case the current
1268 dereference isn't offsetted. */
1269 if (!addr_base
1270 && *i_p == ops->length () - 1
1271 && off == 0
1272 /* This makes us disable this transform for PRE where the
1273 reference ops might be also used for code insertion which
1274 is invalid. */
1275 && default_vn_walk_kind == VN_WALKREWRITE)
1277 auto_vec<vn_reference_op_s, 32> tem;
1278 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1279 ops->pop ();
1280 ops->pop ();
1281 ops->safe_splice (tem);
1282 --*i_p;
1283 return;
1285 if (!addr_base
1286 || TREE_CODE (addr_base) != MEM_REF)
1287 return;
1289 off += addr_offset;
1290 off += mem_ref_offset (addr_base);
1291 op->op0 = TREE_OPERAND (addr_base, 0);
1293 else
1295 tree ptr, ptroff;
1296 ptr = gimple_assign_rhs1 (def_stmt);
1297 ptroff = gimple_assign_rhs2 (def_stmt);
1298 if (TREE_CODE (ptr) != SSA_NAME
1299 || TREE_CODE (ptroff) != INTEGER_CST)
1300 return;
1302 off += wi::to_offset (ptroff);
1303 op->op0 = ptr;
1306 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1307 if (tree_fits_shwi_p (mem_op->op0))
1308 mem_op->off = tree_to_shwi (mem_op->op0);
1309 else
1310 mem_op->off = -1;
1311 if (TREE_CODE (op->op0) == SSA_NAME)
1312 op->op0 = SSA_VAL (op->op0);
1313 if (TREE_CODE (op->op0) != SSA_NAME)
1314 op->opcode = TREE_CODE (op->op0);
1316 /* And recurse. */
1317 if (TREE_CODE (op->op0) == SSA_NAME)
1318 vn_reference_maybe_forwprop_address (ops, i_p);
1319 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1320 vn_reference_fold_indirect (ops, i_p);
1323 /* Optimize the reference REF to a constant if possible or return
1324 NULL_TREE if not. */
1326 tree
1327 fully_constant_vn_reference_p (vn_reference_t ref)
1329 vec<vn_reference_op_s> operands = ref->operands;
1330 vn_reference_op_t op;
1332 /* Try to simplify the translated expression if it is
1333 a call to a builtin function with at most two arguments. */
1334 op = &operands[0];
1335 if (op->opcode == CALL_EXPR
1336 && TREE_CODE (op->op0) == ADDR_EXPR
1337 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1338 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1339 && operands.length () >= 2
1340 && operands.length () <= 3)
1342 vn_reference_op_t arg0, arg1 = NULL;
1343 bool anyconst = false;
1344 arg0 = &operands[1];
1345 if (operands.length () > 2)
1346 arg1 = &operands[2];
1347 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1348 || (arg0->opcode == ADDR_EXPR
1349 && is_gimple_min_invariant (arg0->op0)))
1350 anyconst = true;
1351 if (arg1
1352 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1353 || (arg1->opcode == ADDR_EXPR
1354 && is_gimple_min_invariant (arg1->op0))))
1355 anyconst = true;
1356 if (anyconst)
1358 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1359 arg1 ? 2 : 1,
1360 arg0->op0,
1361 arg1 ? arg1->op0 : NULL);
1362 if (folded
1363 && TREE_CODE (folded) == NOP_EXPR)
1364 folded = TREE_OPERAND (folded, 0);
1365 if (folded
1366 && is_gimple_min_invariant (folded))
1367 return folded;
1371 /* Simplify reads from constants or constant initializers. */
1372 else if (BITS_PER_UNIT == 8
1373 && is_gimple_reg_type (ref->type)
1374 && (!INTEGRAL_TYPE_P (ref->type)
1375 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1377 HOST_WIDE_INT off = 0;
1378 HOST_WIDE_INT size;
1379 if (INTEGRAL_TYPE_P (ref->type))
1380 size = TYPE_PRECISION (ref->type);
1381 else
1382 size = tree_to_shwi (TYPE_SIZE (ref->type));
1383 if (size % BITS_PER_UNIT != 0
1384 || size > MAX_BITSIZE_MODE_ANY_MODE)
1385 return NULL_TREE;
1386 size /= BITS_PER_UNIT;
1387 unsigned i;
1388 for (i = 0; i < operands.length (); ++i)
1390 if (operands[i].off == -1)
1391 return NULL_TREE;
1392 off += operands[i].off;
1393 if (operands[i].opcode == MEM_REF)
1395 ++i;
1396 break;
1399 vn_reference_op_t base = &operands[--i];
1400 tree ctor = error_mark_node;
1401 tree decl = NULL_TREE;
1402 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1403 ctor = base->op0;
1404 else if (base->opcode == MEM_REF
1405 && base[1].opcode == ADDR_EXPR
1406 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1407 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1409 decl = TREE_OPERAND (base[1].op0, 0);
1410 ctor = ctor_for_folding (decl);
1412 if (ctor == NULL_TREE)
1413 return build_zero_cst (ref->type);
1414 else if (ctor != error_mark_node)
1416 if (decl)
1418 tree res = fold_ctor_reference (ref->type, ctor,
1419 off * BITS_PER_UNIT,
1420 size * BITS_PER_UNIT, decl);
1421 if (res)
1423 STRIP_USELESS_TYPE_CONVERSION (res);
1424 if (is_gimple_min_invariant (res))
1425 return res;
1428 else
1430 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1431 if (native_encode_expr (ctor, buf, size, off) > 0)
1432 return native_interpret_expr (ref->type, buf, size);
1437 return NULL_TREE;
1440 /* Return true if OPS contain a storage order barrier. */
1442 static bool
1443 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1445 vn_reference_op_t op;
1446 unsigned i;
1448 FOR_EACH_VEC_ELT (ops, i, op)
1449 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1450 return true;
1452 return false;
1455 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1456 structures into their value numbers. This is done in-place, and
1457 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1458 whether any operands were valueized. */
1460 static vec<vn_reference_op_s>
1461 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1463 vn_reference_op_t vro;
1464 unsigned int i;
1466 *valueized_anything = false;
1468 FOR_EACH_VEC_ELT (orig, i, vro)
1470 if (vro->opcode == SSA_NAME
1471 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1473 tree tem = SSA_VAL (vro->op0);
1474 if (tem != vro->op0)
1476 *valueized_anything = true;
1477 vro->op0 = tem;
1479 /* If it transforms from an SSA_NAME to a constant, update
1480 the opcode. */
1481 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1482 vro->opcode = TREE_CODE (vro->op0);
1484 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1486 tree tem = SSA_VAL (vro->op1);
1487 if (tem != vro->op1)
1489 *valueized_anything = true;
1490 vro->op1 = tem;
1493 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1495 tree tem = SSA_VAL (vro->op2);
1496 if (tem != vro->op2)
1498 *valueized_anything = true;
1499 vro->op2 = tem;
1502 /* If it transforms from an SSA_NAME to an address, fold with
1503 a preceding indirect reference. */
1504 if (i > 0
1505 && vro->op0
1506 && TREE_CODE (vro->op0) == ADDR_EXPR
1507 && orig[i - 1].opcode == MEM_REF)
1508 vn_reference_fold_indirect (&orig, &i);
1509 else if (i > 0
1510 && vro->opcode == SSA_NAME
1511 && orig[i - 1].opcode == MEM_REF)
1512 vn_reference_maybe_forwprop_address (&orig, &i);
1513 /* If it transforms a non-constant ARRAY_REF into a constant
1514 one, adjust the constant offset. */
1515 else if (vro->opcode == ARRAY_REF
1516 && vro->off == -1
1517 && TREE_CODE (vro->op0) == INTEGER_CST
1518 && TREE_CODE (vro->op1) == INTEGER_CST
1519 && TREE_CODE (vro->op2) == INTEGER_CST)
1521 offset_int off = ((wi::to_offset (vro->op0)
1522 - wi::to_offset (vro->op1))
1523 * wi::to_offset (vro->op2));
1524 if (wi::fits_shwi_p (off))
1525 vro->off = off.to_shwi ();
1529 return orig;
1532 static vec<vn_reference_op_s>
1533 valueize_refs (vec<vn_reference_op_s> orig)
1535 bool tem;
1536 return valueize_refs_1 (orig, &tem);
1539 static vec<vn_reference_op_s> shared_lookup_references;
1541 /* Create a vector of vn_reference_op_s structures from REF, a
1542 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1543 this function. *VALUEIZED_ANYTHING will specify whether any
1544 operands were valueized. */
1546 static vec<vn_reference_op_s>
1547 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1549 if (!ref)
1550 return vNULL;
1551 shared_lookup_references.truncate (0);
1552 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1553 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1554 valueized_anything);
1555 return shared_lookup_references;
1558 /* Create a vector of vn_reference_op_s structures from CALL, a
1559 call statement. The vector is shared among all callers of
1560 this function. */
1562 static vec<vn_reference_op_s>
1563 valueize_shared_reference_ops_from_call (gcall *call)
1565 if (!call)
1566 return vNULL;
1567 shared_lookup_references.truncate (0);
1568 copy_reference_ops_from_call (call, &shared_lookup_references);
1569 shared_lookup_references = valueize_refs (shared_lookup_references);
1570 return shared_lookup_references;
1573 /* Lookup a SCCVN reference operation VR in the current hash table.
1574 Returns the resulting value number if it exists in the hash table,
1575 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1576 vn_reference_t stored in the hashtable if something is found. */
1578 static tree
1579 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1581 vn_reference_s **slot;
1582 hashval_t hash;
1584 hash = vr->hashcode;
1585 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1586 if (!slot && current_info == optimistic_info)
1587 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1588 if (slot)
1590 if (vnresult)
1591 *vnresult = (vn_reference_t)*slot;
1592 return ((vn_reference_t)*slot)->result;
1595 return NULL_TREE;
1598 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1599 with the current VUSE and performs the expression lookup. */
1601 static void *
1602 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1603 unsigned int cnt, void *vr_)
1605 vn_reference_t vr = (vn_reference_t)vr_;
1606 vn_reference_s **slot;
1607 hashval_t hash;
1609 /* This bounds the stmt walks we perform on reference lookups
1610 to O(1) instead of O(N) where N is the number of dominating
1611 stores. */
1612 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1613 return (void *)-1;
1615 if (last_vuse_ptr)
1616 *last_vuse_ptr = vuse;
1618 /* Fixup vuse and hash. */
1619 if (vr->vuse)
1620 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1621 vr->vuse = vuse_ssa_val (vuse);
1622 if (vr->vuse)
1623 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1625 hash = vr->hashcode;
1626 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1627 if (!slot && current_info == optimistic_info)
1628 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1629 if (slot)
1630 return *slot;
1632 return NULL;
1635 /* Lookup an existing or insert a new vn_reference entry into the
1636 value table for the VUSE, SET, TYPE, OPERANDS reference which
1637 has the value VALUE which is either a constant or an SSA name. */
1639 static vn_reference_t
1640 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1641 alias_set_type set,
1642 tree type,
1643 vec<vn_reference_op_s,
1644 va_heap> operands,
1645 tree value)
1647 vn_reference_s vr1;
1648 vn_reference_t result;
1649 unsigned value_id;
1650 vr1.vuse = vuse;
1651 vr1.operands = operands;
1652 vr1.type = type;
1653 vr1.set = set;
1654 vr1.hashcode = vn_reference_compute_hash (&vr1);
1655 if (vn_reference_lookup_1 (&vr1, &result))
1656 return result;
1657 if (TREE_CODE (value) == SSA_NAME)
1658 value_id = VN_INFO (value)->value_id;
1659 else
1660 value_id = get_or_alloc_constant_value_id (value);
1661 return vn_reference_insert_pieces (vuse, set, type,
1662 operands.copy (), value, value_id);
1665 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1666 from the statement defining VUSE and if not successful tries to
1667 translate *REFP and VR_ through an aggregate copy at the definition
1668 of VUSE. */
1670 static void *
1671 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1672 bool disambiguate_only)
1674 vn_reference_t vr = (vn_reference_t)vr_;
1675 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1676 tree base;
1677 HOST_WIDE_INT offset, maxsize;
1678 static vec<vn_reference_op_s>
1679 lhs_ops = vNULL;
1680 ao_ref lhs_ref;
1681 bool lhs_ref_ok = false;
1683 /* First try to disambiguate after value-replacing in the definitions LHS. */
1684 if (is_gimple_assign (def_stmt))
1686 tree lhs = gimple_assign_lhs (def_stmt);
1687 bool valueized_anything = false;
1688 /* Avoid re-allocation overhead. */
1689 lhs_ops.truncate (0);
1690 copy_reference_ops_from_ref (lhs, &lhs_ops);
1691 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1692 if (valueized_anything)
1694 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1695 get_alias_set (lhs),
1696 TREE_TYPE (lhs), lhs_ops);
1697 if (lhs_ref_ok
1698 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1699 return NULL;
1701 else
1703 ao_ref_init (&lhs_ref, lhs);
1704 lhs_ref_ok = true;
1707 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1708 && gimple_call_num_args (def_stmt) <= 4)
1710 /* For builtin calls valueize its arguments and call the
1711 alias oracle again. Valueization may improve points-to
1712 info of pointers and constify size and position arguments.
1713 Originally this was motivated by PR61034 which has
1714 conditional calls to free falsely clobbering ref because
1715 of imprecise points-to info of the argument. */
1716 tree oldargs[4];
1717 bool valueized_anything = false;
1718 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1720 oldargs[i] = gimple_call_arg (def_stmt, i);
1721 if (TREE_CODE (oldargs[i]) == SSA_NAME
1722 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1724 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1725 valueized_anything = true;
1728 if (valueized_anything)
1730 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1731 ref);
1732 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1733 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1734 if (!res)
1735 return NULL;
1739 if (disambiguate_only)
1740 return (void *)-1;
1742 base = ao_ref_base (ref);
1743 offset = ref->offset;
1744 maxsize = ref->max_size;
1746 /* If we cannot constrain the size of the reference we cannot
1747 test if anything kills it. */
1748 if (maxsize == -1)
1749 return (void *)-1;
1751 /* We can't deduce anything useful from clobbers. */
1752 if (gimple_clobber_p (def_stmt))
1753 return (void *)-1;
1755 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1756 from that definition.
1757 1) Memset. */
1758 if (is_gimple_reg_type (vr->type)
1759 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1760 && integer_zerop (gimple_call_arg (def_stmt, 1))
1761 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1762 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1764 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1765 tree base2;
1766 HOST_WIDE_INT offset2, size2, maxsize2;
1767 bool reverse;
1768 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1769 &reverse);
1770 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1771 if ((unsigned HOST_WIDE_INT)size2 / 8
1772 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1773 && maxsize2 != -1
1774 && operand_equal_p (base, base2, 0)
1775 && offset2 <= offset
1776 && offset2 + size2 >= offset + maxsize)
1778 tree val = build_zero_cst (vr->type);
1779 return vn_reference_lookup_or_insert_for_pieces
1780 (vuse, vr->set, vr->type, vr->operands, val);
1784 /* 2) Assignment from an empty CONSTRUCTOR. */
1785 else if (is_gimple_reg_type (vr->type)
1786 && gimple_assign_single_p (def_stmt)
1787 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1788 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1790 tree base2;
1791 HOST_WIDE_INT offset2, size2, maxsize2;
1792 bool reverse;
1793 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1794 &offset2, &size2, &maxsize2, &reverse);
1795 if (maxsize2 != -1
1796 && operand_equal_p (base, base2, 0)
1797 && offset2 <= offset
1798 && offset2 + size2 >= offset + maxsize)
1800 tree val = build_zero_cst (vr->type);
1801 return vn_reference_lookup_or_insert_for_pieces
1802 (vuse, vr->set, vr->type, vr->operands, val);
1806 /* 3) Assignment from a constant. We can use folds native encode/interpret
1807 routines to extract the assigned bits. */
1808 else if (vn_walk_kind == VN_WALKREWRITE
1809 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1810 && ref->size == maxsize
1811 && maxsize % BITS_PER_UNIT == 0
1812 && offset % BITS_PER_UNIT == 0
1813 && is_gimple_reg_type (vr->type)
1814 && !contains_storage_order_barrier_p (vr->operands)
1815 && gimple_assign_single_p (def_stmt)
1816 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1818 tree base2;
1819 HOST_WIDE_INT offset2, size2, maxsize2;
1820 bool reverse;
1821 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1822 &offset2, &size2, &maxsize2, &reverse);
1823 if (!reverse
1824 && maxsize2 != -1
1825 && maxsize2 == size2
1826 && size2 % BITS_PER_UNIT == 0
1827 && offset2 % BITS_PER_UNIT == 0
1828 && operand_equal_p (base, base2, 0)
1829 && offset2 <= offset
1830 && offset2 + size2 >= offset + maxsize)
1832 /* We support up to 512-bit values (for V8DFmode). */
1833 unsigned char buffer[64];
1834 int len;
1836 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1837 buffer, sizeof (buffer));
1838 if (len > 0)
1840 tree val = native_interpret_expr (vr->type,
1841 buffer
1842 + ((offset - offset2)
1843 / BITS_PER_UNIT),
1844 ref->size / BITS_PER_UNIT);
1845 if (val)
1846 return vn_reference_lookup_or_insert_for_pieces
1847 (vuse, vr->set, vr->type, vr->operands, val);
1852 /* 4) Assignment from an SSA name which definition we may be able
1853 to access pieces from. */
1854 else if (ref->size == maxsize
1855 && is_gimple_reg_type (vr->type)
1856 && !contains_storage_order_barrier_p (vr->operands)
1857 && gimple_assign_single_p (def_stmt)
1858 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1860 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1861 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1862 if (is_gimple_assign (def_stmt2)
1863 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1864 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1865 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1867 tree base2;
1868 HOST_WIDE_INT offset2, size2, maxsize2, off;
1869 bool reverse;
1870 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1871 &offset2, &size2, &maxsize2,
1872 &reverse);
1873 off = offset - offset2;
1874 if (!reverse
1875 && maxsize2 != -1
1876 && maxsize2 == size2
1877 && operand_equal_p (base, base2, 0)
1878 && offset2 <= offset
1879 && offset2 + size2 >= offset + maxsize)
1881 tree val = NULL_TREE;
1882 HOST_WIDE_INT elsz
1883 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1884 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1886 if (off == 0)
1887 val = gimple_assign_rhs1 (def_stmt2);
1888 else if (off == elsz)
1889 val = gimple_assign_rhs2 (def_stmt2);
1891 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1892 && off % elsz == 0)
1894 tree ctor = gimple_assign_rhs1 (def_stmt2);
1895 unsigned i = off / elsz;
1896 if (i < CONSTRUCTOR_NELTS (ctor))
1898 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1899 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1901 if (TREE_CODE (TREE_TYPE (elt->value))
1902 != VECTOR_TYPE)
1903 val = elt->value;
1907 if (val)
1908 return vn_reference_lookup_or_insert_for_pieces
1909 (vuse, vr->set, vr->type, vr->operands, val);
1914 /* 5) For aggregate copies translate the reference through them if
1915 the copy kills ref. */
1916 else if (vn_walk_kind == VN_WALKREWRITE
1917 && gimple_assign_single_p (def_stmt)
1918 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1919 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1920 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1922 tree base2;
1923 HOST_WIDE_INT offset2, size2, maxsize2;
1924 int i, j, k;
1925 auto_vec<vn_reference_op_s> rhs;
1926 vn_reference_op_t vro;
1927 ao_ref r;
1929 if (!lhs_ref_ok)
1930 return (void *)-1;
1932 /* See if the assignment kills REF. */
1933 base2 = ao_ref_base (&lhs_ref);
1934 offset2 = lhs_ref.offset;
1935 size2 = lhs_ref.size;
1936 maxsize2 = lhs_ref.max_size;
1937 if (maxsize2 == -1
1938 || (base != base2
1939 && (TREE_CODE (base) != MEM_REF
1940 || TREE_CODE (base2) != MEM_REF
1941 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1942 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1943 TREE_OPERAND (base2, 1))))
1944 || offset2 > offset
1945 || offset2 + size2 < offset + maxsize)
1946 return (void *)-1;
1948 /* Find the common base of ref and the lhs. lhs_ops already
1949 contains valueized operands for the lhs. */
1950 i = vr->operands.length () - 1;
1951 j = lhs_ops.length () - 1;
1952 while (j >= 0 && i >= 0
1953 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1955 i--;
1956 j--;
1959 /* ??? The innermost op should always be a MEM_REF and we already
1960 checked that the assignment to the lhs kills vr. Thus for
1961 aggregate copies using char[] types the vn_reference_op_eq
1962 may fail when comparing types for compatibility. But we really
1963 don't care here - further lookups with the rewritten operands
1964 will simply fail if we messed up types too badly. */
1965 HOST_WIDE_INT extra_off = 0;
1966 if (j == 0 && i >= 0
1967 && lhs_ops[0].opcode == MEM_REF
1968 && lhs_ops[0].off != -1)
1970 if (lhs_ops[0].off == vr->operands[i].off)
1971 i--, j--;
1972 else if (vr->operands[i].opcode == MEM_REF
1973 && vr->operands[i].off != -1)
1975 extra_off = vr->operands[i].off - lhs_ops[0].off;
1976 i--, j--;
1980 /* i now points to the first additional op.
1981 ??? LHS may not be completely contained in VR, one or more
1982 VIEW_CONVERT_EXPRs could be in its way. We could at least
1983 try handling outermost VIEW_CONVERT_EXPRs. */
1984 if (j != -1)
1985 return (void *)-1;
1987 /* Punt if the additional ops contain a storage order barrier. */
1988 for (k = i; k >= 0; k--)
1990 vro = &vr->operands[k];
1991 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
1992 return (void *)-1;
1995 /* Now re-write REF to be based on the rhs of the assignment. */
1996 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1998 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1999 if (extra_off != 0)
2001 if (rhs.length () < 2
2002 || rhs[0].opcode != MEM_REF
2003 || rhs[0].off == -1)
2004 return (void *)-1;
2005 rhs[0].off += extra_off;
2006 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2007 build_int_cst (TREE_TYPE (rhs[0].op0),
2008 extra_off));
2011 /* We need to pre-pend vr->operands[0..i] to rhs. */
2012 vec<vn_reference_op_s> old = vr->operands;
2013 if (i + 1 + rhs.length () > vr->operands.length ())
2015 vr->operands.safe_grow (i + 1 + rhs.length ());
2016 if (old == shared_lookup_references)
2017 shared_lookup_references = vr->operands;
2019 else
2020 vr->operands.truncate (i + 1 + rhs.length ());
2021 FOR_EACH_VEC_ELT (rhs, j, vro)
2022 vr->operands[i + 1 + j] = *vro;
2023 vr->operands = valueize_refs (vr->operands);
2024 if (old == shared_lookup_references)
2025 shared_lookup_references = vr->operands;
2026 vr->hashcode = vn_reference_compute_hash (vr);
2028 /* Try folding the new reference to a constant. */
2029 tree val = fully_constant_vn_reference_p (vr);
2030 if (val)
2031 return vn_reference_lookup_or_insert_for_pieces
2032 (vuse, vr->set, vr->type, vr->operands, val);
2034 /* Adjust *ref from the new operands. */
2035 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2036 return (void *)-1;
2037 /* This can happen with bitfields. */
2038 if (ref->size != r.size)
2039 return (void *)-1;
2040 *ref = r;
2042 /* Do not update last seen VUSE after translating. */
2043 last_vuse_ptr = NULL;
2045 /* Keep looking for the adjusted *REF / VR pair. */
2046 return NULL;
2049 /* 6) For memcpy copies translate the reference through them if
2050 the copy kills ref. */
2051 else if (vn_walk_kind == VN_WALKREWRITE
2052 && is_gimple_reg_type (vr->type)
2053 /* ??? Handle BCOPY as well. */
2054 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2055 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2056 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2057 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2058 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2059 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2060 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2061 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2063 tree lhs, rhs;
2064 ao_ref r;
2065 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2066 vn_reference_op_s op;
2067 HOST_WIDE_INT at;
2069 /* Only handle non-variable, addressable refs. */
2070 if (ref->size != maxsize
2071 || offset % BITS_PER_UNIT != 0
2072 || ref->size % BITS_PER_UNIT != 0)
2073 return (void *)-1;
2075 /* Extract a pointer base and an offset for the destination. */
2076 lhs = gimple_call_arg (def_stmt, 0);
2077 lhs_offset = 0;
2078 if (TREE_CODE (lhs) == SSA_NAME)
2080 lhs = SSA_VAL (lhs);
2081 if (TREE_CODE (lhs) == SSA_NAME)
2083 gimple def_stmt = SSA_NAME_DEF_STMT (lhs);
2084 if (gimple_assign_single_p (def_stmt)
2085 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2086 lhs = gimple_assign_rhs1 (def_stmt);
2089 if (TREE_CODE (lhs) == ADDR_EXPR)
2091 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2092 &lhs_offset);
2093 if (!tem)
2094 return (void *)-1;
2095 if (TREE_CODE (tem) == MEM_REF
2096 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2098 lhs = TREE_OPERAND (tem, 0);
2099 if (TREE_CODE (lhs) == SSA_NAME)
2100 lhs = SSA_VAL (lhs);
2101 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2103 else if (DECL_P (tem))
2104 lhs = build_fold_addr_expr (tem);
2105 else
2106 return (void *)-1;
2108 if (TREE_CODE (lhs) != SSA_NAME
2109 && TREE_CODE (lhs) != ADDR_EXPR)
2110 return (void *)-1;
2112 /* Extract a pointer base and an offset for the source. */
2113 rhs = gimple_call_arg (def_stmt, 1);
2114 rhs_offset = 0;
2115 if (TREE_CODE (rhs) == SSA_NAME)
2116 rhs = SSA_VAL (rhs);
2117 if (TREE_CODE (rhs) == ADDR_EXPR)
2119 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2120 &rhs_offset);
2121 if (!tem)
2122 return (void *)-1;
2123 if (TREE_CODE (tem) == MEM_REF
2124 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2126 rhs = TREE_OPERAND (tem, 0);
2127 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2129 else if (DECL_P (tem))
2130 rhs = build_fold_addr_expr (tem);
2131 else
2132 return (void *)-1;
2134 if (TREE_CODE (rhs) != SSA_NAME
2135 && TREE_CODE (rhs) != ADDR_EXPR)
2136 return (void *)-1;
2138 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2140 /* The bases of the destination and the references have to agree. */
2141 if ((TREE_CODE (base) != MEM_REF
2142 && !DECL_P (base))
2143 || (TREE_CODE (base) == MEM_REF
2144 && (TREE_OPERAND (base, 0) != lhs
2145 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2146 || (DECL_P (base)
2147 && (TREE_CODE (lhs) != ADDR_EXPR
2148 || TREE_OPERAND (lhs, 0) != base)))
2149 return (void *)-1;
2151 at = offset / BITS_PER_UNIT;
2152 if (TREE_CODE (base) == MEM_REF)
2153 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2154 /* If the access is completely outside of the memcpy destination
2155 area there is no aliasing. */
2156 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2157 || lhs_offset + copy_size <= at)
2158 return NULL;
2159 /* And the access has to be contained within the memcpy destination. */
2160 if (lhs_offset > at
2161 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2162 return (void *)-1;
2164 /* Make room for 2 operands in the new reference. */
2165 if (vr->operands.length () < 2)
2167 vec<vn_reference_op_s> old = vr->operands;
2168 vr->operands.safe_grow_cleared (2);
2169 if (old == shared_lookup_references
2170 && vr->operands != old)
2171 shared_lookup_references = vr->operands;
2173 else
2174 vr->operands.truncate (2);
2176 /* The looked-through reference is a simple MEM_REF. */
2177 memset (&op, 0, sizeof (op));
2178 op.type = vr->type;
2179 op.opcode = MEM_REF;
2180 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2181 op.off = at - lhs_offset + rhs_offset;
2182 vr->operands[0] = op;
2183 op.type = TREE_TYPE (rhs);
2184 op.opcode = TREE_CODE (rhs);
2185 op.op0 = rhs;
2186 op.off = -1;
2187 vr->operands[1] = op;
2188 vr->hashcode = vn_reference_compute_hash (vr);
2190 /* Adjust *ref from the new operands. */
2191 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2192 return (void *)-1;
2193 /* This can happen with bitfields. */
2194 if (ref->size != r.size)
2195 return (void *)-1;
2196 *ref = r;
2198 /* Do not update last seen VUSE after translating. */
2199 last_vuse_ptr = NULL;
2201 /* Keep looking for the adjusted *REF / VR pair. */
2202 return NULL;
2205 /* Bail out and stop walking. */
2206 return (void *)-1;
2209 /* Lookup a reference operation by it's parts, in the current hash table.
2210 Returns the resulting value number if it exists in the hash table,
2211 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2212 vn_reference_t stored in the hashtable if something is found. */
2214 tree
2215 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2216 vec<vn_reference_op_s> operands,
2217 vn_reference_t *vnresult, vn_lookup_kind kind)
2219 struct vn_reference_s vr1;
2220 vn_reference_t tmp;
2221 tree cst;
2223 if (!vnresult)
2224 vnresult = &tmp;
2225 *vnresult = NULL;
2227 vr1.vuse = vuse_ssa_val (vuse);
2228 shared_lookup_references.truncate (0);
2229 shared_lookup_references.safe_grow (operands.length ());
2230 memcpy (shared_lookup_references.address (),
2231 operands.address (),
2232 sizeof (vn_reference_op_s)
2233 * operands.length ());
2234 vr1.operands = operands = shared_lookup_references
2235 = valueize_refs (shared_lookup_references);
2236 vr1.type = type;
2237 vr1.set = set;
2238 vr1.hashcode = vn_reference_compute_hash (&vr1);
2239 if ((cst = fully_constant_vn_reference_p (&vr1)))
2240 return cst;
2242 vn_reference_lookup_1 (&vr1, vnresult);
2243 if (!*vnresult
2244 && kind != VN_NOWALK
2245 && vr1.vuse)
2247 ao_ref r;
2248 vn_walk_kind = kind;
2249 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2250 *vnresult =
2251 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2252 vn_reference_lookup_2,
2253 vn_reference_lookup_3,
2254 vuse_ssa_val, &vr1);
2255 gcc_checking_assert (vr1.operands == shared_lookup_references);
2258 if (*vnresult)
2259 return (*vnresult)->result;
2261 return NULL_TREE;
2264 /* Lookup OP in the current hash table, and return the resulting value
2265 number if it exists in the hash table. Return NULL_TREE if it does
2266 not exist in the hash table or if the result field of the structure
2267 was NULL.. VNRESULT will be filled in with the vn_reference_t
2268 stored in the hashtable if one exists. */
2270 tree
2271 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2272 vn_reference_t *vnresult)
2274 vec<vn_reference_op_s> operands;
2275 struct vn_reference_s vr1;
2276 tree cst;
2277 bool valuezied_anything;
2279 if (vnresult)
2280 *vnresult = NULL;
2282 vr1.vuse = vuse_ssa_val (vuse);
2283 vr1.operands = operands
2284 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2285 vr1.type = TREE_TYPE (op);
2286 vr1.set = get_alias_set (op);
2287 vr1.hashcode = vn_reference_compute_hash (&vr1);
2288 if ((cst = fully_constant_vn_reference_p (&vr1)))
2289 return cst;
2291 if (kind != VN_NOWALK
2292 && vr1.vuse)
2294 vn_reference_t wvnresult;
2295 ao_ref r;
2296 /* Make sure to use a valueized reference if we valueized anything.
2297 Otherwise preserve the full reference for advanced TBAA. */
2298 if (!valuezied_anything
2299 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2300 vr1.operands))
2301 ao_ref_init (&r, op);
2302 vn_walk_kind = kind;
2303 wvnresult =
2304 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2305 vn_reference_lookup_2,
2306 vn_reference_lookup_3,
2307 vuse_ssa_val, &vr1);
2308 gcc_checking_assert (vr1.operands == shared_lookup_references);
2309 if (wvnresult)
2311 if (vnresult)
2312 *vnresult = wvnresult;
2313 return wvnresult->result;
2316 return NULL_TREE;
2319 return vn_reference_lookup_1 (&vr1, vnresult);
2322 /* Lookup CALL in the current hash table and return the entry in
2323 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2325 void
2326 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2327 vn_reference_t vr)
2329 if (vnresult)
2330 *vnresult = NULL;
2332 tree vuse = gimple_vuse (call);
2334 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2335 vr->operands = valueize_shared_reference_ops_from_call (call);
2336 vr->type = gimple_expr_type (call);
2337 vr->set = 0;
2338 vr->hashcode = vn_reference_compute_hash (vr);
2339 vn_reference_lookup_1 (vr, vnresult);
2342 /* Insert OP into the current hash table with a value number of
2343 RESULT, and return the resulting reference structure we created. */
2345 static vn_reference_t
2346 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2348 vn_reference_s **slot;
2349 vn_reference_t vr1;
2350 bool tem;
2352 vr1 = current_info->references_pool->allocate ();
2353 if (TREE_CODE (result) == SSA_NAME)
2354 vr1->value_id = VN_INFO (result)->value_id;
2355 else
2356 vr1->value_id = get_or_alloc_constant_value_id (result);
2357 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2358 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2359 vr1->type = TREE_TYPE (op);
2360 vr1->set = get_alias_set (op);
2361 vr1->hashcode = vn_reference_compute_hash (vr1);
2362 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2363 vr1->result_vdef = vdef;
2365 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2366 INSERT);
2368 /* Because we lookup stores using vuses, and value number failures
2369 using the vdefs (see visit_reference_op_store for how and why),
2370 it's possible that on failure we may try to insert an already
2371 inserted store. This is not wrong, there is no ssa name for a
2372 store that we could use as a differentiator anyway. Thus, unlike
2373 the other lookup functions, you cannot gcc_assert (!*slot)
2374 here. */
2376 /* But free the old slot in case of a collision. */
2377 if (*slot)
2378 free_reference (*slot);
2380 *slot = vr1;
2381 return vr1;
2384 /* Insert a reference by it's pieces into the current hash table with
2385 a value number of RESULT. Return the resulting reference
2386 structure we created. */
2388 vn_reference_t
2389 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2390 vec<vn_reference_op_s> operands,
2391 tree result, unsigned int value_id)
2394 vn_reference_s **slot;
2395 vn_reference_t vr1;
2397 vr1 = current_info->references_pool->allocate ();
2398 vr1->value_id = value_id;
2399 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2400 vr1->operands = valueize_refs (operands);
2401 vr1->type = type;
2402 vr1->set = set;
2403 vr1->hashcode = vn_reference_compute_hash (vr1);
2404 if (result && TREE_CODE (result) == SSA_NAME)
2405 result = SSA_VAL (result);
2406 vr1->result = result;
2408 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2409 INSERT);
2411 /* At this point we should have all the things inserted that we have
2412 seen before, and we should never try inserting something that
2413 already exists. */
2414 gcc_assert (!*slot);
2415 if (*slot)
2416 free_reference (*slot);
2418 *slot = vr1;
2419 return vr1;
2422 /* Compute and return the hash value for nary operation VBO1. */
2424 static hashval_t
2425 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2427 inchash::hash hstate;
2428 unsigned i;
2430 for (i = 0; i < vno1->length; ++i)
2431 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2432 vno1->op[i] = SSA_VAL (vno1->op[i]);
2434 if (vno1->length == 2
2435 && commutative_tree_code (vno1->opcode)
2436 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2438 tree temp = vno1->op[0];
2439 vno1->op[0] = vno1->op[1];
2440 vno1->op[1] = temp;
2443 hstate.add_int (vno1->opcode);
2444 for (i = 0; i < vno1->length; ++i)
2445 inchash::add_expr (vno1->op[i], hstate);
2447 return hstate.end ();
2450 /* Compare nary operations VNO1 and VNO2 and return true if they are
2451 equivalent. */
2453 bool
2454 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2456 unsigned i;
2458 if (vno1->hashcode != vno2->hashcode)
2459 return false;
2461 if (vno1->length != vno2->length)
2462 return false;
2464 if (vno1->opcode != vno2->opcode
2465 || !types_compatible_p (vno1->type, vno2->type))
2466 return false;
2468 for (i = 0; i < vno1->length; ++i)
2469 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2470 return false;
2472 return true;
2475 /* Initialize VNO from the pieces provided. */
2477 static void
2478 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2479 enum tree_code code, tree type, tree *ops)
2481 vno->opcode = code;
2482 vno->length = length;
2483 vno->type = type;
2484 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2487 /* Initialize VNO from OP. */
2489 static void
2490 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2492 unsigned i;
2494 vno->opcode = TREE_CODE (op);
2495 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2496 vno->type = TREE_TYPE (op);
2497 for (i = 0; i < vno->length; ++i)
2498 vno->op[i] = TREE_OPERAND (op, i);
2501 /* Return the number of operands for a vn_nary ops structure from STMT. */
2503 static unsigned int
2504 vn_nary_length_from_stmt (gimple stmt)
2506 switch (gimple_assign_rhs_code (stmt))
2508 case REALPART_EXPR:
2509 case IMAGPART_EXPR:
2510 case VIEW_CONVERT_EXPR:
2511 return 1;
2513 case BIT_FIELD_REF:
2514 return 3;
2516 case CONSTRUCTOR:
2517 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2519 default:
2520 return gimple_num_ops (stmt) - 1;
2524 /* Initialize VNO from STMT. */
2526 static void
2527 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2529 unsigned i;
2531 vno->opcode = gimple_assign_rhs_code (stmt);
2532 vno->type = gimple_expr_type (stmt);
2533 switch (vno->opcode)
2535 case REALPART_EXPR:
2536 case IMAGPART_EXPR:
2537 case VIEW_CONVERT_EXPR:
2538 vno->length = 1;
2539 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2540 break;
2542 case BIT_FIELD_REF:
2543 vno->length = 3;
2544 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2545 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2546 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2547 break;
2549 case CONSTRUCTOR:
2550 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2551 for (i = 0; i < vno->length; ++i)
2552 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2553 break;
2555 default:
2556 gcc_checking_assert (!gimple_assign_single_p (stmt));
2557 vno->length = gimple_num_ops (stmt) - 1;
2558 for (i = 0; i < vno->length; ++i)
2559 vno->op[i] = gimple_op (stmt, i + 1);
2563 /* Compute the hashcode for VNO and look for it in the hash table;
2564 return the resulting value number if it exists in the hash table.
2565 Return NULL_TREE if it does not exist in the hash table or if the
2566 result field of the operation is NULL. VNRESULT will contain the
2567 vn_nary_op_t from the hashtable if it exists. */
2569 static tree
2570 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2572 vn_nary_op_s **slot;
2574 if (vnresult)
2575 *vnresult = NULL;
2577 vno->hashcode = vn_nary_op_compute_hash (vno);
2578 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2579 NO_INSERT);
2580 if (!slot && current_info == optimistic_info)
2581 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2582 NO_INSERT);
2583 if (!slot)
2584 return NULL_TREE;
2585 if (vnresult)
2586 *vnresult = *slot;
2587 return (*slot)->result;
2590 /* Lookup a n-ary operation by its pieces and return the resulting value
2591 number if it exists in the hash table. Return NULL_TREE if it does
2592 not exist in the hash table or if the result field of the operation
2593 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2594 if it exists. */
2596 tree
2597 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2598 tree type, tree *ops, vn_nary_op_t *vnresult)
2600 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2601 sizeof_vn_nary_op (length));
2602 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2603 return vn_nary_op_lookup_1 (vno1, vnresult);
2606 /* Lookup OP in the current hash table, and return the resulting value
2607 number if it exists in the hash table. Return NULL_TREE if it does
2608 not exist in the hash table or if the result field of the operation
2609 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2610 if it exists. */
2612 tree
2613 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2615 vn_nary_op_t vno1
2616 = XALLOCAVAR (struct vn_nary_op_s,
2617 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2618 init_vn_nary_op_from_op (vno1, op);
2619 return vn_nary_op_lookup_1 (vno1, vnresult);
2622 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2623 value number if it exists in the hash table. Return NULL_TREE if
2624 it does not exist in the hash table. VNRESULT will contain the
2625 vn_nary_op_t from the hashtable if it exists. */
2627 tree
2628 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2630 vn_nary_op_t vno1
2631 = XALLOCAVAR (struct vn_nary_op_s,
2632 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2633 init_vn_nary_op_from_stmt (vno1, stmt);
2634 return vn_nary_op_lookup_1 (vno1, vnresult);
2637 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2639 static vn_nary_op_t
2640 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2642 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2645 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2646 obstack. */
2648 static vn_nary_op_t
2649 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2651 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2652 &current_info->nary_obstack);
2654 vno1->value_id = value_id;
2655 vno1->length = length;
2656 vno1->result = result;
2658 return vno1;
2661 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2662 VNO->HASHCODE first. */
2664 static vn_nary_op_t
2665 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2666 bool compute_hash)
2668 vn_nary_op_s **slot;
2670 if (compute_hash)
2671 vno->hashcode = vn_nary_op_compute_hash (vno);
2673 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2674 gcc_assert (!*slot);
2676 *slot = vno;
2677 return vno;
2680 /* Insert a n-ary operation into the current hash table using it's
2681 pieces. Return the vn_nary_op_t structure we created and put in
2682 the hashtable. */
2684 vn_nary_op_t
2685 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2686 tree type, tree *ops,
2687 tree result, unsigned int value_id)
2689 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2690 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2691 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2694 /* Insert OP into the current hash table with a value number of
2695 RESULT. Return the vn_nary_op_t structure we created and put in
2696 the hashtable. */
2698 vn_nary_op_t
2699 vn_nary_op_insert (tree op, tree result)
2701 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2702 vn_nary_op_t vno1;
2704 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2705 init_vn_nary_op_from_op (vno1, op);
2706 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2709 /* Insert the rhs of STMT into the current hash table with a value number of
2710 RESULT. */
2712 vn_nary_op_t
2713 vn_nary_op_insert_stmt (gimple stmt, tree result)
2715 vn_nary_op_t vno1
2716 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2717 result, VN_INFO (result)->value_id);
2718 init_vn_nary_op_from_stmt (vno1, stmt);
2719 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2722 /* Compute a hashcode for PHI operation VP1 and return it. */
2724 static inline hashval_t
2725 vn_phi_compute_hash (vn_phi_t vp1)
2727 inchash::hash hstate (vp1->block->index);
2728 int i;
2729 tree phi1op;
2730 tree type;
2732 /* If all PHI arguments are constants we need to distinguish
2733 the PHI node via its type. */
2734 type = vp1->type;
2735 hstate.merge_hash (vn_hash_type (type));
2737 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2739 if (phi1op == VN_TOP)
2740 continue;
2741 inchash::add_expr (phi1op, hstate);
2744 return hstate.end ();
2747 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2749 static int
2750 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2752 if (vp1->hashcode != vp2->hashcode)
2753 return false;
2755 if (vp1->block == vp2->block)
2757 int i;
2758 tree phi1op;
2760 /* If the PHI nodes do not have compatible types
2761 they are not the same. */
2762 if (!types_compatible_p (vp1->type, vp2->type))
2763 return false;
2765 /* Any phi in the same block will have it's arguments in the
2766 same edge order, because of how we store phi nodes. */
2767 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2769 tree phi2op = vp2->phiargs[i];
2770 if (phi1op == VN_TOP || phi2op == VN_TOP)
2771 continue;
2772 if (!expressions_equal_p (phi1op, phi2op))
2773 return false;
2775 return true;
2777 return false;
2780 static vec<tree> shared_lookup_phiargs;
2782 /* Lookup PHI in the current hash table, and return the resulting
2783 value number if it exists in the hash table. Return NULL_TREE if
2784 it does not exist in the hash table. */
2786 static tree
2787 vn_phi_lookup (gimple phi)
2789 vn_phi_s **slot;
2790 struct vn_phi_s vp1;
2791 unsigned i;
2793 shared_lookup_phiargs.truncate (0);
2795 /* Canonicalize the SSA_NAME's to their value number. */
2796 for (i = 0; i < gimple_phi_num_args (phi); i++)
2798 tree def = PHI_ARG_DEF (phi, i);
2799 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2800 shared_lookup_phiargs.safe_push (def);
2802 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2803 vp1.phiargs = shared_lookup_phiargs;
2804 vp1.block = gimple_bb (phi);
2805 vp1.hashcode = vn_phi_compute_hash (&vp1);
2806 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2807 NO_INSERT);
2808 if (!slot && current_info == optimistic_info)
2809 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2810 NO_INSERT);
2811 if (!slot)
2812 return NULL_TREE;
2813 return (*slot)->result;
2816 /* Insert PHI into the current hash table with a value number of
2817 RESULT. */
2819 static vn_phi_t
2820 vn_phi_insert (gimple phi, tree result)
2822 vn_phi_s **slot;
2823 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2824 unsigned i;
2825 vec<tree> args = vNULL;
2827 /* Canonicalize the SSA_NAME's to their value number. */
2828 for (i = 0; i < gimple_phi_num_args (phi); i++)
2830 tree def = PHI_ARG_DEF (phi, i);
2831 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2832 args.safe_push (def);
2834 vp1->value_id = VN_INFO (result)->value_id;
2835 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2836 vp1->phiargs = args;
2837 vp1->block = gimple_bb (phi);
2838 vp1->result = result;
2839 vp1->hashcode = vn_phi_compute_hash (vp1);
2841 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2843 /* Because we iterate over phi operations more than once, it's
2844 possible the slot might already exist here, hence no assert.*/
2845 *slot = vp1;
2846 return vp1;
2850 /* Print set of components in strongly connected component SCC to OUT. */
2852 static void
2853 print_scc (FILE *out, vec<tree> scc)
2855 tree var;
2856 unsigned int i;
2858 fprintf (out, "SCC consists of:");
2859 FOR_EACH_VEC_ELT (scc, i, var)
2861 fprintf (out, " ");
2862 print_generic_expr (out, var, 0);
2864 fprintf (out, "\n");
2867 /* Set the value number of FROM to TO, return true if it has changed
2868 as a result. */
2870 static inline bool
2871 set_ssa_val_to (tree from, tree to)
2873 tree currval = SSA_VAL (from);
2874 HOST_WIDE_INT toff, coff;
2876 /* The only thing we allow as value numbers are ssa_names
2877 and invariants. So assert that here. We don't allow VN_TOP
2878 as visiting a stmt should produce a value-number other than
2879 that.
2880 ??? Still VN_TOP can happen for unreachable code, so force
2881 it to varying in that case. Not all code is prepared to
2882 get VN_TOP on valueization. */
2883 if (to == VN_TOP)
2885 if (dump_file && (dump_flags & TDF_DETAILS))
2886 fprintf (dump_file, "Forcing value number to varying on "
2887 "receiving VN_TOP\n");
2888 to = from;
2891 gcc_assert (to != NULL_TREE
2892 && ((TREE_CODE (to) == SSA_NAME
2893 && (to == from || SSA_VAL (to) == to))
2894 || is_gimple_min_invariant (to)));
2896 if (from != to)
2898 if (currval == from)
2900 if (dump_file && (dump_flags & TDF_DETAILS))
2902 fprintf (dump_file, "Not changing value number of ");
2903 print_generic_expr (dump_file, from, 0);
2904 fprintf (dump_file, " from VARYING to ");
2905 print_generic_expr (dump_file, to, 0);
2906 fprintf (dump_file, "\n");
2908 return false;
2910 else if (TREE_CODE (to) == SSA_NAME
2911 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2912 to = from;
2915 if (dump_file && (dump_flags & TDF_DETAILS))
2917 fprintf (dump_file, "Setting value number of ");
2918 print_generic_expr (dump_file, from, 0);
2919 fprintf (dump_file, " to ");
2920 print_generic_expr (dump_file, to, 0);
2923 if (currval != to
2924 && !operand_equal_p (currval, to, 0)
2925 /* ??? For addresses involving volatile objects or types operand_equal_p
2926 does not reliably detect ADDR_EXPRs as equal. We know we are only
2927 getting invariant gimple addresses here, so can use
2928 get_addr_base_and_unit_offset to do this comparison. */
2929 && !(TREE_CODE (currval) == ADDR_EXPR
2930 && TREE_CODE (to) == ADDR_EXPR
2931 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2932 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2933 && coff == toff))
2935 VN_INFO (from)->valnum = to;
2936 if (dump_file && (dump_flags & TDF_DETAILS))
2937 fprintf (dump_file, " (changed)\n");
2938 return true;
2940 if (dump_file && (dump_flags & TDF_DETAILS))
2941 fprintf (dump_file, "\n");
2942 return false;
2945 /* Mark as processed all the definitions in the defining stmt of USE, or
2946 the USE itself. */
2948 static void
2949 mark_use_processed (tree use)
2951 ssa_op_iter iter;
2952 def_operand_p defp;
2953 gimple stmt = SSA_NAME_DEF_STMT (use);
2955 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2957 VN_INFO (use)->use_processed = true;
2958 return;
2961 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2963 tree def = DEF_FROM_PTR (defp);
2965 VN_INFO (def)->use_processed = true;
2969 /* Set all definitions in STMT to value number to themselves.
2970 Return true if a value number changed. */
2972 static bool
2973 defs_to_varying (gimple stmt)
2975 bool changed = false;
2976 ssa_op_iter iter;
2977 def_operand_p defp;
2979 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2981 tree def = DEF_FROM_PTR (defp);
2982 changed |= set_ssa_val_to (def, def);
2984 return changed;
2987 static bool expr_has_constants (tree expr);
2989 /* Visit a copy between LHS and RHS, return true if the value number
2990 changed. */
2992 static bool
2993 visit_copy (tree lhs, tree rhs)
2995 /* The copy may have a more interesting constant filled expression
2996 (we don't, since we know our RHS is just an SSA name). */
2997 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2998 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
3000 /* And finally valueize. */
3001 rhs = SSA_VAL (rhs);
3003 return set_ssa_val_to (lhs, rhs);
3006 /* Visit a nary operator RHS, value number it, and return true if the
3007 value number of LHS has changed as a result. */
3009 static bool
3010 visit_nary_op (tree lhs, gimple stmt)
3012 bool changed = false;
3013 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3015 if (result)
3016 changed = set_ssa_val_to (lhs, result);
3017 else
3019 changed = set_ssa_val_to (lhs, lhs);
3020 vn_nary_op_insert_stmt (stmt, lhs);
3023 return changed;
3026 /* Visit a call STMT storing into LHS. Return true if the value number
3027 of the LHS has changed as a result. */
3029 static bool
3030 visit_reference_op_call (tree lhs, gcall *stmt)
3032 bool changed = false;
3033 struct vn_reference_s vr1;
3034 vn_reference_t vnresult = NULL;
3035 tree vdef = gimple_vdef (stmt);
3037 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3038 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3039 lhs = NULL_TREE;
3041 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3042 if (vnresult)
3044 if (vnresult->result_vdef && vdef)
3045 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3047 if (!vnresult->result && lhs)
3048 vnresult->result = lhs;
3050 if (vnresult->result && lhs)
3052 changed |= set_ssa_val_to (lhs, vnresult->result);
3054 if (VN_INFO (vnresult->result)->has_constants)
3055 VN_INFO (lhs)->has_constants = true;
3058 else
3060 vn_reference_t vr2;
3061 vn_reference_s **slot;
3062 if (vdef)
3063 changed |= set_ssa_val_to (vdef, vdef);
3064 if (lhs)
3065 changed |= set_ssa_val_to (lhs, lhs);
3066 vr2 = current_info->references_pool->allocate ();
3067 vr2->vuse = vr1.vuse;
3068 /* As we are not walking the virtual operand chain we know the
3069 shared_lookup_references are still original so we can re-use
3070 them here. */
3071 vr2->operands = vr1.operands.copy ();
3072 vr2->type = vr1.type;
3073 vr2->set = vr1.set;
3074 vr2->hashcode = vr1.hashcode;
3075 vr2->result = lhs;
3076 vr2->result_vdef = vdef;
3077 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3078 INSERT);
3079 gcc_assert (!*slot);
3080 *slot = vr2;
3083 return changed;
3086 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3087 and return true if the value number of the LHS has changed as a result. */
3089 static bool
3090 visit_reference_op_load (tree lhs, tree op, gimple stmt)
3092 bool changed = false;
3093 tree last_vuse;
3094 tree result;
3096 last_vuse = gimple_vuse (stmt);
3097 last_vuse_ptr = &last_vuse;
3098 result = vn_reference_lookup (op, gimple_vuse (stmt),
3099 default_vn_walk_kind, NULL);
3100 last_vuse_ptr = NULL;
3102 /* We handle type-punning through unions by value-numbering based
3103 on offset and size of the access. Be prepared to handle a
3104 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3105 if (result
3106 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3108 /* We will be setting the value number of lhs to the value number
3109 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3110 So first simplify and lookup this expression to see if it
3111 is already available. */
3112 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3113 if ((CONVERT_EXPR_P (val)
3114 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
3115 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
3117 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
3118 if ((CONVERT_EXPR_P (tem)
3119 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
3120 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
3121 TREE_TYPE (val), tem)))
3122 val = tem;
3124 result = val;
3125 if (!is_gimple_min_invariant (val)
3126 && TREE_CODE (val) != SSA_NAME)
3127 result = vn_nary_op_lookup (val, NULL);
3128 /* If the expression is not yet available, value-number lhs to
3129 a new SSA_NAME we create. */
3130 if (!result)
3132 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
3133 "vntemp");
3134 /* Initialize value-number information properly. */
3135 VN_INFO_GET (result)->valnum = result;
3136 VN_INFO (result)->value_id = get_next_value_id ();
3137 VN_INFO (result)->expr = val;
3138 VN_INFO (result)->has_constants = expr_has_constants (val);
3139 VN_INFO (result)->needs_insertion = true;
3140 /* As all "inserted" statements are singleton SCCs, insert
3141 to the valid table. This is strictly needed to
3142 avoid re-generating new value SSA_NAMEs for the same
3143 expression during SCC iteration over and over (the
3144 optimistic table gets cleared after each iteration).
3145 We do not need to insert into the optimistic table, as
3146 lookups there will fall back to the valid table. */
3147 if (current_info == optimistic_info)
3149 current_info = valid_info;
3150 vn_nary_op_insert (val, result);
3151 current_info = optimistic_info;
3153 else
3154 vn_nary_op_insert (val, result);
3155 if (dump_file && (dump_flags & TDF_DETAILS))
3157 fprintf (dump_file, "Inserting name ");
3158 print_generic_expr (dump_file, result, 0);
3159 fprintf (dump_file, " for expression ");
3160 print_generic_expr (dump_file, val, 0);
3161 fprintf (dump_file, "\n");
3166 if (result)
3168 changed = set_ssa_val_to (lhs, result);
3169 if (TREE_CODE (result) == SSA_NAME
3170 && VN_INFO (result)->has_constants)
3172 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3173 VN_INFO (lhs)->has_constants = true;
3176 else
3178 changed = set_ssa_val_to (lhs, lhs);
3179 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3182 return changed;
3186 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3187 and return true if the value number of the LHS has changed as a result. */
3189 static bool
3190 visit_reference_op_store (tree lhs, tree op, gimple stmt)
3192 bool changed = false;
3193 vn_reference_t vnresult = NULL;
3194 tree result, assign;
3195 bool resultsame = false;
3196 tree vuse = gimple_vuse (stmt);
3197 tree vdef = gimple_vdef (stmt);
3199 if (TREE_CODE (op) == SSA_NAME)
3200 op = SSA_VAL (op);
3202 /* First we want to lookup using the *vuses* from the store and see
3203 if there the last store to this location with the same address
3204 had the same value.
3206 The vuses represent the memory state before the store. If the
3207 memory state, address, and value of the store is the same as the
3208 last store to this location, then this store will produce the
3209 same memory state as that store.
3211 In this case the vdef versions for this store are value numbered to those
3212 vuse versions, since they represent the same memory state after
3213 this store.
3215 Otherwise, the vdefs for the store are used when inserting into
3216 the table, since the store generates a new memory state. */
3218 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3220 if (result)
3222 if (TREE_CODE (result) == SSA_NAME)
3223 result = SSA_VAL (result);
3224 resultsame = expressions_equal_p (result, op);
3227 if ((!result || !resultsame)
3228 /* Only perform the following when being called from PRE
3229 which embeds tail merging. */
3230 && default_vn_walk_kind == VN_WALK)
3232 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3233 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3234 if (vnresult)
3236 VN_INFO (vdef)->use_processed = true;
3237 return set_ssa_val_to (vdef, vnresult->result_vdef);
3241 if (!result || !resultsame)
3243 if (dump_file && (dump_flags & TDF_DETAILS))
3245 fprintf (dump_file, "No store match\n");
3246 fprintf (dump_file, "Value numbering store ");
3247 print_generic_expr (dump_file, lhs, 0);
3248 fprintf (dump_file, " to ");
3249 print_generic_expr (dump_file, op, 0);
3250 fprintf (dump_file, "\n");
3252 /* Have to set value numbers before insert, since insert is
3253 going to valueize the references in-place. */
3254 if (vdef)
3256 changed |= set_ssa_val_to (vdef, vdef);
3259 /* Do not insert structure copies into the tables. */
3260 if (is_gimple_min_invariant (op)
3261 || is_gimple_reg (op))
3262 vn_reference_insert (lhs, op, vdef, NULL);
3264 /* Only perform the following when being called from PRE
3265 which embeds tail merging. */
3266 if (default_vn_walk_kind == VN_WALK)
3268 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3269 vn_reference_insert (assign, lhs, vuse, vdef);
3272 else
3274 /* We had a match, so value number the vdef to have the value
3275 number of the vuse it came from. */
3277 if (dump_file && (dump_flags & TDF_DETAILS))
3278 fprintf (dump_file, "Store matched earlier value,"
3279 "value numbering store vdefs to matching vuses.\n");
3281 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3284 return changed;
3287 /* Visit and value number PHI, return true if the value number
3288 changed. */
3290 static bool
3291 visit_phi (gimple phi)
3293 bool changed = false;
3294 tree result;
3295 tree sameval = VN_TOP;
3296 bool allsame = true;
3298 /* TODO: We could check for this in init_sccvn, and replace this
3299 with a gcc_assert. */
3300 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3301 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3303 /* See if all non-TOP arguments have the same value. TOP is
3304 equivalent to everything, so we can ignore it. */
3305 edge_iterator ei;
3306 edge e;
3307 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3308 if (e->flags & EDGE_EXECUTABLE)
3310 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3312 if (TREE_CODE (def) == SSA_NAME)
3313 def = SSA_VAL (def);
3314 if (def == VN_TOP)
3315 continue;
3316 if (sameval == VN_TOP)
3318 sameval = def;
3320 else
3322 if (!expressions_equal_p (def, sameval))
3324 allsame = false;
3325 break;
3330 /* If all value numbered to the same value, the phi node has that
3331 value. */
3332 if (allsame)
3333 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3335 /* Otherwise, see if it is equivalent to a phi node in this block. */
3336 result = vn_phi_lookup (phi);
3337 if (result)
3338 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3339 else
3341 vn_phi_insert (phi, PHI_RESULT (phi));
3342 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3343 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3344 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3347 return changed;
3350 /* Return true if EXPR contains constants. */
3352 static bool
3353 expr_has_constants (tree expr)
3355 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3357 case tcc_unary:
3358 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3360 case tcc_binary:
3361 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3362 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3363 /* Constants inside reference ops are rarely interesting, but
3364 it can take a lot of looking to find them. */
3365 case tcc_reference:
3366 case tcc_declaration:
3367 return false;
3368 default:
3369 return is_gimple_min_invariant (expr);
3371 return false;
3374 /* Return true if STMT contains constants. */
3376 static bool
3377 stmt_has_constants (gimple stmt)
3379 tree tem;
3381 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3382 return false;
3384 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3386 case GIMPLE_TERNARY_RHS:
3387 tem = gimple_assign_rhs3 (stmt);
3388 if (TREE_CODE (tem) == SSA_NAME)
3389 tem = SSA_VAL (tem);
3390 if (is_gimple_min_invariant (tem))
3391 return true;
3392 /* Fallthru. */
3394 case GIMPLE_BINARY_RHS:
3395 tem = gimple_assign_rhs2 (stmt);
3396 if (TREE_CODE (tem) == SSA_NAME)
3397 tem = SSA_VAL (tem);
3398 if (is_gimple_min_invariant (tem))
3399 return true;
3400 /* Fallthru. */
3402 case GIMPLE_SINGLE_RHS:
3403 /* Constants inside reference ops are rarely interesting, but
3404 it can take a lot of looking to find them. */
3405 case GIMPLE_UNARY_RHS:
3406 tem = gimple_assign_rhs1 (stmt);
3407 if (TREE_CODE (tem) == SSA_NAME)
3408 tem = SSA_VAL (tem);
3409 return is_gimple_min_invariant (tem);
3411 default:
3412 gcc_unreachable ();
3414 return false;
3417 /* Simplify the binary expression RHS, and return the result if
3418 simplified. */
3420 static tree
3421 simplify_binary_expression (gimple stmt)
3423 tree result = NULL_TREE;
3424 tree op0 = gimple_assign_rhs1 (stmt);
3425 tree op1 = gimple_assign_rhs2 (stmt);
3426 enum tree_code code = gimple_assign_rhs_code (stmt);
3428 /* This will not catch every single case we could combine, but will
3429 catch those with constants. The goal here is to simultaneously
3430 combine constants between expressions, but avoid infinite
3431 expansion of expressions during simplification. */
3432 op0 = vn_valueize (op0);
3433 if (TREE_CODE (op0) == SSA_NAME
3434 && (VN_INFO (op0)->has_constants
3435 || TREE_CODE_CLASS (code) == tcc_comparison
3436 || code == COMPLEX_EXPR))
3437 op0 = vn_get_expr_for (op0);
3439 op1 = vn_valueize (op1);
3440 if (TREE_CODE (op1) == SSA_NAME
3441 && (VN_INFO (op1)->has_constants
3442 || code == COMPLEX_EXPR))
3443 op1 = vn_get_expr_for (op1);
3445 /* Pointer plus constant can be represented as invariant address.
3446 Do so to allow further propatation, see also tree forwprop. */
3447 if (code == POINTER_PLUS_EXPR
3448 && tree_fits_uhwi_p (op1)
3449 && TREE_CODE (op0) == ADDR_EXPR
3450 && is_gimple_min_invariant (op0))
3451 return build_invariant_address (TREE_TYPE (op0),
3452 TREE_OPERAND (op0, 0),
3453 tree_to_uhwi (op1));
3455 /* Avoid folding if nothing changed. */
3456 if (op0 == gimple_assign_rhs1 (stmt)
3457 && op1 == gimple_assign_rhs2 (stmt))
3458 return NULL_TREE;
3460 fold_defer_overflow_warnings ();
3462 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3463 if (result)
3464 STRIP_USELESS_TYPE_CONVERSION (result);
3466 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3467 stmt, 0);
3469 /* Make sure result is not a complex expression consisting
3470 of operators of operators (IE (a + b) + (a + c))
3471 Otherwise, we will end up with unbounded expressions if
3472 fold does anything at all. */
3473 if (result && valid_gimple_rhs_p (result))
3474 return result;
3476 return NULL_TREE;
3479 /* Simplify the unary expression RHS, and return the result if
3480 simplified. */
3482 static tree
3483 simplify_unary_expression (gassign *stmt)
3485 tree result = NULL_TREE;
3486 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3487 enum tree_code code = gimple_assign_rhs_code (stmt);
3489 /* We handle some tcc_reference codes here that are all
3490 GIMPLE_ASSIGN_SINGLE codes. */
3491 if (code == REALPART_EXPR
3492 || code == IMAGPART_EXPR
3493 || code == VIEW_CONVERT_EXPR
3494 || code == BIT_FIELD_REF)
3495 op0 = TREE_OPERAND (op0, 0);
3497 orig_op0 = op0;
3498 op0 = vn_valueize (op0);
3499 if (TREE_CODE (op0) == SSA_NAME)
3501 if (VN_INFO (op0)->has_constants)
3502 op0 = vn_get_expr_for (op0);
3503 else if (CONVERT_EXPR_CODE_P (code)
3504 || code == REALPART_EXPR
3505 || code == IMAGPART_EXPR
3506 || code == VIEW_CONVERT_EXPR
3507 || code == BIT_FIELD_REF)
3509 /* We want to do tree-combining on conversion-like expressions.
3510 Make sure we feed only SSA_NAMEs or constants to fold though. */
3511 tree tem = vn_get_expr_for (op0);
3512 if (UNARY_CLASS_P (tem)
3513 || BINARY_CLASS_P (tem)
3514 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3515 || TREE_CODE (tem) == SSA_NAME
3516 || TREE_CODE (tem) == CONSTRUCTOR
3517 || is_gimple_min_invariant (tem))
3518 op0 = tem;
3522 /* Avoid folding if nothing changed, but remember the expression. */
3523 if (op0 == orig_op0)
3524 return NULL_TREE;
3526 if (code == BIT_FIELD_REF)
3528 tree rhs = gimple_assign_rhs1 (stmt);
3529 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3530 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3532 else
3533 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3534 if (result)
3536 STRIP_USELESS_TYPE_CONVERSION (result);
3537 if (valid_gimple_rhs_p (result))
3538 return result;
3541 return NULL_TREE;
3544 /* Try to simplify RHS using equivalences and constant folding. */
3546 static tree
3547 try_to_simplify (gassign *stmt)
3549 enum tree_code code = gimple_assign_rhs_code (stmt);
3550 tree tem;
3552 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3553 in this case, there is no point in doing extra work. */
3554 if (code == SSA_NAME)
3555 return NULL_TREE;
3557 /* First try constant folding based on our current lattice. */
3558 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3559 if (tem
3560 && (TREE_CODE (tem) == SSA_NAME
3561 || is_gimple_min_invariant (tem)))
3562 return tem;
3564 /* If that didn't work try combining multiple statements. */
3565 switch (TREE_CODE_CLASS (code))
3567 case tcc_reference:
3568 /* Fallthrough for some unary codes that can operate on registers. */
3569 if (!(code == REALPART_EXPR
3570 || code == IMAGPART_EXPR
3571 || code == VIEW_CONVERT_EXPR
3572 || code == BIT_FIELD_REF))
3573 break;
3574 /* We could do a little more with unary ops, if they expand
3575 into binary ops, but it's debatable whether it is worth it. */
3576 case tcc_unary:
3577 return simplify_unary_expression (stmt);
3579 case tcc_comparison:
3580 case tcc_binary:
3581 return simplify_binary_expression (stmt);
3583 default:
3584 break;
3587 return NULL_TREE;
3590 /* Visit and value number USE, return true if the value number
3591 changed. */
3593 static bool
3594 visit_use (tree use)
3596 bool changed = false;
3597 gimple stmt = SSA_NAME_DEF_STMT (use);
3599 mark_use_processed (use);
3601 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3602 if (dump_file && (dump_flags & TDF_DETAILS)
3603 && !SSA_NAME_IS_DEFAULT_DEF (use))
3605 fprintf (dump_file, "Value numbering ");
3606 print_generic_expr (dump_file, use, 0);
3607 fprintf (dump_file, " stmt = ");
3608 print_gimple_stmt (dump_file, stmt, 0, 0);
3611 /* Handle uninitialized uses. */
3612 if (SSA_NAME_IS_DEFAULT_DEF (use))
3613 changed = set_ssa_val_to (use, use);
3614 else
3616 if (gimple_code (stmt) == GIMPLE_PHI)
3617 changed = visit_phi (stmt);
3618 else if (gimple_has_volatile_ops (stmt))
3619 changed = defs_to_varying (stmt);
3620 else if (is_gimple_assign (stmt))
3622 enum tree_code code = gimple_assign_rhs_code (stmt);
3623 tree lhs = gimple_assign_lhs (stmt);
3624 tree rhs1 = gimple_assign_rhs1 (stmt);
3625 tree simplified;
3627 /* Shortcut for copies. Simplifying copies is pointless,
3628 since we copy the expression and value they represent. */
3629 if (code == SSA_NAME
3630 && TREE_CODE (lhs) == SSA_NAME)
3632 changed = visit_copy (lhs, rhs1);
3633 goto done;
3635 simplified = try_to_simplify (as_a <gassign *> (stmt));
3636 if (simplified)
3638 if (dump_file && (dump_flags & TDF_DETAILS))
3640 fprintf (dump_file, "RHS ");
3641 print_gimple_expr (dump_file, stmt, 0, 0);
3642 fprintf (dump_file, " simplified to ");
3643 print_generic_expr (dump_file, simplified, 0);
3644 if (TREE_CODE (lhs) == SSA_NAME)
3645 fprintf (dump_file, " has constants %d\n",
3646 expr_has_constants (simplified));
3647 else
3648 fprintf (dump_file, "\n");
3651 /* Setting value numbers to constants will occasionally
3652 screw up phi congruence because constants are not
3653 uniquely associated with a single ssa name that can be
3654 looked up. */
3655 if (simplified
3656 && is_gimple_min_invariant (simplified)
3657 && TREE_CODE (lhs) == SSA_NAME)
3659 VN_INFO (lhs)->expr = simplified;
3660 VN_INFO (lhs)->has_constants = true;
3661 changed = set_ssa_val_to (lhs, simplified);
3662 goto done;
3664 else if (simplified
3665 && TREE_CODE (simplified) == SSA_NAME
3666 && TREE_CODE (lhs) == SSA_NAME)
3668 changed = visit_copy (lhs, simplified);
3669 goto done;
3671 else if (simplified)
3673 if (TREE_CODE (lhs) == SSA_NAME)
3675 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3676 /* We have to unshare the expression or else
3677 valuizing may change the IL stream. */
3678 VN_INFO (lhs)->expr = unshare_expr (simplified);
3681 else if (stmt_has_constants (stmt)
3682 && TREE_CODE (lhs) == SSA_NAME)
3683 VN_INFO (lhs)->has_constants = true;
3684 else if (TREE_CODE (lhs) == SSA_NAME)
3686 /* We reset expr and constantness here because we may
3687 have been value numbering optimistically, and
3688 iterating. They may become non-constant in this case,
3689 even if they were optimistically constant. */
3691 VN_INFO (lhs)->has_constants = false;
3692 VN_INFO (lhs)->expr = NULL_TREE;
3695 if ((TREE_CODE (lhs) == SSA_NAME
3696 /* We can substitute SSA_NAMEs that are live over
3697 abnormal edges with their constant value. */
3698 && !(gimple_assign_copy_p (stmt)
3699 && is_gimple_min_invariant (rhs1))
3700 && !(simplified
3701 && is_gimple_min_invariant (simplified))
3702 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3703 /* Stores or copies from SSA_NAMEs that are live over
3704 abnormal edges are a problem. */
3705 || (code == SSA_NAME
3706 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3707 changed = defs_to_varying (stmt);
3708 else if (REFERENCE_CLASS_P (lhs)
3709 || DECL_P (lhs))
3710 changed = visit_reference_op_store (lhs, rhs1, stmt);
3711 else if (TREE_CODE (lhs) == SSA_NAME)
3713 if ((gimple_assign_copy_p (stmt)
3714 && is_gimple_min_invariant (rhs1))
3715 || (simplified
3716 && is_gimple_min_invariant (simplified)))
3718 VN_INFO (lhs)->has_constants = true;
3719 if (simplified)
3720 changed = set_ssa_val_to (lhs, simplified);
3721 else
3722 changed = set_ssa_val_to (lhs, rhs1);
3724 else
3726 /* First try to lookup the simplified expression. */
3727 if (simplified)
3729 enum gimple_rhs_class rhs_class;
3732 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3733 if ((rhs_class == GIMPLE_UNARY_RHS
3734 || rhs_class == GIMPLE_BINARY_RHS
3735 || rhs_class == GIMPLE_TERNARY_RHS)
3736 && valid_gimple_rhs_p (simplified))
3738 tree result = vn_nary_op_lookup (simplified, NULL);
3739 if (result)
3741 changed = set_ssa_val_to (lhs, result);
3742 goto done;
3747 /* Otherwise visit the original statement. */
3748 switch (vn_get_stmt_kind (stmt))
3750 case VN_NARY:
3751 changed = visit_nary_op (lhs, stmt);
3752 break;
3753 case VN_REFERENCE:
3754 changed = visit_reference_op_load (lhs, rhs1, stmt);
3755 break;
3756 default:
3757 changed = defs_to_varying (stmt);
3758 break;
3762 else
3763 changed = defs_to_varying (stmt);
3765 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3767 tree lhs = gimple_call_lhs (stmt);
3768 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3770 /* Try constant folding based on our current lattice. */
3771 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3772 vn_valueize);
3773 if (simplified)
3775 if (dump_file && (dump_flags & TDF_DETAILS))
3777 fprintf (dump_file, "call ");
3778 print_gimple_expr (dump_file, stmt, 0, 0);
3779 fprintf (dump_file, " simplified to ");
3780 print_generic_expr (dump_file, simplified, 0);
3781 if (TREE_CODE (lhs) == SSA_NAME)
3782 fprintf (dump_file, " has constants %d\n",
3783 expr_has_constants (simplified));
3784 else
3785 fprintf (dump_file, "\n");
3788 /* Setting value numbers to constants will occasionally
3789 screw up phi congruence because constants are not
3790 uniquely associated with a single ssa name that can be
3791 looked up. */
3792 if (simplified
3793 && is_gimple_min_invariant (simplified))
3795 VN_INFO (lhs)->expr = simplified;
3796 VN_INFO (lhs)->has_constants = true;
3797 changed = set_ssa_val_to (lhs, simplified);
3798 if (gimple_vdef (stmt))
3799 changed |= set_ssa_val_to (gimple_vdef (stmt),
3800 SSA_VAL (gimple_vuse (stmt)));
3801 goto done;
3803 else if (simplified
3804 && TREE_CODE (simplified) == SSA_NAME)
3806 changed = visit_copy (lhs, simplified);
3807 if (gimple_vdef (stmt))
3808 changed |= set_ssa_val_to (gimple_vdef (stmt),
3809 SSA_VAL (gimple_vuse (stmt)));
3810 goto done;
3812 else
3814 if (stmt_has_constants (stmt))
3815 VN_INFO (lhs)->has_constants = true;
3816 else
3818 /* We reset expr and constantness here because we may
3819 have been value numbering optimistically, and
3820 iterating. They may become non-constant in this case,
3821 even if they were optimistically constant. */
3822 VN_INFO (lhs)->has_constants = false;
3823 VN_INFO (lhs)->expr = NULL_TREE;
3826 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3828 changed = defs_to_varying (stmt);
3829 goto done;
3834 if (!gimple_call_internal_p (stmt)
3835 && (/* Calls to the same function with the same vuse
3836 and the same operands do not necessarily return the same
3837 value, unless they're pure or const. */
3838 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3839 /* If calls have a vdef, subsequent calls won't have
3840 the same incoming vuse. So, if 2 calls with vdef have the
3841 same vuse, we know they're not subsequent.
3842 We can value number 2 calls to the same function with the
3843 same vuse and the same operands which are not subsequent
3844 the same, because there is no code in the program that can
3845 compare the 2 values... */
3846 || (gimple_vdef (stmt)
3847 /* ... unless the call returns a pointer which does
3848 not alias with anything else. In which case the
3849 information that the values are distinct are encoded
3850 in the IL. */
3851 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3852 /* Only perform the following when being called from PRE
3853 which embeds tail merging. */
3854 && default_vn_walk_kind == VN_WALK)))
3855 changed = visit_reference_op_call (lhs, call_stmt);
3856 else
3857 changed = defs_to_varying (stmt);
3859 else
3860 changed = defs_to_varying (stmt);
3862 done:
3863 return changed;
3866 /* Compare two operands by reverse postorder index */
3868 static int
3869 compare_ops (const void *pa, const void *pb)
3871 const tree opa = *((const tree *)pa);
3872 const tree opb = *((const tree *)pb);
3873 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3874 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3875 basic_block bba;
3876 basic_block bbb;
3878 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3879 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3880 else if (gimple_nop_p (opstmta))
3881 return -1;
3882 else if (gimple_nop_p (opstmtb))
3883 return 1;
3885 bba = gimple_bb (opstmta);
3886 bbb = gimple_bb (opstmtb);
3888 if (!bba && !bbb)
3889 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3890 else if (!bba)
3891 return -1;
3892 else if (!bbb)
3893 return 1;
3895 if (bba == bbb)
3897 if (gimple_code (opstmta) == GIMPLE_PHI
3898 && gimple_code (opstmtb) == GIMPLE_PHI)
3899 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3900 else if (gimple_code (opstmta) == GIMPLE_PHI)
3901 return -1;
3902 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3903 return 1;
3904 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3905 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3906 else
3907 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3909 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3912 /* Sort an array containing members of a strongly connected component
3913 SCC so that the members are ordered by RPO number.
3914 This means that when the sort is complete, iterating through the
3915 array will give you the members in RPO order. */
3917 static void
3918 sort_scc (vec<tree> scc)
3920 scc.qsort (compare_ops);
3923 /* Insert the no longer used nary ONARY to the hash INFO. */
3925 static void
3926 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3928 size_t size = sizeof_vn_nary_op (onary->length);
3929 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3930 &info->nary_obstack);
3931 memcpy (nary, onary, size);
3932 vn_nary_op_insert_into (nary, info->nary, false);
3935 /* Insert the no longer used phi OPHI to the hash INFO. */
3937 static void
3938 copy_phi (vn_phi_t ophi, vn_tables_t info)
3940 vn_phi_t phi = info->phis_pool->allocate ();
3941 vn_phi_s **slot;
3942 memcpy (phi, ophi, sizeof (*phi));
3943 ophi->phiargs.create (0);
3944 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3945 gcc_assert (!*slot);
3946 *slot = phi;
3949 /* Insert the no longer used reference OREF to the hash INFO. */
3951 static void
3952 copy_reference (vn_reference_t oref, vn_tables_t info)
3954 vn_reference_t ref;
3955 vn_reference_s **slot;
3956 ref = info->references_pool->allocate ();
3957 memcpy (ref, oref, sizeof (*ref));
3958 oref->operands.create (0);
3959 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3960 if (*slot)
3961 free_reference (*slot);
3962 *slot = ref;
3965 /* Process a strongly connected component in the SSA graph. */
3967 static void
3968 process_scc (vec<tree> scc)
3970 tree var;
3971 unsigned int i;
3972 unsigned int iterations = 0;
3973 bool changed = true;
3974 vn_nary_op_iterator_type hin;
3975 vn_phi_iterator_type hip;
3976 vn_reference_iterator_type hir;
3977 vn_nary_op_t nary;
3978 vn_phi_t phi;
3979 vn_reference_t ref;
3981 /* If the SCC has a single member, just visit it. */
3982 if (scc.length () == 1)
3984 tree use = scc[0];
3985 if (VN_INFO (use)->use_processed)
3986 return;
3987 /* We need to make sure it doesn't form a cycle itself, which can
3988 happen for self-referential PHI nodes. In that case we would
3989 end up inserting an expression with VN_TOP operands into the
3990 valid table which makes us derive bogus equivalences later.
3991 The cheapest way to check this is to assume it for all PHI nodes. */
3992 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3993 /* Fallthru to iteration. */ ;
3994 else
3996 visit_use (use);
3997 return;
4001 if (dump_file && (dump_flags & TDF_DETAILS))
4002 print_scc (dump_file, scc);
4004 /* Iterate over the SCC with the optimistic table until it stops
4005 changing. */
4006 current_info = optimistic_info;
4007 while (changed)
4009 changed = false;
4010 iterations++;
4011 if (dump_file && (dump_flags & TDF_DETAILS))
4012 fprintf (dump_file, "Starting iteration %d\n", iterations);
4013 /* As we are value-numbering optimistically we have to
4014 clear the expression tables and the simplified expressions
4015 in each iteration until we converge. */
4016 optimistic_info->nary->empty ();
4017 optimistic_info->phis->empty ();
4018 optimistic_info->references->empty ();
4019 obstack_free (&optimistic_info->nary_obstack, NULL);
4020 gcc_obstack_init (&optimistic_info->nary_obstack);
4021 optimistic_info->phis_pool->release ();
4022 optimistic_info->references_pool->release ();
4023 FOR_EACH_VEC_ELT (scc, i, var)
4024 VN_INFO (var)->expr = NULL_TREE;
4025 FOR_EACH_VEC_ELT (scc, i, var)
4026 changed |= visit_use (var);
4029 if (dump_file && (dump_flags & TDF_DETAILS))
4030 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4031 statistics_histogram_event (cfun, "SCC iterations", iterations);
4033 /* Finally, copy the contents of the no longer used optimistic
4034 table to the valid table. */
4035 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4036 copy_nary (nary, valid_info);
4037 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4038 copy_phi (phi, valid_info);
4039 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4040 ref, vn_reference_t, hir)
4041 copy_reference (ref, valid_info);
4043 current_info = valid_info;
4047 /* Pop the components of the found SCC for NAME off the SCC stack
4048 and process them. Returns true if all went well, false if
4049 we run into resource limits. */
4051 static bool
4052 extract_and_process_scc_for_name (tree name)
4054 auto_vec<tree> scc;
4055 tree x;
4057 /* Found an SCC, pop the components off the SCC stack and
4058 process them. */
4061 x = sccstack.pop ();
4063 VN_INFO (x)->on_sccstack = false;
4064 scc.safe_push (x);
4065 } while (x != name);
4067 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4068 if (scc.length ()
4069 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4071 if (dump_file)
4072 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4073 "SCC size %u exceeding %u\n", scc.length (),
4074 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4076 return false;
4079 if (scc.length () > 1)
4080 sort_scc (scc);
4082 process_scc (scc);
4084 return true;
4087 /* Depth first search on NAME to discover and process SCC's in the SSA
4088 graph.
4089 Execution of this algorithm relies on the fact that the SCC's are
4090 popped off the stack in topological order.
4091 Returns true if successful, false if we stopped processing SCC's due
4092 to resource constraints. */
4094 static bool
4095 DFS (tree name)
4097 vec<ssa_op_iter> itervec = vNULL;
4098 vec<tree> namevec = vNULL;
4099 use_operand_p usep = NULL;
4100 gimple defstmt;
4101 tree use;
4102 ssa_op_iter iter;
4104 start_over:
4105 /* SCC info */
4106 VN_INFO (name)->dfsnum = next_dfs_num++;
4107 VN_INFO (name)->visited = true;
4108 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4110 sccstack.safe_push (name);
4111 VN_INFO (name)->on_sccstack = true;
4112 defstmt = SSA_NAME_DEF_STMT (name);
4114 /* Recursively DFS on our operands, looking for SCC's. */
4115 if (!gimple_nop_p (defstmt))
4117 /* Push a new iterator. */
4118 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4119 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4120 else
4121 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4123 else
4124 clear_and_done_ssa_iter (&iter);
4126 while (1)
4128 /* If we are done processing uses of a name, go up the stack
4129 of iterators and process SCCs as we found them. */
4130 if (op_iter_done (&iter))
4132 /* See if we found an SCC. */
4133 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4134 if (!extract_and_process_scc_for_name (name))
4136 namevec.release ();
4137 itervec.release ();
4138 return false;
4141 /* Check if we are done. */
4142 if (namevec.is_empty ())
4144 namevec.release ();
4145 itervec.release ();
4146 return true;
4149 /* Restore the last use walker and continue walking there. */
4150 use = name;
4151 name = namevec.pop ();
4152 memcpy (&iter, &itervec.last (),
4153 sizeof (ssa_op_iter));
4154 itervec.pop ();
4155 goto continue_walking;
4158 use = USE_FROM_PTR (usep);
4160 /* Since we handle phi nodes, we will sometimes get
4161 invariants in the use expression. */
4162 if (TREE_CODE (use) == SSA_NAME)
4164 if (! (VN_INFO (use)->visited))
4166 /* Recurse by pushing the current use walking state on
4167 the stack and starting over. */
4168 itervec.safe_push (iter);
4169 namevec.safe_push (name);
4170 name = use;
4171 goto start_over;
4173 continue_walking:
4174 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4175 VN_INFO (use)->low);
4177 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4178 && VN_INFO (use)->on_sccstack)
4180 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4181 VN_INFO (name)->low);
4185 usep = op_iter_next_use (&iter);
4189 /* Allocate a value number table. */
4191 static void
4192 allocate_vn_table (vn_tables_t table)
4194 table->phis = new vn_phi_table_type (23);
4195 table->nary = new vn_nary_op_table_type (23);
4196 table->references = new vn_reference_table_type (23);
4198 gcc_obstack_init (&table->nary_obstack);
4199 table->phis_pool = new pool_allocator<vn_phi_s> ("VN phis", 30);
4200 table->references_pool = new pool_allocator<vn_reference_s> ("VN references",
4201 30);
4204 /* Free a value number table. */
4206 static void
4207 free_vn_table (vn_tables_t table)
4209 delete table->phis;
4210 table->phis = NULL;
4211 delete table->nary;
4212 table->nary = NULL;
4213 delete table->references;
4214 table->references = NULL;
4215 obstack_free (&table->nary_obstack, NULL);
4216 delete table->phis_pool;
4217 delete table->references_pool;
4220 static void
4221 init_scc_vn (void)
4223 size_t i;
4224 int j;
4225 int *rpo_numbers_temp;
4227 calculate_dominance_info (CDI_DOMINATORS);
4228 sccstack.create (0);
4229 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4231 constant_value_ids = BITMAP_ALLOC (NULL);
4233 next_dfs_num = 1;
4234 next_value_id = 1;
4236 vn_ssa_aux_table.create (num_ssa_names + 1);
4237 /* VEC_alloc doesn't actually grow it to the right size, it just
4238 preallocates the space to do so. */
4239 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4240 gcc_obstack_init (&vn_ssa_aux_obstack);
4242 shared_lookup_phiargs.create (0);
4243 shared_lookup_references.create (0);
4244 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4245 rpo_numbers_temp =
4246 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4247 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4249 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4250 the i'th block in RPO order is bb. We want to map bb's to RPO
4251 numbers, so we need to rearrange this array. */
4252 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4253 rpo_numbers[rpo_numbers_temp[j]] = j;
4255 XDELETE (rpo_numbers_temp);
4257 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4259 /* Create the VN_INFO structures, and initialize value numbers to
4260 TOP. */
4261 for (i = 0; i < num_ssa_names; i++)
4263 tree name = ssa_name (i);
4264 if (name)
4266 VN_INFO_GET (name)->valnum = VN_TOP;
4267 VN_INFO (name)->expr = NULL_TREE;
4268 VN_INFO (name)->value_id = 0;
4272 renumber_gimple_stmt_uids ();
4274 /* Create the valid and optimistic value numbering tables. */
4275 valid_info = XCNEW (struct vn_tables_s);
4276 allocate_vn_table (valid_info);
4277 optimistic_info = XCNEW (struct vn_tables_s);
4278 allocate_vn_table (optimistic_info);
4281 void
4282 free_scc_vn (void)
4284 size_t i;
4286 delete constant_to_value_id;
4287 constant_to_value_id = NULL;
4288 BITMAP_FREE (constant_value_ids);
4289 shared_lookup_phiargs.release ();
4290 shared_lookup_references.release ();
4291 XDELETEVEC (rpo_numbers);
4293 for (i = 0; i < num_ssa_names; i++)
4295 tree name = ssa_name (i);
4296 if (name
4297 && VN_INFO (name)->needs_insertion)
4298 release_ssa_name (name);
4300 obstack_free (&vn_ssa_aux_obstack, NULL);
4301 vn_ssa_aux_table.release ();
4303 sccstack.release ();
4304 free_vn_table (valid_info);
4305 XDELETE (valid_info);
4306 free_vn_table (optimistic_info);
4307 XDELETE (optimistic_info);
4310 /* Set *ID according to RESULT. */
4312 static void
4313 set_value_id_for_result (tree result, unsigned int *id)
4315 if (result && TREE_CODE (result) == SSA_NAME)
4316 *id = VN_INFO (result)->value_id;
4317 else if (result && is_gimple_min_invariant (result))
4318 *id = get_or_alloc_constant_value_id (result);
4319 else
4320 *id = get_next_value_id ();
4323 /* Set the value ids in the valid hash tables. */
4325 static void
4326 set_hashtable_value_ids (void)
4328 vn_nary_op_iterator_type hin;
4329 vn_phi_iterator_type hip;
4330 vn_reference_iterator_type hir;
4331 vn_nary_op_t vno;
4332 vn_reference_t vr;
4333 vn_phi_t vp;
4335 /* Now set the value ids of the things we had put in the hash
4336 table. */
4338 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4339 set_value_id_for_result (vno->result, &vno->value_id);
4341 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4342 set_value_id_for_result (vp->result, &vp->value_id);
4344 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4345 hir)
4346 set_value_id_for_result (vr->result, &vr->value_id);
4349 class cond_dom_walker : public dom_walker
4351 public:
4352 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4354 virtual void before_dom_children (basic_block);
4356 bool fail;
4359 void
4360 cond_dom_walker::before_dom_children (basic_block bb)
4362 edge e;
4363 edge_iterator ei;
4365 if (fail)
4366 return;
4368 /* If any of the predecessor edges that do not come from blocks dominated
4369 by us are still marked as possibly executable consider this block
4370 reachable. */
4371 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4372 FOR_EACH_EDGE (e, ei, bb->preds)
4373 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4374 reachable |= (e->flags & EDGE_EXECUTABLE);
4376 /* If the block is not reachable all outgoing edges are not
4377 executable. */
4378 if (!reachable)
4380 if (dump_file && (dump_flags & TDF_DETAILS))
4381 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4382 "BB %d as not executable\n", bb->index);
4384 FOR_EACH_EDGE (e, ei, bb->succs)
4385 e->flags &= ~EDGE_EXECUTABLE;
4386 return;
4389 gimple stmt = last_stmt (bb);
4390 if (!stmt)
4391 return;
4393 enum gimple_code code = gimple_code (stmt);
4394 if (code != GIMPLE_COND
4395 && code != GIMPLE_SWITCH
4396 && code != GIMPLE_GOTO)
4397 return;
4399 if (dump_file && (dump_flags & TDF_DETAILS))
4401 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4402 bb->index);
4403 print_gimple_stmt (dump_file, stmt, 0, 0);
4406 /* Value-number the last stmts SSA uses. */
4407 ssa_op_iter i;
4408 tree op;
4409 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4410 if (VN_INFO (op)->visited == false
4411 && !DFS (op))
4413 fail = true;
4414 return;
4417 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4418 if value-numbering can prove they are not reachable. Handling
4419 computed gotos is also possible. */
4420 tree val;
4421 switch (code)
4423 case GIMPLE_COND:
4425 tree lhs = gimple_cond_lhs (stmt);
4426 tree rhs = gimple_cond_rhs (stmt);
4427 /* Work hard in computing the condition and take into account
4428 the valueization of the defining stmt. */
4429 if (TREE_CODE (lhs) == SSA_NAME)
4430 lhs = vn_get_expr_for (lhs);
4431 if (TREE_CODE (rhs) == SSA_NAME)
4432 rhs = vn_get_expr_for (rhs);
4433 val = fold_binary (gimple_cond_code (stmt),
4434 boolean_type_node, lhs, rhs);
4435 break;
4437 case GIMPLE_SWITCH:
4438 val = gimple_switch_index (as_a <gswitch *> (stmt));
4439 break;
4440 case GIMPLE_GOTO:
4441 val = gimple_goto_dest (stmt);
4442 break;
4443 default:
4444 gcc_unreachable ();
4446 if (!val)
4447 return;
4449 edge taken = find_taken_edge (bb, vn_valueize (val));
4450 if (!taken)
4451 return;
4453 if (dump_file && (dump_flags & TDF_DETAILS))
4454 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4455 "not executable\n", bb->index, bb->index, taken->dest->index);
4457 FOR_EACH_EDGE (e, ei, bb->succs)
4458 if (e != taken)
4459 e->flags &= ~EDGE_EXECUTABLE;
4462 /* Do SCCVN. Returns true if it finished, false if we bailed out
4463 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4464 how we use the alias oracle walking during the VN process. */
4466 bool
4467 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4469 basic_block bb;
4470 size_t i;
4471 tree param;
4473 default_vn_walk_kind = default_vn_walk_kind_;
4475 init_scc_vn ();
4476 current_info = valid_info;
4478 for (param = DECL_ARGUMENTS (current_function_decl);
4479 param;
4480 param = DECL_CHAIN (param))
4482 tree def = ssa_default_def (cfun, param);
4483 if (def)
4485 VN_INFO (def)->visited = true;
4486 VN_INFO (def)->valnum = def;
4490 /* Mark all edges as possibly executable. */
4491 FOR_ALL_BB_FN (bb, cfun)
4493 edge_iterator ei;
4494 edge e;
4495 FOR_EACH_EDGE (e, ei, bb->succs)
4496 e->flags |= EDGE_EXECUTABLE;
4499 /* Walk all blocks in dominator order, value-numbering the last stmts
4500 SSA uses and decide whether outgoing edges are not executable. */
4501 cond_dom_walker walker;
4502 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4503 if (walker.fail)
4505 free_scc_vn ();
4506 return false;
4509 /* Value-number remaining SSA names. */
4510 for (i = 1; i < num_ssa_names; ++i)
4512 tree name = ssa_name (i);
4513 if (name
4514 && VN_INFO (name)->visited == false
4515 && !has_zero_uses (name))
4516 if (!DFS (name))
4518 free_scc_vn ();
4519 return false;
4523 /* Initialize the value ids. */
4525 for (i = 1; i < num_ssa_names; ++i)
4527 tree name = ssa_name (i);
4528 vn_ssa_aux_t info;
4529 if (!name)
4530 continue;
4531 info = VN_INFO (name);
4532 if (info->valnum == name
4533 || info->valnum == VN_TOP)
4534 info->value_id = get_next_value_id ();
4535 else if (is_gimple_min_invariant (info->valnum))
4536 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4539 /* Propagate. */
4540 for (i = 1; i < num_ssa_names; ++i)
4542 tree name = ssa_name (i);
4543 vn_ssa_aux_t info;
4544 if (!name)
4545 continue;
4546 info = VN_INFO (name);
4547 if (TREE_CODE (info->valnum) == SSA_NAME
4548 && info->valnum != name
4549 && info->value_id != VN_INFO (info->valnum)->value_id)
4550 info->value_id = VN_INFO (info->valnum)->value_id;
4553 set_hashtable_value_ids ();
4555 if (dump_file && (dump_flags & TDF_DETAILS))
4557 fprintf (dump_file, "Value numbers:\n");
4558 for (i = 0; i < num_ssa_names; i++)
4560 tree name = ssa_name (i);
4561 if (name
4562 && VN_INFO (name)->visited
4563 && SSA_VAL (name) != name)
4565 print_generic_expr (dump_file, name, 0);
4566 fprintf (dump_file, " = ");
4567 print_generic_expr (dump_file, SSA_VAL (name), 0);
4568 fprintf (dump_file, "\n");
4573 return true;
4576 /* Return the maximum value id we have ever seen. */
4578 unsigned int
4579 get_max_value_id (void)
4581 return next_value_id;
4584 /* Return the next unique value id. */
4586 unsigned int
4587 get_next_value_id (void)
4589 return next_value_id++;
4593 /* Compare two expressions E1 and E2 and return true if they are equal. */
4595 bool
4596 expressions_equal_p (tree e1, tree e2)
4598 /* The obvious case. */
4599 if (e1 == e2)
4600 return true;
4602 /* If only one of them is null, they cannot be equal. */
4603 if (!e1 || !e2)
4604 return false;
4606 /* Now perform the actual comparison. */
4607 if (TREE_CODE (e1) == TREE_CODE (e2)
4608 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4609 return true;
4611 return false;
4615 /* Return true if the nary operation NARY may trap. This is a copy
4616 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4618 bool
4619 vn_nary_may_trap (vn_nary_op_t nary)
4621 tree type;
4622 tree rhs2 = NULL_TREE;
4623 bool honor_nans = false;
4624 bool honor_snans = false;
4625 bool fp_operation = false;
4626 bool honor_trapv = false;
4627 bool handled, ret;
4628 unsigned i;
4630 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4631 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4632 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4634 type = nary->type;
4635 fp_operation = FLOAT_TYPE_P (type);
4636 if (fp_operation)
4638 honor_nans = flag_trapping_math && !flag_finite_math_only;
4639 honor_snans = flag_signaling_nans != 0;
4641 else if (INTEGRAL_TYPE_P (type)
4642 && TYPE_OVERFLOW_TRAPS (type))
4643 honor_trapv = true;
4645 if (nary->length >= 2)
4646 rhs2 = nary->op[1];
4647 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4648 honor_trapv,
4649 honor_nans, honor_snans, rhs2,
4650 &handled);
4651 if (handled
4652 && ret)
4653 return true;
4655 for (i = 0; i < nary->length; ++i)
4656 if (tree_could_trap_p (nary->op[i]))
4657 return true;
4659 return false;