[Patch AArch64 1/3] Enable CRC by default for armv8.1-a
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob8a5b641a70e3cedc899b7174812cb39b48960f57
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
108 equivalent.
109 TODO:
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
135 /* Return the computed hashcode for nary operation P1. */
137 inline hashval_t
138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
140 return vno1->hashcode;
143 /* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
146 inline bool
147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
149 return vn_nary_op_eq (vno1, vno2);
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
156 /* vn_phi hashtable helpers. */
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
168 /* Return the computed hashcode for phi operation P1. */
170 inline hashval_t
171 vn_phi_hasher::hash (const vn_phi_s *vp1)
173 return vp1->hashcode;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
178 inline bool
179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
181 return vn_phi_eq (vp1, vp2);
184 /* Free a phi operation structure VP. */
186 inline void
187 vn_phi_hasher::remove (vn_phi_s *phi)
189 phi->phiargs.release ();
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
199 static int
200 vn_reference_op_eq (const void *p1, const void *p2)
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
216 /* Free a reference operation structure VP. */
218 static inline void
219 free_reference (vn_reference_s *vr)
221 vr->operands.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
234 /* Return the hashcode for a given reference operation P1. */
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 return vr1->hashcode;
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 return vn_reference_eq (v, c);
248 inline void
249 vn_reference_hasher::remove (vn_reference_s *v)
251 free_reference (v);
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
265 struct obstack nary_obstack;
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
279 /* Hash table hash function for vn_constant_t. */
281 inline hashval_t
282 vn_constant_hasher::hash (const vn_constant_s *vc1)
284 return vc1->hashcode;
287 /* Hash table equality function for vn_constant_t. */
289 inline bool
290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
305 static vn_tables_t valid_info;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
316 static vn_tables_t current_info;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
329 static inline tree
330 vuse_ssa_val (tree x)
332 if (!x)
333 return NULL_TREE;
337 x = SSA_VAL (x);
339 while (SSA_NAME_IN_FREE_LIST (x));
341 return x;
344 /* This represents the top of the VN lattice, which is the universal
345 value. */
347 tree VN_TOP;
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id;
353 /* Next DFS number and the stack for strongly connected component
354 detection. */
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
368 /* Return whether there is value numbering information for a given SSA name. */
370 bool
371 has_VN_INFO (tree name)
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
378 /* Return the value numbering information for a given SSA name. */
380 vn_ssa_aux_t
381 VN_INFO (tree name)
383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
384 gcc_checking_assert (res);
385 return res;
388 /* Set the value numbering info for a given SSA name to a given
389 value. */
391 static inline void
392 VN_INFO_SET (tree name, vn_ssa_aux_t value)
394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
400 vn_ssa_aux_t
401 VN_INFO_GET (tree name)
403 vn_ssa_aux_t newinfo;
405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412 return newinfo;
416 /* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
419 enum vn_kind
420 vn_get_stmt_kind (gimple *stmt)
422 switch (gimple_code (stmt))
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
454 case tcc_constant:
455 return VN_CONSTANT;
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
465 default:
466 return VN_NONE;
469 default:
470 return VN_NONE;
474 /* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
477 unsigned int
478 get_constant_value_id (tree constant)
480 vn_constant_s **slot;
481 struct vn_constant_s vc;
483 vc.hashcode = vn_hash_constant_with_type (constant);
484 vc.constant = constant;
485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
486 if (slot)
487 return (*slot)->value_id;
488 return 0;
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
494 unsigned int
495 get_or_alloc_constant_value_id (tree constant)
497 vn_constant_s **slot;
498 struct vn_constant_s vc;
499 vn_constant_t vcp;
501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
503 slot = constant_to_value_id->find_slot (&vc, INSERT);
504 if (*slot)
505 return (*slot)->value_id;
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
511 *slot = vcp;
512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
516 /* Return true if V is a value id for a constant. */
518 bool
519 value_id_constant_p (unsigned int v)
521 return bitmap_bit_p (constant_value_ids, v);
524 /* Compute the hash for a reference operand VRO1. */
526 static void
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
529 hstate.add_int (vro1->opcode);
530 if (vro1->op0)
531 inchash::add_expr (vro1->op0, hstate);
532 if (vro1->op1)
533 inchash::add_expr (vro1->op1, hstate);
534 if (vro1->op2)
535 inchash::add_expr (vro1->op2, hstate);
538 /* Compute a hash for the reference operation VR1 and return it. */
540 static hashval_t
541 vn_reference_compute_hash (const vn_reference_t vr1)
543 inchash::hash hstate;
544 hashval_t result;
545 int i;
546 vn_reference_op_t vro;
547 HOST_WIDE_INT off = -1;
548 bool deref = false;
550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
558 if (off == -1)
559 off = 0;
560 off += vro->off;
562 else
564 if (off != -1
565 && off != 0)
566 hstate.add_int (off);
567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
571 if (vro->op0)
573 tree op = TREE_OPERAND (vro->op0, 0);
574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
578 else
579 vn_reference_op_compute_hash (vro, hstate);
582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
587 return result;
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
593 bool
594 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
596 unsigned i, j;
598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
604 return false;
606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
611 return false;
613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626 return false;
628 i = 0;
629 j = 0;
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
636 for (; vr1->operands.iterate (i, &vro1); i++)
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
640 /* Do not look through a storage order barrier. */
641 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 return false;
643 if (vro1->off == -1)
644 break;
645 off1 += vro1->off;
647 for (; vr2->operands.iterate (j, &vro2); j++)
649 if (vro2->opcode == MEM_REF)
650 deref2 = true;
651 /* Do not look through a storage order barrier. */
652 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 return false;
654 if (vro2->off == -1)
655 break;
656 off2 += vro2->off;
658 if (off1 != off2)
659 return false;
660 if (deref1 && vro1->opcode == ADDR_EXPR)
662 memset (&tem1, 0, sizeof (tem1));
663 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 tem1.type = TREE_TYPE (tem1.op0);
665 tem1.opcode = TREE_CODE (tem1.op0);
666 vro1 = &tem1;
667 deref1 = false;
669 if (deref2 && vro2->opcode == ADDR_EXPR)
671 memset (&tem2, 0, sizeof (tem2));
672 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 tem2.type = TREE_TYPE (tem2.op0);
674 tem2.opcode = TREE_CODE (tem2.op0);
675 vro2 = &tem2;
676 deref2 = false;
678 if (deref1 != deref2)
679 return false;
680 if (!vn_reference_op_eq (vro1, vro2))
681 return false;
682 ++j;
683 ++i;
685 while (vr1->operands.length () != i
686 || vr2->operands.length () != j);
688 return true;
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
694 static void
695 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
697 if (TREE_CODE (ref) == TARGET_MEM_REF)
699 vn_reference_op_s temp;
701 result->reserve (3);
703 memset (&temp, 0, sizeof (temp));
704 temp.type = TREE_TYPE (ref);
705 temp.opcode = TREE_CODE (ref);
706 temp.op0 = TMR_INDEX (ref);
707 temp.op1 = TMR_STEP (ref);
708 temp.op2 = TMR_OFFSET (ref);
709 temp.off = -1;
710 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711 temp.base = MR_DEPENDENCE_BASE (ref);
712 result->quick_push (temp);
714 memset (&temp, 0, sizeof (temp));
715 temp.type = NULL_TREE;
716 temp.opcode = ERROR_MARK;
717 temp.op0 = TMR_INDEX2 (ref);
718 temp.off = -1;
719 result->quick_push (temp);
721 memset (&temp, 0, sizeof (temp));
722 temp.type = NULL_TREE;
723 temp.opcode = TREE_CODE (TMR_BASE (ref));
724 temp.op0 = TMR_BASE (ref);
725 temp.off = -1;
726 result->quick_push (temp);
727 return;
730 /* For non-calls, store the information that makes up the address. */
731 tree orig = ref;
732 while (ref)
734 vn_reference_op_s temp;
736 memset (&temp, 0, sizeof (temp));
737 temp.type = TREE_TYPE (ref);
738 temp.opcode = TREE_CODE (ref);
739 temp.off = -1;
741 switch (temp.opcode)
743 case MODIFY_EXPR:
744 temp.op0 = TREE_OPERAND (ref, 1);
745 break;
746 case WITH_SIZE_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 temp.off = 0;
749 break;
750 case MEM_REF:
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp.op0 = TREE_OPERAND (ref, 1);
754 offset_int off = mem_ref_offset (ref);
755 if (wi::fits_shwi_p (off))
756 temp.off = off.to_shwi ();
758 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
759 temp.base = MR_DEPENDENCE_BASE (ref);
760 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
761 break;
762 case BIT_FIELD_REF:
763 /* Record bits, position and storage order. */
764 temp.op0 = TREE_OPERAND (ref, 1);
765 temp.op1 = TREE_OPERAND (ref, 2);
766 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
768 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
769 if (off % BITS_PER_UNIT == 0)
770 temp.off = off / BITS_PER_UNIT;
772 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
773 break;
774 case COMPONENT_REF:
775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp.type = NULL_TREE;
779 temp.op0 = TREE_OPERAND (ref, 1);
780 temp.op1 = TREE_OPERAND (ref, 2);
782 tree this_offset = component_ref_field_offset (ref);
783 if (this_offset
784 && TREE_CODE (this_offset) == INTEGER_CST)
786 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
787 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
789 offset_int off
790 = (wi::to_offset (this_offset)
791 + wi::lrshift (wi::to_offset (bit_offset),
792 LOG2_BITS_PER_UNIT));
793 if (wi::fits_shwi_p (off)
794 /* Probibit value-numbering zero offset components
795 of addresses the same before the pass folding
796 __builtin_object_size had a chance to run
797 (checking cfun->after_inlining does the
798 trick here). */
799 && (TREE_CODE (orig) != ADDR_EXPR
800 || off != 0
801 || cfun->after_inlining))
802 temp.off = off.to_shwi ();
806 break;
807 case ARRAY_RANGE_REF:
808 case ARRAY_REF:
809 /* Record index as operand. */
810 temp.op0 = TREE_OPERAND (ref, 1);
811 /* Always record lower bounds and element size. */
812 temp.op1 = array_ref_low_bound (ref);
813 temp.op2 = array_ref_element_size (ref);
814 if (TREE_CODE (temp.op0) == INTEGER_CST
815 && TREE_CODE (temp.op1) == INTEGER_CST
816 && TREE_CODE (temp.op2) == INTEGER_CST)
818 offset_int off = ((wi::to_offset (temp.op0)
819 - wi::to_offset (temp.op1))
820 * wi::to_offset (temp.op2));
821 if (wi::fits_shwi_p (off))
822 temp.off = off.to_shwi();
824 break;
825 case VAR_DECL:
826 if (DECL_HARD_REGISTER (ref))
828 temp.op0 = ref;
829 break;
831 /* Fallthru. */
832 case PARM_DECL:
833 case CONST_DECL:
834 case RESULT_DECL:
835 /* Canonicalize decls to MEM[&decl] which is what we end up with
836 when valueizing MEM[ptr] with ptr = &decl. */
837 temp.opcode = MEM_REF;
838 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
839 temp.off = 0;
840 result->safe_push (temp);
841 temp.opcode = ADDR_EXPR;
842 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
843 temp.type = TREE_TYPE (temp.op0);
844 temp.off = -1;
845 break;
846 case STRING_CST:
847 case INTEGER_CST:
848 case COMPLEX_CST:
849 case VECTOR_CST:
850 case REAL_CST:
851 case FIXED_CST:
852 case CONSTRUCTOR:
853 case SSA_NAME:
854 temp.op0 = ref;
855 break;
856 case ADDR_EXPR:
857 if (is_gimple_min_invariant (ref))
859 temp.op0 = ref;
860 break;
862 break;
863 /* These are only interesting for their operands, their
864 existence, and their type. They will never be the last
865 ref in the chain of references (IE they require an
866 operand), so we don't have to put anything
867 for op* as it will be handled by the iteration */
868 case REALPART_EXPR:
869 temp.off = 0;
870 break;
871 case VIEW_CONVERT_EXPR:
872 temp.off = 0;
873 temp.reverse = storage_order_barrier_p (ref);
874 break;
875 case IMAGPART_EXPR:
876 /* This is only interesting for its constant offset. */
877 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
878 break;
879 default:
880 gcc_unreachable ();
882 result->safe_push (temp);
884 if (REFERENCE_CLASS_P (ref)
885 || TREE_CODE (ref) == MODIFY_EXPR
886 || TREE_CODE (ref) == WITH_SIZE_EXPR
887 || (TREE_CODE (ref) == ADDR_EXPR
888 && !is_gimple_min_invariant (ref)))
889 ref = TREE_OPERAND (ref, 0);
890 else
891 ref = NULL_TREE;
895 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
896 operands in *OPS, the reference alias set SET and the reference type TYPE.
897 Return true if something useful was produced. */
899 bool
900 ao_ref_init_from_vn_reference (ao_ref *ref,
901 alias_set_type set, tree type,
902 vec<vn_reference_op_s> ops)
904 vn_reference_op_t op;
905 unsigned i;
906 tree base = NULL_TREE;
907 tree *op0_p = &base;
908 offset_int offset = 0;
909 offset_int max_size;
910 offset_int size = -1;
911 tree size_tree = NULL_TREE;
912 alias_set_type base_alias_set = -1;
914 /* First get the final access size from just the outermost expression. */
915 op = &ops[0];
916 if (op->opcode == COMPONENT_REF)
917 size_tree = DECL_SIZE (op->op0);
918 else if (op->opcode == BIT_FIELD_REF)
919 size_tree = op->op0;
920 else
922 machine_mode mode = TYPE_MODE (type);
923 if (mode == BLKmode)
924 size_tree = TYPE_SIZE (type);
925 else
926 size = int (GET_MODE_BITSIZE (mode));
928 if (size_tree != NULL_TREE
929 && TREE_CODE (size_tree) == INTEGER_CST)
930 size = wi::to_offset (size_tree);
932 /* Initially, maxsize is the same as the accessed element size.
933 In the following it will only grow (or become -1). */
934 max_size = size;
936 /* Compute cumulative bit-offset for nested component-refs and array-refs,
937 and find the ultimate containing object. */
938 FOR_EACH_VEC_ELT (ops, i, op)
940 switch (op->opcode)
942 /* These may be in the reference ops, but we cannot do anything
943 sensible with them here. */
944 case ADDR_EXPR:
945 /* Apart from ADDR_EXPR arguments to MEM_REF. */
946 if (base != NULL_TREE
947 && TREE_CODE (base) == MEM_REF
948 && op->op0
949 && DECL_P (TREE_OPERAND (op->op0, 0)))
951 vn_reference_op_t pop = &ops[i-1];
952 base = TREE_OPERAND (op->op0, 0);
953 if (pop->off == -1)
955 max_size = -1;
956 offset = 0;
958 else
959 offset += pop->off * BITS_PER_UNIT;
960 op0_p = NULL;
961 break;
963 /* Fallthru. */
964 case CALL_EXPR:
965 return false;
967 /* Record the base objects. */
968 case MEM_REF:
969 base_alias_set = get_deref_alias_set (op->op0);
970 *op0_p = build2 (MEM_REF, op->type,
971 NULL_TREE, op->op0);
972 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
973 MR_DEPENDENCE_BASE (*op0_p) = op->base;
974 op0_p = &TREE_OPERAND (*op0_p, 0);
975 break;
977 case VAR_DECL:
978 case PARM_DECL:
979 case RESULT_DECL:
980 case SSA_NAME:
981 *op0_p = op->op0;
982 op0_p = NULL;
983 break;
985 /* And now the usual component-reference style ops. */
986 case BIT_FIELD_REF:
987 offset += wi::to_offset (op->op1);
988 break;
990 case COMPONENT_REF:
992 tree field = op->op0;
993 /* We do not have a complete COMPONENT_REF tree here so we
994 cannot use component_ref_field_offset. Do the interesting
995 parts manually. */
996 tree this_offset = DECL_FIELD_OFFSET (field);
998 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
999 max_size = -1;
1000 else
1002 offset_int woffset = wi::lshift (wi::to_offset (this_offset),
1003 LOG2_BITS_PER_UNIT);
1004 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1005 offset += woffset;
1007 break;
1010 case ARRAY_RANGE_REF:
1011 case ARRAY_REF:
1012 /* We recorded the lower bound and the element size. */
1013 if (TREE_CODE (op->op0) != INTEGER_CST
1014 || TREE_CODE (op->op1) != INTEGER_CST
1015 || TREE_CODE (op->op2) != INTEGER_CST)
1016 max_size = -1;
1017 else
1019 offset_int woffset
1020 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1021 TYPE_PRECISION (TREE_TYPE (op->op0)));
1022 woffset *= wi::to_offset (op->op2);
1023 woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
1024 offset += woffset;
1026 break;
1028 case REALPART_EXPR:
1029 break;
1031 case IMAGPART_EXPR:
1032 offset += size;
1033 break;
1035 case VIEW_CONVERT_EXPR:
1036 break;
1038 case STRING_CST:
1039 case INTEGER_CST:
1040 case COMPLEX_CST:
1041 case VECTOR_CST:
1042 case REAL_CST:
1043 case CONSTRUCTOR:
1044 case CONST_DECL:
1045 return false;
1047 default:
1048 return false;
1052 if (base == NULL_TREE)
1053 return false;
1055 ref->ref = NULL_TREE;
1056 ref->base = base;
1057 ref->ref_alias_set = set;
1058 if (base_alias_set != -1)
1059 ref->base_alias_set = base_alias_set;
1060 else
1061 ref->base_alias_set = get_alias_set (base);
1062 /* We discount volatiles from value-numbering elsewhere. */
1063 ref->volatile_p = false;
1065 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1067 ref->offset = 0;
1068 ref->size = -1;
1069 ref->max_size = -1;
1070 return true;
1073 ref->size = size.to_shwi ();
1075 if (!wi::fits_shwi_p (offset))
1077 ref->offset = 0;
1078 ref->max_size = -1;
1079 return true;
1082 ref->offset = offset.to_shwi ();
1084 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1085 ref->max_size = -1;
1086 else
1087 ref->max_size = max_size.to_shwi ();
1089 return true;
1092 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1093 vn_reference_op_s's. */
1095 static void
1096 copy_reference_ops_from_call (gcall *call,
1097 vec<vn_reference_op_s> *result)
1099 vn_reference_op_s temp;
1100 unsigned i;
1101 tree lhs = gimple_call_lhs (call);
1102 int lr;
1104 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1105 different. By adding the lhs here in the vector, we ensure that the
1106 hashcode is different, guaranteeing a different value number. */
1107 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1109 memset (&temp, 0, sizeof (temp));
1110 temp.opcode = MODIFY_EXPR;
1111 temp.type = TREE_TYPE (lhs);
1112 temp.op0 = lhs;
1113 temp.off = -1;
1114 result->safe_push (temp);
1117 /* Copy the type, opcode, function, static chain and EH region, if any. */
1118 memset (&temp, 0, sizeof (temp));
1119 temp.type = gimple_call_return_type (call);
1120 temp.opcode = CALL_EXPR;
1121 temp.op0 = gimple_call_fn (call);
1122 temp.op1 = gimple_call_chain (call);
1123 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1124 temp.op2 = size_int (lr);
1125 temp.off = -1;
1126 if (gimple_call_with_bounds_p (call))
1127 temp.with_bounds = 1;
1128 result->safe_push (temp);
1130 /* Copy the call arguments. As they can be references as well,
1131 just chain them together. */
1132 for (i = 0; i < gimple_call_num_args (call); ++i)
1134 tree callarg = gimple_call_arg (call, i);
1135 copy_reference_ops_from_ref (callarg, result);
1139 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1140 *I_P to point to the last element of the replacement. */
1141 static bool
1142 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1143 unsigned int *i_p)
1145 unsigned int i = *i_p;
1146 vn_reference_op_t op = &(*ops)[i];
1147 vn_reference_op_t mem_op = &(*ops)[i - 1];
1148 tree addr_base;
1149 HOST_WIDE_INT addr_offset = 0;
1151 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1152 from .foo.bar to the preceding MEM_REF offset and replace the
1153 address with &OBJ. */
1154 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1155 &addr_offset);
1156 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1157 if (addr_base != TREE_OPERAND (op->op0, 0))
1159 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1160 off += addr_offset;
1161 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1162 op->op0 = build_fold_addr_expr (addr_base);
1163 if (tree_fits_shwi_p (mem_op->op0))
1164 mem_op->off = tree_to_shwi (mem_op->op0);
1165 else
1166 mem_op->off = -1;
1167 return true;
1169 return false;
1172 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1173 *I_P to point to the last element of the replacement. */
1174 static bool
1175 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1176 unsigned int *i_p)
1178 unsigned int i = *i_p;
1179 vn_reference_op_t op = &(*ops)[i];
1180 vn_reference_op_t mem_op = &(*ops)[i - 1];
1181 gimple *def_stmt;
1182 enum tree_code code;
1183 offset_int off;
1185 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1186 if (!is_gimple_assign (def_stmt))
1187 return false;
1189 code = gimple_assign_rhs_code (def_stmt);
1190 if (code != ADDR_EXPR
1191 && code != POINTER_PLUS_EXPR)
1192 return false;
1194 off = offset_int::from (mem_op->op0, SIGNED);
1196 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1197 from .foo.bar to the preceding MEM_REF offset and replace the
1198 address with &OBJ. */
1199 if (code == ADDR_EXPR)
1201 tree addr, addr_base;
1202 HOST_WIDE_INT addr_offset;
1204 addr = gimple_assign_rhs1 (def_stmt);
1205 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1206 &addr_offset);
1207 /* If that didn't work because the address isn't invariant propagate
1208 the reference tree from the address operation in case the current
1209 dereference isn't offsetted. */
1210 if (!addr_base
1211 && *i_p == ops->length () - 1
1212 && off == 0
1213 /* This makes us disable this transform for PRE where the
1214 reference ops might be also used for code insertion which
1215 is invalid. */
1216 && default_vn_walk_kind == VN_WALKREWRITE)
1218 auto_vec<vn_reference_op_s, 32> tem;
1219 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1220 ops->pop ();
1221 ops->pop ();
1222 ops->safe_splice (tem);
1223 --*i_p;
1224 return true;
1226 if (!addr_base
1227 || TREE_CODE (addr_base) != MEM_REF)
1228 return false;
1230 off += addr_offset;
1231 off += mem_ref_offset (addr_base);
1232 op->op0 = TREE_OPERAND (addr_base, 0);
1234 else
1236 tree ptr, ptroff;
1237 ptr = gimple_assign_rhs1 (def_stmt);
1238 ptroff = gimple_assign_rhs2 (def_stmt);
1239 if (TREE_CODE (ptr) != SSA_NAME
1240 || TREE_CODE (ptroff) != INTEGER_CST)
1241 return false;
1243 off += wi::to_offset (ptroff);
1244 op->op0 = ptr;
1247 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1248 if (tree_fits_shwi_p (mem_op->op0))
1249 mem_op->off = tree_to_shwi (mem_op->op0);
1250 else
1251 mem_op->off = -1;
1252 if (TREE_CODE (op->op0) == SSA_NAME)
1253 op->op0 = SSA_VAL (op->op0);
1254 if (TREE_CODE (op->op0) != SSA_NAME)
1255 op->opcode = TREE_CODE (op->op0);
1257 /* And recurse. */
1258 if (TREE_CODE (op->op0) == SSA_NAME)
1259 vn_reference_maybe_forwprop_address (ops, i_p);
1260 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1261 vn_reference_fold_indirect (ops, i_p);
1262 return true;
1265 /* Optimize the reference REF to a constant if possible or return
1266 NULL_TREE if not. */
1268 tree
1269 fully_constant_vn_reference_p (vn_reference_t ref)
1271 vec<vn_reference_op_s> operands = ref->operands;
1272 vn_reference_op_t op;
1274 /* Try to simplify the translated expression if it is
1275 a call to a builtin function with at most two arguments. */
1276 op = &operands[0];
1277 if (op->opcode == CALL_EXPR
1278 && TREE_CODE (op->op0) == ADDR_EXPR
1279 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1280 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1281 && operands.length () >= 2
1282 && operands.length () <= 3)
1284 vn_reference_op_t arg0, arg1 = NULL;
1285 bool anyconst = false;
1286 arg0 = &operands[1];
1287 if (operands.length () > 2)
1288 arg1 = &operands[2];
1289 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1290 || (arg0->opcode == ADDR_EXPR
1291 && is_gimple_min_invariant (arg0->op0)))
1292 anyconst = true;
1293 if (arg1
1294 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1295 || (arg1->opcode == ADDR_EXPR
1296 && is_gimple_min_invariant (arg1->op0))))
1297 anyconst = true;
1298 if (anyconst)
1300 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1301 arg1 ? 2 : 1,
1302 arg0->op0,
1303 arg1 ? arg1->op0 : NULL);
1304 if (folded
1305 && TREE_CODE (folded) == NOP_EXPR)
1306 folded = TREE_OPERAND (folded, 0);
1307 if (folded
1308 && is_gimple_min_invariant (folded))
1309 return folded;
1313 /* Simplify reads from constants or constant initializers. */
1314 else if (BITS_PER_UNIT == 8
1315 && is_gimple_reg_type (ref->type)
1316 && (!INTEGRAL_TYPE_P (ref->type)
1317 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1319 HOST_WIDE_INT off = 0;
1320 HOST_WIDE_INT size;
1321 if (INTEGRAL_TYPE_P (ref->type))
1322 size = TYPE_PRECISION (ref->type);
1323 else
1324 size = tree_to_shwi (TYPE_SIZE (ref->type));
1325 if (size % BITS_PER_UNIT != 0
1326 || size > MAX_BITSIZE_MODE_ANY_MODE)
1327 return NULL_TREE;
1328 size /= BITS_PER_UNIT;
1329 unsigned i;
1330 for (i = 0; i < operands.length (); ++i)
1332 if (operands[i].off == -1)
1333 return NULL_TREE;
1334 off += operands[i].off;
1335 if (operands[i].opcode == MEM_REF)
1337 ++i;
1338 break;
1341 vn_reference_op_t base = &operands[--i];
1342 tree ctor = error_mark_node;
1343 tree decl = NULL_TREE;
1344 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1345 ctor = base->op0;
1346 else if (base->opcode == MEM_REF
1347 && base[1].opcode == ADDR_EXPR
1348 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1349 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1351 decl = TREE_OPERAND (base[1].op0, 0);
1352 ctor = ctor_for_folding (decl);
1354 if (ctor == NULL_TREE)
1355 return build_zero_cst (ref->type);
1356 else if (ctor != error_mark_node)
1358 if (decl)
1360 tree res = fold_ctor_reference (ref->type, ctor,
1361 off * BITS_PER_UNIT,
1362 size * BITS_PER_UNIT, decl);
1363 if (res)
1365 STRIP_USELESS_TYPE_CONVERSION (res);
1366 if (is_gimple_min_invariant (res))
1367 return res;
1370 else
1372 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1373 int len = native_encode_expr (ctor, buf, size, off);
1374 if (len > 0)
1375 return native_interpret_expr (ref->type, buf, len);
1380 return NULL_TREE;
1383 /* Return true if OPS contain a storage order barrier. */
1385 static bool
1386 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1388 vn_reference_op_t op;
1389 unsigned i;
1391 FOR_EACH_VEC_ELT (ops, i, op)
1392 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1393 return true;
1395 return false;
1398 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1399 structures into their value numbers. This is done in-place, and
1400 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1401 whether any operands were valueized. */
1403 static vec<vn_reference_op_s>
1404 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1406 vn_reference_op_t vro;
1407 unsigned int i;
1409 *valueized_anything = false;
1411 FOR_EACH_VEC_ELT (orig, i, vro)
1413 if (vro->opcode == SSA_NAME
1414 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1416 tree tem = SSA_VAL (vro->op0);
1417 if (tem != vro->op0)
1419 *valueized_anything = true;
1420 vro->op0 = tem;
1422 /* If it transforms from an SSA_NAME to a constant, update
1423 the opcode. */
1424 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1425 vro->opcode = TREE_CODE (vro->op0);
1427 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1429 tree tem = SSA_VAL (vro->op1);
1430 if (tem != vro->op1)
1432 *valueized_anything = true;
1433 vro->op1 = tem;
1436 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1438 tree tem = SSA_VAL (vro->op2);
1439 if (tem != vro->op2)
1441 *valueized_anything = true;
1442 vro->op2 = tem;
1445 /* If it transforms from an SSA_NAME to an address, fold with
1446 a preceding indirect reference. */
1447 if (i > 0
1448 && vro->op0
1449 && TREE_CODE (vro->op0) == ADDR_EXPR
1450 && orig[i - 1].opcode == MEM_REF)
1452 if (vn_reference_fold_indirect (&orig, &i))
1453 *valueized_anything = true;
1455 else if (i > 0
1456 && vro->opcode == SSA_NAME
1457 && orig[i - 1].opcode == MEM_REF)
1459 if (vn_reference_maybe_forwprop_address (&orig, &i))
1460 *valueized_anything = true;
1462 /* If it transforms a non-constant ARRAY_REF into a constant
1463 one, adjust the constant offset. */
1464 else if (vro->opcode == ARRAY_REF
1465 && vro->off == -1
1466 && TREE_CODE (vro->op0) == INTEGER_CST
1467 && TREE_CODE (vro->op1) == INTEGER_CST
1468 && TREE_CODE (vro->op2) == INTEGER_CST)
1470 offset_int off = ((wi::to_offset (vro->op0)
1471 - wi::to_offset (vro->op1))
1472 * wi::to_offset (vro->op2));
1473 if (wi::fits_shwi_p (off))
1474 vro->off = off.to_shwi ();
1478 return orig;
1481 static vec<vn_reference_op_s>
1482 valueize_refs (vec<vn_reference_op_s> orig)
1484 bool tem;
1485 return valueize_refs_1 (orig, &tem);
1488 static vec<vn_reference_op_s> shared_lookup_references;
1490 /* Create a vector of vn_reference_op_s structures from REF, a
1491 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1492 this function. *VALUEIZED_ANYTHING will specify whether any
1493 operands were valueized. */
1495 static vec<vn_reference_op_s>
1496 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1498 if (!ref)
1499 return vNULL;
1500 shared_lookup_references.truncate (0);
1501 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1502 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1503 valueized_anything);
1504 return shared_lookup_references;
1507 /* Create a vector of vn_reference_op_s structures from CALL, a
1508 call statement. The vector is shared among all callers of
1509 this function. */
1511 static vec<vn_reference_op_s>
1512 valueize_shared_reference_ops_from_call (gcall *call)
1514 if (!call)
1515 return vNULL;
1516 shared_lookup_references.truncate (0);
1517 copy_reference_ops_from_call (call, &shared_lookup_references);
1518 shared_lookup_references = valueize_refs (shared_lookup_references);
1519 return shared_lookup_references;
1522 /* Lookup a SCCVN reference operation VR in the current hash table.
1523 Returns the resulting value number if it exists in the hash table,
1524 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1525 vn_reference_t stored in the hashtable if something is found. */
1527 static tree
1528 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1530 vn_reference_s **slot;
1531 hashval_t hash;
1533 hash = vr->hashcode;
1534 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1535 if (!slot && current_info == optimistic_info)
1536 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1537 if (slot)
1539 if (vnresult)
1540 *vnresult = (vn_reference_t)*slot;
1541 return ((vn_reference_t)*slot)->result;
1544 return NULL_TREE;
1547 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1548 with the current VUSE and performs the expression lookup. */
1550 static void *
1551 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1552 unsigned int cnt, void *vr_)
1554 vn_reference_t vr = (vn_reference_t)vr_;
1555 vn_reference_s **slot;
1556 hashval_t hash;
1558 /* This bounds the stmt walks we perform on reference lookups
1559 to O(1) instead of O(N) where N is the number of dominating
1560 stores. */
1561 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1562 return (void *)-1;
1564 if (last_vuse_ptr)
1565 *last_vuse_ptr = vuse;
1567 /* Fixup vuse and hash. */
1568 if (vr->vuse)
1569 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1570 vr->vuse = vuse_ssa_val (vuse);
1571 if (vr->vuse)
1572 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1574 hash = vr->hashcode;
1575 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1576 if (!slot && current_info == optimistic_info)
1577 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1578 if (slot)
1579 return *slot;
1581 return NULL;
1584 /* Lookup an existing or insert a new vn_reference entry into the
1585 value table for the VUSE, SET, TYPE, OPERANDS reference which
1586 has the value VALUE which is either a constant or an SSA name. */
1588 static vn_reference_t
1589 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1590 alias_set_type set,
1591 tree type,
1592 vec<vn_reference_op_s,
1593 va_heap> operands,
1594 tree value)
1596 vn_reference_s vr1;
1597 vn_reference_t result;
1598 unsigned value_id;
1599 vr1.vuse = vuse;
1600 vr1.operands = operands;
1601 vr1.type = type;
1602 vr1.set = set;
1603 vr1.hashcode = vn_reference_compute_hash (&vr1);
1604 if (vn_reference_lookup_1 (&vr1, &result))
1605 return result;
1606 if (TREE_CODE (value) == SSA_NAME)
1607 value_id = VN_INFO (value)->value_id;
1608 else
1609 value_id = get_or_alloc_constant_value_id (value);
1610 return vn_reference_insert_pieces (vuse, set, type,
1611 operands.copy (), value, value_id);
1614 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1615 from the statement defining VUSE and if not successful tries to
1616 translate *REFP and VR_ through an aggregate copy at the definition
1617 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1618 of *REF and *VR. If only disambiguation was performed then
1619 *DISAMBIGUATE_ONLY is set to true. */
1621 static void *
1622 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1623 bool *disambiguate_only)
1625 vn_reference_t vr = (vn_reference_t)vr_;
1626 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1627 tree base = ao_ref_base (ref);
1628 HOST_WIDE_INT offset, maxsize;
1629 static vec<vn_reference_op_s>
1630 lhs_ops = vNULL;
1631 ao_ref lhs_ref;
1632 bool lhs_ref_ok = false;
1634 /* If the reference is based on a parameter that was determined as
1635 pointing to readonly memory it doesn't change. */
1636 if (TREE_CODE (base) == MEM_REF
1637 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1638 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1639 && bitmap_bit_p (const_parms,
1640 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1642 *disambiguate_only = true;
1643 return NULL;
1646 /* First try to disambiguate after value-replacing in the definitions LHS. */
1647 if (is_gimple_assign (def_stmt))
1649 tree lhs = gimple_assign_lhs (def_stmt);
1650 bool valueized_anything = false;
1651 /* Avoid re-allocation overhead. */
1652 lhs_ops.truncate (0);
1653 copy_reference_ops_from_ref (lhs, &lhs_ops);
1654 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1655 if (valueized_anything)
1657 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1658 get_alias_set (lhs),
1659 TREE_TYPE (lhs), lhs_ops);
1660 if (lhs_ref_ok
1661 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1663 *disambiguate_only = true;
1664 return NULL;
1667 else
1669 ao_ref_init (&lhs_ref, lhs);
1670 lhs_ref_ok = true;
1673 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1674 && gimple_call_num_args (def_stmt) <= 4)
1676 /* For builtin calls valueize its arguments and call the
1677 alias oracle again. Valueization may improve points-to
1678 info of pointers and constify size and position arguments.
1679 Originally this was motivated by PR61034 which has
1680 conditional calls to free falsely clobbering ref because
1681 of imprecise points-to info of the argument. */
1682 tree oldargs[4];
1683 bool valueized_anything = false;
1684 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1686 oldargs[i] = gimple_call_arg (def_stmt, i);
1687 if (TREE_CODE (oldargs[i]) == SSA_NAME
1688 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1690 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1691 valueized_anything = true;
1694 if (valueized_anything)
1696 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1697 ref);
1698 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1699 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1700 if (!res)
1702 *disambiguate_only = true;
1703 return NULL;
1708 if (*disambiguate_only)
1709 return (void *)-1;
1711 offset = ref->offset;
1712 maxsize = ref->max_size;
1714 /* If we cannot constrain the size of the reference we cannot
1715 test if anything kills it. */
1716 if (maxsize == -1)
1717 return (void *)-1;
1719 /* We can't deduce anything useful from clobbers. */
1720 if (gimple_clobber_p (def_stmt))
1721 return (void *)-1;
1723 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1724 from that definition.
1725 1) Memset. */
1726 if (is_gimple_reg_type (vr->type)
1727 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1728 && integer_zerop (gimple_call_arg (def_stmt, 1))
1729 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1730 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1732 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1733 tree base2;
1734 HOST_WIDE_INT offset2, size2, maxsize2;
1735 bool reverse;
1736 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1737 &reverse);
1738 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1739 if ((unsigned HOST_WIDE_INT)size2 / 8
1740 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1741 && maxsize2 != -1
1742 && operand_equal_p (base, base2, 0)
1743 && offset2 <= offset
1744 && offset2 + size2 >= offset + maxsize)
1746 tree val = build_zero_cst (vr->type);
1747 return vn_reference_lookup_or_insert_for_pieces
1748 (vuse, vr->set, vr->type, vr->operands, val);
1752 /* 2) Assignment from an empty CONSTRUCTOR. */
1753 else if (is_gimple_reg_type (vr->type)
1754 && gimple_assign_single_p (def_stmt)
1755 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1756 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1758 tree base2;
1759 HOST_WIDE_INT offset2, size2, maxsize2;
1760 bool reverse;
1761 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1762 &offset2, &size2, &maxsize2, &reverse);
1763 if (maxsize2 != -1
1764 && operand_equal_p (base, base2, 0)
1765 && offset2 <= offset
1766 && offset2 + size2 >= offset + maxsize)
1768 tree val = build_zero_cst (vr->type);
1769 return vn_reference_lookup_or_insert_for_pieces
1770 (vuse, vr->set, vr->type, vr->operands, val);
1774 /* 3) Assignment from a constant. We can use folds native encode/interpret
1775 routines to extract the assigned bits. */
1776 else if (vn_walk_kind == VN_WALKREWRITE
1777 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1778 && ref->size == maxsize
1779 && maxsize % BITS_PER_UNIT == 0
1780 && offset % BITS_PER_UNIT == 0
1781 && is_gimple_reg_type (vr->type)
1782 && !contains_storage_order_barrier_p (vr->operands)
1783 && gimple_assign_single_p (def_stmt)
1784 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1786 tree base2;
1787 HOST_WIDE_INT offset2, size2, maxsize2;
1788 bool reverse;
1789 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1790 &offset2, &size2, &maxsize2, &reverse);
1791 if (!reverse
1792 && maxsize2 != -1
1793 && maxsize2 == size2
1794 && size2 % BITS_PER_UNIT == 0
1795 && offset2 % BITS_PER_UNIT == 0
1796 && operand_equal_p (base, base2, 0)
1797 && offset2 <= offset
1798 && offset2 + size2 >= offset + maxsize)
1800 /* We support up to 512-bit values (for V8DFmode). */
1801 unsigned char buffer[64];
1802 int len;
1804 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1805 buffer, sizeof (buffer));
1806 if (len > 0)
1808 tree val = native_interpret_expr (vr->type,
1809 buffer
1810 + ((offset - offset2)
1811 / BITS_PER_UNIT),
1812 ref->size / BITS_PER_UNIT);
1813 if (val)
1814 return vn_reference_lookup_or_insert_for_pieces
1815 (vuse, vr->set, vr->type, vr->operands, val);
1820 /* 4) Assignment from an SSA name which definition we may be able
1821 to access pieces from. */
1822 else if (ref->size == maxsize
1823 && is_gimple_reg_type (vr->type)
1824 && !contains_storage_order_barrier_p (vr->operands)
1825 && gimple_assign_single_p (def_stmt)
1826 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1828 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1829 gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1830 if (is_gimple_assign (def_stmt2)
1831 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1832 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1833 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1835 tree base2;
1836 HOST_WIDE_INT offset2, size2, maxsize2, off;
1837 bool reverse;
1838 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1839 &offset2, &size2, &maxsize2,
1840 &reverse);
1841 off = offset - offset2;
1842 if (!reverse
1843 && maxsize2 != -1
1844 && maxsize2 == size2
1845 && operand_equal_p (base, base2, 0)
1846 && offset2 <= offset
1847 && offset2 + size2 >= offset + maxsize)
1849 tree val = NULL_TREE;
1850 HOST_WIDE_INT elsz
1851 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1852 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1854 if (off == 0)
1855 val = gimple_assign_rhs1 (def_stmt2);
1856 else if (off == elsz)
1857 val = gimple_assign_rhs2 (def_stmt2);
1859 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1860 && off % elsz == 0)
1862 tree ctor = gimple_assign_rhs1 (def_stmt2);
1863 unsigned i = off / elsz;
1864 if (i < CONSTRUCTOR_NELTS (ctor))
1866 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1867 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1869 if (TREE_CODE (TREE_TYPE (elt->value))
1870 != VECTOR_TYPE)
1871 val = elt->value;
1875 if (val)
1876 return vn_reference_lookup_or_insert_for_pieces
1877 (vuse, vr->set, vr->type, vr->operands, val);
1882 /* 5) For aggregate copies translate the reference through them if
1883 the copy kills ref. */
1884 else if (vn_walk_kind == VN_WALKREWRITE
1885 && gimple_assign_single_p (def_stmt)
1886 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1887 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1888 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1890 tree base2;
1891 HOST_WIDE_INT maxsize2;
1892 int i, j, k;
1893 auto_vec<vn_reference_op_s> rhs;
1894 vn_reference_op_t vro;
1895 ao_ref r;
1897 if (!lhs_ref_ok)
1898 return (void *)-1;
1900 /* See if the assignment kills REF. */
1901 base2 = ao_ref_base (&lhs_ref);
1902 maxsize2 = lhs_ref.max_size;
1903 if (maxsize2 == -1
1904 || (base != base2
1905 && (TREE_CODE (base) != MEM_REF
1906 || TREE_CODE (base2) != MEM_REF
1907 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1908 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1909 TREE_OPERAND (base2, 1))))
1910 || !stmt_kills_ref_p (def_stmt, ref))
1911 return (void *)-1;
1913 /* Find the common base of ref and the lhs. lhs_ops already
1914 contains valueized operands for the lhs. */
1915 i = vr->operands.length () - 1;
1916 j = lhs_ops.length () - 1;
1917 while (j >= 0 && i >= 0
1918 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1920 i--;
1921 j--;
1924 /* ??? The innermost op should always be a MEM_REF and we already
1925 checked that the assignment to the lhs kills vr. Thus for
1926 aggregate copies using char[] types the vn_reference_op_eq
1927 may fail when comparing types for compatibility. But we really
1928 don't care here - further lookups with the rewritten operands
1929 will simply fail if we messed up types too badly. */
1930 HOST_WIDE_INT extra_off = 0;
1931 if (j == 0 && i >= 0
1932 && lhs_ops[0].opcode == MEM_REF
1933 && lhs_ops[0].off != -1)
1935 if (lhs_ops[0].off == vr->operands[i].off)
1936 i--, j--;
1937 else if (vr->operands[i].opcode == MEM_REF
1938 && vr->operands[i].off != -1)
1940 extra_off = vr->operands[i].off - lhs_ops[0].off;
1941 i--, j--;
1945 /* i now points to the first additional op.
1946 ??? LHS may not be completely contained in VR, one or more
1947 VIEW_CONVERT_EXPRs could be in its way. We could at least
1948 try handling outermost VIEW_CONVERT_EXPRs. */
1949 if (j != -1)
1950 return (void *)-1;
1952 /* Punt if the additional ops contain a storage order barrier. */
1953 for (k = i; k >= 0; k--)
1955 vro = &vr->operands[k];
1956 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
1957 return (void *)-1;
1960 /* Now re-write REF to be based on the rhs of the assignment. */
1961 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1963 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1964 if (extra_off != 0)
1966 if (rhs.length () < 2
1967 || rhs[0].opcode != MEM_REF
1968 || rhs[0].off == -1)
1969 return (void *)-1;
1970 rhs[0].off += extra_off;
1971 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1972 build_int_cst (TREE_TYPE (rhs[0].op0),
1973 extra_off));
1976 /* We need to pre-pend vr->operands[0..i] to rhs. */
1977 vec<vn_reference_op_s> old = vr->operands;
1978 if (i + 1 + rhs.length () > vr->operands.length ())
1980 vr->operands.safe_grow (i + 1 + rhs.length ());
1981 if (old == shared_lookup_references)
1982 shared_lookup_references = vr->operands;
1984 else
1985 vr->operands.truncate (i + 1 + rhs.length ());
1986 FOR_EACH_VEC_ELT (rhs, j, vro)
1987 vr->operands[i + 1 + j] = *vro;
1988 vr->operands = valueize_refs (vr->operands);
1989 if (old == shared_lookup_references)
1990 shared_lookup_references = vr->operands;
1991 vr->hashcode = vn_reference_compute_hash (vr);
1993 /* Try folding the new reference to a constant. */
1994 tree val = fully_constant_vn_reference_p (vr);
1995 if (val)
1996 return vn_reference_lookup_or_insert_for_pieces
1997 (vuse, vr->set, vr->type, vr->operands, val);
1999 /* Adjust *ref from the new operands. */
2000 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2001 return (void *)-1;
2002 /* This can happen with bitfields. */
2003 if (ref->size != r.size)
2004 return (void *)-1;
2005 *ref = r;
2007 /* Do not update last seen VUSE after translating. */
2008 last_vuse_ptr = NULL;
2010 /* Keep looking for the adjusted *REF / VR pair. */
2011 return NULL;
2014 /* 6) For memcpy copies translate the reference through them if
2015 the copy kills ref. */
2016 else if (vn_walk_kind == VN_WALKREWRITE
2017 && is_gimple_reg_type (vr->type)
2018 /* ??? Handle BCOPY as well. */
2019 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2020 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2021 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2022 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2023 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2024 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2025 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2026 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2028 tree lhs, rhs;
2029 ao_ref r;
2030 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2031 vn_reference_op_s op;
2032 HOST_WIDE_INT at;
2034 /* Only handle non-variable, addressable refs. */
2035 if (ref->size != maxsize
2036 || offset % BITS_PER_UNIT != 0
2037 || ref->size % BITS_PER_UNIT != 0)
2038 return (void *)-1;
2040 /* Extract a pointer base and an offset for the destination. */
2041 lhs = gimple_call_arg (def_stmt, 0);
2042 lhs_offset = 0;
2043 if (TREE_CODE (lhs) == SSA_NAME)
2045 lhs = SSA_VAL (lhs);
2046 if (TREE_CODE (lhs) == SSA_NAME)
2048 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2049 if (gimple_assign_single_p (def_stmt)
2050 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2051 lhs = gimple_assign_rhs1 (def_stmt);
2054 if (TREE_CODE (lhs) == ADDR_EXPR)
2056 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2057 &lhs_offset);
2058 if (!tem)
2059 return (void *)-1;
2060 if (TREE_CODE (tem) == MEM_REF
2061 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2063 lhs = TREE_OPERAND (tem, 0);
2064 if (TREE_CODE (lhs) == SSA_NAME)
2065 lhs = SSA_VAL (lhs);
2066 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2068 else if (DECL_P (tem))
2069 lhs = build_fold_addr_expr (tem);
2070 else
2071 return (void *)-1;
2073 if (TREE_CODE (lhs) != SSA_NAME
2074 && TREE_CODE (lhs) != ADDR_EXPR)
2075 return (void *)-1;
2077 /* Extract a pointer base and an offset for the source. */
2078 rhs = gimple_call_arg (def_stmt, 1);
2079 rhs_offset = 0;
2080 if (TREE_CODE (rhs) == SSA_NAME)
2081 rhs = SSA_VAL (rhs);
2082 if (TREE_CODE (rhs) == ADDR_EXPR)
2084 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2085 &rhs_offset);
2086 if (!tem)
2087 return (void *)-1;
2088 if (TREE_CODE (tem) == MEM_REF
2089 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2091 rhs = TREE_OPERAND (tem, 0);
2092 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2094 else if (DECL_P (tem))
2095 rhs = build_fold_addr_expr (tem);
2096 else
2097 return (void *)-1;
2099 if (TREE_CODE (rhs) != SSA_NAME
2100 && TREE_CODE (rhs) != ADDR_EXPR)
2101 return (void *)-1;
2103 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2105 /* The bases of the destination and the references have to agree. */
2106 if ((TREE_CODE (base) != MEM_REF
2107 && !DECL_P (base))
2108 || (TREE_CODE (base) == MEM_REF
2109 && (TREE_OPERAND (base, 0) != lhs
2110 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2111 || (DECL_P (base)
2112 && (TREE_CODE (lhs) != ADDR_EXPR
2113 || TREE_OPERAND (lhs, 0) != base)))
2114 return (void *)-1;
2116 at = offset / BITS_PER_UNIT;
2117 if (TREE_CODE (base) == MEM_REF)
2118 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2119 /* If the access is completely outside of the memcpy destination
2120 area there is no aliasing. */
2121 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2122 || lhs_offset + copy_size <= at)
2123 return NULL;
2124 /* And the access has to be contained within the memcpy destination. */
2125 if (lhs_offset > at
2126 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2127 return (void *)-1;
2129 /* Make room for 2 operands in the new reference. */
2130 if (vr->operands.length () < 2)
2132 vec<vn_reference_op_s> old = vr->operands;
2133 vr->operands.safe_grow_cleared (2);
2134 if (old == shared_lookup_references
2135 && vr->operands != old)
2136 shared_lookup_references = vr->operands;
2138 else
2139 vr->operands.truncate (2);
2141 /* The looked-through reference is a simple MEM_REF. */
2142 memset (&op, 0, sizeof (op));
2143 op.type = vr->type;
2144 op.opcode = MEM_REF;
2145 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2146 op.off = at - lhs_offset + rhs_offset;
2147 vr->operands[0] = op;
2148 op.type = TREE_TYPE (rhs);
2149 op.opcode = TREE_CODE (rhs);
2150 op.op0 = rhs;
2151 op.off = -1;
2152 vr->operands[1] = op;
2153 vr->hashcode = vn_reference_compute_hash (vr);
2155 /* Adjust *ref from the new operands. */
2156 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2157 return (void *)-1;
2158 /* This can happen with bitfields. */
2159 if (ref->size != r.size)
2160 return (void *)-1;
2161 *ref = r;
2163 /* Do not update last seen VUSE after translating. */
2164 last_vuse_ptr = NULL;
2166 /* Keep looking for the adjusted *REF / VR pair. */
2167 return NULL;
2170 /* Bail out and stop walking. */
2171 return (void *)-1;
2174 /* Lookup a reference operation by it's parts, in the current hash table.
2175 Returns the resulting value number if it exists in the hash table,
2176 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2177 vn_reference_t stored in the hashtable if something is found. */
2179 tree
2180 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2181 vec<vn_reference_op_s> operands,
2182 vn_reference_t *vnresult, vn_lookup_kind kind)
2184 struct vn_reference_s vr1;
2185 vn_reference_t tmp;
2186 tree cst;
2188 if (!vnresult)
2189 vnresult = &tmp;
2190 *vnresult = NULL;
2192 vr1.vuse = vuse_ssa_val (vuse);
2193 shared_lookup_references.truncate (0);
2194 shared_lookup_references.safe_grow (operands.length ());
2195 memcpy (shared_lookup_references.address (),
2196 operands.address (),
2197 sizeof (vn_reference_op_s)
2198 * operands.length ());
2199 vr1.operands = operands = shared_lookup_references
2200 = valueize_refs (shared_lookup_references);
2201 vr1.type = type;
2202 vr1.set = set;
2203 vr1.hashcode = vn_reference_compute_hash (&vr1);
2204 if ((cst = fully_constant_vn_reference_p (&vr1)))
2205 return cst;
2207 vn_reference_lookup_1 (&vr1, vnresult);
2208 if (!*vnresult
2209 && kind != VN_NOWALK
2210 && vr1.vuse)
2212 ao_ref r;
2213 vn_walk_kind = kind;
2214 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2215 *vnresult =
2216 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2217 vn_reference_lookup_2,
2218 vn_reference_lookup_3,
2219 vuse_ssa_val, &vr1);
2220 gcc_checking_assert (vr1.operands == shared_lookup_references);
2223 if (*vnresult)
2224 return (*vnresult)->result;
2226 return NULL_TREE;
2229 /* Lookup OP in the current hash table, and return the resulting value
2230 number if it exists in the hash table. Return NULL_TREE if it does
2231 not exist in the hash table or if the result field of the structure
2232 was NULL.. VNRESULT will be filled in with the vn_reference_t
2233 stored in the hashtable if one exists. When TBAA_P is false assume
2234 we are looking up a store and treat it as having alias-set zero. */
2236 tree
2237 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2238 vn_reference_t *vnresult, bool tbaa_p)
2240 vec<vn_reference_op_s> operands;
2241 struct vn_reference_s vr1;
2242 tree cst;
2243 bool valuezied_anything;
2245 if (vnresult)
2246 *vnresult = NULL;
2248 vr1.vuse = vuse_ssa_val (vuse);
2249 vr1.operands = operands
2250 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2251 vr1.type = TREE_TYPE (op);
2252 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2253 vr1.hashcode = vn_reference_compute_hash (&vr1);
2254 if ((cst = fully_constant_vn_reference_p (&vr1)))
2255 return cst;
2257 if (kind != VN_NOWALK
2258 && vr1.vuse)
2260 vn_reference_t wvnresult;
2261 ao_ref r;
2262 /* Make sure to use a valueized reference if we valueized anything.
2263 Otherwise preserve the full reference for advanced TBAA. */
2264 if (!valuezied_anything
2265 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2266 vr1.operands))
2267 ao_ref_init (&r, op);
2268 if (! tbaa_p)
2269 r.ref_alias_set = r.base_alias_set = 0;
2270 vn_walk_kind = kind;
2271 wvnresult =
2272 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2273 vn_reference_lookup_2,
2274 vn_reference_lookup_3,
2275 vuse_ssa_val, &vr1);
2276 gcc_checking_assert (vr1.operands == shared_lookup_references);
2277 if (wvnresult)
2279 if (vnresult)
2280 *vnresult = wvnresult;
2281 return wvnresult->result;
2284 return NULL_TREE;
2287 return vn_reference_lookup_1 (&vr1, vnresult);
2290 /* Lookup CALL in the current hash table and return the entry in
2291 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2293 void
2294 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2295 vn_reference_t vr)
2297 if (vnresult)
2298 *vnresult = NULL;
2300 tree vuse = gimple_vuse (call);
2302 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2303 vr->operands = valueize_shared_reference_ops_from_call (call);
2304 vr->type = gimple_expr_type (call);
2305 vr->set = 0;
2306 vr->hashcode = vn_reference_compute_hash (vr);
2307 vn_reference_lookup_1 (vr, vnresult);
2310 /* Insert OP into the current hash table with a value number of
2311 RESULT, and return the resulting reference structure we created. */
2313 static vn_reference_t
2314 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2316 vn_reference_s **slot;
2317 vn_reference_t vr1;
2318 bool tem;
2320 vr1 = current_info->references_pool->allocate ();
2321 if (TREE_CODE (result) == SSA_NAME)
2322 vr1->value_id = VN_INFO (result)->value_id;
2323 else
2324 vr1->value_id = get_or_alloc_constant_value_id (result);
2325 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2326 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2327 vr1->type = TREE_TYPE (op);
2328 vr1->set = get_alias_set (op);
2329 vr1->hashcode = vn_reference_compute_hash (vr1);
2330 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2331 vr1->result_vdef = vdef;
2333 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2334 INSERT);
2336 /* Because we lookup stores using vuses, and value number failures
2337 using the vdefs (see visit_reference_op_store for how and why),
2338 it's possible that on failure we may try to insert an already
2339 inserted store. This is not wrong, there is no ssa name for a
2340 store that we could use as a differentiator anyway. Thus, unlike
2341 the other lookup functions, you cannot gcc_assert (!*slot)
2342 here. */
2344 /* But free the old slot in case of a collision. */
2345 if (*slot)
2346 free_reference (*slot);
2348 *slot = vr1;
2349 return vr1;
2352 /* Insert a reference by it's pieces into the current hash table with
2353 a value number of RESULT. Return the resulting reference
2354 structure we created. */
2356 vn_reference_t
2357 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2358 vec<vn_reference_op_s> operands,
2359 tree result, unsigned int value_id)
2362 vn_reference_s **slot;
2363 vn_reference_t vr1;
2365 vr1 = current_info->references_pool->allocate ();
2366 vr1->value_id = value_id;
2367 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2368 vr1->operands = valueize_refs (operands);
2369 vr1->type = type;
2370 vr1->set = set;
2371 vr1->hashcode = vn_reference_compute_hash (vr1);
2372 if (result && TREE_CODE (result) == SSA_NAME)
2373 result = SSA_VAL (result);
2374 vr1->result = result;
2376 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2377 INSERT);
2379 /* At this point we should have all the things inserted that we have
2380 seen before, and we should never try inserting something that
2381 already exists. */
2382 gcc_assert (!*slot);
2383 if (*slot)
2384 free_reference (*slot);
2386 *slot = vr1;
2387 return vr1;
2390 /* Compute and return the hash value for nary operation VBO1. */
2392 static hashval_t
2393 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2395 inchash::hash hstate;
2396 unsigned i;
2398 for (i = 0; i < vno1->length; ++i)
2399 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2400 vno1->op[i] = SSA_VAL (vno1->op[i]);
2402 if (((vno1->length == 2
2403 && commutative_tree_code (vno1->opcode))
2404 || (vno1->length == 3
2405 && commutative_ternary_tree_code (vno1->opcode)))
2406 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2407 std::swap (vno1->op[0], vno1->op[1]);
2408 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2409 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2411 std::swap (vno1->op[0], vno1->op[1]);
2412 vno1->opcode = swap_tree_comparison (vno1->opcode);
2415 hstate.add_int (vno1->opcode);
2416 for (i = 0; i < vno1->length; ++i)
2417 inchash::add_expr (vno1->op[i], hstate);
2419 return hstate.end ();
2422 /* Compare nary operations VNO1 and VNO2 and return true if they are
2423 equivalent. */
2425 bool
2426 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2428 unsigned i;
2430 if (vno1->hashcode != vno2->hashcode)
2431 return false;
2433 if (vno1->length != vno2->length)
2434 return false;
2436 if (vno1->opcode != vno2->opcode
2437 || !types_compatible_p (vno1->type, vno2->type))
2438 return false;
2440 for (i = 0; i < vno1->length; ++i)
2441 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2442 return false;
2444 return true;
2447 /* Initialize VNO from the pieces provided. */
2449 static void
2450 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2451 enum tree_code code, tree type, tree *ops)
2453 vno->opcode = code;
2454 vno->length = length;
2455 vno->type = type;
2456 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2459 /* Initialize VNO from OP. */
2461 static void
2462 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2464 unsigned i;
2466 vno->opcode = TREE_CODE (op);
2467 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2468 vno->type = TREE_TYPE (op);
2469 for (i = 0; i < vno->length; ++i)
2470 vno->op[i] = TREE_OPERAND (op, i);
2473 /* Return the number of operands for a vn_nary ops structure from STMT. */
2475 static unsigned int
2476 vn_nary_length_from_stmt (gimple *stmt)
2478 switch (gimple_assign_rhs_code (stmt))
2480 case REALPART_EXPR:
2481 case IMAGPART_EXPR:
2482 case VIEW_CONVERT_EXPR:
2483 return 1;
2485 case BIT_FIELD_REF:
2486 return 3;
2488 case CONSTRUCTOR:
2489 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2491 default:
2492 return gimple_num_ops (stmt) - 1;
2496 /* Initialize VNO from STMT. */
2498 static void
2499 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2501 unsigned i;
2503 vno->opcode = gimple_assign_rhs_code (stmt);
2504 vno->type = gimple_expr_type (stmt);
2505 switch (vno->opcode)
2507 case REALPART_EXPR:
2508 case IMAGPART_EXPR:
2509 case VIEW_CONVERT_EXPR:
2510 vno->length = 1;
2511 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2512 break;
2514 case BIT_FIELD_REF:
2515 vno->length = 3;
2516 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2517 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2518 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2519 break;
2521 case CONSTRUCTOR:
2522 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2523 for (i = 0; i < vno->length; ++i)
2524 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2525 break;
2527 default:
2528 gcc_checking_assert (!gimple_assign_single_p (stmt));
2529 vno->length = gimple_num_ops (stmt) - 1;
2530 for (i = 0; i < vno->length; ++i)
2531 vno->op[i] = gimple_op (stmt, i + 1);
2535 /* Compute the hashcode for VNO and look for it in the hash table;
2536 return the resulting value number if it exists in the hash table.
2537 Return NULL_TREE if it does not exist in the hash table or if the
2538 result field of the operation is NULL. VNRESULT will contain the
2539 vn_nary_op_t from the hashtable if it exists. */
2541 static tree
2542 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2544 vn_nary_op_s **slot;
2546 if (vnresult)
2547 *vnresult = NULL;
2549 vno->hashcode = vn_nary_op_compute_hash (vno);
2550 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2551 NO_INSERT);
2552 if (!slot && current_info == optimistic_info)
2553 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2554 NO_INSERT);
2555 if (!slot)
2556 return NULL_TREE;
2557 if (vnresult)
2558 *vnresult = *slot;
2559 return (*slot)->result;
2562 /* Lookup a n-ary operation by its pieces and return the resulting value
2563 number if it exists in the hash table. Return NULL_TREE if it does
2564 not exist in the hash table or if the result field of the operation
2565 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2566 if it exists. */
2568 tree
2569 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2570 tree type, tree *ops, vn_nary_op_t *vnresult)
2572 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2573 sizeof_vn_nary_op (length));
2574 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2575 return vn_nary_op_lookup_1 (vno1, vnresult);
2578 /* Lookup OP in the current hash table, and return the resulting value
2579 number if it exists in the hash table. Return NULL_TREE if it does
2580 not exist in the hash table or if the result field of the operation
2581 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2582 if it exists. */
2584 tree
2585 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2587 vn_nary_op_t vno1
2588 = XALLOCAVAR (struct vn_nary_op_s,
2589 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2590 init_vn_nary_op_from_op (vno1, op);
2591 return vn_nary_op_lookup_1 (vno1, vnresult);
2594 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2595 value number if it exists in the hash table. Return NULL_TREE if
2596 it does not exist in the hash table. VNRESULT will contain the
2597 vn_nary_op_t from the hashtable if it exists. */
2599 tree
2600 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2602 vn_nary_op_t vno1
2603 = XALLOCAVAR (struct vn_nary_op_s,
2604 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2605 init_vn_nary_op_from_stmt (vno1, stmt);
2606 return vn_nary_op_lookup_1 (vno1, vnresult);
2609 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
2611 static tree
2612 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
2614 if (!rcode.is_tree_code ())
2615 return NULL_TREE;
2616 vn_nary_op_t vnresult = NULL;
2617 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
2618 (tree_code) rcode, type, ops, &vnresult);
2621 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2623 static vn_nary_op_t
2624 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2626 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2629 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2630 obstack. */
2632 static vn_nary_op_t
2633 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2635 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2636 &current_info->nary_obstack);
2638 vno1->value_id = value_id;
2639 vno1->length = length;
2640 vno1->result = result;
2642 return vno1;
2645 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2646 VNO->HASHCODE first. */
2648 static vn_nary_op_t
2649 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2650 bool compute_hash)
2652 vn_nary_op_s **slot;
2654 if (compute_hash)
2655 vno->hashcode = vn_nary_op_compute_hash (vno);
2657 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2658 gcc_assert (!*slot);
2660 *slot = vno;
2661 return vno;
2664 /* Insert a n-ary operation into the current hash table using it's
2665 pieces. Return the vn_nary_op_t structure we created and put in
2666 the hashtable. */
2668 vn_nary_op_t
2669 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2670 tree type, tree *ops,
2671 tree result, unsigned int value_id)
2673 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2674 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2675 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2678 /* Insert OP into the current hash table with a value number of
2679 RESULT. Return the vn_nary_op_t structure we created and put in
2680 the hashtable. */
2682 vn_nary_op_t
2683 vn_nary_op_insert (tree op, tree result)
2685 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2686 vn_nary_op_t vno1;
2688 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2689 init_vn_nary_op_from_op (vno1, op);
2690 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2693 /* Insert the rhs of STMT into the current hash table with a value number of
2694 RESULT. */
2696 static vn_nary_op_t
2697 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2699 vn_nary_op_t vno1
2700 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2701 result, VN_INFO (result)->value_id);
2702 init_vn_nary_op_from_stmt (vno1, stmt);
2703 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2706 /* Compute a hashcode for PHI operation VP1 and return it. */
2708 static inline hashval_t
2709 vn_phi_compute_hash (vn_phi_t vp1)
2711 inchash::hash hstate (vp1->phiargs.length () > 2
2712 ? vp1->block->index : vp1->phiargs.length ());
2713 tree phi1op;
2714 tree type;
2715 edge e;
2716 edge_iterator ei;
2718 /* If all PHI arguments are constants we need to distinguish
2719 the PHI node via its type. */
2720 type = vp1->type;
2721 hstate.merge_hash (vn_hash_type (type));
2723 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2725 /* Don't hash backedge values they need to be handled as VN_TOP
2726 for optimistic value-numbering. */
2727 if (e->flags & EDGE_DFS_BACK)
2728 continue;
2730 phi1op = vp1->phiargs[e->dest_idx];
2731 if (phi1op == VN_TOP)
2732 continue;
2733 inchash::add_expr (phi1op, hstate);
2736 return hstate.end ();
2740 /* Return true if COND1 and COND2 represent the same condition, set
2741 *INVERTED_P if one needs to be inverted to make it the same as
2742 the other. */
2744 static bool
2745 cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2747 enum tree_code code1 = gimple_cond_code (cond1);
2748 enum tree_code code2 = gimple_cond_code (cond2);
2749 tree lhs1 = gimple_cond_lhs (cond1);
2750 tree lhs2 = gimple_cond_lhs (cond2);
2751 tree rhs1 = gimple_cond_rhs (cond1);
2752 tree rhs2 = gimple_cond_rhs (cond2);
2754 *inverted_p = false;
2755 if (code1 == code2)
2757 else if (code1 == swap_tree_comparison (code2))
2758 std::swap (lhs2, rhs2);
2759 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2760 *inverted_p = true;
2761 else if (code1 == invert_tree_comparison
2762 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2764 std::swap (lhs2, rhs2);
2765 *inverted_p = true;
2767 else
2768 return false;
2770 lhs1 = vn_valueize (lhs1);
2771 rhs1 = vn_valueize (rhs1);
2772 lhs2 = vn_valueize (lhs2);
2773 rhs2 = vn_valueize (rhs2);
2774 return ((expressions_equal_p (lhs1, lhs2)
2775 && expressions_equal_p (rhs1, rhs2))
2776 || (commutative_tree_code (code1)
2777 && expressions_equal_p (lhs1, rhs2)
2778 && expressions_equal_p (rhs1, lhs2)));
2781 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2783 static int
2784 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2786 if (vp1->hashcode != vp2->hashcode)
2787 return false;
2789 if (vp1->block != vp2->block)
2791 if (vp1->phiargs.length () != vp2->phiargs.length ())
2792 return false;
2794 switch (vp1->phiargs.length ())
2796 case 1:
2797 /* Single-arg PHIs are just copies. */
2798 break;
2800 case 2:
2802 /* Rule out backedges into the PHI. */
2803 if (vp1->block->loop_father->header == vp1->block
2804 || vp2->block->loop_father->header == vp2->block)
2805 return false;
2807 /* If the PHI nodes do not have compatible types
2808 they are not the same. */
2809 if (!types_compatible_p (vp1->type, vp2->type))
2810 return false;
2812 basic_block idom1
2813 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2814 basic_block idom2
2815 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2816 /* If the immediate dominator end in switch stmts multiple
2817 values may end up in the same PHI arg via intermediate
2818 CFG merges. */
2819 if (EDGE_COUNT (idom1->succs) != 2
2820 || EDGE_COUNT (idom2->succs) != 2)
2821 return false;
2823 /* Verify the controlling stmt is the same. */
2824 gimple *last1 = last_stmt (idom1);
2825 gimple *last2 = last_stmt (idom2);
2826 if (gimple_code (last1) != GIMPLE_COND
2827 || gimple_code (last2) != GIMPLE_COND)
2828 return false;
2829 bool inverted_p;
2830 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2831 as_a <gcond *> (last2), &inverted_p))
2832 return false;
2834 /* Get at true/false controlled edges into the PHI. */
2835 edge te1, te2, fe1, fe2;
2836 if (! extract_true_false_controlled_edges (idom1, vp1->block,
2837 &te1, &fe1)
2838 || ! extract_true_false_controlled_edges (idom2, vp2->block,
2839 &te2, &fe2))
2840 return false;
2842 /* Swap edges if the second condition is the inverted of the
2843 first. */
2844 if (inverted_p)
2845 std::swap (te2, fe2);
2847 /* ??? Handle VN_TOP specially. */
2848 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
2849 vp2->phiargs[te2->dest_idx])
2850 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
2851 vp2->phiargs[fe2->dest_idx]))
2852 return false;
2854 return true;
2857 default:
2858 return false;
2862 /* If the PHI nodes do not have compatible types
2863 they are not the same. */
2864 if (!types_compatible_p (vp1->type, vp2->type))
2865 return false;
2867 /* Any phi in the same block will have it's arguments in the
2868 same edge order, because of how we store phi nodes. */
2869 int i;
2870 tree phi1op;
2871 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2873 tree phi2op = vp2->phiargs[i];
2874 if (phi1op == VN_TOP || phi2op == VN_TOP)
2875 continue;
2876 if (!expressions_equal_p (phi1op, phi2op))
2877 return false;
2880 return true;
2883 static vec<tree> shared_lookup_phiargs;
2885 /* Lookup PHI in the current hash table, and return the resulting
2886 value number if it exists in the hash table. Return NULL_TREE if
2887 it does not exist in the hash table. */
2889 static tree
2890 vn_phi_lookup (gimple *phi)
2892 vn_phi_s **slot;
2893 struct vn_phi_s vp1;
2894 edge e;
2895 edge_iterator ei;
2897 shared_lookup_phiargs.truncate (0);
2898 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
2900 /* Canonicalize the SSA_NAME's to their value number. */
2901 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2903 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2904 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2905 shared_lookup_phiargs[e->dest_idx] = def;
2907 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2908 vp1.phiargs = shared_lookup_phiargs;
2909 vp1.block = gimple_bb (phi);
2910 vp1.hashcode = vn_phi_compute_hash (&vp1);
2911 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2912 NO_INSERT);
2913 if (!slot && current_info == optimistic_info)
2914 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2915 NO_INSERT);
2916 if (!slot)
2917 return NULL_TREE;
2918 return (*slot)->result;
2921 /* Insert PHI into the current hash table with a value number of
2922 RESULT. */
2924 static vn_phi_t
2925 vn_phi_insert (gimple *phi, tree result)
2927 vn_phi_s **slot;
2928 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2929 vec<tree> args = vNULL;
2930 edge e;
2931 edge_iterator ei;
2933 args.safe_grow (gimple_phi_num_args (phi));
2935 /* Canonicalize the SSA_NAME's to their value number. */
2936 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2938 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2939 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2940 args[e->dest_idx] = def;
2942 vp1->value_id = VN_INFO (result)->value_id;
2943 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2944 vp1->phiargs = args;
2945 vp1->block = gimple_bb (phi);
2946 vp1->result = result;
2947 vp1->hashcode = vn_phi_compute_hash (vp1);
2949 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2951 /* Because we iterate over phi operations more than once, it's
2952 possible the slot might already exist here, hence no assert.*/
2953 *slot = vp1;
2954 return vp1;
2958 /* Print set of components in strongly connected component SCC to OUT. */
2960 static void
2961 print_scc (FILE *out, vec<tree> scc)
2963 tree var;
2964 unsigned int i;
2966 fprintf (out, "SCC consists of:");
2967 FOR_EACH_VEC_ELT (scc, i, var)
2969 fprintf (out, " ");
2970 print_generic_expr (out, var, 0);
2972 fprintf (out, "\n");
2975 /* Return true if BB1 is dominated by BB2 taking into account edges
2976 that are not executable. */
2978 static bool
2979 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
2981 edge_iterator ei;
2982 edge e;
2984 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
2985 return true;
2987 /* Before iterating we'd like to know if there exists a
2988 (executable) path from bb2 to bb1 at all, if not we can
2989 directly return false. For now simply iterate once. */
2991 /* Iterate to the single executable bb1 predecessor. */
2992 if (EDGE_COUNT (bb1->preds) > 1)
2994 edge prede = NULL;
2995 FOR_EACH_EDGE (e, ei, bb1->preds)
2996 if (e->flags & EDGE_EXECUTABLE)
2998 if (prede)
3000 prede = NULL;
3001 break;
3003 prede = e;
3005 if (prede)
3007 bb1 = prede->src;
3009 /* Re-do the dominance check with changed bb1. */
3010 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3011 return true;
3015 /* Iterate to the single executable bb2 successor. */
3016 edge succe = NULL;
3017 FOR_EACH_EDGE (e, ei, bb2->succs)
3018 if (e->flags & EDGE_EXECUTABLE)
3020 if (succe)
3022 succe = NULL;
3023 break;
3025 succe = e;
3027 if (succe)
3029 /* Verify the reached block is only reached through succe.
3030 If there is only one edge we can spare us the dominator
3031 check and iterate directly. */
3032 if (EDGE_COUNT (succe->dest->preds) > 1)
3034 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3035 if (e != succe
3036 && (e->flags & EDGE_EXECUTABLE))
3038 succe = NULL;
3039 break;
3042 if (succe)
3044 bb2 = succe->dest;
3046 /* Re-do the dominance check with changed bb2. */
3047 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3048 return true;
3052 /* We could now iterate updating bb1 / bb2. */
3053 return false;
3056 /* Set the value number of FROM to TO, return true if it has changed
3057 as a result. */
3059 static inline bool
3060 set_ssa_val_to (tree from, tree to)
3062 tree currval = SSA_VAL (from);
3063 HOST_WIDE_INT toff, coff;
3065 /* The only thing we allow as value numbers are ssa_names
3066 and invariants. So assert that here. We don't allow VN_TOP
3067 as visiting a stmt should produce a value-number other than
3068 that.
3069 ??? Still VN_TOP can happen for unreachable code, so force
3070 it to varying in that case. Not all code is prepared to
3071 get VN_TOP on valueization. */
3072 if (to == VN_TOP)
3074 if (dump_file && (dump_flags & TDF_DETAILS))
3075 fprintf (dump_file, "Forcing value number to varying on "
3076 "receiving VN_TOP\n");
3077 to = from;
3080 gcc_assert (to != NULL_TREE
3081 && ((TREE_CODE (to) == SSA_NAME
3082 && (to == from || SSA_VAL (to) == to))
3083 || is_gimple_min_invariant (to)));
3085 if (from != to)
3087 if (currval == from)
3089 if (dump_file && (dump_flags & TDF_DETAILS))
3091 fprintf (dump_file, "Not changing value number of ");
3092 print_generic_expr (dump_file, from, 0);
3093 fprintf (dump_file, " from VARYING to ");
3094 print_generic_expr (dump_file, to, 0);
3095 fprintf (dump_file, "\n");
3097 return false;
3099 else if (TREE_CODE (to) == SSA_NAME
3100 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3101 to = from;
3104 if (dump_file && (dump_flags & TDF_DETAILS))
3106 fprintf (dump_file, "Setting value number of ");
3107 print_generic_expr (dump_file, from, 0);
3108 fprintf (dump_file, " to ");
3109 print_generic_expr (dump_file, to, 0);
3112 if (currval != to
3113 && !operand_equal_p (currval, to, 0)
3114 /* ??? For addresses involving volatile objects or types operand_equal_p
3115 does not reliably detect ADDR_EXPRs as equal. We know we are only
3116 getting invariant gimple addresses here, so can use
3117 get_addr_base_and_unit_offset to do this comparison. */
3118 && !(TREE_CODE (currval) == ADDR_EXPR
3119 && TREE_CODE (to) == ADDR_EXPR
3120 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3121 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3122 && coff == toff))
3124 /* If we equate two SSA names we have to make the side-band info
3125 of the leader conservative (and remember whatever original value
3126 was present). */
3127 if (TREE_CODE (to) == SSA_NAME)
3129 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3130 && SSA_NAME_RANGE_INFO (to))
3132 if (SSA_NAME_IS_DEFAULT_DEF (to)
3133 || dominated_by_p_w_unex
3134 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3135 gimple_bb (SSA_NAME_DEF_STMT (to))))
3136 /* Keep the info from the dominator. */
3138 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3139 || dominated_by_p_w_unex
3140 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3141 gimple_bb (SSA_NAME_DEF_STMT (from))))
3143 /* Save old info. */
3144 if (! VN_INFO (to)->info.range_info)
3146 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3147 VN_INFO (to)->range_info_anti_range_p
3148 = SSA_NAME_ANTI_RANGE_P (to);
3150 /* Use that from the dominator. */
3151 SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
3152 SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
3154 else
3156 /* Save old info. */
3157 if (! VN_INFO (to)->info.range_info)
3159 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3160 VN_INFO (to)->range_info_anti_range_p
3161 = SSA_NAME_ANTI_RANGE_P (to);
3163 /* Rather than allocating memory and unioning the info
3164 just clear it. */
3165 SSA_NAME_RANGE_INFO (to) = NULL;
3168 else if (POINTER_TYPE_P (TREE_TYPE (to))
3169 && SSA_NAME_PTR_INFO (to))
3171 if (SSA_NAME_IS_DEFAULT_DEF (to)
3172 || dominated_by_p_w_unex
3173 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3174 gimple_bb (SSA_NAME_DEF_STMT (to))))
3175 /* Keep the info from the dominator. */
3177 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3178 || dominated_by_p_w_unex
3179 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3180 gimple_bb (SSA_NAME_DEF_STMT (from))))
3182 /* Save old info. */
3183 if (! VN_INFO (to)->info.ptr_info)
3184 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3185 /* Use that from the dominator. */
3186 SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3188 else if (! SSA_NAME_PTR_INFO (from)
3189 /* Handle the case of trivially equivalent info. */
3190 || memcmp (SSA_NAME_PTR_INFO (to),
3191 SSA_NAME_PTR_INFO (from),
3192 sizeof (ptr_info_def)) != 0)
3194 /* Save old info. */
3195 if (! VN_INFO (to)->info.ptr_info)
3196 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3197 /* Rather than allocating memory and unioning the info
3198 just clear it. */
3199 SSA_NAME_PTR_INFO (to) = NULL;
3204 VN_INFO (from)->valnum = to;
3205 if (dump_file && (dump_flags & TDF_DETAILS))
3206 fprintf (dump_file, " (changed)\n");
3207 return true;
3209 if (dump_file && (dump_flags & TDF_DETAILS))
3210 fprintf (dump_file, "\n");
3211 return false;
3214 /* Mark as processed all the definitions in the defining stmt of USE, or
3215 the USE itself. */
3217 static void
3218 mark_use_processed (tree use)
3220 ssa_op_iter iter;
3221 def_operand_p defp;
3222 gimple *stmt = SSA_NAME_DEF_STMT (use);
3224 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3226 VN_INFO (use)->use_processed = true;
3227 return;
3230 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3232 tree def = DEF_FROM_PTR (defp);
3234 VN_INFO (def)->use_processed = true;
3238 /* Set all definitions in STMT to value number to themselves.
3239 Return true if a value number changed. */
3241 static bool
3242 defs_to_varying (gimple *stmt)
3244 bool changed = false;
3245 ssa_op_iter iter;
3246 def_operand_p defp;
3248 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3250 tree def = DEF_FROM_PTR (defp);
3251 changed |= set_ssa_val_to (def, def);
3253 return changed;
3256 /* Visit a copy between LHS and RHS, return true if the value number
3257 changed. */
3259 static bool
3260 visit_copy (tree lhs, tree rhs)
3262 /* Valueize. */
3263 rhs = SSA_VAL (rhs);
3265 return set_ssa_val_to (lhs, rhs);
3268 /* Visit a nary operator RHS, value number it, and return true if the
3269 value number of LHS has changed as a result. */
3271 static bool
3272 visit_nary_op (tree lhs, gimple *stmt)
3274 bool changed = false;
3275 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3277 if (result)
3278 changed = set_ssa_val_to (lhs, result);
3279 else
3281 changed = set_ssa_val_to (lhs, lhs);
3282 vn_nary_op_insert_stmt (stmt, lhs);
3285 return changed;
3288 /* Visit a call STMT storing into LHS. Return true if the value number
3289 of the LHS has changed as a result. */
3291 static bool
3292 visit_reference_op_call (tree lhs, gcall *stmt)
3294 bool changed = false;
3295 struct vn_reference_s vr1;
3296 vn_reference_t vnresult = NULL;
3297 tree vdef = gimple_vdef (stmt);
3299 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3300 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3301 lhs = NULL_TREE;
3303 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3304 if (vnresult)
3306 if (vnresult->result_vdef && vdef)
3307 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3309 if (!vnresult->result && lhs)
3310 vnresult->result = lhs;
3312 if (vnresult->result && lhs)
3313 changed |= set_ssa_val_to (lhs, vnresult->result);
3315 else
3317 vn_reference_t vr2;
3318 vn_reference_s **slot;
3319 if (vdef)
3320 changed |= set_ssa_val_to (vdef, vdef);
3321 if (lhs)
3322 changed |= set_ssa_val_to (lhs, lhs);
3323 vr2 = current_info->references_pool->allocate ();
3324 vr2->vuse = vr1.vuse;
3325 /* As we are not walking the virtual operand chain we know the
3326 shared_lookup_references are still original so we can re-use
3327 them here. */
3328 vr2->operands = vr1.operands.copy ();
3329 vr2->type = vr1.type;
3330 vr2->set = vr1.set;
3331 vr2->hashcode = vr1.hashcode;
3332 vr2->result = lhs;
3333 vr2->result_vdef = vdef;
3334 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3335 INSERT);
3336 gcc_assert (!*slot);
3337 *slot = vr2;
3340 return changed;
3343 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3344 and return true if the value number of the LHS has changed as a result. */
3346 static bool
3347 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3349 bool changed = false;
3350 tree last_vuse;
3351 tree result;
3353 last_vuse = gimple_vuse (stmt);
3354 last_vuse_ptr = &last_vuse;
3355 result = vn_reference_lookup (op, gimple_vuse (stmt),
3356 default_vn_walk_kind, NULL, true);
3357 last_vuse_ptr = NULL;
3359 /* We handle type-punning through unions by value-numbering based
3360 on offset and size of the access. Be prepared to handle a
3361 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3362 if (result
3363 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3365 /* We will be setting the value number of lhs to the value number
3366 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3367 So first simplify and lookup this expression to see if it
3368 is already available. */
3369 mprts_hook = vn_lookup_simplify_result;
3370 code_helper rcode = VIEW_CONVERT_EXPR;
3371 tree ops[3] = { result };
3372 bool res = gimple_resimplify1 (NULL, &rcode, TREE_TYPE (op), ops,
3373 vn_valueize);
3374 mprts_hook = NULL;
3375 gimple *new_stmt = NULL;
3376 if (res
3377 && gimple_simplified_result_is_gimple_val (rcode, ops))
3378 /* The expression is already available. */
3379 result = ops[0];
3380 else
3382 tree val = vn_lookup_simplify_result (rcode, TREE_TYPE (op), ops);
3383 if (!val)
3385 gimple_seq stmts = NULL;
3386 result = maybe_push_res_to_seq (rcode, TREE_TYPE (op), ops,
3387 &stmts);
3388 if (result)
3390 gcc_assert (gimple_seq_singleton_p (stmts));
3391 new_stmt = gimple_seq_first_stmt (stmts);
3394 else
3395 /* The expression is already available. */
3396 result = val;
3398 if (new_stmt)
3400 /* The expression is not yet available, value-number lhs to
3401 the new SSA_NAME we created. */
3402 /* Initialize value-number information properly. */
3403 VN_INFO_GET (result)->valnum = result;
3404 VN_INFO (result)->value_id = get_next_value_id ();
3405 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
3406 new_stmt);
3407 VN_INFO (result)->needs_insertion = true;
3408 /* As all "inserted" statements are singleton SCCs, insert
3409 to the valid table. This is strictly needed to
3410 avoid re-generating new value SSA_NAMEs for the same
3411 expression during SCC iteration over and over (the
3412 optimistic table gets cleared after each iteration).
3413 We do not need to insert into the optimistic table, as
3414 lookups there will fall back to the valid table. */
3415 if (current_info == optimistic_info)
3417 current_info = valid_info;
3418 vn_nary_op_insert_stmt (new_stmt, result);
3419 current_info = optimistic_info;
3421 else
3422 vn_nary_op_insert_stmt (new_stmt, result);
3423 if (dump_file && (dump_flags & TDF_DETAILS))
3425 fprintf (dump_file, "Inserting name ");
3426 print_generic_expr (dump_file, result, 0);
3427 fprintf (dump_file, " for expression ");
3428 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
3429 fprintf (dump_file, "\n");
3434 if (result)
3435 changed = set_ssa_val_to (lhs, result);
3436 else
3438 changed = set_ssa_val_to (lhs, lhs);
3439 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3442 return changed;
3446 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3447 and return true if the value number of the LHS has changed as a result. */
3449 static bool
3450 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3452 bool changed = false;
3453 vn_reference_t vnresult = NULL;
3454 tree result, assign;
3455 bool resultsame = false;
3456 tree vuse = gimple_vuse (stmt);
3457 tree vdef = gimple_vdef (stmt);
3459 if (TREE_CODE (op) == SSA_NAME)
3460 op = SSA_VAL (op);
3462 /* First we want to lookup using the *vuses* from the store and see
3463 if there the last store to this location with the same address
3464 had the same value.
3466 The vuses represent the memory state before the store. If the
3467 memory state, address, and value of the store is the same as the
3468 last store to this location, then this store will produce the
3469 same memory state as that store.
3471 In this case the vdef versions for this store are value numbered to those
3472 vuse versions, since they represent the same memory state after
3473 this store.
3475 Otherwise, the vdefs for the store are used when inserting into
3476 the table, since the store generates a new memory state. */
3478 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL, false);
3480 if (result)
3482 if (TREE_CODE (result) == SSA_NAME)
3483 result = SSA_VAL (result);
3484 resultsame = expressions_equal_p (result, op);
3487 if ((!result || !resultsame)
3488 /* Only perform the following when being called from PRE
3489 which embeds tail merging. */
3490 && default_vn_walk_kind == VN_WALK)
3492 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3493 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3494 if (vnresult)
3496 VN_INFO (vdef)->use_processed = true;
3497 return set_ssa_val_to (vdef, vnresult->result_vdef);
3501 if (!result || !resultsame)
3503 if (dump_file && (dump_flags & TDF_DETAILS))
3505 fprintf (dump_file, "No store match\n");
3506 fprintf (dump_file, "Value numbering store ");
3507 print_generic_expr (dump_file, lhs, 0);
3508 fprintf (dump_file, " to ");
3509 print_generic_expr (dump_file, op, 0);
3510 fprintf (dump_file, "\n");
3512 /* Have to set value numbers before insert, since insert is
3513 going to valueize the references in-place. */
3514 if (vdef)
3516 changed |= set_ssa_val_to (vdef, vdef);
3519 /* Do not insert structure copies into the tables. */
3520 if (is_gimple_min_invariant (op)
3521 || is_gimple_reg (op))
3522 vn_reference_insert (lhs, op, vdef, NULL);
3524 /* Only perform the following when being called from PRE
3525 which embeds tail merging. */
3526 if (default_vn_walk_kind == VN_WALK)
3528 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3529 vn_reference_insert (assign, lhs, vuse, vdef);
3532 else
3534 /* We had a match, so value number the vdef to have the value
3535 number of the vuse it came from. */
3537 if (dump_file && (dump_flags & TDF_DETAILS))
3538 fprintf (dump_file, "Store matched earlier value,"
3539 "value numbering store vdefs to matching vuses.\n");
3541 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3544 return changed;
3547 /* Visit and value number PHI, return true if the value number
3548 changed. */
3550 static bool
3551 visit_phi (gimple *phi)
3553 bool changed = false;
3554 tree result;
3555 tree sameval = VN_TOP;
3556 bool allsame = true;
3557 unsigned n_executable = 0;
3559 /* TODO: We could check for this in init_sccvn, and replace this
3560 with a gcc_assert. */
3561 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3562 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3564 /* See if all non-TOP arguments have the same value. TOP is
3565 equivalent to everything, so we can ignore it. */
3566 edge_iterator ei;
3567 edge e;
3568 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3569 if (e->flags & EDGE_EXECUTABLE)
3571 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3573 ++n_executable;
3574 if (TREE_CODE (def) == SSA_NAME)
3575 def = SSA_VAL (def);
3576 if (def == VN_TOP)
3577 continue;
3578 if (sameval == VN_TOP)
3579 sameval = def;
3580 else if (!expressions_equal_p (def, sameval))
3582 allsame = false;
3583 break;
3587 /* If none of the edges was executable or all incoming values are
3588 undefined keep the value-number at VN_TOP. If only a single edge
3589 is exectuable use its value. */
3590 if (sameval == VN_TOP
3591 || n_executable == 1)
3592 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3594 /* First see if it is equivalent to a phi node in this block. We prefer
3595 this as it allows IV elimination - see PRs 66502 and 67167. */
3596 result = vn_phi_lookup (phi);
3597 if (result)
3598 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3599 /* Otherwise all value numbered to the same value, the phi node has that
3600 value. */
3601 else if (allsame)
3602 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3603 else
3605 vn_phi_insert (phi, PHI_RESULT (phi));
3606 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3609 return changed;
3612 /* Try to simplify RHS using equivalences and constant folding. */
3614 static tree
3615 try_to_simplify (gassign *stmt)
3617 enum tree_code code = gimple_assign_rhs_code (stmt);
3618 tree tem;
3620 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3621 in this case, there is no point in doing extra work. */
3622 if (code == SSA_NAME)
3623 return NULL_TREE;
3625 /* First try constant folding based on our current lattice. */
3626 mprts_hook = vn_lookup_simplify_result;
3627 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3628 mprts_hook = NULL;
3629 if (tem
3630 && (TREE_CODE (tem) == SSA_NAME
3631 || is_gimple_min_invariant (tem)))
3632 return tem;
3634 return NULL_TREE;
3637 /* Visit and value number USE, return true if the value number
3638 changed. */
3640 static bool
3641 visit_use (tree use)
3643 bool changed = false;
3644 gimple *stmt = SSA_NAME_DEF_STMT (use);
3646 mark_use_processed (use);
3648 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3649 if (dump_file && (dump_flags & TDF_DETAILS)
3650 && !SSA_NAME_IS_DEFAULT_DEF (use))
3652 fprintf (dump_file, "Value numbering ");
3653 print_generic_expr (dump_file, use, 0);
3654 fprintf (dump_file, " stmt = ");
3655 print_gimple_stmt (dump_file, stmt, 0, 0);
3658 /* Handle uninitialized uses. */
3659 if (SSA_NAME_IS_DEFAULT_DEF (use))
3660 changed = set_ssa_val_to (use, use);
3661 else if (gimple_code (stmt) == GIMPLE_PHI)
3662 changed = visit_phi (stmt);
3663 else if (gimple_has_volatile_ops (stmt))
3664 changed = defs_to_varying (stmt);
3665 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3667 enum tree_code code = gimple_assign_rhs_code (ass);
3668 tree lhs = gimple_assign_lhs (ass);
3669 tree rhs1 = gimple_assign_rhs1 (ass);
3670 tree simplified;
3672 /* Shortcut for copies. Simplifying copies is pointless,
3673 since we copy the expression and value they represent. */
3674 if (code == SSA_NAME
3675 && TREE_CODE (lhs) == SSA_NAME)
3677 changed = visit_copy (lhs, rhs1);
3678 goto done;
3680 simplified = try_to_simplify (ass);
3681 if (simplified)
3683 if (dump_file && (dump_flags & TDF_DETAILS))
3685 fprintf (dump_file, "RHS ");
3686 print_gimple_expr (dump_file, ass, 0, 0);
3687 fprintf (dump_file, " simplified to ");
3688 print_generic_expr (dump_file, simplified, 0);
3689 fprintf (dump_file, "\n");
3692 /* Setting value numbers to constants will occasionally
3693 screw up phi congruence because constants are not
3694 uniquely associated with a single ssa name that can be
3695 looked up. */
3696 if (simplified
3697 && is_gimple_min_invariant (simplified)
3698 && TREE_CODE (lhs) == SSA_NAME)
3700 changed = set_ssa_val_to (lhs, simplified);
3701 goto done;
3703 else if (simplified
3704 && TREE_CODE (simplified) == SSA_NAME
3705 && TREE_CODE (lhs) == SSA_NAME)
3707 changed = visit_copy (lhs, simplified);
3708 goto done;
3711 if ((TREE_CODE (lhs) == SSA_NAME
3712 /* We can substitute SSA_NAMEs that are live over
3713 abnormal edges with their constant value. */
3714 && !(gimple_assign_copy_p (ass)
3715 && is_gimple_min_invariant (rhs1))
3716 && !(simplified
3717 && is_gimple_min_invariant (simplified))
3718 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3719 /* Stores or copies from SSA_NAMEs that are live over
3720 abnormal edges are a problem. */
3721 || (code == SSA_NAME
3722 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3723 changed = defs_to_varying (ass);
3724 else if (REFERENCE_CLASS_P (lhs)
3725 || DECL_P (lhs))
3726 changed = visit_reference_op_store (lhs, rhs1, ass);
3727 else if (TREE_CODE (lhs) == SSA_NAME)
3729 if ((gimple_assign_copy_p (ass)
3730 && is_gimple_min_invariant (rhs1))
3731 || (simplified
3732 && is_gimple_min_invariant (simplified)))
3734 if (simplified)
3735 changed = set_ssa_val_to (lhs, simplified);
3736 else
3737 changed = set_ssa_val_to (lhs, rhs1);
3739 else
3741 /* Visit the original statement. */
3742 switch (vn_get_stmt_kind (ass))
3744 case VN_NARY:
3745 changed = visit_nary_op (lhs, ass);
3746 break;
3747 case VN_REFERENCE:
3748 changed = visit_reference_op_load (lhs, rhs1, ass);
3749 break;
3750 default:
3751 changed = defs_to_varying (ass);
3752 break;
3756 else
3757 changed = defs_to_varying (ass);
3759 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3761 tree lhs = gimple_call_lhs (call_stmt);
3762 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3764 /* Try constant folding based on our current lattice. */
3765 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3766 vn_valueize);
3767 if (simplified)
3769 if (dump_file && (dump_flags & TDF_DETAILS))
3771 fprintf (dump_file, "call ");
3772 print_gimple_expr (dump_file, call_stmt, 0, 0);
3773 fprintf (dump_file, " simplified to ");
3774 print_generic_expr (dump_file, simplified, 0);
3775 fprintf (dump_file, "\n");
3778 /* Setting value numbers to constants will occasionally
3779 screw up phi congruence because constants are not
3780 uniquely associated with a single ssa name that can be
3781 looked up. */
3782 if (simplified
3783 && is_gimple_min_invariant (simplified))
3785 changed = set_ssa_val_to (lhs, simplified);
3786 if (gimple_vdef (call_stmt))
3787 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3788 SSA_VAL (gimple_vuse (call_stmt)));
3789 goto done;
3791 else if (simplified
3792 && TREE_CODE (simplified) == SSA_NAME)
3794 changed = visit_copy (lhs, simplified);
3795 if (gimple_vdef (call_stmt))
3796 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3797 SSA_VAL (gimple_vuse (call_stmt)));
3798 goto done;
3800 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3802 changed = defs_to_varying (call_stmt);
3803 goto done;
3807 if (!gimple_call_internal_p (call_stmt)
3808 && (/* Calls to the same function with the same vuse
3809 and the same operands do not necessarily return the same
3810 value, unless they're pure or const. */
3811 gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3812 /* If calls have a vdef, subsequent calls won't have
3813 the same incoming vuse. So, if 2 calls with vdef have the
3814 same vuse, we know they're not subsequent.
3815 We can value number 2 calls to the same function with the
3816 same vuse and the same operands which are not subsequent
3817 the same, because there is no code in the program that can
3818 compare the 2 values... */
3819 || (gimple_vdef (call_stmt)
3820 /* ... unless the call returns a pointer which does
3821 not alias with anything else. In which case the
3822 information that the values are distinct are encoded
3823 in the IL. */
3824 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3825 /* Only perform the following when being called from PRE
3826 which embeds tail merging. */
3827 && default_vn_walk_kind == VN_WALK)))
3828 changed = visit_reference_op_call (lhs, call_stmt);
3829 else
3830 changed = defs_to_varying (call_stmt);
3832 else
3833 changed = defs_to_varying (stmt);
3834 done:
3835 return changed;
3838 /* Compare two operands by reverse postorder index */
3840 static int
3841 compare_ops (const void *pa, const void *pb)
3843 const tree opa = *((const tree *)pa);
3844 const tree opb = *((const tree *)pb);
3845 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3846 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3847 basic_block bba;
3848 basic_block bbb;
3850 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3851 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3852 else if (gimple_nop_p (opstmta))
3853 return -1;
3854 else if (gimple_nop_p (opstmtb))
3855 return 1;
3857 bba = gimple_bb (opstmta);
3858 bbb = gimple_bb (opstmtb);
3860 if (!bba && !bbb)
3861 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3862 else if (!bba)
3863 return -1;
3864 else if (!bbb)
3865 return 1;
3867 if (bba == bbb)
3869 if (gimple_code (opstmta) == GIMPLE_PHI
3870 && gimple_code (opstmtb) == GIMPLE_PHI)
3871 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3872 else if (gimple_code (opstmta) == GIMPLE_PHI)
3873 return -1;
3874 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3875 return 1;
3876 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3877 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3878 else
3879 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3881 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3884 /* Sort an array containing members of a strongly connected component
3885 SCC so that the members are ordered by RPO number.
3886 This means that when the sort is complete, iterating through the
3887 array will give you the members in RPO order. */
3889 static void
3890 sort_scc (vec<tree> scc)
3892 scc.qsort (compare_ops);
3895 /* Insert the no longer used nary ONARY to the hash INFO. */
3897 static void
3898 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3900 size_t size = sizeof_vn_nary_op (onary->length);
3901 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3902 &info->nary_obstack);
3903 memcpy (nary, onary, size);
3904 vn_nary_op_insert_into (nary, info->nary, false);
3907 /* Insert the no longer used phi OPHI to the hash INFO. */
3909 static void
3910 copy_phi (vn_phi_t ophi, vn_tables_t info)
3912 vn_phi_t phi = info->phis_pool->allocate ();
3913 vn_phi_s **slot;
3914 memcpy (phi, ophi, sizeof (*phi));
3915 ophi->phiargs.create (0);
3916 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3917 gcc_assert (!*slot);
3918 *slot = phi;
3921 /* Insert the no longer used reference OREF to the hash INFO. */
3923 static void
3924 copy_reference (vn_reference_t oref, vn_tables_t info)
3926 vn_reference_t ref;
3927 vn_reference_s **slot;
3928 ref = info->references_pool->allocate ();
3929 memcpy (ref, oref, sizeof (*ref));
3930 oref->operands.create (0);
3931 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3932 if (*slot)
3933 free_reference (*slot);
3934 *slot = ref;
3937 /* Process a strongly connected component in the SSA graph. */
3939 static void
3940 process_scc (vec<tree> scc)
3942 tree var;
3943 unsigned int i;
3944 unsigned int iterations = 0;
3945 bool changed = true;
3946 vn_nary_op_iterator_type hin;
3947 vn_phi_iterator_type hip;
3948 vn_reference_iterator_type hir;
3949 vn_nary_op_t nary;
3950 vn_phi_t phi;
3951 vn_reference_t ref;
3953 /* If the SCC has a single member, just visit it. */
3954 if (scc.length () == 1)
3956 tree use = scc[0];
3957 if (VN_INFO (use)->use_processed)
3958 return;
3959 /* We need to make sure it doesn't form a cycle itself, which can
3960 happen for self-referential PHI nodes. In that case we would
3961 end up inserting an expression with VN_TOP operands into the
3962 valid table which makes us derive bogus equivalences later.
3963 The cheapest way to check this is to assume it for all PHI nodes. */
3964 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3965 /* Fallthru to iteration. */ ;
3966 else
3968 visit_use (use);
3969 return;
3973 if (dump_file && (dump_flags & TDF_DETAILS))
3974 print_scc (dump_file, scc);
3976 /* Iterate over the SCC with the optimistic table until it stops
3977 changing. */
3978 current_info = optimistic_info;
3979 while (changed)
3981 changed = false;
3982 iterations++;
3983 if (dump_file && (dump_flags & TDF_DETAILS))
3984 fprintf (dump_file, "Starting iteration %d\n", iterations);
3985 /* As we are value-numbering optimistically we have to
3986 clear the expression tables and the simplified expressions
3987 in each iteration until we converge. */
3988 optimistic_info->nary->empty ();
3989 optimistic_info->phis->empty ();
3990 optimistic_info->references->empty ();
3991 obstack_free (&optimistic_info->nary_obstack, NULL);
3992 gcc_obstack_init (&optimistic_info->nary_obstack);
3993 optimistic_info->phis_pool->release ();
3994 optimistic_info->references_pool->release ();
3995 FOR_EACH_VEC_ELT (scc, i, var)
3996 gcc_assert (!VN_INFO (var)->needs_insertion
3997 && VN_INFO (var)->expr == NULL);
3998 FOR_EACH_VEC_ELT (scc, i, var)
3999 changed |= visit_use (var);
4002 if (dump_file && (dump_flags & TDF_DETAILS))
4003 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4004 statistics_histogram_event (cfun, "SCC iterations", iterations);
4006 /* Finally, copy the contents of the no longer used optimistic
4007 table to the valid table. */
4008 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4009 copy_nary (nary, valid_info);
4010 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4011 copy_phi (phi, valid_info);
4012 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4013 ref, vn_reference_t, hir)
4014 copy_reference (ref, valid_info);
4016 current_info = valid_info;
4020 /* Pop the components of the found SCC for NAME off the SCC stack
4021 and process them. Returns true if all went well, false if
4022 we run into resource limits. */
4024 static bool
4025 extract_and_process_scc_for_name (tree name)
4027 auto_vec<tree> scc;
4028 tree x;
4030 /* Found an SCC, pop the components off the SCC stack and
4031 process them. */
4034 x = sccstack.pop ();
4036 VN_INFO (x)->on_sccstack = false;
4037 scc.safe_push (x);
4038 } while (x != name);
4040 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4041 if (scc.length ()
4042 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4044 if (dump_file)
4045 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4046 "SCC size %u exceeding %u\n", scc.length (),
4047 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4049 return false;
4052 if (scc.length () > 1)
4053 sort_scc (scc);
4055 process_scc (scc);
4057 return true;
4060 /* Depth first search on NAME to discover and process SCC's in the SSA
4061 graph.
4062 Execution of this algorithm relies on the fact that the SCC's are
4063 popped off the stack in topological order.
4064 Returns true if successful, false if we stopped processing SCC's due
4065 to resource constraints. */
4067 static bool
4068 DFS (tree name)
4070 vec<ssa_op_iter> itervec = vNULL;
4071 vec<tree> namevec = vNULL;
4072 use_operand_p usep = NULL;
4073 gimple *defstmt;
4074 tree use;
4075 ssa_op_iter iter;
4077 start_over:
4078 /* SCC info */
4079 VN_INFO (name)->dfsnum = next_dfs_num++;
4080 VN_INFO (name)->visited = true;
4081 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4083 sccstack.safe_push (name);
4084 VN_INFO (name)->on_sccstack = true;
4085 defstmt = SSA_NAME_DEF_STMT (name);
4087 /* Recursively DFS on our operands, looking for SCC's. */
4088 if (!gimple_nop_p (defstmt))
4090 /* Push a new iterator. */
4091 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4092 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4093 else
4094 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4096 else
4097 clear_and_done_ssa_iter (&iter);
4099 while (1)
4101 /* If we are done processing uses of a name, go up the stack
4102 of iterators and process SCCs as we found them. */
4103 if (op_iter_done (&iter))
4105 /* See if we found an SCC. */
4106 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4107 if (!extract_and_process_scc_for_name (name))
4109 namevec.release ();
4110 itervec.release ();
4111 return false;
4114 /* Check if we are done. */
4115 if (namevec.is_empty ())
4117 namevec.release ();
4118 itervec.release ();
4119 return true;
4122 /* Restore the last use walker and continue walking there. */
4123 use = name;
4124 name = namevec.pop ();
4125 memcpy (&iter, &itervec.last (),
4126 sizeof (ssa_op_iter));
4127 itervec.pop ();
4128 goto continue_walking;
4131 use = USE_FROM_PTR (usep);
4133 /* Since we handle phi nodes, we will sometimes get
4134 invariants in the use expression. */
4135 if (TREE_CODE (use) == SSA_NAME)
4137 if (! (VN_INFO (use)->visited))
4139 /* Recurse by pushing the current use walking state on
4140 the stack and starting over. */
4141 itervec.safe_push (iter);
4142 namevec.safe_push (name);
4143 name = use;
4144 goto start_over;
4146 continue_walking:
4147 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4148 VN_INFO (use)->low);
4150 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4151 && VN_INFO (use)->on_sccstack)
4153 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4154 VN_INFO (name)->low);
4158 usep = op_iter_next_use (&iter);
4162 /* Allocate a value number table. */
4164 static void
4165 allocate_vn_table (vn_tables_t table)
4167 table->phis = new vn_phi_table_type (23);
4168 table->nary = new vn_nary_op_table_type (23);
4169 table->references = new vn_reference_table_type (23);
4171 gcc_obstack_init (&table->nary_obstack);
4172 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4173 table->references_pool = new object_allocator<vn_reference_s>
4174 ("VN references");
4177 /* Free a value number table. */
4179 static void
4180 free_vn_table (vn_tables_t table)
4182 delete table->phis;
4183 table->phis = NULL;
4184 delete table->nary;
4185 table->nary = NULL;
4186 delete table->references;
4187 table->references = NULL;
4188 obstack_free (&table->nary_obstack, NULL);
4189 delete table->phis_pool;
4190 delete table->references_pool;
4193 static void
4194 init_scc_vn (void)
4196 size_t i;
4197 int j;
4198 int *rpo_numbers_temp;
4200 calculate_dominance_info (CDI_DOMINATORS);
4201 mark_dfs_back_edges ();
4203 sccstack.create (0);
4204 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4206 constant_value_ids = BITMAP_ALLOC (NULL);
4208 next_dfs_num = 1;
4209 next_value_id = 1;
4211 vn_ssa_aux_table.create (num_ssa_names + 1);
4212 /* VEC_alloc doesn't actually grow it to the right size, it just
4213 preallocates the space to do so. */
4214 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4215 gcc_obstack_init (&vn_ssa_aux_obstack);
4217 shared_lookup_phiargs.create (0);
4218 shared_lookup_references.create (0);
4219 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4220 rpo_numbers_temp =
4221 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4222 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4224 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4225 the i'th block in RPO order is bb. We want to map bb's to RPO
4226 numbers, so we need to rearrange this array. */
4227 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4228 rpo_numbers[rpo_numbers_temp[j]] = j;
4230 XDELETE (rpo_numbers_temp);
4232 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4234 renumber_gimple_stmt_uids ();
4236 /* Create the valid and optimistic value numbering tables. */
4237 valid_info = XCNEW (struct vn_tables_s);
4238 allocate_vn_table (valid_info);
4239 optimistic_info = XCNEW (struct vn_tables_s);
4240 allocate_vn_table (optimistic_info);
4241 current_info = valid_info;
4243 /* Create the VN_INFO structures, and initialize value numbers to
4244 TOP or VARYING for parameters. */
4245 for (i = 1; i < num_ssa_names; i++)
4247 tree name = ssa_name (i);
4248 if (!name)
4249 continue;
4251 VN_INFO_GET (name)->valnum = VN_TOP;
4252 VN_INFO (name)->needs_insertion = false;
4253 VN_INFO (name)->expr = NULL;
4254 VN_INFO (name)->value_id = 0;
4256 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4257 continue;
4259 switch (TREE_CODE (SSA_NAME_VAR (name)))
4261 case VAR_DECL:
4262 /* Undefined vars keep TOP. */
4263 break;
4265 case PARM_DECL:
4266 /* Parameters are VARYING but we can record a condition
4267 if we know it is a non-NULL pointer. */
4268 VN_INFO (name)->visited = true;
4269 VN_INFO (name)->valnum = name;
4270 if (POINTER_TYPE_P (TREE_TYPE (name))
4271 && nonnull_arg_p (SSA_NAME_VAR (name)))
4273 tree ops[2];
4274 ops[0] = name;
4275 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4276 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4277 boolean_true_node, 0);
4278 if (dump_file && (dump_flags & TDF_DETAILS))
4280 fprintf (dump_file, "Recording ");
4281 print_generic_expr (dump_file, name, TDF_SLIM);
4282 fprintf (dump_file, " != 0\n");
4285 break;
4287 case RESULT_DECL:
4288 /* If the result is passed by invisible reference the default
4289 def is initialized, otherwise it's uninitialized. */
4290 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4292 VN_INFO (name)->visited = true;
4293 VN_INFO (name)->valnum = name;
4295 break;
4297 default:
4298 gcc_unreachable ();
4303 void
4304 free_scc_vn (void)
4306 size_t i;
4308 delete constant_to_value_id;
4309 constant_to_value_id = NULL;
4310 BITMAP_FREE (constant_value_ids);
4311 shared_lookup_phiargs.release ();
4312 shared_lookup_references.release ();
4313 XDELETEVEC (rpo_numbers);
4315 for (i = 0; i < num_ssa_names; i++)
4317 tree name = ssa_name (i);
4318 if (name
4319 && has_VN_INFO (name))
4321 if (VN_INFO (name)->needs_insertion)
4322 release_ssa_name (name);
4323 else if (POINTER_TYPE_P (TREE_TYPE (name))
4324 && VN_INFO (name)->info.ptr_info)
4325 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4326 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4327 && VN_INFO (name)->info.range_info)
4329 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4330 SSA_NAME_ANTI_RANGE_P (name)
4331 = VN_INFO (name)->range_info_anti_range_p;
4335 obstack_free (&vn_ssa_aux_obstack, NULL);
4336 vn_ssa_aux_table.release ();
4338 sccstack.release ();
4339 free_vn_table (valid_info);
4340 XDELETE (valid_info);
4341 free_vn_table (optimistic_info);
4342 XDELETE (optimistic_info);
4344 BITMAP_FREE (const_parms);
4347 /* Set *ID according to RESULT. */
4349 static void
4350 set_value_id_for_result (tree result, unsigned int *id)
4352 if (result && TREE_CODE (result) == SSA_NAME)
4353 *id = VN_INFO (result)->value_id;
4354 else if (result && is_gimple_min_invariant (result))
4355 *id = get_or_alloc_constant_value_id (result);
4356 else
4357 *id = get_next_value_id ();
4360 /* Set the value ids in the valid hash tables. */
4362 static void
4363 set_hashtable_value_ids (void)
4365 vn_nary_op_iterator_type hin;
4366 vn_phi_iterator_type hip;
4367 vn_reference_iterator_type hir;
4368 vn_nary_op_t vno;
4369 vn_reference_t vr;
4370 vn_phi_t vp;
4372 /* Now set the value ids of the things we had put in the hash
4373 table. */
4375 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4376 set_value_id_for_result (vno->result, &vno->value_id);
4378 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4379 set_value_id_for_result (vp->result, &vp->value_id);
4381 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4382 hir)
4383 set_value_id_for_result (vr->result, &vr->value_id);
4386 class sccvn_dom_walker : public dom_walker
4388 public:
4389 sccvn_dom_walker ()
4390 : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (vNULL) {}
4391 ~sccvn_dom_walker ();
4393 virtual edge before_dom_children (basic_block);
4394 virtual void after_dom_children (basic_block);
4396 void record_cond (basic_block,
4397 enum tree_code code, tree lhs, tree rhs, bool value);
4398 void record_conds (basic_block,
4399 enum tree_code code, tree lhs, tree rhs, bool value);
4401 bool fail;
4402 vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4403 cond_stack;
4406 sccvn_dom_walker::~sccvn_dom_walker ()
4408 cond_stack.release ();
4411 /* Record a temporary condition for the BB and its dominated blocks. */
4413 void
4414 sccvn_dom_walker::record_cond (basic_block bb,
4415 enum tree_code code, tree lhs, tree rhs,
4416 bool value)
4418 tree ops[2] = { lhs, rhs };
4419 vn_nary_op_t old = NULL;
4420 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4421 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4422 vn_nary_op_t cond
4423 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4424 value
4425 ? boolean_true_node
4426 : boolean_false_node, 0);
4427 if (dump_file && (dump_flags & TDF_DETAILS))
4429 fprintf (dump_file, "Recording temporarily ");
4430 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4431 fprintf (dump_file, " %s ", get_tree_code_name (code));
4432 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4433 fprintf (dump_file, " == %s%s\n",
4434 value ? "true" : "false",
4435 old ? " (old entry saved)" : "");
4437 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4440 /* Record temporary conditions for the BB and its dominated blocks
4441 according to LHS CODE RHS == VALUE and its dominated conditions. */
4443 void
4444 sccvn_dom_walker::record_conds (basic_block bb,
4445 enum tree_code code, tree lhs, tree rhs,
4446 bool value)
4448 /* Record the original condition. */
4449 record_cond (bb, code, lhs, rhs, value);
4451 if (!value)
4452 return;
4454 /* Record dominated conditions if the condition is true. Note that
4455 the inversion is already recorded. */
4456 switch (code)
4458 case LT_EXPR:
4459 case GT_EXPR:
4460 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4461 record_cond (bb, NE_EXPR, lhs, rhs, true);
4462 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4463 break;
4465 case EQ_EXPR:
4466 record_cond (bb, LE_EXPR, lhs, rhs, true);
4467 record_cond (bb, GE_EXPR, lhs, rhs, true);
4468 record_cond (bb, LT_EXPR, lhs, rhs, false);
4469 record_cond (bb, GT_EXPR, lhs, rhs, false);
4470 break;
4472 default:
4473 break;
4477 /* Restore expressions and values derived from conditionals. */
4479 void
4480 sccvn_dom_walker::after_dom_children (basic_block bb)
4482 while (!cond_stack.is_empty ()
4483 && cond_stack.last ().first == bb)
4485 vn_nary_op_t cond = cond_stack.last ().second.first;
4486 vn_nary_op_t old = cond_stack.last ().second.second;
4487 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4488 if (old)
4489 vn_nary_op_insert_into (old, current_info->nary, false);
4490 cond_stack.pop ();
4494 /* Value number all statements in BB. */
4496 edge
4497 sccvn_dom_walker::before_dom_children (basic_block bb)
4499 edge e;
4500 edge_iterator ei;
4502 if (fail)
4503 return NULL;
4505 if (dump_file && (dump_flags & TDF_DETAILS))
4506 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4508 /* If we have a single predecessor record the equivalence from a
4509 possible condition on the predecessor edge. */
4510 edge pred_e = NULL;
4511 FOR_EACH_EDGE (e, ei, bb->preds)
4513 /* Ignore simple backedges from this to allow recording conditions
4514 in loop headers. */
4515 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4516 continue;
4517 if (! pred_e)
4518 pred_e = e;
4519 else
4521 pred_e = NULL;
4522 break;
4525 if (pred_e)
4527 /* Check if there are multiple executable successor edges in
4528 the source block. Otherwise there is no additional info
4529 to be recorded. */
4530 edge e2;
4531 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4532 if (e2 != pred_e
4533 && e2->flags & EDGE_EXECUTABLE)
4534 break;
4535 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4537 gimple *stmt = last_stmt (pred_e->src);
4538 if (stmt
4539 && gimple_code (stmt) == GIMPLE_COND)
4541 enum tree_code code = gimple_cond_code (stmt);
4542 tree lhs = gimple_cond_lhs (stmt);
4543 tree rhs = gimple_cond_rhs (stmt);
4544 record_conds (bb, code, lhs, rhs,
4545 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4546 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4547 if (code != ERROR_MARK)
4548 record_conds (bb, code, lhs, rhs,
4549 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4554 /* Value-number all defs in the basic-block. */
4555 for (gphi_iterator gsi = gsi_start_phis (bb);
4556 !gsi_end_p (gsi); gsi_next (&gsi))
4558 gphi *phi = gsi.phi ();
4559 tree res = PHI_RESULT (phi);
4560 if (!VN_INFO (res)->visited
4561 && !DFS (res))
4563 fail = true;
4564 return NULL;
4567 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4568 !gsi_end_p (gsi); gsi_next (&gsi))
4570 ssa_op_iter i;
4571 tree op;
4572 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4573 if (!VN_INFO (op)->visited
4574 && !DFS (op))
4576 fail = true;
4577 return NULL;
4581 /* Finally look at the last stmt. */
4582 gimple *stmt = last_stmt (bb);
4583 if (!stmt)
4584 return NULL;
4586 enum gimple_code code = gimple_code (stmt);
4587 if (code != GIMPLE_COND
4588 && code != GIMPLE_SWITCH
4589 && code != GIMPLE_GOTO)
4590 return NULL;
4592 if (dump_file && (dump_flags & TDF_DETAILS))
4594 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4595 print_gimple_stmt (dump_file, stmt, 0, 0);
4598 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4599 if value-numbering can prove they are not reachable. Handling
4600 computed gotos is also possible. */
4601 tree val;
4602 switch (code)
4604 case GIMPLE_COND:
4606 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4607 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4608 val = gimple_simplify (gimple_cond_code (stmt),
4609 boolean_type_node, lhs, rhs,
4610 NULL, vn_valueize);
4611 /* If that didn't simplify to a constant see if we have recorded
4612 temporary expressions from taken edges. */
4613 if (!val || TREE_CODE (val) != INTEGER_CST)
4615 tree ops[2];
4616 ops[0] = lhs;
4617 ops[1] = rhs;
4618 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4619 boolean_type_node, ops, NULL);
4621 break;
4623 case GIMPLE_SWITCH:
4624 val = gimple_switch_index (as_a <gswitch *> (stmt));
4625 break;
4626 case GIMPLE_GOTO:
4627 val = gimple_goto_dest (stmt);
4628 break;
4629 default:
4630 gcc_unreachable ();
4632 if (!val)
4633 return NULL;
4635 edge taken = find_taken_edge (bb, vn_valueize (val));
4636 if (!taken)
4637 return NULL;
4639 if (dump_file && (dump_flags & TDF_DETAILS))
4640 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4641 "not executable\n", bb->index, bb->index, taken->dest->index);
4643 return taken;
4646 /* Do SCCVN. Returns true if it finished, false if we bailed out
4647 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4648 how we use the alias oracle walking during the VN process. */
4650 bool
4651 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4653 size_t i;
4655 default_vn_walk_kind = default_vn_walk_kind_;
4657 init_scc_vn ();
4659 /* Collect pointers we know point to readonly memory. */
4660 const_parms = BITMAP_ALLOC (NULL);
4661 tree fnspec = lookup_attribute ("fn spec",
4662 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4663 if (fnspec)
4665 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4666 i = 1;
4667 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4668 arg; arg = DECL_CHAIN (arg), ++i)
4670 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4671 break;
4672 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4673 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4675 tree name = ssa_default_def (cfun, arg);
4676 if (name)
4677 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4682 /* Walk all blocks in dominator order, value-numbering stmts
4683 SSA defs and decide whether outgoing edges are not executable. */
4684 sccvn_dom_walker walker;
4685 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4686 if (walker.fail)
4688 free_scc_vn ();
4689 return false;
4692 /* Initialize the value ids and prune out remaining VN_TOPs
4693 from dead code. */
4694 for (i = 1; i < num_ssa_names; ++i)
4696 tree name = ssa_name (i);
4697 vn_ssa_aux_t info;
4698 if (!name)
4699 continue;
4700 info = VN_INFO (name);
4701 if (!info->visited)
4702 info->valnum = name;
4703 if (info->valnum == name
4704 || info->valnum == VN_TOP)
4705 info->value_id = get_next_value_id ();
4706 else if (is_gimple_min_invariant (info->valnum))
4707 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4710 /* Propagate. */
4711 for (i = 1; i < num_ssa_names; ++i)
4713 tree name = ssa_name (i);
4714 vn_ssa_aux_t info;
4715 if (!name)
4716 continue;
4717 info = VN_INFO (name);
4718 if (TREE_CODE (info->valnum) == SSA_NAME
4719 && info->valnum != name
4720 && info->value_id != VN_INFO (info->valnum)->value_id)
4721 info->value_id = VN_INFO (info->valnum)->value_id;
4724 set_hashtable_value_ids ();
4726 if (dump_file && (dump_flags & TDF_DETAILS))
4728 fprintf (dump_file, "Value numbers:\n");
4729 for (i = 0; i < num_ssa_names; i++)
4731 tree name = ssa_name (i);
4732 if (name
4733 && VN_INFO (name)->visited
4734 && SSA_VAL (name) != name)
4736 print_generic_expr (dump_file, name, 0);
4737 fprintf (dump_file, " = ");
4738 print_generic_expr (dump_file, SSA_VAL (name), 0);
4739 fprintf (dump_file, "\n");
4744 return true;
4747 /* Return the maximum value id we have ever seen. */
4749 unsigned int
4750 get_max_value_id (void)
4752 return next_value_id;
4755 /* Return the next unique value id. */
4757 unsigned int
4758 get_next_value_id (void)
4760 return next_value_id++;
4764 /* Compare two expressions E1 and E2 and return true if they are equal. */
4766 bool
4767 expressions_equal_p (tree e1, tree e2)
4769 /* The obvious case. */
4770 if (e1 == e2)
4771 return true;
4773 /* If either one is VN_TOP consider them equal. */
4774 if (e1 == VN_TOP || e2 == VN_TOP)
4775 return true;
4777 /* If only one of them is null, they cannot be equal. */
4778 if (!e1 || !e2)
4779 return false;
4781 /* Now perform the actual comparison. */
4782 if (TREE_CODE (e1) == TREE_CODE (e2)
4783 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4784 return true;
4786 return false;
4790 /* Return true if the nary operation NARY may trap. This is a copy
4791 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4793 bool
4794 vn_nary_may_trap (vn_nary_op_t nary)
4796 tree type;
4797 tree rhs2 = NULL_TREE;
4798 bool honor_nans = false;
4799 bool honor_snans = false;
4800 bool fp_operation = false;
4801 bool honor_trapv = false;
4802 bool handled, ret;
4803 unsigned i;
4805 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4806 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4807 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4809 type = nary->type;
4810 fp_operation = FLOAT_TYPE_P (type);
4811 if (fp_operation)
4813 honor_nans = flag_trapping_math && !flag_finite_math_only;
4814 honor_snans = flag_signaling_nans != 0;
4816 else if (INTEGRAL_TYPE_P (type)
4817 && TYPE_OVERFLOW_TRAPS (type))
4818 honor_trapv = true;
4820 if (nary->length >= 2)
4821 rhs2 = nary->op[1];
4822 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4823 honor_trapv,
4824 honor_nans, honor_snans, rhs2,
4825 &handled);
4826 if (handled
4827 && ret)
4828 return true;
4830 for (i = 0; i < nary->length; ++i)
4831 if (tree_could_trap_p (nary->op[i]))
4832 return true;
4834 return false;