2016-09-26 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob7f44ec8a2414bf1f32b9b8347332be0d06c5b7e9
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
108 equivalent.
109 TODO:
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
135 /* Return the computed hashcode for nary operation P1. */
137 inline hashval_t
138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
140 return vno1->hashcode;
143 /* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
146 inline bool
147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
149 return vn_nary_op_eq (vno1, vno2);
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
156 /* vn_phi hashtable helpers. */
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
168 /* Return the computed hashcode for phi operation P1. */
170 inline hashval_t
171 vn_phi_hasher::hash (const vn_phi_s *vp1)
173 return vp1->hashcode;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
178 inline bool
179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
181 return vn_phi_eq (vp1, vp2);
184 /* Free a phi operation structure VP. */
186 inline void
187 vn_phi_hasher::remove (vn_phi_s *phi)
189 phi->phiargs.release ();
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
199 static int
200 vn_reference_op_eq (const void *p1, const void *p2)
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
216 /* Free a reference operation structure VP. */
218 static inline void
219 free_reference (vn_reference_s *vr)
221 vr->operands.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
234 /* Return the hashcode for a given reference operation P1. */
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 return vr1->hashcode;
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 return vn_reference_eq (v, c);
248 inline void
249 vn_reference_hasher::remove (vn_reference_s *v)
251 free_reference (v);
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
265 struct obstack nary_obstack;
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
279 /* Hash table hash function for vn_constant_t. */
281 inline hashval_t
282 vn_constant_hasher::hash (const vn_constant_s *vc1)
284 return vc1->hashcode;
287 /* Hash table equality function for vn_constant_t. */
289 inline bool
290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
305 static vn_tables_t valid_info;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
316 static vn_tables_t current_info;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
329 static inline tree
330 vuse_ssa_val (tree x)
332 if (!x)
333 return NULL_TREE;
337 x = SSA_VAL (x);
339 while (SSA_NAME_IN_FREE_LIST (x));
341 return x;
344 /* This represents the top of the VN lattice, which is the universal
345 value. */
347 tree VN_TOP;
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id;
353 /* Next DFS number and the stack for strongly connected component
354 detection. */
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
368 /* Return whether there is value numbering information for a given SSA name. */
370 bool
371 has_VN_INFO (tree name)
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
378 /* Return the value numbering information for a given SSA name. */
380 vn_ssa_aux_t
381 VN_INFO (tree name)
383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
384 gcc_checking_assert (res);
385 return res;
388 /* Set the value numbering info for a given SSA name to a given
389 value. */
391 static inline void
392 VN_INFO_SET (tree name, vn_ssa_aux_t value)
394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
400 vn_ssa_aux_t
401 VN_INFO_GET (tree name)
403 vn_ssa_aux_t newinfo;
405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412 return newinfo;
416 /* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
419 enum vn_kind
420 vn_get_stmt_kind (gimple *stmt)
422 switch (gimple_code (stmt))
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
454 case tcc_constant:
455 return VN_CONSTANT;
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
465 default:
466 return VN_NONE;
469 default:
470 return VN_NONE;
474 /* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
477 unsigned int
478 get_constant_value_id (tree constant)
480 vn_constant_s **slot;
481 struct vn_constant_s vc;
483 vc.hashcode = vn_hash_constant_with_type (constant);
484 vc.constant = constant;
485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
486 if (slot)
487 return (*slot)->value_id;
488 return 0;
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
494 unsigned int
495 get_or_alloc_constant_value_id (tree constant)
497 vn_constant_s **slot;
498 struct vn_constant_s vc;
499 vn_constant_t vcp;
501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
503 slot = constant_to_value_id->find_slot (&vc, INSERT);
504 if (*slot)
505 return (*slot)->value_id;
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
511 *slot = vcp;
512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
516 /* Return true if V is a value id for a constant. */
518 bool
519 value_id_constant_p (unsigned int v)
521 return bitmap_bit_p (constant_value_ids, v);
524 /* Compute the hash for a reference operand VRO1. */
526 static void
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
529 hstate.add_int (vro1->opcode);
530 if (vro1->op0)
531 inchash::add_expr (vro1->op0, hstate);
532 if (vro1->op1)
533 inchash::add_expr (vro1->op1, hstate);
534 if (vro1->op2)
535 inchash::add_expr (vro1->op2, hstate);
538 /* Compute a hash for the reference operation VR1 and return it. */
540 static hashval_t
541 vn_reference_compute_hash (const vn_reference_t vr1)
543 inchash::hash hstate;
544 hashval_t result;
545 int i;
546 vn_reference_op_t vro;
547 HOST_WIDE_INT off = -1;
548 bool deref = false;
550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
558 if (off == -1)
559 off = 0;
560 off += vro->off;
562 else
564 if (off != -1
565 && off != 0)
566 hstate.add_int (off);
567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
571 if (vro->op0)
573 tree op = TREE_OPERAND (vro->op0, 0);
574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
578 else
579 vn_reference_op_compute_hash (vro, hstate);
582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
587 return result;
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
593 bool
594 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
596 unsigned i, j;
598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
604 return false;
606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
611 return false;
613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626 return false;
628 i = 0;
629 j = 0;
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
636 for (; vr1->operands.iterate (i, &vro1); i++)
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
640 /* Do not look through a storage order barrier. */
641 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 return false;
643 if (vro1->off == -1)
644 break;
645 off1 += vro1->off;
647 for (; vr2->operands.iterate (j, &vro2); j++)
649 if (vro2->opcode == MEM_REF)
650 deref2 = true;
651 /* Do not look through a storage order barrier. */
652 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 return false;
654 if (vro2->off == -1)
655 break;
656 off2 += vro2->off;
658 if (off1 != off2)
659 return false;
660 if (deref1 && vro1->opcode == ADDR_EXPR)
662 memset (&tem1, 0, sizeof (tem1));
663 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 tem1.type = TREE_TYPE (tem1.op0);
665 tem1.opcode = TREE_CODE (tem1.op0);
666 vro1 = &tem1;
667 deref1 = false;
669 if (deref2 && vro2->opcode == ADDR_EXPR)
671 memset (&tem2, 0, sizeof (tem2));
672 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 tem2.type = TREE_TYPE (tem2.op0);
674 tem2.opcode = TREE_CODE (tem2.op0);
675 vro2 = &tem2;
676 deref2 = false;
678 if (deref1 != deref2)
679 return false;
680 if (!vn_reference_op_eq (vro1, vro2))
681 return false;
682 ++j;
683 ++i;
685 while (vr1->operands.length () != i
686 || vr2->operands.length () != j);
688 return true;
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
694 static void
695 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
697 if (TREE_CODE (ref) == TARGET_MEM_REF)
699 vn_reference_op_s temp;
701 result->reserve (3);
703 memset (&temp, 0, sizeof (temp));
704 temp.type = TREE_TYPE (ref);
705 temp.opcode = TREE_CODE (ref);
706 temp.op0 = TMR_INDEX (ref);
707 temp.op1 = TMR_STEP (ref);
708 temp.op2 = TMR_OFFSET (ref);
709 temp.off = -1;
710 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711 temp.base = MR_DEPENDENCE_BASE (ref);
712 result->quick_push (temp);
714 memset (&temp, 0, sizeof (temp));
715 temp.type = NULL_TREE;
716 temp.opcode = ERROR_MARK;
717 temp.op0 = TMR_INDEX2 (ref);
718 temp.off = -1;
719 result->quick_push (temp);
721 memset (&temp, 0, sizeof (temp));
722 temp.type = NULL_TREE;
723 temp.opcode = TREE_CODE (TMR_BASE (ref));
724 temp.op0 = TMR_BASE (ref);
725 temp.off = -1;
726 result->quick_push (temp);
727 return;
730 /* For non-calls, store the information that makes up the address. */
731 tree orig = ref;
732 while (ref)
734 vn_reference_op_s temp;
736 memset (&temp, 0, sizeof (temp));
737 temp.type = TREE_TYPE (ref);
738 temp.opcode = TREE_CODE (ref);
739 temp.off = -1;
741 switch (temp.opcode)
743 case MODIFY_EXPR:
744 temp.op0 = TREE_OPERAND (ref, 1);
745 break;
746 case WITH_SIZE_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 temp.off = 0;
749 break;
750 case MEM_REF:
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp.op0 = TREE_OPERAND (ref, 1);
754 offset_int off = mem_ref_offset (ref);
755 if (wi::fits_shwi_p (off))
756 temp.off = off.to_shwi ();
758 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
759 temp.base = MR_DEPENDENCE_BASE (ref);
760 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
761 break;
762 case BIT_FIELD_REF:
763 /* Record bits, position and storage order. */
764 temp.op0 = TREE_OPERAND (ref, 1);
765 temp.op1 = TREE_OPERAND (ref, 2);
766 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
768 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
769 if (off % BITS_PER_UNIT == 0)
770 temp.off = off / BITS_PER_UNIT;
772 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
773 break;
774 case COMPONENT_REF:
775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp.type = NULL_TREE;
779 temp.op0 = TREE_OPERAND (ref, 1);
780 temp.op1 = TREE_OPERAND (ref, 2);
782 tree this_offset = component_ref_field_offset (ref);
783 if (this_offset
784 && TREE_CODE (this_offset) == INTEGER_CST)
786 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
787 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
789 offset_int off
790 = (wi::to_offset (this_offset)
791 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
792 if (wi::fits_shwi_p (off)
793 /* Probibit value-numbering zero offset components
794 of addresses the same before the pass folding
795 __builtin_object_size had a chance to run
796 (checking cfun->after_inlining does the
797 trick here). */
798 && (TREE_CODE (orig) != ADDR_EXPR
799 || off != 0
800 || cfun->after_inlining))
801 temp.off = off.to_shwi ();
805 break;
806 case ARRAY_RANGE_REF:
807 case ARRAY_REF:
809 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
810 /* Record index as operand. */
811 temp.op0 = TREE_OPERAND (ref, 1);
812 /* Always record lower bounds and element size. */
813 temp.op1 = array_ref_low_bound (ref);
814 /* But record element size in units of the type alignment. */
815 temp.op2 = TREE_OPERAND (ref, 3);
816 temp.align = eltype->type_common.align;
817 if (! temp.op2)
818 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
819 size_int (TYPE_ALIGN_UNIT (eltype)));
820 if (TREE_CODE (temp.op0) == INTEGER_CST
821 && TREE_CODE (temp.op1) == INTEGER_CST
822 && TREE_CODE (temp.op2) == INTEGER_CST)
824 offset_int off = ((wi::to_offset (temp.op0)
825 - wi::to_offset (temp.op1))
826 * wi::to_offset (temp.op2)
827 * vn_ref_op_align_unit (&temp));
828 if (wi::fits_shwi_p (off))
829 temp.off = off.to_shwi();
832 break;
833 case VAR_DECL:
834 if (DECL_HARD_REGISTER (ref))
836 temp.op0 = ref;
837 break;
839 /* Fallthru. */
840 case PARM_DECL:
841 case CONST_DECL:
842 case RESULT_DECL:
843 /* Canonicalize decls to MEM[&decl] which is what we end up with
844 when valueizing MEM[ptr] with ptr = &decl. */
845 temp.opcode = MEM_REF;
846 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
847 temp.off = 0;
848 result->safe_push (temp);
849 temp.opcode = ADDR_EXPR;
850 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
851 temp.type = TREE_TYPE (temp.op0);
852 temp.off = -1;
853 break;
854 case STRING_CST:
855 case INTEGER_CST:
856 case COMPLEX_CST:
857 case VECTOR_CST:
858 case REAL_CST:
859 case FIXED_CST:
860 case CONSTRUCTOR:
861 case SSA_NAME:
862 temp.op0 = ref;
863 break;
864 case ADDR_EXPR:
865 if (is_gimple_min_invariant (ref))
867 temp.op0 = ref;
868 break;
870 break;
871 /* These are only interesting for their operands, their
872 existence, and their type. They will never be the last
873 ref in the chain of references (IE they require an
874 operand), so we don't have to put anything
875 for op* as it will be handled by the iteration */
876 case REALPART_EXPR:
877 temp.off = 0;
878 break;
879 case VIEW_CONVERT_EXPR:
880 temp.off = 0;
881 temp.reverse = storage_order_barrier_p (ref);
882 break;
883 case IMAGPART_EXPR:
884 /* This is only interesting for its constant offset. */
885 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
886 break;
887 default:
888 gcc_unreachable ();
890 result->safe_push (temp);
892 if (REFERENCE_CLASS_P (ref)
893 || TREE_CODE (ref) == MODIFY_EXPR
894 || TREE_CODE (ref) == WITH_SIZE_EXPR
895 || (TREE_CODE (ref) == ADDR_EXPR
896 && !is_gimple_min_invariant (ref)))
897 ref = TREE_OPERAND (ref, 0);
898 else
899 ref = NULL_TREE;
903 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
904 operands in *OPS, the reference alias set SET and the reference type TYPE.
905 Return true if something useful was produced. */
907 bool
908 ao_ref_init_from_vn_reference (ao_ref *ref,
909 alias_set_type set, tree type,
910 vec<vn_reference_op_s> ops)
912 vn_reference_op_t op;
913 unsigned i;
914 tree base = NULL_TREE;
915 tree *op0_p = &base;
916 offset_int offset = 0;
917 offset_int max_size;
918 offset_int size = -1;
919 tree size_tree = NULL_TREE;
920 alias_set_type base_alias_set = -1;
922 /* First get the final access size from just the outermost expression. */
923 op = &ops[0];
924 if (op->opcode == COMPONENT_REF)
925 size_tree = DECL_SIZE (op->op0);
926 else if (op->opcode == BIT_FIELD_REF)
927 size_tree = op->op0;
928 else
930 machine_mode mode = TYPE_MODE (type);
931 if (mode == BLKmode)
932 size_tree = TYPE_SIZE (type);
933 else
934 size = int (GET_MODE_BITSIZE (mode));
936 if (size_tree != NULL_TREE
937 && TREE_CODE (size_tree) == INTEGER_CST)
938 size = wi::to_offset (size_tree);
940 /* Initially, maxsize is the same as the accessed element size.
941 In the following it will only grow (or become -1). */
942 max_size = size;
944 /* Compute cumulative bit-offset for nested component-refs and array-refs,
945 and find the ultimate containing object. */
946 FOR_EACH_VEC_ELT (ops, i, op)
948 switch (op->opcode)
950 /* These may be in the reference ops, but we cannot do anything
951 sensible with them here. */
952 case ADDR_EXPR:
953 /* Apart from ADDR_EXPR arguments to MEM_REF. */
954 if (base != NULL_TREE
955 && TREE_CODE (base) == MEM_REF
956 && op->op0
957 && DECL_P (TREE_OPERAND (op->op0, 0)))
959 vn_reference_op_t pop = &ops[i-1];
960 base = TREE_OPERAND (op->op0, 0);
961 if (pop->off == -1)
963 max_size = -1;
964 offset = 0;
966 else
967 offset += pop->off * BITS_PER_UNIT;
968 op0_p = NULL;
969 break;
971 /* Fallthru. */
972 case CALL_EXPR:
973 return false;
975 /* Record the base objects. */
976 case MEM_REF:
977 base_alias_set = get_deref_alias_set (op->op0);
978 *op0_p = build2 (MEM_REF, op->type,
979 NULL_TREE, op->op0);
980 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
981 MR_DEPENDENCE_BASE (*op0_p) = op->base;
982 op0_p = &TREE_OPERAND (*op0_p, 0);
983 break;
985 case VAR_DECL:
986 case PARM_DECL:
987 case RESULT_DECL:
988 case SSA_NAME:
989 *op0_p = op->op0;
990 op0_p = NULL;
991 break;
993 /* And now the usual component-reference style ops. */
994 case BIT_FIELD_REF:
995 offset += wi::to_offset (op->op1);
996 break;
998 case COMPONENT_REF:
1000 tree field = op->op0;
1001 /* We do not have a complete COMPONENT_REF tree here so we
1002 cannot use component_ref_field_offset. Do the interesting
1003 parts manually. */
1004 tree this_offset = DECL_FIELD_OFFSET (field);
1006 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1007 max_size = -1;
1008 else
1010 offset_int woffset = (wi::to_offset (this_offset)
1011 << LOG2_BITS_PER_UNIT);
1012 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1013 offset += woffset;
1015 break;
1018 case ARRAY_RANGE_REF:
1019 case ARRAY_REF:
1020 /* We recorded the lower bound and the element size. */
1021 if (TREE_CODE (op->op0) != INTEGER_CST
1022 || TREE_CODE (op->op1) != INTEGER_CST
1023 || TREE_CODE (op->op2) != INTEGER_CST)
1024 max_size = -1;
1025 else
1027 offset_int woffset
1028 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1029 TYPE_PRECISION (TREE_TYPE (op->op0)));
1030 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1031 woffset <<= LOG2_BITS_PER_UNIT;
1032 offset += woffset;
1034 break;
1036 case REALPART_EXPR:
1037 break;
1039 case IMAGPART_EXPR:
1040 offset += size;
1041 break;
1043 case VIEW_CONVERT_EXPR:
1044 break;
1046 case STRING_CST:
1047 case INTEGER_CST:
1048 case COMPLEX_CST:
1049 case VECTOR_CST:
1050 case REAL_CST:
1051 case CONSTRUCTOR:
1052 case CONST_DECL:
1053 return false;
1055 default:
1056 return false;
1060 if (base == NULL_TREE)
1061 return false;
1063 ref->ref = NULL_TREE;
1064 ref->base = base;
1065 ref->ref_alias_set = set;
1066 if (base_alias_set != -1)
1067 ref->base_alias_set = base_alias_set;
1068 else
1069 ref->base_alias_set = get_alias_set (base);
1070 /* We discount volatiles from value-numbering elsewhere. */
1071 ref->volatile_p = false;
1073 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1075 ref->offset = 0;
1076 ref->size = -1;
1077 ref->max_size = -1;
1078 return true;
1081 ref->size = size.to_shwi ();
1083 if (!wi::fits_shwi_p (offset))
1085 ref->offset = 0;
1086 ref->max_size = -1;
1087 return true;
1090 ref->offset = offset.to_shwi ();
1092 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1093 ref->max_size = -1;
1094 else
1095 ref->max_size = max_size.to_shwi ();
1097 return true;
1100 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1101 vn_reference_op_s's. */
1103 static void
1104 copy_reference_ops_from_call (gcall *call,
1105 vec<vn_reference_op_s> *result)
1107 vn_reference_op_s temp;
1108 unsigned i;
1109 tree lhs = gimple_call_lhs (call);
1110 int lr;
1112 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1113 different. By adding the lhs here in the vector, we ensure that the
1114 hashcode is different, guaranteeing a different value number. */
1115 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1117 memset (&temp, 0, sizeof (temp));
1118 temp.opcode = MODIFY_EXPR;
1119 temp.type = TREE_TYPE (lhs);
1120 temp.op0 = lhs;
1121 temp.off = -1;
1122 result->safe_push (temp);
1125 /* Copy the type, opcode, function, static chain and EH region, if any. */
1126 memset (&temp, 0, sizeof (temp));
1127 temp.type = gimple_call_return_type (call);
1128 temp.opcode = CALL_EXPR;
1129 temp.op0 = gimple_call_fn (call);
1130 temp.op1 = gimple_call_chain (call);
1131 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1132 temp.op2 = size_int (lr);
1133 temp.off = -1;
1134 if (gimple_call_with_bounds_p (call))
1135 temp.with_bounds = 1;
1136 result->safe_push (temp);
1138 /* Copy the call arguments. As they can be references as well,
1139 just chain them together. */
1140 for (i = 0; i < gimple_call_num_args (call); ++i)
1142 tree callarg = gimple_call_arg (call, i);
1143 copy_reference_ops_from_ref (callarg, result);
1147 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1148 *I_P to point to the last element of the replacement. */
1149 static bool
1150 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1151 unsigned int *i_p)
1153 unsigned int i = *i_p;
1154 vn_reference_op_t op = &(*ops)[i];
1155 vn_reference_op_t mem_op = &(*ops)[i - 1];
1156 tree addr_base;
1157 HOST_WIDE_INT addr_offset = 0;
1159 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1160 from .foo.bar to the preceding MEM_REF offset and replace the
1161 address with &OBJ. */
1162 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1163 &addr_offset);
1164 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1165 if (addr_base != TREE_OPERAND (op->op0, 0))
1167 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1168 off += addr_offset;
1169 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1170 op->op0 = build_fold_addr_expr (addr_base);
1171 if (tree_fits_shwi_p (mem_op->op0))
1172 mem_op->off = tree_to_shwi (mem_op->op0);
1173 else
1174 mem_op->off = -1;
1175 return true;
1177 return false;
1180 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1181 *I_P to point to the last element of the replacement. */
1182 static bool
1183 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1184 unsigned int *i_p)
1186 unsigned int i = *i_p;
1187 vn_reference_op_t op = &(*ops)[i];
1188 vn_reference_op_t mem_op = &(*ops)[i - 1];
1189 gimple *def_stmt;
1190 enum tree_code code;
1191 offset_int off;
1193 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1194 if (!is_gimple_assign (def_stmt))
1195 return false;
1197 code = gimple_assign_rhs_code (def_stmt);
1198 if (code != ADDR_EXPR
1199 && code != POINTER_PLUS_EXPR)
1200 return false;
1202 off = offset_int::from (mem_op->op0, SIGNED);
1204 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1205 from .foo.bar to the preceding MEM_REF offset and replace the
1206 address with &OBJ. */
1207 if (code == ADDR_EXPR)
1209 tree addr, addr_base;
1210 HOST_WIDE_INT addr_offset;
1212 addr = gimple_assign_rhs1 (def_stmt);
1213 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1214 &addr_offset);
1215 /* If that didn't work because the address isn't invariant propagate
1216 the reference tree from the address operation in case the current
1217 dereference isn't offsetted. */
1218 if (!addr_base
1219 && *i_p == ops->length () - 1
1220 && off == 0
1221 /* This makes us disable this transform for PRE where the
1222 reference ops might be also used for code insertion which
1223 is invalid. */
1224 && default_vn_walk_kind == VN_WALKREWRITE)
1226 auto_vec<vn_reference_op_s, 32> tem;
1227 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1228 /* Make sure to preserve TBAA info. The only objects not
1229 wrapped in MEM_REFs that can have their address taken are
1230 STRING_CSTs. */
1231 if (tem.length () >= 2
1232 && tem[tem.length () - 2].opcode == MEM_REF)
1234 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1235 new_mem_op->op0 = fold_convert (TREE_TYPE (mem_op->op0),
1236 new_mem_op->op0);
1238 else
1239 gcc_assert (tem.last ().opcode == STRING_CST);
1240 ops->pop ();
1241 ops->pop ();
1242 ops->safe_splice (tem);
1243 --*i_p;
1244 return true;
1246 if (!addr_base
1247 || TREE_CODE (addr_base) != MEM_REF)
1248 return false;
1250 off += addr_offset;
1251 off += mem_ref_offset (addr_base);
1252 op->op0 = TREE_OPERAND (addr_base, 0);
1254 else
1256 tree ptr, ptroff;
1257 ptr = gimple_assign_rhs1 (def_stmt);
1258 ptroff = gimple_assign_rhs2 (def_stmt);
1259 if (TREE_CODE (ptr) != SSA_NAME
1260 || TREE_CODE (ptroff) != INTEGER_CST)
1261 return false;
1263 off += wi::to_offset (ptroff);
1264 op->op0 = ptr;
1267 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1268 if (tree_fits_shwi_p (mem_op->op0))
1269 mem_op->off = tree_to_shwi (mem_op->op0);
1270 else
1271 mem_op->off = -1;
1272 if (TREE_CODE (op->op0) == SSA_NAME)
1273 op->op0 = SSA_VAL (op->op0);
1274 if (TREE_CODE (op->op0) != SSA_NAME)
1275 op->opcode = TREE_CODE (op->op0);
1277 /* And recurse. */
1278 if (TREE_CODE (op->op0) == SSA_NAME)
1279 vn_reference_maybe_forwprop_address (ops, i_p);
1280 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1281 vn_reference_fold_indirect (ops, i_p);
1282 return true;
1285 /* Optimize the reference REF to a constant if possible or return
1286 NULL_TREE if not. */
1288 tree
1289 fully_constant_vn_reference_p (vn_reference_t ref)
1291 vec<vn_reference_op_s> operands = ref->operands;
1292 vn_reference_op_t op;
1294 /* Try to simplify the translated expression if it is
1295 a call to a builtin function with at most two arguments. */
1296 op = &operands[0];
1297 if (op->opcode == CALL_EXPR
1298 && TREE_CODE (op->op0) == ADDR_EXPR
1299 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1300 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1301 && operands.length () >= 2
1302 && operands.length () <= 3)
1304 vn_reference_op_t arg0, arg1 = NULL;
1305 bool anyconst = false;
1306 arg0 = &operands[1];
1307 if (operands.length () > 2)
1308 arg1 = &operands[2];
1309 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1310 || (arg0->opcode == ADDR_EXPR
1311 && is_gimple_min_invariant (arg0->op0)))
1312 anyconst = true;
1313 if (arg1
1314 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1315 || (arg1->opcode == ADDR_EXPR
1316 && is_gimple_min_invariant (arg1->op0))))
1317 anyconst = true;
1318 if (anyconst)
1320 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1321 arg1 ? 2 : 1,
1322 arg0->op0,
1323 arg1 ? arg1->op0 : NULL);
1324 if (folded
1325 && TREE_CODE (folded) == NOP_EXPR)
1326 folded = TREE_OPERAND (folded, 0);
1327 if (folded
1328 && is_gimple_min_invariant (folded))
1329 return folded;
1333 /* Simplify reads from constants or constant initializers. */
1334 else if (BITS_PER_UNIT == 8
1335 && is_gimple_reg_type (ref->type)
1336 && (!INTEGRAL_TYPE_P (ref->type)
1337 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1339 HOST_WIDE_INT off = 0;
1340 HOST_WIDE_INT size;
1341 if (INTEGRAL_TYPE_P (ref->type))
1342 size = TYPE_PRECISION (ref->type);
1343 else
1344 size = tree_to_shwi (TYPE_SIZE (ref->type));
1345 if (size % BITS_PER_UNIT != 0
1346 || size > MAX_BITSIZE_MODE_ANY_MODE)
1347 return NULL_TREE;
1348 size /= BITS_PER_UNIT;
1349 unsigned i;
1350 for (i = 0; i < operands.length (); ++i)
1352 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1354 ++i;
1355 break;
1357 if (operands[i].off == -1)
1358 return NULL_TREE;
1359 off += operands[i].off;
1360 if (operands[i].opcode == MEM_REF)
1362 ++i;
1363 break;
1366 vn_reference_op_t base = &operands[--i];
1367 tree ctor = error_mark_node;
1368 tree decl = NULL_TREE;
1369 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1370 ctor = base->op0;
1371 else if (base->opcode == MEM_REF
1372 && base[1].opcode == ADDR_EXPR
1373 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1374 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1376 decl = TREE_OPERAND (base[1].op0, 0);
1377 ctor = ctor_for_folding (decl);
1379 if (ctor == NULL_TREE)
1380 return build_zero_cst (ref->type);
1381 else if (ctor != error_mark_node)
1383 if (decl)
1385 tree res = fold_ctor_reference (ref->type, ctor,
1386 off * BITS_PER_UNIT,
1387 size * BITS_PER_UNIT, decl);
1388 if (res)
1390 STRIP_USELESS_TYPE_CONVERSION (res);
1391 if (is_gimple_min_invariant (res))
1392 return res;
1395 else
1397 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1398 int len = native_encode_expr (ctor, buf, size, off);
1399 if (len > 0)
1400 return native_interpret_expr (ref->type, buf, len);
1405 return NULL_TREE;
1408 /* Return true if OPS contain a storage order barrier. */
1410 static bool
1411 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1413 vn_reference_op_t op;
1414 unsigned i;
1416 FOR_EACH_VEC_ELT (ops, i, op)
1417 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1418 return true;
1420 return false;
1423 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1424 structures into their value numbers. This is done in-place, and
1425 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1426 whether any operands were valueized. */
1428 static vec<vn_reference_op_s>
1429 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1431 vn_reference_op_t vro;
1432 unsigned int i;
1434 *valueized_anything = false;
1436 FOR_EACH_VEC_ELT (orig, i, vro)
1438 if (vro->opcode == SSA_NAME
1439 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1441 tree tem = SSA_VAL (vro->op0);
1442 if (tem != vro->op0)
1444 *valueized_anything = true;
1445 vro->op0 = tem;
1447 /* If it transforms from an SSA_NAME to a constant, update
1448 the opcode. */
1449 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1450 vro->opcode = TREE_CODE (vro->op0);
1452 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1454 tree tem = SSA_VAL (vro->op1);
1455 if (tem != vro->op1)
1457 *valueized_anything = true;
1458 vro->op1 = tem;
1461 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1463 tree tem = SSA_VAL (vro->op2);
1464 if (tem != vro->op2)
1466 *valueized_anything = true;
1467 vro->op2 = tem;
1470 /* If it transforms from an SSA_NAME to an address, fold with
1471 a preceding indirect reference. */
1472 if (i > 0
1473 && vro->op0
1474 && TREE_CODE (vro->op0) == ADDR_EXPR
1475 && orig[i - 1].opcode == MEM_REF)
1477 if (vn_reference_fold_indirect (&orig, &i))
1478 *valueized_anything = true;
1480 else if (i > 0
1481 && vro->opcode == SSA_NAME
1482 && orig[i - 1].opcode == MEM_REF)
1484 if (vn_reference_maybe_forwprop_address (&orig, &i))
1485 *valueized_anything = true;
1487 /* If it transforms a non-constant ARRAY_REF into a constant
1488 one, adjust the constant offset. */
1489 else if (vro->opcode == ARRAY_REF
1490 && vro->off == -1
1491 && TREE_CODE (vro->op0) == INTEGER_CST
1492 && TREE_CODE (vro->op1) == INTEGER_CST
1493 && TREE_CODE (vro->op2) == INTEGER_CST)
1495 offset_int off = ((wi::to_offset (vro->op0)
1496 - wi::to_offset (vro->op1))
1497 * wi::to_offset (vro->op2)
1498 * vn_ref_op_align_unit (vro));
1499 if (wi::fits_shwi_p (off))
1500 vro->off = off.to_shwi ();
1504 return orig;
1507 static vec<vn_reference_op_s>
1508 valueize_refs (vec<vn_reference_op_s> orig)
1510 bool tem;
1511 return valueize_refs_1 (orig, &tem);
1514 static vec<vn_reference_op_s> shared_lookup_references;
1516 /* Create a vector of vn_reference_op_s structures from REF, a
1517 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1518 this function. *VALUEIZED_ANYTHING will specify whether any
1519 operands were valueized. */
1521 static vec<vn_reference_op_s>
1522 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1524 if (!ref)
1525 return vNULL;
1526 shared_lookup_references.truncate (0);
1527 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1528 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1529 valueized_anything);
1530 return shared_lookup_references;
1533 /* Create a vector of vn_reference_op_s structures from CALL, a
1534 call statement. The vector is shared among all callers of
1535 this function. */
1537 static vec<vn_reference_op_s>
1538 valueize_shared_reference_ops_from_call (gcall *call)
1540 if (!call)
1541 return vNULL;
1542 shared_lookup_references.truncate (0);
1543 copy_reference_ops_from_call (call, &shared_lookup_references);
1544 shared_lookup_references = valueize_refs (shared_lookup_references);
1545 return shared_lookup_references;
1548 /* Lookup a SCCVN reference operation VR in the current hash table.
1549 Returns the resulting value number if it exists in the hash table,
1550 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1551 vn_reference_t stored in the hashtable if something is found. */
1553 static tree
1554 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1556 vn_reference_s **slot;
1557 hashval_t hash;
1559 hash = vr->hashcode;
1560 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1561 if (!slot && current_info == optimistic_info)
1562 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1563 if (slot)
1565 if (vnresult)
1566 *vnresult = (vn_reference_t)*slot;
1567 return ((vn_reference_t)*slot)->result;
1570 return NULL_TREE;
1573 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1574 with the current VUSE and performs the expression lookup. */
1576 static void *
1577 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1578 unsigned int cnt, void *vr_)
1580 vn_reference_t vr = (vn_reference_t)vr_;
1581 vn_reference_s **slot;
1582 hashval_t hash;
1584 /* This bounds the stmt walks we perform on reference lookups
1585 to O(1) instead of O(N) where N is the number of dominating
1586 stores. */
1587 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1588 return (void *)-1;
1590 if (last_vuse_ptr)
1591 *last_vuse_ptr = vuse;
1593 /* Fixup vuse and hash. */
1594 if (vr->vuse)
1595 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1596 vr->vuse = vuse_ssa_val (vuse);
1597 if (vr->vuse)
1598 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1600 hash = vr->hashcode;
1601 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1602 if (!slot && current_info == optimistic_info)
1603 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1604 if (slot)
1605 return *slot;
1607 return NULL;
1610 /* Lookup an existing or insert a new vn_reference entry into the
1611 value table for the VUSE, SET, TYPE, OPERANDS reference which
1612 has the value VALUE which is either a constant or an SSA name. */
1614 static vn_reference_t
1615 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1616 alias_set_type set,
1617 tree type,
1618 vec<vn_reference_op_s,
1619 va_heap> operands,
1620 tree value)
1622 vn_reference_s vr1;
1623 vn_reference_t result;
1624 unsigned value_id;
1625 vr1.vuse = vuse;
1626 vr1.operands = operands;
1627 vr1.type = type;
1628 vr1.set = set;
1629 vr1.hashcode = vn_reference_compute_hash (&vr1);
1630 if (vn_reference_lookup_1 (&vr1, &result))
1631 return result;
1632 if (TREE_CODE (value) == SSA_NAME)
1633 value_id = VN_INFO (value)->value_id;
1634 else
1635 value_id = get_or_alloc_constant_value_id (value);
1636 return vn_reference_insert_pieces (vuse, set, type,
1637 operands.copy (), value, value_id);
1640 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1642 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1644 static tree
1645 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
1647 if (!rcode.is_tree_code ())
1648 return NULL_TREE;
1649 vn_nary_op_t vnresult = NULL;
1650 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
1651 (tree_code) rcode, type, ops, &vnresult);
1654 /* Return a value-number for RCODE OPS... either by looking up an existing
1655 value-number for the simplified result or by inserting the operation if
1656 INSERT is true. */
1658 static tree
1659 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1660 bool insert)
1662 tree result = NULL_TREE;
1663 /* We will be creating a value number for
1664 RCODE (OPS...).
1665 So first simplify and lookup this expression to see if it
1666 is already available. */
1667 mprts_hook = vn_lookup_simplify_result;
1668 bool res = false;
1669 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1671 case 1:
1672 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1673 break;
1674 case 2:
1675 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1676 break;
1677 case 3:
1678 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1679 break;
1681 mprts_hook = NULL;
1682 gimple *new_stmt = NULL;
1683 if (res
1684 && gimple_simplified_result_is_gimple_val (rcode, ops))
1685 /* The expression is already available. */
1686 result = ops[0];
1687 else
1689 tree val = vn_lookup_simplify_result (rcode, type, ops);
1690 if (!val && insert)
1692 gimple_seq stmts = NULL;
1693 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1694 if (result)
1696 gcc_assert (gimple_seq_singleton_p (stmts));
1697 new_stmt = gimple_seq_first_stmt (stmts);
1700 else
1701 /* The expression is already available. */
1702 result = val;
1704 if (new_stmt)
1706 /* The expression is not yet available, value-number lhs to
1707 the new SSA_NAME we created. */
1708 /* Initialize value-number information properly. */
1709 VN_INFO_GET (result)->valnum = result;
1710 VN_INFO (result)->value_id = get_next_value_id ();
1711 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1712 new_stmt);
1713 VN_INFO (result)->needs_insertion = true;
1714 /* ??? PRE phi-translation inserts NARYs without corresponding
1715 SSA name result. Re-use those but set their result according
1716 to the stmt we just built. */
1717 vn_nary_op_t nary = NULL;
1718 vn_nary_op_lookup_stmt (new_stmt, &nary);
1719 if (nary)
1721 gcc_assert (nary->result == NULL_TREE);
1722 nary->result = gimple_assign_lhs (new_stmt);
1724 /* As all "inserted" statements are singleton SCCs, insert
1725 to the valid table. This is strictly needed to
1726 avoid re-generating new value SSA_NAMEs for the same
1727 expression during SCC iteration over and over (the
1728 optimistic table gets cleared after each iteration).
1729 We do not need to insert into the optimistic table, as
1730 lookups there will fall back to the valid table. */
1731 else if (current_info == optimistic_info)
1733 current_info = valid_info;
1734 vn_nary_op_insert_stmt (new_stmt, result);
1735 current_info = optimistic_info;
1737 else
1738 vn_nary_op_insert_stmt (new_stmt, result);
1739 if (dump_file && (dump_flags & TDF_DETAILS))
1741 fprintf (dump_file, "Inserting name ");
1742 print_generic_expr (dump_file, result, 0);
1743 fprintf (dump_file, " for expression ");
1744 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1745 fprintf (dump_file, "\n");
1748 return result;
1751 /* Return a value-number for RCODE OPS... either by looking up an existing
1752 value-number for the simplified result or by inserting the operation. */
1754 static tree
1755 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1757 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1760 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1761 its value if present. */
1763 tree
1764 vn_nary_simplify (vn_nary_op_t nary)
1766 if (nary->length > 3)
1767 return NULL_TREE;
1768 tree ops[3];
1769 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1770 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1774 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1775 from the statement defining VUSE and if not successful tries to
1776 translate *REFP and VR_ through an aggregate copy at the definition
1777 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1778 of *REF and *VR. If only disambiguation was performed then
1779 *DISAMBIGUATE_ONLY is set to true. */
1781 static void *
1782 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1783 bool *disambiguate_only)
1785 vn_reference_t vr = (vn_reference_t)vr_;
1786 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1787 tree base = ao_ref_base (ref);
1788 HOST_WIDE_INT offset, maxsize;
1789 static vec<vn_reference_op_s> lhs_ops;
1790 ao_ref lhs_ref;
1791 bool lhs_ref_ok = false;
1793 /* If the reference is based on a parameter that was determined as
1794 pointing to readonly memory it doesn't change. */
1795 if (TREE_CODE (base) == MEM_REF
1796 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1797 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1798 && bitmap_bit_p (const_parms,
1799 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1801 *disambiguate_only = true;
1802 return NULL;
1805 /* First try to disambiguate after value-replacing in the definitions LHS. */
1806 if (is_gimple_assign (def_stmt))
1808 tree lhs = gimple_assign_lhs (def_stmt);
1809 bool valueized_anything = false;
1810 /* Avoid re-allocation overhead. */
1811 lhs_ops.truncate (0);
1812 copy_reference_ops_from_ref (lhs, &lhs_ops);
1813 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1814 if (valueized_anything)
1816 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1817 get_alias_set (lhs),
1818 TREE_TYPE (lhs), lhs_ops);
1819 if (lhs_ref_ok
1820 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1822 *disambiguate_only = true;
1823 return NULL;
1826 else
1828 ao_ref_init (&lhs_ref, lhs);
1829 lhs_ref_ok = true;
1832 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1833 && gimple_call_num_args (def_stmt) <= 4)
1835 /* For builtin calls valueize its arguments and call the
1836 alias oracle again. Valueization may improve points-to
1837 info of pointers and constify size and position arguments.
1838 Originally this was motivated by PR61034 which has
1839 conditional calls to free falsely clobbering ref because
1840 of imprecise points-to info of the argument. */
1841 tree oldargs[4];
1842 bool valueized_anything = false;
1843 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1845 oldargs[i] = gimple_call_arg (def_stmt, i);
1846 if (TREE_CODE (oldargs[i]) == SSA_NAME
1847 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1849 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1850 valueized_anything = true;
1853 if (valueized_anything)
1855 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1856 ref);
1857 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1858 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1859 if (!res)
1861 *disambiguate_only = true;
1862 return NULL;
1867 if (*disambiguate_only)
1868 return (void *)-1;
1870 offset = ref->offset;
1871 maxsize = ref->max_size;
1873 /* If we cannot constrain the size of the reference we cannot
1874 test if anything kills it. */
1875 if (maxsize == -1)
1876 return (void *)-1;
1878 /* We can't deduce anything useful from clobbers. */
1879 if (gimple_clobber_p (def_stmt))
1880 return (void *)-1;
1882 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1883 from that definition.
1884 1) Memset. */
1885 if (is_gimple_reg_type (vr->type)
1886 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1887 && integer_zerop (gimple_call_arg (def_stmt, 1))
1888 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1889 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1891 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1892 tree base2;
1893 HOST_WIDE_INT offset2, size2, maxsize2;
1894 bool reverse;
1895 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1896 &reverse);
1897 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1898 if ((unsigned HOST_WIDE_INT)size2 / 8
1899 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1900 && maxsize2 != -1
1901 && operand_equal_p (base, base2, 0)
1902 && offset2 <= offset
1903 && offset2 + size2 >= offset + maxsize)
1905 tree val = build_zero_cst (vr->type);
1906 return vn_reference_lookup_or_insert_for_pieces
1907 (vuse, vr->set, vr->type, vr->operands, val);
1911 /* 2) Assignment from an empty CONSTRUCTOR. */
1912 else if (is_gimple_reg_type (vr->type)
1913 && gimple_assign_single_p (def_stmt)
1914 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1915 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1917 tree base2;
1918 HOST_WIDE_INT offset2, size2, maxsize2;
1919 bool reverse;
1920 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1921 &offset2, &size2, &maxsize2, &reverse);
1922 if (maxsize2 != -1
1923 && operand_equal_p (base, base2, 0)
1924 && offset2 <= offset
1925 && offset2 + size2 >= offset + maxsize)
1927 tree val = build_zero_cst (vr->type);
1928 return vn_reference_lookup_or_insert_for_pieces
1929 (vuse, vr->set, vr->type, vr->operands, val);
1933 /* 3) Assignment from a constant. We can use folds native encode/interpret
1934 routines to extract the assigned bits. */
1935 else if (ref->size == maxsize
1936 && is_gimple_reg_type (vr->type)
1937 && !contains_storage_order_barrier_p (vr->operands)
1938 && gimple_assign_single_p (def_stmt)
1939 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1940 && maxsize % BITS_PER_UNIT == 0
1941 && offset % BITS_PER_UNIT == 0
1942 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1943 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1944 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1946 tree base2;
1947 HOST_WIDE_INT offset2, size2, maxsize2;
1948 bool reverse;
1949 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1950 &offset2, &size2, &maxsize2, &reverse);
1951 if (!reverse
1952 && maxsize2 != -1
1953 && maxsize2 == size2
1954 && size2 % BITS_PER_UNIT == 0
1955 && offset2 % BITS_PER_UNIT == 0
1956 && operand_equal_p (base, base2, 0)
1957 && offset2 <= offset
1958 && offset2 + size2 >= offset + maxsize)
1960 /* We support up to 512-bit values (for V8DFmode). */
1961 unsigned char buffer[64];
1962 int len;
1964 tree rhs = gimple_assign_rhs1 (def_stmt);
1965 if (TREE_CODE (rhs) == SSA_NAME)
1966 rhs = SSA_VAL (rhs);
1967 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1968 buffer, sizeof (buffer));
1969 if (len > 0)
1971 tree type = vr->type;
1972 /* Make sure to interpret in a type that has a range
1973 covering the whole access size. */
1974 if (INTEGRAL_TYPE_P (vr->type)
1975 && ref->size != TYPE_PRECISION (vr->type))
1976 type = build_nonstandard_integer_type (ref->size,
1977 TYPE_UNSIGNED (type));
1978 tree val = native_interpret_expr (type,
1979 buffer
1980 + ((offset - offset2)
1981 / BITS_PER_UNIT),
1982 ref->size / BITS_PER_UNIT);
1983 /* If we chop off bits because the types precision doesn't
1984 match the memory access size this is ok when optimizing
1985 reads but not when called from the DSE code during
1986 elimination. */
1987 if (val
1988 && type != vr->type)
1990 if (! int_fits_type_p (val, vr->type))
1991 val = NULL_TREE;
1992 else
1993 val = fold_convert (vr->type, val);
1996 if (val)
1997 return vn_reference_lookup_or_insert_for_pieces
1998 (vuse, vr->set, vr->type, vr->operands, val);
2003 /* 4) Assignment from an SSA name which definition we may be able
2004 to access pieces from. */
2005 else if (ref->size == maxsize
2006 && is_gimple_reg_type (vr->type)
2007 && !contains_storage_order_barrier_p (vr->operands)
2008 && gimple_assign_single_p (def_stmt)
2009 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2011 tree base2;
2012 HOST_WIDE_INT offset2, size2, maxsize2;
2013 bool reverse;
2014 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2015 &offset2, &size2, &maxsize2,
2016 &reverse);
2017 if (!reverse
2018 && maxsize2 != -1
2019 && maxsize2 == size2
2020 && operand_equal_p (base, base2, 0)
2021 && offset2 <= offset
2022 && offset2 + size2 >= offset + maxsize
2023 /* ??? We can't handle bitfield precision extracts without
2024 either using an alternate type for the BIT_FIELD_REF and
2025 then doing a conversion or possibly adjusting the offset
2026 according to endianess. */
2027 && (! INTEGRAL_TYPE_P (vr->type)
2028 || ref->size == TYPE_PRECISION (vr->type))
2029 && ref->size % BITS_PER_UNIT == 0)
2031 code_helper rcode = BIT_FIELD_REF;
2032 tree ops[3];
2033 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2034 ops[1] = bitsize_int (ref->size);
2035 ops[2] = bitsize_int (offset - offset2);
2036 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2037 if (val)
2039 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2040 (vuse, vr->set, vr->type, vr->operands, val);
2041 return res;
2046 /* 5) For aggregate copies translate the reference through them if
2047 the copy kills ref. */
2048 else if (vn_walk_kind == VN_WALKREWRITE
2049 && gimple_assign_single_p (def_stmt)
2050 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2051 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2052 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2054 tree base2;
2055 HOST_WIDE_INT maxsize2;
2056 int i, j, k;
2057 auto_vec<vn_reference_op_s> rhs;
2058 vn_reference_op_t vro;
2059 ao_ref r;
2061 if (!lhs_ref_ok)
2062 return (void *)-1;
2064 /* See if the assignment kills REF. */
2065 base2 = ao_ref_base (&lhs_ref);
2066 maxsize2 = lhs_ref.max_size;
2067 if (maxsize2 == -1
2068 || (base != base2
2069 && (TREE_CODE (base) != MEM_REF
2070 || TREE_CODE (base2) != MEM_REF
2071 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2072 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2073 TREE_OPERAND (base2, 1))))
2074 || !stmt_kills_ref_p (def_stmt, ref))
2075 return (void *)-1;
2077 /* Find the common base of ref and the lhs. lhs_ops already
2078 contains valueized operands for the lhs. */
2079 i = vr->operands.length () - 1;
2080 j = lhs_ops.length () - 1;
2081 while (j >= 0 && i >= 0
2082 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2084 i--;
2085 j--;
2088 /* ??? The innermost op should always be a MEM_REF and we already
2089 checked that the assignment to the lhs kills vr. Thus for
2090 aggregate copies using char[] types the vn_reference_op_eq
2091 may fail when comparing types for compatibility. But we really
2092 don't care here - further lookups with the rewritten operands
2093 will simply fail if we messed up types too badly. */
2094 HOST_WIDE_INT extra_off = 0;
2095 if (j == 0 && i >= 0
2096 && lhs_ops[0].opcode == MEM_REF
2097 && lhs_ops[0].off != -1)
2099 if (lhs_ops[0].off == vr->operands[i].off)
2100 i--, j--;
2101 else if (vr->operands[i].opcode == MEM_REF
2102 && vr->operands[i].off != -1)
2104 extra_off = vr->operands[i].off - lhs_ops[0].off;
2105 i--, j--;
2109 /* i now points to the first additional op.
2110 ??? LHS may not be completely contained in VR, one or more
2111 VIEW_CONVERT_EXPRs could be in its way. We could at least
2112 try handling outermost VIEW_CONVERT_EXPRs. */
2113 if (j != -1)
2114 return (void *)-1;
2116 /* Punt if the additional ops contain a storage order barrier. */
2117 for (k = i; k >= 0; k--)
2119 vro = &vr->operands[k];
2120 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2121 return (void *)-1;
2124 /* Now re-write REF to be based on the rhs of the assignment. */
2125 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2127 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2128 if (extra_off != 0)
2130 if (rhs.length () < 2
2131 || rhs[0].opcode != MEM_REF
2132 || rhs[0].off == -1)
2133 return (void *)-1;
2134 rhs[0].off += extra_off;
2135 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2136 build_int_cst (TREE_TYPE (rhs[0].op0),
2137 extra_off));
2140 /* We need to pre-pend vr->operands[0..i] to rhs. */
2141 vec<vn_reference_op_s> old = vr->operands;
2142 if (i + 1 + rhs.length () > vr->operands.length ())
2143 vr->operands.safe_grow (i + 1 + rhs.length ());
2144 else
2145 vr->operands.truncate (i + 1 + rhs.length ());
2146 FOR_EACH_VEC_ELT (rhs, j, vro)
2147 vr->operands[i + 1 + j] = *vro;
2148 vr->operands = valueize_refs (vr->operands);
2149 if (old == shared_lookup_references)
2150 shared_lookup_references = vr->operands;
2151 vr->hashcode = vn_reference_compute_hash (vr);
2153 /* Try folding the new reference to a constant. */
2154 tree val = fully_constant_vn_reference_p (vr);
2155 if (val)
2156 return vn_reference_lookup_or_insert_for_pieces
2157 (vuse, vr->set, vr->type, vr->operands, val);
2159 /* Adjust *ref from the new operands. */
2160 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2161 return (void *)-1;
2162 /* This can happen with bitfields. */
2163 if (ref->size != r.size)
2164 return (void *)-1;
2165 *ref = r;
2167 /* Do not update last seen VUSE after translating. */
2168 last_vuse_ptr = NULL;
2170 /* Keep looking for the adjusted *REF / VR pair. */
2171 return NULL;
2174 /* 6) For memcpy copies translate the reference through them if
2175 the copy kills ref. */
2176 else if (vn_walk_kind == VN_WALKREWRITE
2177 && is_gimple_reg_type (vr->type)
2178 /* ??? Handle BCOPY as well. */
2179 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2180 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2181 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2182 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2183 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2184 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2185 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2186 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2188 tree lhs, rhs;
2189 ao_ref r;
2190 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2191 vn_reference_op_s op;
2192 HOST_WIDE_INT at;
2194 /* Only handle non-variable, addressable refs. */
2195 if (ref->size != maxsize
2196 || offset % BITS_PER_UNIT != 0
2197 || ref->size % BITS_PER_UNIT != 0)
2198 return (void *)-1;
2200 /* Extract a pointer base and an offset for the destination. */
2201 lhs = gimple_call_arg (def_stmt, 0);
2202 lhs_offset = 0;
2203 if (TREE_CODE (lhs) == SSA_NAME)
2205 lhs = SSA_VAL (lhs);
2206 if (TREE_CODE (lhs) == SSA_NAME)
2208 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2209 if (gimple_assign_single_p (def_stmt)
2210 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2211 lhs = gimple_assign_rhs1 (def_stmt);
2214 if (TREE_CODE (lhs) == ADDR_EXPR)
2216 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2217 &lhs_offset);
2218 if (!tem)
2219 return (void *)-1;
2220 if (TREE_CODE (tem) == MEM_REF
2221 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2223 lhs = TREE_OPERAND (tem, 0);
2224 if (TREE_CODE (lhs) == SSA_NAME)
2225 lhs = SSA_VAL (lhs);
2226 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2228 else if (DECL_P (tem))
2229 lhs = build_fold_addr_expr (tem);
2230 else
2231 return (void *)-1;
2233 if (TREE_CODE (lhs) != SSA_NAME
2234 && TREE_CODE (lhs) != ADDR_EXPR)
2235 return (void *)-1;
2237 /* Extract a pointer base and an offset for the source. */
2238 rhs = gimple_call_arg (def_stmt, 1);
2239 rhs_offset = 0;
2240 if (TREE_CODE (rhs) == SSA_NAME)
2241 rhs = SSA_VAL (rhs);
2242 if (TREE_CODE (rhs) == ADDR_EXPR)
2244 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2245 &rhs_offset);
2246 if (!tem)
2247 return (void *)-1;
2248 if (TREE_CODE (tem) == MEM_REF
2249 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2251 rhs = TREE_OPERAND (tem, 0);
2252 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2254 else if (DECL_P (tem))
2255 rhs = build_fold_addr_expr (tem);
2256 else
2257 return (void *)-1;
2259 if (TREE_CODE (rhs) != SSA_NAME
2260 && TREE_CODE (rhs) != ADDR_EXPR)
2261 return (void *)-1;
2263 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2265 /* The bases of the destination and the references have to agree. */
2266 if ((TREE_CODE (base) != MEM_REF
2267 && !DECL_P (base))
2268 || (TREE_CODE (base) == MEM_REF
2269 && (TREE_OPERAND (base, 0) != lhs
2270 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2271 || (DECL_P (base)
2272 && (TREE_CODE (lhs) != ADDR_EXPR
2273 || TREE_OPERAND (lhs, 0) != base)))
2274 return (void *)-1;
2276 at = offset / BITS_PER_UNIT;
2277 if (TREE_CODE (base) == MEM_REF)
2278 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2279 /* If the access is completely outside of the memcpy destination
2280 area there is no aliasing. */
2281 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2282 || lhs_offset + copy_size <= at)
2283 return NULL;
2284 /* And the access has to be contained within the memcpy destination. */
2285 if (lhs_offset > at
2286 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2287 return (void *)-1;
2289 /* Make room for 2 operands in the new reference. */
2290 if (vr->operands.length () < 2)
2292 vec<vn_reference_op_s> old = vr->operands;
2293 vr->operands.safe_grow_cleared (2);
2294 if (old == shared_lookup_references)
2295 shared_lookup_references = vr->operands;
2297 else
2298 vr->operands.truncate (2);
2300 /* The looked-through reference is a simple MEM_REF. */
2301 memset (&op, 0, sizeof (op));
2302 op.type = vr->type;
2303 op.opcode = MEM_REF;
2304 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2305 op.off = at - lhs_offset + rhs_offset;
2306 vr->operands[0] = op;
2307 op.type = TREE_TYPE (rhs);
2308 op.opcode = TREE_CODE (rhs);
2309 op.op0 = rhs;
2310 op.off = -1;
2311 vr->operands[1] = op;
2312 vr->hashcode = vn_reference_compute_hash (vr);
2314 /* Try folding the new reference to a constant. */
2315 tree val = fully_constant_vn_reference_p (vr);
2316 if (val)
2317 return vn_reference_lookup_or_insert_for_pieces
2318 (vuse, vr->set, vr->type, vr->operands, val);
2320 /* Adjust *ref from the new operands. */
2321 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2322 return (void *)-1;
2323 /* This can happen with bitfields. */
2324 if (ref->size != r.size)
2325 return (void *)-1;
2326 *ref = r;
2328 /* Do not update last seen VUSE after translating. */
2329 last_vuse_ptr = NULL;
2331 /* Keep looking for the adjusted *REF / VR pair. */
2332 return NULL;
2335 /* Bail out and stop walking. */
2336 return (void *)-1;
2339 /* Return a reference op vector from OP that can be used for
2340 vn_reference_lookup_pieces. The caller is responsible for releasing
2341 the vector. */
2343 vec<vn_reference_op_s>
2344 vn_reference_operands_for_lookup (tree op)
2346 bool valueized;
2347 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2350 /* Lookup a reference operation by it's parts, in the current hash table.
2351 Returns the resulting value number if it exists in the hash table,
2352 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2353 vn_reference_t stored in the hashtable if something is found. */
2355 tree
2356 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2357 vec<vn_reference_op_s> operands,
2358 vn_reference_t *vnresult, vn_lookup_kind kind)
2360 struct vn_reference_s vr1;
2361 vn_reference_t tmp;
2362 tree cst;
2364 if (!vnresult)
2365 vnresult = &tmp;
2366 *vnresult = NULL;
2368 vr1.vuse = vuse_ssa_val (vuse);
2369 shared_lookup_references.truncate (0);
2370 shared_lookup_references.safe_grow (operands.length ());
2371 memcpy (shared_lookup_references.address (),
2372 operands.address (),
2373 sizeof (vn_reference_op_s)
2374 * operands.length ());
2375 vr1.operands = operands = shared_lookup_references
2376 = valueize_refs (shared_lookup_references);
2377 vr1.type = type;
2378 vr1.set = set;
2379 vr1.hashcode = vn_reference_compute_hash (&vr1);
2380 if ((cst = fully_constant_vn_reference_p (&vr1)))
2381 return cst;
2383 vn_reference_lookup_1 (&vr1, vnresult);
2384 if (!*vnresult
2385 && kind != VN_NOWALK
2386 && vr1.vuse)
2388 ao_ref r;
2389 vn_walk_kind = kind;
2390 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2391 *vnresult =
2392 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2393 vn_reference_lookup_2,
2394 vn_reference_lookup_3,
2395 vuse_ssa_val, &vr1);
2396 gcc_checking_assert (vr1.operands == shared_lookup_references);
2399 if (*vnresult)
2400 return (*vnresult)->result;
2402 return NULL_TREE;
2405 /* Lookup OP in the current hash table, and return the resulting value
2406 number if it exists in the hash table. Return NULL_TREE if it does
2407 not exist in the hash table or if the result field of the structure
2408 was NULL.. VNRESULT will be filled in with the vn_reference_t
2409 stored in the hashtable if one exists. When TBAA_P is false assume
2410 we are looking up a store and treat it as having alias-set zero. */
2412 tree
2413 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2414 vn_reference_t *vnresult, bool tbaa_p)
2416 vec<vn_reference_op_s> operands;
2417 struct vn_reference_s vr1;
2418 tree cst;
2419 bool valuezied_anything;
2421 if (vnresult)
2422 *vnresult = NULL;
2424 vr1.vuse = vuse_ssa_val (vuse);
2425 vr1.operands = operands
2426 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2427 vr1.type = TREE_TYPE (op);
2428 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2429 vr1.hashcode = vn_reference_compute_hash (&vr1);
2430 if ((cst = fully_constant_vn_reference_p (&vr1)))
2431 return cst;
2433 if (kind != VN_NOWALK
2434 && vr1.vuse)
2436 vn_reference_t wvnresult;
2437 ao_ref r;
2438 /* Make sure to use a valueized reference if we valueized anything.
2439 Otherwise preserve the full reference for advanced TBAA. */
2440 if (!valuezied_anything
2441 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2442 vr1.operands))
2443 ao_ref_init (&r, op);
2444 if (! tbaa_p)
2445 r.ref_alias_set = r.base_alias_set = 0;
2446 vn_walk_kind = kind;
2447 wvnresult =
2448 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2449 vn_reference_lookup_2,
2450 vn_reference_lookup_3,
2451 vuse_ssa_val, &vr1);
2452 gcc_checking_assert (vr1.operands == shared_lookup_references);
2453 if (wvnresult)
2455 if (vnresult)
2456 *vnresult = wvnresult;
2457 return wvnresult->result;
2460 return NULL_TREE;
2463 return vn_reference_lookup_1 (&vr1, vnresult);
2466 /* Lookup CALL in the current hash table and return the entry in
2467 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2469 void
2470 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2471 vn_reference_t vr)
2473 if (vnresult)
2474 *vnresult = NULL;
2476 tree vuse = gimple_vuse (call);
2478 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2479 vr->operands = valueize_shared_reference_ops_from_call (call);
2480 vr->type = gimple_expr_type (call);
2481 vr->set = 0;
2482 vr->hashcode = vn_reference_compute_hash (vr);
2483 vn_reference_lookup_1 (vr, vnresult);
2486 /* Insert OP into the current hash table with a value number of
2487 RESULT, and return the resulting reference structure we created. */
2489 static vn_reference_t
2490 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2492 vn_reference_s **slot;
2493 vn_reference_t vr1;
2494 bool tem;
2496 vr1 = current_info->references_pool->allocate ();
2497 if (TREE_CODE (result) == SSA_NAME)
2498 vr1->value_id = VN_INFO (result)->value_id;
2499 else
2500 vr1->value_id = get_or_alloc_constant_value_id (result);
2501 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2502 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2503 vr1->type = TREE_TYPE (op);
2504 vr1->set = get_alias_set (op);
2505 vr1->hashcode = vn_reference_compute_hash (vr1);
2506 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2507 vr1->result_vdef = vdef;
2509 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2510 INSERT);
2512 /* Because we lookup stores using vuses, and value number failures
2513 using the vdefs (see visit_reference_op_store for how and why),
2514 it's possible that on failure we may try to insert an already
2515 inserted store. This is not wrong, there is no ssa name for a
2516 store that we could use as a differentiator anyway. Thus, unlike
2517 the other lookup functions, you cannot gcc_assert (!*slot)
2518 here. */
2520 /* But free the old slot in case of a collision. */
2521 if (*slot)
2522 free_reference (*slot);
2524 *slot = vr1;
2525 return vr1;
2528 /* Insert a reference by it's pieces into the current hash table with
2529 a value number of RESULT. Return the resulting reference
2530 structure we created. */
2532 vn_reference_t
2533 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2534 vec<vn_reference_op_s> operands,
2535 tree result, unsigned int value_id)
2538 vn_reference_s **slot;
2539 vn_reference_t vr1;
2541 vr1 = current_info->references_pool->allocate ();
2542 vr1->value_id = value_id;
2543 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2544 vr1->operands = valueize_refs (operands);
2545 vr1->type = type;
2546 vr1->set = set;
2547 vr1->hashcode = vn_reference_compute_hash (vr1);
2548 if (result && TREE_CODE (result) == SSA_NAME)
2549 result = SSA_VAL (result);
2550 vr1->result = result;
2552 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2553 INSERT);
2555 /* At this point we should have all the things inserted that we have
2556 seen before, and we should never try inserting something that
2557 already exists. */
2558 gcc_assert (!*slot);
2559 if (*slot)
2560 free_reference (*slot);
2562 *slot = vr1;
2563 return vr1;
2566 /* Compute and return the hash value for nary operation VBO1. */
2568 static hashval_t
2569 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2571 inchash::hash hstate;
2572 unsigned i;
2574 for (i = 0; i < vno1->length; ++i)
2575 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2576 vno1->op[i] = SSA_VAL (vno1->op[i]);
2578 if (((vno1->length == 2
2579 && commutative_tree_code (vno1->opcode))
2580 || (vno1->length == 3
2581 && commutative_ternary_tree_code (vno1->opcode)))
2582 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2583 std::swap (vno1->op[0], vno1->op[1]);
2584 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2585 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2587 std::swap (vno1->op[0], vno1->op[1]);
2588 vno1->opcode = swap_tree_comparison (vno1->opcode);
2591 hstate.add_int (vno1->opcode);
2592 for (i = 0; i < vno1->length; ++i)
2593 inchash::add_expr (vno1->op[i], hstate);
2595 return hstate.end ();
2598 /* Compare nary operations VNO1 and VNO2 and return true if they are
2599 equivalent. */
2601 bool
2602 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2604 unsigned i;
2606 if (vno1->hashcode != vno2->hashcode)
2607 return false;
2609 if (vno1->length != vno2->length)
2610 return false;
2612 if (vno1->opcode != vno2->opcode
2613 || !types_compatible_p (vno1->type, vno2->type))
2614 return false;
2616 for (i = 0; i < vno1->length; ++i)
2617 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2618 return false;
2620 return true;
2623 /* Initialize VNO from the pieces provided. */
2625 static void
2626 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2627 enum tree_code code, tree type, tree *ops)
2629 vno->opcode = code;
2630 vno->length = length;
2631 vno->type = type;
2632 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2635 /* Initialize VNO from OP. */
2637 static void
2638 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2640 unsigned i;
2642 vno->opcode = TREE_CODE (op);
2643 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2644 vno->type = TREE_TYPE (op);
2645 for (i = 0; i < vno->length; ++i)
2646 vno->op[i] = TREE_OPERAND (op, i);
2649 /* Return the number of operands for a vn_nary ops structure from STMT. */
2651 static unsigned int
2652 vn_nary_length_from_stmt (gimple *stmt)
2654 switch (gimple_assign_rhs_code (stmt))
2656 case REALPART_EXPR:
2657 case IMAGPART_EXPR:
2658 case VIEW_CONVERT_EXPR:
2659 return 1;
2661 case BIT_FIELD_REF:
2662 return 3;
2664 case CONSTRUCTOR:
2665 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2667 default:
2668 return gimple_num_ops (stmt) - 1;
2672 /* Initialize VNO from STMT. */
2674 static void
2675 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2677 unsigned i;
2679 vno->opcode = gimple_assign_rhs_code (stmt);
2680 vno->type = gimple_expr_type (stmt);
2681 switch (vno->opcode)
2683 case REALPART_EXPR:
2684 case IMAGPART_EXPR:
2685 case VIEW_CONVERT_EXPR:
2686 vno->length = 1;
2687 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2688 break;
2690 case BIT_FIELD_REF:
2691 vno->length = 3;
2692 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2693 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2694 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2695 break;
2697 case CONSTRUCTOR:
2698 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2699 for (i = 0; i < vno->length; ++i)
2700 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2701 break;
2703 default:
2704 gcc_checking_assert (!gimple_assign_single_p (stmt));
2705 vno->length = gimple_num_ops (stmt) - 1;
2706 for (i = 0; i < vno->length; ++i)
2707 vno->op[i] = gimple_op (stmt, i + 1);
2711 /* Compute the hashcode for VNO and look for it in the hash table;
2712 return the resulting value number if it exists in the hash table.
2713 Return NULL_TREE if it does not exist in the hash table or if the
2714 result field of the operation is NULL. VNRESULT will contain the
2715 vn_nary_op_t from the hashtable if it exists. */
2717 static tree
2718 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2720 vn_nary_op_s **slot;
2722 if (vnresult)
2723 *vnresult = NULL;
2725 vno->hashcode = vn_nary_op_compute_hash (vno);
2726 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2727 NO_INSERT);
2728 if (!slot && current_info == optimistic_info)
2729 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2730 NO_INSERT);
2731 if (!slot)
2732 return NULL_TREE;
2733 if (vnresult)
2734 *vnresult = *slot;
2735 return (*slot)->result;
2738 /* Lookup a n-ary operation by its pieces and return the resulting value
2739 number if it exists in the hash table. Return NULL_TREE if it does
2740 not exist in the hash table or if the result field of the operation
2741 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2742 if it exists. */
2744 tree
2745 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2746 tree type, tree *ops, vn_nary_op_t *vnresult)
2748 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2749 sizeof_vn_nary_op (length));
2750 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2751 return vn_nary_op_lookup_1 (vno1, vnresult);
2754 /* Lookup OP in the current hash table, and return the resulting value
2755 number if it exists in the hash table. Return NULL_TREE if it does
2756 not exist in the hash table or if the result field of the operation
2757 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2758 if it exists. */
2760 tree
2761 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2763 vn_nary_op_t vno1
2764 = XALLOCAVAR (struct vn_nary_op_s,
2765 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2766 init_vn_nary_op_from_op (vno1, op);
2767 return vn_nary_op_lookup_1 (vno1, vnresult);
2770 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2771 value number if it exists in the hash table. Return NULL_TREE if
2772 it does not exist in the hash table. VNRESULT will contain the
2773 vn_nary_op_t from the hashtable if it exists. */
2775 tree
2776 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2778 vn_nary_op_t vno1
2779 = XALLOCAVAR (struct vn_nary_op_s,
2780 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2781 init_vn_nary_op_from_stmt (vno1, stmt);
2782 return vn_nary_op_lookup_1 (vno1, vnresult);
2785 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2787 static vn_nary_op_t
2788 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2790 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2793 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2794 obstack. */
2796 static vn_nary_op_t
2797 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2799 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2800 &current_info->nary_obstack);
2802 vno1->value_id = value_id;
2803 vno1->length = length;
2804 vno1->result = result;
2806 return vno1;
2809 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2810 VNO->HASHCODE first. */
2812 static vn_nary_op_t
2813 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2814 bool compute_hash)
2816 vn_nary_op_s **slot;
2818 if (compute_hash)
2819 vno->hashcode = vn_nary_op_compute_hash (vno);
2821 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2822 gcc_assert (!*slot);
2824 *slot = vno;
2825 return vno;
2828 /* Insert a n-ary operation into the current hash table using it's
2829 pieces. Return the vn_nary_op_t structure we created and put in
2830 the hashtable. */
2832 vn_nary_op_t
2833 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2834 tree type, tree *ops,
2835 tree result, unsigned int value_id)
2837 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2838 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2839 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2842 /* Insert OP into the current hash table with a value number of
2843 RESULT. Return the vn_nary_op_t structure we created and put in
2844 the hashtable. */
2846 vn_nary_op_t
2847 vn_nary_op_insert (tree op, tree result)
2849 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2850 vn_nary_op_t vno1;
2852 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2853 init_vn_nary_op_from_op (vno1, op);
2854 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2857 /* Insert the rhs of STMT into the current hash table with a value number of
2858 RESULT. */
2860 static vn_nary_op_t
2861 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2863 vn_nary_op_t vno1
2864 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2865 result, VN_INFO (result)->value_id);
2866 init_vn_nary_op_from_stmt (vno1, stmt);
2867 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2870 /* Compute a hashcode for PHI operation VP1 and return it. */
2872 static inline hashval_t
2873 vn_phi_compute_hash (vn_phi_t vp1)
2875 inchash::hash hstate (vp1->phiargs.length () > 2
2876 ? vp1->block->index : vp1->phiargs.length ());
2877 tree phi1op;
2878 tree type;
2879 edge e;
2880 edge_iterator ei;
2882 /* If all PHI arguments are constants we need to distinguish
2883 the PHI node via its type. */
2884 type = vp1->type;
2885 hstate.merge_hash (vn_hash_type (type));
2887 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2889 /* Don't hash backedge values they need to be handled as VN_TOP
2890 for optimistic value-numbering. */
2891 if (e->flags & EDGE_DFS_BACK)
2892 continue;
2894 phi1op = vp1->phiargs[e->dest_idx];
2895 if (phi1op == VN_TOP)
2896 continue;
2897 inchash::add_expr (phi1op, hstate);
2900 return hstate.end ();
2904 /* Return true if COND1 and COND2 represent the same condition, set
2905 *INVERTED_P if one needs to be inverted to make it the same as
2906 the other. */
2908 static bool
2909 cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2911 enum tree_code code1 = gimple_cond_code (cond1);
2912 enum tree_code code2 = gimple_cond_code (cond2);
2913 tree lhs1 = gimple_cond_lhs (cond1);
2914 tree lhs2 = gimple_cond_lhs (cond2);
2915 tree rhs1 = gimple_cond_rhs (cond1);
2916 tree rhs2 = gimple_cond_rhs (cond2);
2918 *inverted_p = false;
2919 if (code1 == code2)
2921 else if (code1 == swap_tree_comparison (code2))
2922 std::swap (lhs2, rhs2);
2923 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2924 *inverted_p = true;
2925 else if (code1 == invert_tree_comparison
2926 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2928 std::swap (lhs2, rhs2);
2929 *inverted_p = true;
2931 else
2932 return false;
2934 lhs1 = vn_valueize (lhs1);
2935 rhs1 = vn_valueize (rhs1);
2936 lhs2 = vn_valueize (lhs2);
2937 rhs2 = vn_valueize (rhs2);
2938 return ((expressions_equal_p (lhs1, lhs2)
2939 && expressions_equal_p (rhs1, rhs2))
2940 || (commutative_tree_code (code1)
2941 && expressions_equal_p (lhs1, rhs2)
2942 && expressions_equal_p (rhs1, lhs2)));
2945 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2947 static int
2948 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2950 if (vp1->hashcode != vp2->hashcode)
2951 return false;
2953 if (vp1->block != vp2->block)
2955 if (vp1->phiargs.length () != vp2->phiargs.length ())
2956 return false;
2958 switch (vp1->phiargs.length ())
2960 case 1:
2961 /* Single-arg PHIs are just copies. */
2962 break;
2964 case 2:
2966 /* Rule out backedges into the PHI. */
2967 if (vp1->block->loop_father->header == vp1->block
2968 || vp2->block->loop_father->header == vp2->block)
2969 return false;
2971 /* If the PHI nodes do not have compatible types
2972 they are not the same. */
2973 if (!types_compatible_p (vp1->type, vp2->type))
2974 return false;
2976 basic_block idom1
2977 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2978 basic_block idom2
2979 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2980 /* If the immediate dominator end in switch stmts multiple
2981 values may end up in the same PHI arg via intermediate
2982 CFG merges. */
2983 if (EDGE_COUNT (idom1->succs) != 2
2984 || EDGE_COUNT (idom2->succs) != 2)
2985 return false;
2987 /* Verify the controlling stmt is the same. */
2988 gimple *last1 = last_stmt (idom1);
2989 gimple *last2 = last_stmt (idom2);
2990 if (gimple_code (last1) != GIMPLE_COND
2991 || gimple_code (last2) != GIMPLE_COND)
2992 return false;
2993 bool inverted_p;
2994 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2995 as_a <gcond *> (last2), &inverted_p))
2996 return false;
2998 /* Get at true/false controlled edges into the PHI. */
2999 edge te1, te2, fe1, fe2;
3000 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3001 &te1, &fe1)
3002 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3003 &te2, &fe2))
3004 return false;
3006 /* Swap edges if the second condition is the inverted of the
3007 first. */
3008 if (inverted_p)
3009 std::swap (te2, fe2);
3011 /* ??? Handle VN_TOP specially. */
3012 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3013 vp2->phiargs[te2->dest_idx])
3014 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3015 vp2->phiargs[fe2->dest_idx]))
3016 return false;
3018 return true;
3021 default:
3022 return false;
3026 /* If the PHI nodes do not have compatible types
3027 they are not the same. */
3028 if (!types_compatible_p (vp1->type, vp2->type))
3029 return false;
3031 /* Any phi in the same block will have it's arguments in the
3032 same edge order, because of how we store phi nodes. */
3033 int i;
3034 tree phi1op;
3035 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3037 tree phi2op = vp2->phiargs[i];
3038 if (phi1op == VN_TOP || phi2op == VN_TOP)
3039 continue;
3040 if (!expressions_equal_p (phi1op, phi2op))
3041 return false;
3044 return true;
3047 static vec<tree> shared_lookup_phiargs;
3049 /* Lookup PHI in the current hash table, and return the resulting
3050 value number if it exists in the hash table. Return NULL_TREE if
3051 it does not exist in the hash table. */
3053 static tree
3054 vn_phi_lookup (gimple *phi)
3056 vn_phi_s **slot;
3057 struct vn_phi_s vp1;
3058 edge e;
3059 edge_iterator ei;
3061 shared_lookup_phiargs.truncate (0);
3062 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3064 /* Canonicalize the SSA_NAME's to their value number. */
3065 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3067 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3068 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3069 shared_lookup_phiargs[e->dest_idx] = def;
3071 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3072 vp1.phiargs = shared_lookup_phiargs;
3073 vp1.block = gimple_bb (phi);
3074 vp1.hashcode = vn_phi_compute_hash (&vp1);
3075 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3076 NO_INSERT);
3077 if (!slot && current_info == optimistic_info)
3078 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3079 NO_INSERT);
3080 if (!slot)
3081 return NULL_TREE;
3082 return (*slot)->result;
3085 /* Insert PHI into the current hash table with a value number of
3086 RESULT. */
3088 static vn_phi_t
3089 vn_phi_insert (gimple *phi, tree result)
3091 vn_phi_s **slot;
3092 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3093 vec<tree> args = vNULL;
3094 edge e;
3095 edge_iterator ei;
3097 args.safe_grow (gimple_phi_num_args (phi));
3099 /* Canonicalize the SSA_NAME's to their value number. */
3100 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3102 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3103 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3104 args[e->dest_idx] = def;
3106 vp1->value_id = VN_INFO (result)->value_id;
3107 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3108 vp1->phiargs = args;
3109 vp1->block = gimple_bb (phi);
3110 vp1->result = result;
3111 vp1->hashcode = vn_phi_compute_hash (vp1);
3113 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3115 /* Because we iterate over phi operations more than once, it's
3116 possible the slot might already exist here, hence no assert.*/
3117 *slot = vp1;
3118 return vp1;
3122 /* Print set of components in strongly connected component SCC to OUT. */
3124 static void
3125 print_scc (FILE *out, vec<tree> scc)
3127 tree var;
3128 unsigned int i;
3130 fprintf (out, "SCC consists of:");
3131 FOR_EACH_VEC_ELT (scc, i, var)
3133 fprintf (out, " ");
3134 print_generic_expr (out, var, 0);
3136 fprintf (out, "\n");
3139 /* Return true if BB1 is dominated by BB2 taking into account edges
3140 that are not executable. */
3142 static bool
3143 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3145 edge_iterator ei;
3146 edge e;
3148 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3149 return true;
3151 /* Before iterating we'd like to know if there exists a
3152 (executable) path from bb2 to bb1 at all, if not we can
3153 directly return false. For now simply iterate once. */
3155 /* Iterate to the single executable bb1 predecessor. */
3156 if (EDGE_COUNT (bb1->preds) > 1)
3158 edge prede = NULL;
3159 FOR_EACH_EDGE (e, ei, bb1->preds)
3160 if (e->flags & EDGE_EXECUTABLE)
3162 if (prede)
3164 prede = NULL;
3165 break;
3167 prede = e;
3169 if (prede)
3171 bb1 = prede->src;
3173 /* Re-do the dominance check with changed bb1. */
3174 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3175 return true;
3179 /* Iterate to the single executable bb2 successor. */
3180 edge succe = NULL;
3181 FOR_EACH_EDGE (e, ei, bb2->succs)
3182 if (e->flags & EDGE_EXECUTABLE)
3184 if (succe)
3186 succe = NULL;
3187 break;
3189 succe = e;
3191 if (succe)
3193 /* Verify the reached block is only reached through succe.
3194 If there is only one edge we can spare us the dominator
3195 check and iterate directly. */
3196 if (EDGE_COUNT (succe->dest->preds) > 1)
3198 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3199 if (e != succe
3200 && (e->flags & EDGE_EXECUTABLE))
3202 succe = NULL;
3203 break;
3206 if (succe)
3208 bb2 = succe->dest;
3210 /* Re-do the dominance check with changed bb2. */
3211 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3212 return true;
3216 /* We could now iterate updating bb1 / bb2. */
3217 return false;
3220 /* Set the value number of FROM to TO, return true if it has changed
3221 as a result. */
3223 static inline bool
3224 set_ssa_val_to (tree from, tree to)
3226 tree currval = SSA_VAL (from);
3227 HOST_WIDE_INT toff, coff;
3229 /* The only thing we allow as value numbers are ssa_names
3230 and invariants. So assert that here. We don't allow VN_TOP
3231 as visiting a stmt should produce a value-number other than
3232 that.
3233 ??? Still VN_TOP can happen for unreachable code, so force
3234 it to varying in that case. Not all code is prepared to
3235 get VN_TOP on valueization. */
3236 if (to == VN_TOP)
3238 if (dump_file && (dump_flags & TDF_DETAILS))
3239 fprintf (dump_file, "Forcing value number to varying on "
3240 "receiving VN_TOP\n");
3241 to = from;
3244 gcc_assert (to != NULL_TREE
3245 && ((TREE_CODE (to) == SSA_NAME
3246 && (to == from || SSA_VAL (to) == to))
3247 || is_gimple_min_invariant (to)));
3249 if (from != to)
3251 if (currval == from)
3253 if (dump_file && (dump_flags & TDF_DETAILS))
3255 fprintf (dump_file, "Not changing value number of ");
3256 print_generic_expr (dump_file, from, 0);
3257 fprintf (dump_file, " from VARYING to ");
3258 print_generic_expr (dump_file, to, 0);
3259 fprintf (dump_file, "\n");
3261 return false;
3263 else if (TREE_CODE (to) == SSA_NAME
3264 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3265 to = from;
3268 if (dump_file && (dump_flags & TDF_DETAILS))
3270 fprintf (dump_file, "Setting value number of ");
3271 print_generic_expr (dump_file, from, 0);
3272 fprintf (dump_file, " to ");
3273 print_generic_expr (dump_file, to, 0);
3276 if (currval != to
3277 && !operand_equal_p (currval, to, 0)
3278 /* ??? For addresses involving volatile objects or types operand_equal_p
3279 does not reliably detect ADDR_EXPRs as equal. We know we are only
3280 getting invariant gimple addresses here, so can use
3281 get_addr_base_and_unit_offset to do this comparison. */
3282 && !(TREE_CODE (currval) == ADDR_EXPR
3283 && TREE_CODE (to) == ADDR_EXPR
3284 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3285 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3286 && coff == toff))
3288 /* If we equate two SSA names we have to make the side-band info
3289 of the leader conservative (and remember whatever original value
3290 was present). */
3291 if (TREE_CODE (to) == SSA_NAME)
3293 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3294 && SSA_NAME_RANGE_INFO (to))
3296 if (SSA_NAME_IS_DEFAULT_DEF (to)
3297 || dominated_by_p_w_unex
3298 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3299 gimple_bb (SSA_NAME_DEF_STMT (to))))
3300 /* Keep the info from the dominator. */
3302 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3303 || dominated_by_p_w_unex
3304 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3305 gimple_bb (SSA_NAME_DEF_STMT (from))))
3307 /* Save old info. */
3308 if (! VN_INFO (to)->info.range_info)
3310 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3311 VN_INFO (to)->range_info_anti_range_p
3312 = SSA_NAME_ANTI_RANGE_P (to);
3314 /* Use that from the dominator. */
3315 SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
3316 SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
3318 else
3320 /* Save old info. */
3321 if (! VN_INFO (to)->info.range_info)
3323 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3324 VN_INFO (to)->range_info_anti_range_p
3325 = SSA_NAME_ANTI_RANGE_P (to);
3327 /* Rather than allocating memory and unioning the info
3328 just clear it. */
3329 SSA_NAME_RANGE_INFO (to) = NULL;
3332 else if (POINTER_TYPE_P (TREE_TYPE (to))
3333 && SSA_NAME_PTR_INFO (to))
3335 if (SSA_NAME_IS_DEFAULT_DEF (to)
3336 || dominated_by_p_w_unex
3337 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3338 gimple_bb (SSA_NAME_DEF_STMT (to))))
3339 /* Keep the info from the dominator. */
3341 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3342 || dominated_by_p_w_unex
3343 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3344 gimple_bb (SSA_NAME_DEF_STMT (from))))
3346 /* Save old info. */
3347 if (! VN_INFO (to)->info.ptr_info)
3348 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3349 /* Use that from the dominator. */
3350 SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3352 else if (! SSA_NAME_PTR_INFO (from)
3353 /* Handle the case of trivially equivalent info. */
3354 || memcmp (SSA_NAME_PTR_INFO (to),
3355 SSA_NAME_PTR_INFO (from),
3356 sizeof (ptr_info_def)) != 0)
3358 /* Save old info. */
3359 if (! VN_INFO (to)->info.ptr_info)
3360 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3361 /* Rather than allocating memory and unioning the info
3362 just clear it. */
3363 SSA_NAME_PTR_INFO (to) = NULL;
3368 VN_INFO (from)->valnum = to;
3369 if (dump_file && (dump_flags & TDF_DETAILS))
3370 fprintf (dump_file, " (changed)\n");
3371 return true;
3373 if (dump_file && (dump_flags & TDF_DETAILS))
3374 fprintf (dump_file, "\n");
3375 return false;
3378 /* Mark as processed all the definitions in the defining stmt of USE, or
3379 the USE itself. */
3381 static void
3382 mark_use_processed (tree use)
3384 ssa_op_iter iter;
3385 def_operand_p defp;
3386 gimple *stmt = SSA_NAME_DEF_STMT (use);
3388 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3390 VN_INFO (use)->use_processed = true;
3391 return;
3394 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3396 tree def = DEF_FROM_PTR (defp);
3398 VN_INFO (def)->use_processed = true;
3402 /* Set all definitions in STMT to value number to themselves.
3403 Return true if a value number changed. */
3405 static bool
3406 defs_to_varying (gimple *stmt)
3408 bool changed = false;
3409 ssa_op_iter iter;
3410 def_operand_p defp;
3412 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3414 tree def = DEF_FROM_PTR (defp);
3415 changed |= set_ssa_val_to (def, def);
3417 return changed;
3420 /* Visit a copy between LHS and RHS, return true if the value number
3421 changed. */
3423 static bool
3424 visit_copy (tree lhs, tree rhs)
3426 /* Valueize. */
3427 rhs = SSA_VAL (rhs);
3429 return set_ssa_val_to (lhs, rhs);
3432 /* Visit a nary operator RHS, value number it, and return true if the
3433 value number of LHS has changed as a result. */
3435 static bool
3436 visit_nary_op (tree lhs, gimple *stmt)
3438 bool changed = false;
3439 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3441 if (result)
3442 changed = set_ssa_val_to (lhs, result);
3443 else
3445 changed = set_ssa_val_to (lhs, lhs);
3446 vn_nary_op_insert_stmt (stmt, lhs);
3449 return changed;
3452 /* Visit a call STMT storing into LHS. Return true if the value number
3453 of the LHS has changed as a result. */
3455 static bool
3456 visit_reference_op_call (tree lhs, gcall *stmt)
3458 bool changed = false;
3459 struct vn_reference_s vr1;
3460 vn_reference_t vnresult = NULL;
3461 tree vdef = gimple_vdef (stmt);
3463 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3464 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3465 lhs = NULL_TREE;
3467 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3468 if (vnresult)
3470 if (vnresult->result_vdef && vdef)
3471 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3472 else if (vdef)
3473 /* If the call was discovered to be pure or const reflect
3474 that as far as possible. */
3475 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3477 if (!vnresult->result && lhs)
3478 vnresult->result = lhs;
3480 if (vnresult->result && lhs)
3481 changed |= set_ssa_val_to (lhs, vnresult->result);
3483 else
3485 vn_reference_t vr2;
3486 vn_reference_s **slot;
3487 tree vdef_val = vdef;
3488 if (vdef)
3490 /* If we value numbered an indirect functions function to
3491 one not clobbering memory value number its VDEF to its
3492 VUSE. */
3493 tree fn = gimple_call_fn (stmt);
3494 if (fn && TREE_CODE (fn) == SSA_NAME)
3496 fn = SSA_VAL (fn);
3497 if (TREE_CODE (fn) == ADDR_EXPR
3498 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3499 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3500 & (ECF_CONST | ECF_PURE)))
3501 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3503 changed |= set_ssa_val_to (vdef, vdef_val);
3505 if (lhs)
3506 changed |= set_ssa_val_to (lhs, lhs);
3507 vr2 = current_info->references_pool->allocate ();
3508 vr2->vuse = vr1.vuse;
3509 /* As we are not walking the virtual operand chain we know the
3510 shared_lookup_references are still original so we can re-use
3511 them here. */
3512 vr2->operands = vr1.operands.copy ();
3513 vr2->type = vr1.type;
3514 vr2->set = vr1.set;
3515 vr2->hashcode = vr1.hashcode;
3516 vr2->result = lhs;
3517 vr2->result_vdef = vdef_val;
3518 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3519 INSERT);
3520 gcc_assert (!*slot);
3521 *slot = vr2;
3524 return changed;
3527 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3528 and return true if the value number of the LHS has changed as a result. */
3530 static bool
3531 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3533 bool changed = false;
3534 tree last_vuse;
3535 tree result;
3537 last_vuse = gimple_vuse (stmt);
3538 last_vuse_ptr = &last_vuse;
3539 result = vn_reference_lookup (op, gimple_vuse (stmt),
3540 default_vn_walk_kind, NULL, true);
3541 last_vuse_ptr = NULL;
3543 /* We handle type-punning through unions by value-numbering based
3544 on offset and size of the access. Be prepared to handle a
3545 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3546 if (result
3547 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3549 /* We will be setting the value number of lhs to the value number
3550 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3551 So first simplify and lookup this expression to see if it
3552 is already available. */
3553 code_helper rcode = VIEW_CONVERT_EXPR;
3554 tree ops[3] = { result };
3555 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3558 if (result)
3559 changed = set_ssa_val_to (lhs, result);
3560 else
3562 changed = set_ssa_val_to (lhs, lhs);
3563 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3566 return changed;
3570 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3571 and return true if the value number of the LHS has changed as a result. */
3573 static bool
3574 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3576 bool changed = false;
3577 vn_reference_t vnresult = NULL;
3578 tree result, assign;
3579 bool resultsame = false;
3580 tree vuse = gimple_vuse (stmt);
3581 tree vdef = gimple_vdef (stmt);
3583 if (TREE_CODE (op) == SSA_NAME)
3584 op = SSA_VAL (op);
3586 /* First we want to lookup using the *vuses* from the store and see
3587 if there the last store to this location with the same address
3588 had the same value.
3590 The vuses represent the memory state before the store. If the
3591 memory state, address, and value of the store is the same as the
3592 last store to this location, then this store will produce the
3593 same memory state as that store.
3595 In this case the vdef versions for this store are value numbered to those
3596 vuse versions, since they represent the same memory state after
3597 this store.
3599 Otherwise, the vdefs for the store are used when inserting into
3600 the table, since the store generates a new memory state. */
3602 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL, false);
3604 if (result)
3606 if (TREE_CODE (result) == SSA_NAME)
3607 result = SSA_VAL (result);
3608 resultsame = expressions_equal_p (result, op);
3611 if ((!result || !resultsame)
3612 /* Only perform the following when being called from PRE
3613 which embeds tail merging. */
3614 && default_vn_walk_kind == VN_WALK)
3616 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3617 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3618 if (vnresult)
3620 VN_INFO (vdef)->use_processed = true;
3621 return set_ssa_val_to (vdef, vnresult->result_vdef);
3625 if (!result || !resultsame)
3627 if (dump_file && (dump_flags & TDF_DETAILS))
3629 fprintf (dump_file, "No store match\n");
3630 fprintf (dump_file, "Value numbering store ");
3631 print_generic_expr (dump_file, lhs, 0);
3632 fprintf (dump_file, " to ");
3633 print_generic_expr (dump_file, op, 0);
3634 fprintf (dump_file, "\n");
3636 /* Have to set value numbers before insert, since insert is
3637 going to valueize the references in-place. */
3638 if (vdef)
3640 changed |= set_ssa_val_to (vdef, vdef);
3643 /* Do not insert structure copies into the tables. */
3644 if (is_gimple_min_invariant (op)
3645 || is_gimple_reg (op))
3646 vn_reference_insert (lhs, op, vdef, NULL);
3648 /* Only perform the following when being called from PRE
3649 which embeds tail merging. */
3650 if (default_vn_walk_kind == VN_WALK)
3652 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3653 vn_reference_insert (assign, lhs, vuse, vdef);
3656 else
3658 /* We had a match, so value number the vdef to have the value
3659 number of the vuse it came from. */
3661 if (dump_file && (dump_flags & TDF_DETAILS))
3662 fprintf (dump_file, "Store matched earlier value,"
3663 "value numbering store vdefs to matching vuses.\n");
3665 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3668 return changed;
3671 /* Visit and value number PHI, return true if the value number
3672 changed. */
3674 static bool
3675 visit_phi (gimple *phi)
3677 bool changed = false;
3678 tree result;
3679 tree sameval = VN_TOP;
3680 bool allsame = true;
3681 unsigned n_executable = 0;
3683 /* TODO: We could check for this in init_sccvn, and replace this
3684 with a gcc_assert. */
3685 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3686 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3688 /* See if all non-TOP arguments have the same value. TOP is
3689 equivalent to everything, so we can ignore it. */
3690 edge_iterator ei;
3691 edge e;
3692 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3693 if (e->flags & EDGE_EXECUTABLE)
3695 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3697 ++n_executable;
3698 if (TREE_CODE (def) == SSA_NAME)
3699 def = SSA_VAL (def);
3700 if (def == VN_TOP)
3701 continue;
3702 if (sameval == VN_TOP)
3703 sameval = def;
3704 else if (!expressions_equal_p (def, sameval))
3706 allsame = false;
3707 break;
3711 /* If none of the edges was executable or all incoming values are
3712 undefined keep the value-number at VN_TOP. If only a single edge
3713 is exectuable use its value. */
3714 if (sameval == VN_TOP
3715 || n_executable == 1)
3716 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3718 /* First see if it is equivalent to a phi node in this block. We prefer
3719 this as it allows IV elimination - see PRs 66502 and 67167. */
3720 result = vn_phi_lookup (phi);
3721 if (result)
3722 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3723 /* Otherwise all value numbered to the same value, the phi node has that
3724 value. */
3725 else if (allsame)
3726 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3727 else
3729 vn_phi_insert (phi, PHI_RESULT (phi));
3730 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3733 return changed;
3736 /* Try to simplify RHS using equivalences and constant folding. */
3738 static tree
3739 try_to_simplify (gassign *stmt)
3741 enum tree_code code = gimple_assign_rhs_code (stmt);
3742 tree tem;
3744 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3745 in this case, there is no point in doing extra work. */
3746 if (code == SSA_NAME)
3747 return NULL_TREE;
3749 /* First try constant folding based on our current lattice. */
3750 mprts_hook = vn_lookup_simplify_result;
3751 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3752 mprts_hook = NULL;
3753 if (tem
3754 && (TREE_CODE (tem) == SSA_NAME
3755 || is_gimple_min_invariant (tem)))
3756 return tem;
3758 return NULL_TREE;
3761 /* Visit and value number USE, return true if the value number
3762 changed. */
3764 static bool
3765 visit_use (tree use)
3767 bool changed = false;
3768 gimple *stmt = SSA_NAME_DEF_STMT (use);
3770 mark_use_processed (use);
3772 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3773 if (dump_file && (dump_flags & TDF_DETAILS)
3774 && !SSA_NAME_IS_DEFAULT_DEF (use))
3776 fprintf (dump_file, "Value numbering ");
3777 print_generic_expr (dump_file, use, 0);
3778 fprintf (dump_file, " stmt = ");
3779 print_gimple_stmt (dump_file, stmt, 0, 0);
3782 /* Handle uninitialized uses. */
3783 if (SSA_NAME_IS_DEFAULT_DEF (use))
3784 changed = set_ssa_val_to (use, use);
3785 else if (gimple_code (stmt) == GIMPLE_PHI)
3786 changed = visit_phi (stmt);
3787 else if (gimple_has_volatile_ops (stmt))
3788 changed = defs_to_varying (stmt);
3789 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3791 enum tree_code code = gimple_assign_rhs_code (ass);
3792 tree lhs = gimple_assign_lhs (ass);
3793 tree rhs1 = gimple_assign_rhs1 (ass);
3794 tree simplified;
3796 /* Shortcut for copies. Simplifying copies is pointless,
3797 since we copy the expression and value they represent. */
3798 if (code == SSA_NAME
3799 && TREE_CODE (lhs) == SSA_NAME)
3801 changed = visit_copy (lhs, rhs1);
3802 goto done;
3804 simplified = try_to_simplify (ass);
3805 if (simplified)
3807 if (dump_file && (dump_flags & TDF_DETAILS))
3809 fprintf (dump_file, "RHS ");
3810 print_gimple_expr (dump_file, ass, 0, 0);
3811 fprintf (dump_file, " simplified to ");
3812 print_generic_expr (dump_file, simplified, 0);
3813 fprintf (dump_file, "\n");
3816 /* Setting value numbers to constants will occasionally
3817 screw up phi congruence because constants are not
3818 uniquely associated with a single ssa name that can be
3819 looked up. */
3820 if (simplified
3821 && is_gimple_min_invariant (simplified)
3822 && TREE_CODE (lhs) == SSA_NAME)
3824 changed = set_ssa_val_to (lhs, simplified);
3825 goto done;
3827 else if (simplified
3828 && TREE_CODE (simplified) == SSA_NAME
3829 && TREE_CODE (lhs) == SSA_NAME)
3831 changed = visit_copy (lhs, simplified);
3832 goto done;
3835 if ((TREE_CODE (lhs) == SSA_NAME
3836 /* We can substitute SSA_NAMEs that are live over
3837 abnormal edges with their constant value. */
3838 && !(gimple_assign_copy_p (ass)
3839 && is_gimple_min_invariant (rhs1))
3840 && !(simplified
3841 && is_gimple_min_invariant (simplified))
3842 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3843 /* Stores or copies from SSA_NAMEs that are live over
3844 abnormal edges are a problem. */
3845 || (code == SSA_NAME
3846 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3847 changed = defs_to_varying (ass);
3848 else if (REFERENCE_CLASS_P (lhs)
3849 || DECL_P (lhs))
3850 changed = visit_reference_op_store (lhs, rhs1, ass);
3851 else if (TREE_CODE (lhs) == SSA_NAME)
3853 if ((gimple_assign_copy_p (ass)
3854 && is_gimple_min_invariant (rhs1))
3855 || (simplified
3856 && is_gimple_min_invariant (simplified)))
3858 if (simplified)
3859 changed = set_ssa_val_to (lhs, simplified);
3860 else
3861 changed = set_ssa_val_to (lhs, rhs1);
3863 else
3865 /* Visit the original statement. */
3866 switch (vn_get_stmt_kind (ass))
3868 case VN_NARY:
3869 changed = visit_nary_op (lhs, ass);
3870 break;
3871 case VN_REFERENCE:
3872 changed = visit_reference_op_load (lhs, rhs1, ass);
3873 break;
3874 default:
3875 changed = defs_to_varying (ass);
3876 break;
3880 else
3881 changed = defs_to_varying (ass);
3883 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3885 tree lhs = gimple_call_lhs (call_stmt);
3886 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3888 /* Try constant folding based on our current lattice. */
3889 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3890 vn_valueize);
3891 if (simplified)
3893 if (dump_file && (dump_flags & TDF_DETAILS))
3895 fprintf (dump_file, "call ");
3896 print_gimple_expr (dump_file, call_stmt, 0, 0);
3897 fprintf (dump_file, " simplified to ");
3898 print_generic_expr (dump_file, simplified, 0);
3899 fprintf (dump_file, "\n");
3902 /* Setting value numbers to constants will occasionally
3903 screw up phi congruence because constants are not
3904 uniquely associated with a single ssa name that can be
3905 looked up. */
3906 if (simplified
3907 && is_gimple_min_invariant (simplified))
3909 changed = set_ssa_val_to (lhs, simplified);
3910 if (gimple_vdef (call_stmt))
3911 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3912 SSA_VAL (gimple_vuse (call_stmt)));
3913 goto done;
3915 else if (simplified
3916 && TREE_CODE (simplified) == SSA_NAME)
3918 changed = visit_copy (lhs, simplified);
3919 if (gimple_vdef (call_stmt))
3920 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3921 SSA_VAL (gimple_vuse (call_stmt)));
3922 goto done;
3924 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3926 changed = defs_to_varying (call_stmt);
3927 goto done;
3931 /* Pick up flags from a devirtualization target. */
3932 tree fn = gimple_call_fn (stmt);
3933 int extra_fnflags = 0;
3934 if (fn && TREE_CODE (fn) == SSA_NAME)
3936 fn = SSA_VAL (fn);
3937 if (TREE_CODE (fn) == ADDR_EXPR
3938 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
3939 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
3941 if (!gimple_call_internal_p (call_stmt)
3942 && (/* Calls to the same function with the same vuse
3943 and the same operands do not necessarily return the same
3944 value, unless they're pure or const. */
3945 ((gimple_call_flags (call_stmt) | extra_fnflags)
3946 & (ECF_PURE | ECF_CONST))
3947 /* If calls have a vdef, subsequent calls won't have
3948 the same incoming vuse. So, if 2 calls with vdef have the
3949 same vuse, we know they're not subsequent.
3950 We can value number 2 calls to the same function with the
3951 same vuse and the same operands which are not subsequent
3952 the same, because there is no code in the program that can
3953 compare the 2 values... */
3954 || (gimple_vdef (call_stmt)
3955 /* ... unless the call returns a pointer which does
3956 not alias with anything else. In which case the
3957 information that the values are distinct are encoded
3958 in the IL. */
3959 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3960 /* Only perform the following when being called from PRE
3961 which embeds tail merging. */
3962 && default_vn_walk_kind == VN_WALK)))
3963 changed = visit_reference_op_call (lhs, call_stmt);
3964 else
3965 changed = defs_to_varying (call_stmt);
3967 else
3968 changed = defs_to_varying (stmt);
3969 done:
3970 return changed;
3973 /* Compare two operands by reverse postorder index */
3975 static int
3976 compare_ops (const void *pa, const void *pb)
3978 const tree opa = *((const tree *)pa);
3979 const tree opb = *((const tree *)pb);
3980 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3981 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3982 basic_block bba;
3983 basic_block bbb;
3985 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3986 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3987 else if (gimple_nop_p (opstmta))
3988 return -1;
3989 else if (gimple_nop_p (opstmtb))
3990 return 1;
3992 bba = gimple_bb (opstmta);
3993 bbb = gimple_bb (opstmtb);
3995 if (!bba && !bbb)
3996 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3997 else if (!bba)
3998 return -1;
3999 else if (!bbb)
4000 return 1;
4002 if (bba == bbb)
4004 if (gimple_code (opstmta) == GIMPLE_PHI
4005 && gimple_code (opstmtb) == GIMPLE_PHI)
4006 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4007 else if (gimple_code (opstmta) == GIMPLE_PHI)
4008 return -1;
4009 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4010 return 1;
4011 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4012 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4013 else
4014 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4016 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4019 /* Sort an array containing members of a strongly connected component
4020 SCC so that the members are ordered by RPO number.
4021 This means that when the sort is complete, iterating through the
4022 array will give you the members in RPO order. */
4024 static void
4025 sort_scc (vec<tree> scc)
4027 scc.qsort (compare_ops);
4030 /* Insert the no longer used nary ONARY to the hash INFO. */
4032 static void
4033 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4035 size_t size = sizeof_vn_nary_op (onary->length);
4036 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4037 &info->nary_obstack);
4038 memcpy (nary, onary, size);
4039 vn_nary_op_insert_into (nary, info->nary, false);
4042 /* Insert the no longer used phi OPHI to the hash INFO. */
4044 static void
4045 copy_phi (vn_phi_t ophi, vn_tables_t info)
4047 vn_phi_t phi = info->phis_pool->allocate ();
4048 vn_phi_s **slot;
4049 memcpy (phi, ophi, sizeof (*phi));
4050 ophi->phiargs.create (0);
4051 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4052 gcc_assert (!*slot);
4053 *slot = phi;
4056 /* Insert the no longer used reference OREF to the hash INFO. */
4058 static void
4059 copy_reference (vn_reference_t oref, vn_tables_t info)
4061 vn_reference_t ref;
4062 vn_reference_s **slot;
4063 ref = info->references_pool->allocate ();
4064 memcpy (ref, oref, sizeof (*ref));
4065 oref->operands.create (0);
4066 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4067 if (*slot)
4068 free_reference (*slot);
4069 *slot = ref;
4072 /* Process a strongly connected component in the SSA graph. */
4074 static void
4075 process_scc (vec<tree> scc)
4077 tree var;
4078 unsigned int i;
4079 unsigned int iterations = 0;
4080 bool changed = true;
4081 vn_nary_op_iterator_type hin;
4082 vn_phi_iterator_type hip;
4083 vn_reference_iterator_type hir;
4084 vn_nary_op_t nary;
4085 vn_phi_t phi;
4086 vn_reference_t ref;
4088 /* If the SCC has a single member, just visit it. */
4089 if (scc.length () == 1)
4091 tree use = scc[0];
4092 if (VN_INFO (use)->use_processed)
4093 return;
4094 /* We need to make sure it doesn't form a cycle itself, which can
4095 happen for self-referential PHI nodes. In that case we would
4096 end up inserting an expression with VN_TOP operands into the
4097 valid table which makes us derive bogus equivalences later.
4098 The cheapest way to check this is to assume it for all PHI nodes. */
4099 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4100 /* Fallthru to iteration. */ ;
4101 else
4103 visit_use (use);
4104 return;
4108 if (dump_file && (dump_flags & TDF_DETAILS))
4109 print_scc (dump_file, scc);
4111 /* Iterate over the SCC with the optimistic table until it stops
4112 changing. */
4113 current_info = optimistic_info;
4114 while (changed)
4116 changed = false;
4117 iterations++;
4118 if (dump_file && (dump_flags & TDF_DETAILS))
4119 fprintf (dump_file, "Starting iteration %d\n", iterations);
4120 /* As we are value-numbering optimistically we have to
4121 clear the expression tables and the simplified expressions
4122 in each iteration until we converge. */
4123 optimistic_info->nary->empty ();
4124 optimistic_info->phis->empty ();
4125 optimistic_info->references->empty ();
4126 obstack_free (&optimistic_info->nary_obstack, NULL);
4127 gcc_obstack_init (&optimistic_info->nary_obstack);
4128 optimistic_info->phis_pool->release ();
4129 optimistic_info->references_pool->release ();
4130 FOR_EACH_VEC_ELT (scc, i, var)
4131 gcc_assert (!VN_INFO (var)->needs_insertion
4132 && VN_INFO (var)->expr == NULL);
4133 FOR_EACH_VEC_ELT (scc, i, var)
4134 changed |= visit_use (var);
4137 if (dump_file && (dump_flags & TDF_DETAILS))
4138 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4139 statistics_histogram_event (cfun, "SCC iterations", iterations);
4141 /* Finally, copy the contents of the no longer used optimistic
4142 table to the valid table. */
4143 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4144 copy_nary (nary, valid_info);
4145 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4146 copy_phi (phi, valid_info);
4147 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4148 ref, vn_reference_t, hir)
4149 copy_reference (ref, valid_info);
4151 current_info = valid_info;
4155 /* Pop the components of the found SCC for NAME off the SCC stack
4156 and process them. Returns true if all went well, false if
4157 we run into resource limits. */
4159 static bool
4160 extract_and_process_scc_for_name (tree name)
4162 auto_vec<tree> scc;
4163 tree x;
4165 /* Found an SCC, pop the components off the SCC stack and
4166 process them. */
4169 x = sccstack.pop ();
4171 VN_INFO (x)->on_sccstack = false;
4172 scc.safe_push (x);
4173 } while (x != name);
4175 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4176 if (scc.length ()
4177 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4179 if (dump_file)
4180 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4181 "SCC size %u exceeding %u\n", scc.length (),
4182 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4184 return false;
4187 if (scc.length () > 1)
4188 sort_scc (scc);
4190 process_scc (scc);
4192 return true;
4195 /* Depth first search on NAME to discover and process SCC's in the SSA
4196 graph.
4197 Execution of this algorithm relies on the fact that the SCC's are
4198 popped off the stack in topological order.
4199 Returns true if successful, false if we stopped processing SCC's due
4200 to resource constraints. */
4202 static bool
4203 DFS (tree name)
4205 auto_vec<ssa_op_iter> itervec;
4206 auto_vec<tree> namevec;
4207 use_operand_p usep = NULL;
4208 gimple *defstmt;
4209 tree use;
4210 ssa_op_iter iter;
4212 start_over:
4213 /* SCC info */
4214 VN_INFO (name)->dfsnum = next_dfs_num++;
4215 VN_INFO (name)->visited = true;
4216 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4218 sccstack.safe_push (name);
4219 VN_INFO (name)->on_sccstack = true;
4220 defstmt = SSA_NAME_DEF_STMT (name);
4222 /* Recursively DFS on our operands, looking for SCC's. */
4223 if (!gimple_nop_p (defstmt))
4225 /* Push a new iterator. */
4226 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4227 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4228 else
4229 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4231 else
4232 clear_and_done_ssa_iter (&iter);
4234 while (1)
4236 /* If we are done processing uses of a name, go up the stack
4237 of iterators and process SCCs as we found them. */
4238 if (op_iter_done (&iter))
4240 /* See if we found an SCC. */
4241 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4242 if (!extract_and_process_scc_for_name (name))
4243 return false;
4245 /* Check if we are done. */
4246 if (namevec.is_empty ())
4247 return true;
4249 /* Restore the last use walker and continue walking there. */
4250 use = name;
4251 name = namevec.pop ();
4252 memcpy (&iter, &itervec.last (),
4253 sizeof (ssa_op_iter));
4254 itervec.pop ();
4255 goto continue_walking;
4258 use = USE_FROM_PTR (usep);
4260 /* Since we handle phi nodes, we will sometimes get
4261 invariants in the use expression. */
4262 if (TREE_CODE (use) == SSA_NAME)
4264 if (! (VN_INFO (use)->visited))
4266 /* Recurse by pushing the current use walking state on
4267 the stack and starting over. */
4268 itervec.safe_push (iter);
4269 namevec.safe_push (name);
4270 name = use;
4271 goto start_over;
4273 continue_walking:
4274 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4275 VN_INFO (use)->low);
4277 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4278 && VN_INFO (use)->on_sccstack)
4280 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4281 VN_INFO (name)->low);
4285 usep = op_iter_next_use (&iter);
4289 /* Allocate a value number table. */
4291 static void
4292 allocate_vn_table (vn_tables_t table)
4294 table->phis = new vn_phi_table_type (23);
4295 table->nary = new vn_nary_op_table_type (23);
4296 table->references = new vn_reference_table_type (23);
4298 gcc_obstack_init (&table->nary_obstack);
4299 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4300 table->references_pool = new object_allocator<vn_reference_s>
4301 ("VN references");
4304 /* Free a value number table. */
4306 static void
4307 free_vn_table (vn_tables_t table)
4309 delete table->phis;
4310 table->phis = NULL;
4311 delete table->nary;
4312 table->nary = NULL;
4313 delete table->references;
4314 table->references = NULL;
4315 obstack_free (&table->nary_obstack, NULL);
4316 delete table->phis_pool;
4317 delete table->references_pool;
4320 static void
4321 init_scc_vn (void)
4323 int j;
4324 int *rpo_numbers_temp;
4326 calculate_dominance_info (CDI_DOMINATORS);
4327 mark_dfs_back_edges ();
4329 sccstack.create (0);
4330 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4332 constant_value_ids = BITMAP_ALLOC (NULL);
4334 next_dfs_num = 1;
4335 next_value_id = 1;
4337 vn_ssa_aux_table.create (num_ssa_names + 1);
4338 /* VEC_alloc doesn't actually grow it to the right size, it just
4339 preallocates the space to do so. */
4340 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4341 gcc_obstack_init (&vn_ssa_aux_obstack);
4343 shared_lookup_phiargs.create (0);
4344 shared_lookup_references.create (0);
4345 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4346 rpo_numbers_temp =
4347 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4348 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4350 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4351 the i'th block in RPO order is bb. We want to map bb's to RPO
4352 numbers, so we need to rearrange this array. */
4353 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4354 rpo_numbers[rpo_numbers_temp[j]] = j;
4356 XDELETE (rpo_numbers_temp);
4358 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4360 renumber_gimple_stmt_uids ();
4362 /* Create the valid and optimistic value numbering tables. */
4363 valid_info = XCNEW (struct vn_tables_s);
4364 allocate_vn_table (valid_info);
4365 optimistic_info = XCNEW (struct vn_tables_s);
4366 allocate_vn_table (optimistic_info);
4367 current_info = valid_info;
4369 /* Create the VN_INFO structures, and initialize value numbers to
4370 TOP or VARYING for parameters. */
4371 size_t i;
4372 tree name;
4374 FOR_EACH_SSA_NAME (i, name, cfun)
4376 VN_INFO_GET (name)->valnum = VN_TOP;
4377 VN_INFO (name)->needs_insertion = false;
4378 VN_INFO (name)->expr = NULL;
4379 VN_INFO (name)->value_id = 0;
4381 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4382 continue;
4384 switch (TREE_CODE (SSA_NAME_VAR (name)))
4386 case VAR_DECL:
4387 /* Undefined vars keep TOP. */
4388 break;
4390 case PARM_DECL:
4391 /* Parameters are VARYING but we can record a condition
4392 if we know it is a non-NULL pointer. */
4393 VN_INFO (name)->visited = true;
4394 VN_INFO (name)->valnum = name;
4395 if (POINTER_TYPE_P (TREE_TYPE (name))
4396 && nonnull_arg_p (SSA_NAME_VAR (name)))
4398 tree ops[2];
4399 ops[0] = name;
4400 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4401 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4402 boolean_true_node, 0);
4403 if (dump_file && (dump_flags & TDF_DETAILS))
4405 fprintf (dump_file, "Recording ");
4406 print_generic_expr (dump_file, name, TDF_SLIM);
4407 fprintf (dump_file, " != 0\n");
4410 break;
4412 case RESULT_DECL:
4413 /* If the result is passed by invisible reference the default
4414 def is initialized, otherwise it's uninitialized. */
4415 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4417 VN_INFO (name)->visited = true;
4418 VN_INFO (name)->valnum = name;
4420 break;
4422 default:
4423 gcc_unreachable ();
4428 /* Restore SSA info that has been reset on value leaders. */
4430 void
4431 scc_vn_restore_ssa_info (void)
4433 unsigned i;
4434 tree name;
4436 FOR_EACH_SSA_NAME (i, name, cfun)
4438 if (has_VN_INFO (name))
4440 if (VN_INFO (name)->needs_insertion)
4442 else if (POINTER_TYPE_P (TREE_TYPE (name))
4443 && VN_INFO (name)->info.ptr_info)
4444 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4445 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4446 && VN_INFO (name)->info.range_info)
4448 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4449 SSA_NAME_ANTI_RANGE_P (name)
4450 = VN_INFO (name)->range_info_anti_range_p;
4456 void
4457 free_scc_vn (void)
4459 size_t i;
4460 tree name;
4462 delete constant_to_value_id;
4463 constant_to_value_id = NULL;
4464 BITMAP_FREE (constant_value_ids);
4465 shared_lookup_phiargs.release ();
4466 shared_lookup_references.release ();
4467 XDELETEVEC (rpo_numbers);
4469 FOR_EACH_SSA_NAME (i, name, cfun)
4471 if (has_VN_INFO (name)
4472 && VN_INFO (name)->needs_insertion)
4473 release_ssa_name (name);
4475 obstack_free (&vn_ssa_aux_obstack, NULL);
4476 vn_ssa_aux_table.release ();
4478 sccstack.release ();
4479 free_vn_table (valid_info);
4480 XDELETE (valid_info);
4481 free_vn_table (optimistic_info);
4482 XDELETE (optimistic_info);
4484 BITMAP_FREE (const_parms);
4487 /* Set *ID according to RESULT. */
4489 static void
4490 set_value_id_for_result (tree result, unsigned int *id)
4492 if (result && TREE_CODE (result) == SSA_NAME)
4493 *id = VN_INFO (result)->value_id;
4494 else if (result && is_gimple_min_invariant (result))
4495 *id = get_or_alloc_constant_value_id (result);
4496 else
4497 *id = get_next_value_id ();
4500 /* Set the value ids in the valid hash tables. */
4502 static void
4503 set_hashtable_value_ids (void)
4505 vn_nary_op_iterator_type hin;
4506 vn_phi_iterator_type hip;
4507 vn_reference_iterator_type hir;
4508 vn_nary_op_t vno;
4509 vn_reference_t vr;
4510 vn_phi_t vp;
4512 /* Now set the value ids of the things we had put in the hash
4513 table. */
4515 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4516 set_value_id_for_result (vno->result, &vno->value_id);
4518 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4519 set_value_id_for_result (vp->result, &vp->value_id);
4521 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4522 hir)
4523 set_value_id_for_result (vr->result, &vr->value_id);
4526 class sccvn_dom_walker : public dom_walker
4528 public:
4529 sccvn_dom_walker ()
4530 : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (0) {}
4532 virtual edge before_dom_children (basic_block);
4533 virtual void after_dom_children (basic_block);
4535 void record_cond (basic_block,
4536 enum tree_code code, tree lhs, tree rhs, bool value);
4537 void record_conds (basic_block,
4538 enum tree_code code, tree lhs, tree rhs, bool value);
4540 bool fail;
4541 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4542 cond_stack;
4545 /* Record a temporary condition for the BB and its dominated blocks. */
4547 void
4548 sccvn_dom_walker::record_cond (basic_block bb,
4549 enum tree_code code, tree lhs, tree rhs,
4550 bool value)
4552 tree ops[2] = { lhs, rhs };
4553 vn_nary_op_t old = NULL;
4554 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4555 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4556 vn_nary_op_t cond
4557 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4558 value
4559 ? boolean_true_node
4560 : boolean_false_node, 0);
4561 if (dump_file && (dump_flags & TDF_DETAILS))
4563 fprintf (dump_file, "Recording temporarily ");
4564 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4565 fprintf (dump_file, " %s ", get_tree_code_name (code));
4566 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4567 fprintf (dump_file, " == %s%s\n",
4568 value ? "true" : "false",
4569 old ? " (old entry saved)" : "");
4571 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4574 /* Record temporary conditions for the BB and its dominated blocks
4575 according to LHS CODE RHS == VALUE and its dominated conditions. */
4577 void
4578 sccvn_dom_walker::record_conds (basic_block bb,
4579 enum tree_code code, tree lhs, tree rhs,
4580 bool value)
4582 /* Record the original condition. */
4583 record_cond (bb, code, lhs, rhs, value);
4585 if (!value)
4586 return;
4588 /* Record dominated conditions if the condition is true. Note that
4589 the inversion is already recorded. */
4590 switch (code)
4592 case LT_EXPR:
4593 case GT_EXPR:
4594 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4595 record_cond (bb, NE_EXPR, lhs, rhs, true);
4596 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4597 break;
4599 case EQ_EXPR:
4600 record_cond (bb, LE_EXPR, lhs, rhs, true);
4601 record_cond (bb, GE_EXPR, lhs, rhs, true);
4602 record_cond (bb, LT_EXPR, lhs, rhs, false);
4603 record_cond (bb, GT_EXPR, lhs, rhs, false);
4604 break;
4606 default:
4607 break;
4611 /* Restore expressions and values derived from conditionals. */
4613 void
4614 sccvn_dom_walker::after_dom_children (basic_block bb)
4616 while (!cond_stack.is_empty ()
4617 && cond_stack.last ().first == bb)
4619 vn_nary_op_t cond = cond_stack.last ().second.first;
4620 vn_nary_op_t old = cond_stack.last ().second.second;
4621 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4622 if (old)
4623 vn_nary_op_insert_into (old, current_info->nary, false);
4624 cond_stack.pop ();
4628 /* Value number all statements in BB. */
4630 edge
4631 sccvn_dom_walker::before_dom_children (basic_block bb)
4633 edge e;
4634 edge_iterator ei;
4636 if (fail)
4637 return NULL;
4639 if (dump_file && (dump_flags & TDF_DETAILS))
4640 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4642 /* If we have a single predecessor record the equivalence from a
4643 possible condition on the predecessor edge. */
4644 edge pred_e = NULL;
4645 FOR_EACH_EDGE (e, ei, bb->preds)
4647 /* Ignore simple backedges from this to allow recording conditions
4648 in loop headers. */
4649 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4650 continue;
4651 if (! pred_e)
4652 pred_e = e;
4653 else
4655 pred_e = NULL;
4656 break;
4659 if (pred_e)
4661 /* Check if there are multiple executable successor edges in
4662 the source block. Otherwise there is no additional info
4663 to be recorded. */
4664 edge e2;
4665 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4666 if (e2 != pred_e
4667 && e2->flags & EDGE_EXECUTABLE)
4668 break;
4669 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4671 gimple *stmt = last_stmt (pred_e->src);
4672 if (stmt
4673 && gimple_code (stmt) == GIMPLE_COND)
4675 enum tree_code code = gimple_cond_code (stmt);
4676 tree lhs = gimple_cond_lhs (stmt);
4677 tree rhs = gimple_cond_rhs (stmt);
4678 record_conds (bb, code, lhs, rhs,
4679 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4680 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4681 if (code != ERROR_MARK)
4682 record_conds (bb, code, lhs, rhs,
4683 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4688 /* Value-number all defs in the basic-block. */
4689 for (gphi_iterator gsi = gsi_start_phis (bb);
4690 !gsi_end_p (gsi); gsi_next (&gsi))
4692 gphi *phi = gsi.phi ();
4693 tree res = PHI_RESULT (phi);
4694 if (!VN_INFO (res)->visited
4695 && !DFS (res))
4697 fail = true;
4698 return NULL;
4701 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4702 !gsi_end_p (gsi); gsi_next (&gsi))
4704 ssa_op_iter i;
4705 tree op;
4706 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4707 if (!VN_INFO (op)->visited
4708 && !DFS (op))
4710 fail = true;
4711 return NULL;
4715 /* Finally look at the last stmt. */
4716 gimple *stmt = last_stmt (bb);
4717 if (!stmt)
4718 return NULL;
4720 enum gimple_code code = gimple_code (stmt);
4721 if (code != GIMPLE_COND
4722 && code != GIMPLE_SWITCH
4723 && code != GIMPLE_GOTO)
4724 return NULL;
4726 if (dump_file && (dump_flags & TDF_DETAILS))
4728 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4729 print_gimple_stmt (dump_file, stmt, 0, 0);
4732 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4733 if value-numbering can prove they are not reachable. Handling
4734 computed gotos is also possible. */
4735 tree val;
4736 switch (code)
4738 case GIMPLE_COND:
4740 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4741 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4742 val = gimple_simplify (gimple_cond_code (stmt),
4743 boolean_type_node, lhs, rhs,
4744 NULL, vn_valueize);
4745 /* If that didn't simplify to a constant see if we have recorded
4746 temporary expressions from taken edges. */
4747 if (!val || TREE_CODE (val) != INTEGER_CST)
4749 tree ops[2];
4750 ops[0] = lhs;
4751 ops[1] = rhs;
4752 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4753 boolean_type_node, ops, NULL);
4755 break;
4757 case GIMPLE_SWITCH:
4758 val = gimple_switch_index (as_a <gswitch *> (stmt));
4759 break;
4760 case GIMPLE_GOTO:
4761 val = gimple_goto_dest (stmt);
4762 break;
4763 default:
4764 gcc_unreachable ();
4766 if (!val)
4767 return NULL;
4769 edge taken = find_taken_edge (bb, vn_valueize (val));
4770 if (!taken)
4771 return NULL;
4773 if (dump_file && (dump_flags & TDF_DETAILS))
4774 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4775 "not executable\n", bb->index, bb->index, taken->dest->index);
4777 return taken;
4780 /* Do SCCVN. Returns true if it finished, false if we bailed out
4781 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4782 how we use the alias oracle walking during the VN process. */
4784 bool
4785 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4787 size_t i;
4789 default_vn_walk_kind = default_vn_walk_kind_;
4791 init_scc_vn ();
4793 /* Collect pointers we know point to readonly memory. */
4794 const_parms = BITMAP_ALLOC (NULL);
4795 tree fnspec = lookup_attribute ("fn spec",
4796 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4797 if (fnspec)
4799 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4800 i = 1;
4801 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4802 arg; arg = DECL_CHAIN (arg), ++i)
4804 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4805 break;
4806 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4807 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4809 tree name = ssa_default_def (cfun, arg);
4810 if (name)
4811 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4816 /* Walk all blocks in dominator order, value-numbering stmts
4817 SSA defs and decide whether outgoing edges are not executable. */
4818 sccvn_dom_walker walker;
4819 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4820 if (walker.fail)
4822 free_scc_vn ();
4823 return false;
4826 /* Initialize the value ids and prune out remaining VN_TOPs
4827 from dead code. */
4828 tree name;
4830 FOR_EACH_SSA_NAME (i, name, cfun)
4832 vn_ssa_aux_t info = VN_INFO (name);
4833 if (!info->visited)
4834 info->valnum = name;
4835 if (info->valnum == name
4836 || info->valnum == VN_TOP)
4837 info->value_id = get_next_value_id ();
4838 else if (is_gimple_min_invariant (info->valnum))
4839 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4842 /* Propagate. */
4843 FOR_EACH_SSA_NAME (i, name, cfun)
4845 vn_ssa_aux_t info = VN_INFO (name);
4846 if (TREE_CODE (info->valnum) == SSA_NAME
4847 && info->valnum != name
4848 && info->value_id != VN_INFO (info->valnum)->value_id)
4849 info->value_id = VN_INFO (info->valnum)->value_id;
4852 set_hashtable_value_ids ();
4854 if (dump_file && (dump_flags & TDF_DETAILS))
4856 fprintf (dump_file, "Value numbers:\n");
4857 FOR_EACH_SSA_NAME (i, name, cfun)
4859 if (VN_INFO (name)->visited
4860 && SSA_VAL (name) != name)
4862 print_generic_expr (dump_file, name, 0);
4863 fprintf (dump_file, " = ");
4864 print_generic_expr (dump_file, SSA_VAL (name), 0);
4865 fprintf (dump_file, "\n");
4870 return true;
4873 /* Return the maximum value id we have ever seen. */
4875 unsigned int
4876 get_max_value_id (void)
4878 return next_value_id;
4881 /* Return the next unique value id. */
4883 unsigned int
4884 get_next_value_id (void)
4886 return next_value_id++;
4890 /* Compare two expressions E1 and E2 and return true if they are equal. */
4892 bool
4893 expressions_equal_p (tree e1, tree e2)
4895 /* The obvious case. */
4896 if (e1 == e2)
4897 return true;
4899 /* If either one is VN_TOP consider them equal. */
4900 if (e1 == VN_TOP || e2 == VN_TOP)
4901 return true;
4903 /* If only one of them is null, they cannot be equal. */
4904 if (!e1 || !e2)
4905 return false;
4907 /* Now perform the actual comparison. */
4908 if (TREE_CODE (e1) == TREE_CODE (e2)
4909 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4910 return true;
4912 return false;
4916 /* Return true if the nary operation NARY may trap. This is a copy
4917 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4919 bool
4920 vn_nary_may_trap (vn_nary_op_t nary)
4922 tree type;
4923 tree rhs2 = NULL_TREE;
4924 bool honor_nans = false;
4925 bool honor_snans = false;
4926 bool fp_operation = false;
4927 bool honor_trapv = false;
4928 bool handled, ret;
4929 unsigned i;
4931 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4932 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4933 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4935 type = nary->type;
4936 fp_operation = FLOAT_TYPE_P (type);
4937 if (fp_operation)
4939 honor_nans = flag_trapping_math && !flag_finite_math_only;
4940 honor_snans = flag_signaling_nans != 0;
4942 else if (INTEGRAL_TYPE_P (type)
4943 && TYPE_OVERFLOW_TRAPS (type))
4944 honor_trapv = true;
4946 if (nary->length >= 2)
4947 rhs2 = nary->op[1];
4948 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4949 honor_trapv,
4950 honor_nans, honor_snans, rhs2,
4951 &handled);
4952 if (handled
4953 && ret)
4954 return true;
4956 for (i = 0; i < nary->length; ++i)
4957 if (tree_could_trap_p (nary->op[i]))
4958 return true;
4960 return false;