debug/dwarf: support 64-bit DWARF in byte order check
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobd27bcee8262a763625da7b7e5cc84f91bb17820f
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-ssa-sccvn.h"
59 #include "tree-cfg.h"
60 #include "domwalk.h"
61 #include "gimple-iterator.h"
62 #include "gimple-match.h"
63 #include "stringpool.h"
64 #include "attribs.h"
66 /* This algorithm is based on the SCC algorithm presented by Keith
67 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
68 (http://citeseer.ist.psu.edu/41805.html). In
69 straight line code, it is equivalent to a regular hash based value
70 numbering that is performed in reverse postorder.
72 For code with cycles, there are two alternatives, both of which
73 require keeping the hashtables separate from the actual list of
74 value numbers for SSA names.
76 1. Iterate value numbering in an RPO walk of the blocks, removing
77 all the entries from the hashtable after each iteration (but
78 keeping the SSA name->value number mapping between iterations).
79 Iterate until it does not change.
81 2. Perform value numbering as part of an SCC walk on the SSA graph,
82 iterating only the cycles in the SSA graph until they do not change
83 (using a separate, optimistic hashtable for value numbering the SCC
84 operands).
86 The second is not just faster in practice (because most SSA graph
87 cycles do not involve all the variables in the graph), it also has
88 some nice properties.
90 One of these nice properties is that when we pop an SCC off the
91 stack, we are guaranteed to have processed all the operands coming from
92 *outside of that SCC*, so we do not need to do anything special to
93 ensure they have value numbers.
95 Another nice property is that the SCC walk is done as part of a DFS
96 of the SSA graph, which makes it easy to perform combining and
97 simplifying operations at the same time.
99 The code below is deliberately written in a way that makes it easy
100 to separate the SCC walk from the other work it does.
102 In order to propagate constants through the code, we track which
103 expressions contain constants, and use those while folding. In
104 theory, we could also track expressions whose value numbers are
105 replaced, in case we end up folding based on expression
106 identities.
108 In order to value number memory, we assign value numbers to vuses.
109 This enables us to note that, for example, stores to the same
110 address of the same value from the same starting memory states are
111 equivalent.
112 TODO:
114 1. We can iterate only the changing portions of the SCC's, but
115 I have not seen an SCC big enough for this to be a win.
116 2. If you differentiate between phi nodes for loops and phi nodes
117 for if-then-else, you can properly consider phi nodes in different
118 blocks for equivalence.
119 3. We could value number vuses in more cases, particularly, whole
120 structure copies.
124 static tree *last_vuse_ptr;
125 static vn_lookup_kind vn_walk_kind;
126 static vn_lookup_kind default_vn_walk_kind;
127 bitmap const_parms;
129 /* vn_nary_op hashtable helpers. */
131 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
133 typedef vn_nary_op_s *compare_type;
134 static inline hashval_t hash (const vn_nary_op_s *);
135 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
138 /* Return the computed hashcode for nary operation P1. */
140 inline hashval_t
141 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
143 return vno1->hashcode;
146 /* Compare nary operations P1 and P2 and return true if they are
147 equivalent. */
149 inline bool
150 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
152 return vn_nary_op_eq (vno1, vno2);
155 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
156 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
159 /* vn_phi hashtable helpers. */
161 static int
162 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
164 struct vn_phi_hasher : pointer_hash <vn_phi_s>
166 static inline hashval_t hash (const vn_phi_s *);
167 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
168 static inline void remove (vn_phi_s *);
171 /* Return the computed hashcode for phi operation P1. */
173 inline hashval_t
174 vn_phi_hasher::hash (const vn_phi_s *vp1)
176 return vp1->hashcode;
179 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
181 inline bool
182 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
184 return vn_phi_eq (vp1, vp2);
187 /* Free a phi operation structure VP. */
189 inline void
190 vn_phi_hasher::remove (vn_phi_s *phi)
192 phi->phiargs.release ();
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2));
219 /* Free a reference operation structure VP. */
221 static inline void
222 free_reference (vn_reference_s *vr)
224 vr->operands.release ();
228 /* vn_reference hashtable helpers. */
230 struct vn_reference_hasher : pointer_hash <vn_reference_s>
232 static inline hashval_t hash (const vn_reference_s *);
233 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
234 static inline void remove (vn_reference_s *);
237 /* Return the hashcode for a given reference operation P1. */
239 inline hashval_t
240 vn_reference_hasher::hash (const vn_reference_s *vr1)
242 return vr1->hashcode;
245 inline bool
246 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
248 return vn_reference_eq (v, c);
251 inline void
252 vn_reference_hasher::remove (vn_reference_s *v)
254 free_reference (v);
257 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
258 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
261 /* The set of hashtables and alloc_pool's for their items. */
263 typedef struct vn_tables_s
265 vn_nary_op_table_type *nary;
266 vn_phi_table_type *phis;
267 vn_reference_table_type *references;
268 struct obstack nary_obstack;
269 object_allocator<vn_phi_s> *phis_pool;
270 object_allocator<vn_reference_s> *references_pool;
271 } *vn_tables_t;
274 /* vn_constant hashtable helpers. */
276 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
278 static inline hashval_t hash (const vn_constant_s *);
279 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
282 /* Hash table hash function for vn_constant_t. */
284 inline hashval_t
285 vn_constant_hasher::hash (const vn_constant_s *vc1)
287 return vc1->hashcode;
290 /* Hash table equality function for vn_constant_t. */
292 inline bool
293 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
295 if (vc1->hashcode != vc2->hashcode)
296 return false;
298 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
301 static hash_table<vn_constant_hasher> *constant_to_value_id;
302 static bitmap constant_value_ids;
305 /* Valid hashtables storing information we have proven to be
306 correct. */
308 static vn_tables_t valid_info;
310 /* Optimistic hashtables storing information we are making assumptions about
311 during iterations. */
313 static vn_tables_t optimistic_info;
315 /* Pointer to the set of hashtables that is currently being used.
316 Should always point to either the optimistic_info, or the
317 valid_info. */
319 static vn_tables_t current_info;
322 /* Reverse post order index for each basic block. */
324 static int *rpo_numbers;
326 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
328 /* Return the SSA value of the VUSE x, supporting released VDEFs
329 during elimination which will value-number the VDEF to the
330 associated VUSE (but not substitute in the whole lattice). */
332 static inline tree
333 vuse_ssa_val (tree x)
335 if (!x)
336 return NULL_TREE;
340 x = SSA_VAL (x);
342 while (SSA_NAME_IN_FREE_LIST (x));
344 return x;
347 /* This represents the top of the VN lattice, which is the universal
348 value. */
350 tree VN_TOP;
352 /* Unique counter for our value ids. */
354 static unsigned int next_value_id;
356 /* Next DFS number and the stack for strongly connected component
357 detection. */
359 static unsigned int next_dfs_num;
360 static vec<tree> sccstack;
364 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
365 are allocated on an obstack for locality reasons, and to free them
366 without looping over the vec. */
368 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
369 static struct obstack vn_ssa_aux_obstack;
371 /* Return whether there is value numbering information for a given SSA name. */
373 bool
374 has_VN_INFO (tree name)
376 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
377 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
378 return false;
381 /* Return the value numbering information for a given SSA name. */
383 vn_ssa_aux_t
384 VN_INFO (tree name)
386 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
387 gcc_checking_assert (res);
388 return res;
391 /* Set the value numbering info for a given SSA name to a given
392 value. */
394 static inline void
395 VN_INFO_SET (tree name, vn_ssa_aux_t value)
397 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
400 /* Initialize the value numbering info for a given SSA name.
401 This should be called just once for every SSA name. */
403 vn_ssa_aux_t
404 VN_INFO_GET (tree name)
406 vn_ssa_aux_t newinfo;
408 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
409 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
410 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
411 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
412 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
413 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
414 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
415 return newinfo;
419 /* Return the vn_kind the expression computed by the stmt should be
420 associated with. */
422 enum vn_kind
423 vn_get_stmt_kind (gimple *stmt)
425 switch (gimple_code (stmt))
427 case GIMPLE_CALL:
428 return VN_REFERENCE;
429 case GIMPLE_PHI:
430 return VN_PHI;
431 case GIMPLE_ASSIGN:
433 enum tree_code code = gimple_assign_rhs_code (stmt);
434 tree rhs1 = gimple_assign_rhs1 (stmt);
435 switch (get_gimple_rhs_class (code))
437 case GIMPLE_UNARY_RHS:
438 case GIMPLE_BINARY_RHS:
439 case GIMPLE_TERNARY_RHS:
440 return VN_NARY;
441 case GIMPLE_SINGLE_RHS:
442 switch (TREE_CODE_CLASS (code))
444 case tcc_reference:
445 /* VOP-less references can go through unary case. */
446 if ((code == REALPART_EXPR
447 || code == IMAGPART_EXPR
448 || code == VIEW_CONVERT_EXPR
449 || code == BIT_FIELD_REF)
450 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
451 return VN_NARY;
453 /* Fallthrough. */
454 case tcc_declaration:
455 return VN_REFERENCE;
457 case tcc_constant:
458 return VN_CONSTANT;
460 default:
461 if (code == ADDR_EXPR)
462 return (is_gimple_min_invariant (rhs1)
463 ? VN_CONSTANT : VN_REFERENCE);
464 else if (code == CONSTRUCTOR)
465 return VN_NARY;
466 return VN_NONE;
468 default:
469 return VN_NONE;
472 default:
473 return VN_NONE;
477 /* Lookup a value id for CONSTANT and return it. If it does not
478 exist returns 0. */
480 unsigned int
481 get_constant_value_id (tree constant)
483 vn_constant_s **slot;
484 struct vn_constant_s vc;
486 vc.hashcode = vn_hash_constant_with_type (constant);
487 vc.constant = constant;
488 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
489 if (slot)
490 return (*slot)->value_id;
491 return 0;
494 /* Lookup a value id for CONSTANT, and if it does not exist, create a
495 new one and return it. If it does exist, return it. */
497 unsigned int
498 get_or_alloc_constant_value_id (tree constant)
500 vn_constant_s **slot;
501 struct vn_constant_s vc;
502 vn_constant_t vcp;
504 vc.hashcode = vn_hash_constant_with_type (constant);
505 vc.constant = constant;
506 slot = constant_to_value_id->find_slot (&vc, INSERT);
507 if (*slot)
508 return (*slot)->value_id;
510 vcp = XNEW (struct vn_constant_s);
511 vcp->hashcode = vc.hashcode;
512 vcp->constant = constant;
513 vcp->value_id = get_next_value_id ();
514 *slot = vcp;
515 bitmap_set_bit (constant_value_ids, vcp->value_id);
516 return vcp->value_id;
519 /* Return true if V is a value id for a constant. */
521 bool
522 value_id_constant_p (unsigned int v)
524 return bitmap_bit_p (constant_value_ids, v);
527 /* Compute the hash for a reference operand VRO1. */
529 static void
530 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
532 hstate.add_int (vro1->opcode);
533 if (vro1->op0)
534 inchash::add_expr (vro1->op0, hstate);
535 if (vro1->op1)
536 inchash::add_expr (vro1->op1, hstate);
537 if (vro1->op2)
538 inchash::add_expr (vro1->op2, hstate);
541 /* Compute a hash for the reference operation VR1 and return it. */
543 static hashval_t
544 vn_reference_compute_hash (const vn_reference_t vr1)
546 inchash::hash hstate;
547 hashval_t result;
548 int i;
549 vn_reference_op_t vro;
550 HOST_WIDE_INT off = -1;
551 bool deref = false;
553 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
555 if (vro->opcode == MEM_REF)
556 deref = true;
557 else if (vro->opcode != ADDR_EXPR)
558 deref = false;
559 if (vro->off != -1)
561 if (off == -1)
562 off = 0;
563 off += vro->off;
565 else
567 if (off != -1
568 && off != 0)
569 hstate.add_int (off);
570 off = -1;
571 if (deref
572 && vro->opcode == ADDR_EXPR)
574 if (vro->op0)
576 tree op = TREE_OPERAND (vro->op0, 0);
577 hstate.add_int (TREE_CODE (op));
578 inchash::add_expr (op, hstate);
581 else
582 vn_reference_op_compute_hash (vro, hstate);
585 result = hstate.end ();
586 /* ??? We would ICE later if we hash instead of adding that in. */
587 if (vr1->vuse)
588 result += SSA_NAME_VERSION (vr1->vuse);
590 return result;
593 /* Return true if reference operations VR1 and VR2 are equivalent. This
594 means they have the same set of operands and vuses. */
596 bool
597 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
599 unsigned i, j;
601 /* Early out if this is not a hash collision. */
602 if (vr1->hashcode != vr2->hashcode)
603 return false;
605 /* The VOP needs to be the same. */
606 if (vr1->vuse != vr2->vuse)
607 return false;
609 /* If the operands are the same we are done. */
610 if (vr1->operands == vr2->operands)
611 return true;
613 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
614 return false;
616 if (INTEGRAL_TYPE_P (vr1->type)
617 && INTEGRAL_TYPE_P (vr2->type))
619 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
620 return false;
622 else if (INTEGRAL_TYPE_P (vr1->type)
623 && (TYPE_PRECISION (vr1->type)
624 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
625 return false;
626 else if (INTEGRAL_TYPE_P (vr2->type)
627 && (TYPE_PRECISION (vr2->type)
628 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
629 return false;
631 i = 0;
632 j = 0;
635 HOST_WIDE_INT off1 = 0, off2 = 0;
636 vn_reference_op_t vro1, vro2;
637 vn_reference_op_s tem1, tem2;
638 bool deref1 = false, deref2 = false;
639 for (; vr1->operands.iterate (i, &vro1); i++)
641 if (vro1->opcode == MEM_REF)
642 deref1 = true;
643 /* Do not look through a storage order barrier. */
644 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
645 return false;
646 if (vro1->off == -1)
647 break;
648 off1 += vro1->off;
650 for (; vr2->operands.iterate (j, &vro2); j++)
652 if (vro2->opcode == MEM_REF)
653 deref2 = true;
654 /* Do not look through a storage order barrier. */
655 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
656 return false;
657 if (vro2->off == -1)
658 break;
659 off2 += vro2->off;
661 if (off1 != off2)
662 return false;
663 if (deref1 && vro1->opcode == ADDR_EXPR)
665 memset (&tem1, 0, sizeof (tem1));
666 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
667 tem1.type = TREE_TYPE (tem1.op0);
668 tem1.opcode = TREE_CODE (tem1.op0);
669 vro1 = &tem1;
670 deref1 = false;
672 if (deref2 && vro2->opcode == ADDR_EXPR)
674 memset (&tem2, 0, sizeof (tem2));
675 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
676 tem2.type = TREE_TYPE (tem2.op0);
677 tem2.opcode = TREE_CODE (tem2.op0);
678 vro2 = &tem2;
679 deref2 = false;
681 if (deref1 != deref2)
682 return false;
683 if (!vn_reference_op_eq (vro1, vro2))
684 return false;
685 ++j;
686 ++i;
688 while (vr1->operands.length () != i
689 || vr2->operands.length () != j);
691 return true;
694 /* Copy the operations present in load/store REF into RESULT, a vector of
695 vn_reference_op_s's. */
697 static void
698 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
700 if (TREE_CODE (ref) == TARGET_MEM_REF)
702 vn_reference_op_s temp;
704 result->reserve (3);
706 memset (&temp, 0, sizeof (temp));
707 temp.type = TREE_TYPE (ref);
708 temp.opcode = TREE_CODE (ref);
709 temp.op0 = TMR_INDEX (ref);
710 temp.op1 = TMR_STEP (ref);
711 temp.op2 = TMR_OFFSET (ref);
712 temp.off = -1;
713 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
714 temp.base = MR_DEPENDENCE_BASE (ref);
715 result->quick_push (temp);
717 memset (&temp, 0, sizeof (temp));
718 temp.type = NULL_TREE;
719 temp.opcode = ERROR_MARK;
720 temp.op0 = TMR_INDEX2 (ref);
721 temp.off = -1;
722 result->quick_push (temp);
724 memset (&temp, 0, sizeof (temp));
725 temp.type = NULL_TREE;
726 temp.opcode = TREE_CODE (TMR_BASE (ref));
727 temp.op0 = TMR_BASE (ref);
728 temp.off = -1;
729 result->quick_push (temp);
730 return;
733 /* For non-calls, store the information that makes up the address. */
734 tree orig = ref;
735 while (ref)
737 vn_reference_op_s temp;
739 memset (&temp, 0, sizeof (temp));
740 temp.type = TREE_TYPE (ref);
741 temp.opcode = TREE_CODE (ref);
742 temp.off = -1;
744 switch (temp.opcode)
746 case MODIFY_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 break;
749 case WITH_SIZE_EXPR:
750 temp.op0 = TREE_OPERAND (ref, 1);
751 temp.off = 0;
752 break;
753 case MEM_REF:
754 /* The base address gets its own vn_reference_op_s structure. */
755 temp.op0 = TREE_OPERAND (ref, 1);
757 offset_int off = mem_ref_offset (ref);
758 if (wi::fits_shwi_p (off))
759 temp.off = off.to_shwi ();
761 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
762 temp.base = MR_DEPENDENCE_BASE (ref);
763 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
764 break;
765 case BIT_FIELD_REF:
766 /* Record bits, position and storage order. */
767 temp.op0 = TREE_OPERAND (ref, 1);
768 temp.op1 = TREE_OPERAND (ref, 2);
769 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
771 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
772 if (off % BITS_PER_UNIT == 0)
773 temp.off = off / BITS_PER_UNIT;
775 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
776 break;
777 case COMPONENT_REF:
778 /* The field decl is enough to unambiguously specify the field,
779 a matching type is not necessary and a mismatching type
780 is always a spurious difference. */
781 temp.type = NULL_TREE;
782 temp.op0 = TREE_OPERAND (ref, 1);
783 temp.op1 = TREE_OPERAND (ref, 2);
785 tree this_offset = component_ref_field_offset (ref);
786 if (this_offset
787 && TREE_CODE (this_offset) == INTEGER_CST)
789 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
790 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
792 offset_int off
793 = (wi::to_offset (this_offset)
794 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
795 if (wi::fits_shwi_p (off)
796 /* Probibit value-numbering zero offset components
797 of addresses the same before the pass folding
798 __builtin_object_size had a chance to run
799 (checking cfun->after_inlining does the
800 trick here). */
801 && (TREE_CODE (orig) != ADDR_EXPR
802 || off != 0
803 || cfun->after_inlining))
804 temp.off = off.to_shwi ();
808 break;
809 case ARRAY_RANGE_REF:
810 case ARRAY_REF:
812 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
813 /* Record index as operand. */
814 temp.op0 = TREE_OPERAND (ref, 1);
815 /* Always record lower bounds and element size. */
816 temp.op1 = array_ref_low_bound (ref);
817 /* But record element size in units of the type alignment. */
818 temp.op2 = TREE_OPERAND (ref, 3);
819 temp.align = eltype->type_common.align;
820 if (! temp.op2)
821 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
822 size_int (TYPE_ALIGN_UNIT (eltype)));
823 if (TREE_CODE (temp.op0) == INTEGER_CST
824 && TREE_CODE (temp.op1) == INTEGER_CST
825 && TREE_CODE (temp.op2) == INTEGER_CST)
827 offset_int off = ((wi::to_offset (temp.op0)
828 - wi::to_offset (temp.op1))
829 * wi::to_offset (temp.op2)
830 * vn_ref_op_align_unit (&temp));
831 if (wi::fits_shwi_p (off))
832 temp.off = off.to_shwi();
835 break;
836 case VAR_DECL:
837 if (DECL_HARD_REGISTER (ref))
839 temp.op0 = ref;
840 break;
842 /* Fallthru. */
843 case PARM_DECL:
844 case CONST_DECL:
845 case RESULT_DECL:
846 /* Canonicalize decls to MEM[&decl] which is what we end up with
847 when valueizing MEM[ptr] with ptr = &decl. */
848 temp.opcode = MEM_REF;
849 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
850 temp.off = 0;
851 result->safe_push (temp);
852 temp.opcode = ADDR_EXPR;
853 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
854 temp.type = TREE_TYPE (temp.op0);
855 temp.off = -1;
856 break;
857 case STRING_CST:
858 case INTEGER_CST:
859 case COMPLEX_CST:
860 case VECTOR_CST:
861 case REAL_CST:
862 case FIXED_CST:
863 case CONSTRUCTOR:
864 case SSA_NAME:
865 temp.op0 = ref;
866 break;
867 case ADDR_EXPR:
868 if (is_gimple_min_invariant (ref))
870 temp.op0 = ref;
871 break;
873 break;
874 /* These are only interesting for their operands, their
875 existence, and their type. They will never be the last
876 ref in the chain of references (IE they require an
877 operand), so we don't have to put anything
878 for op* as it will be handled by the iteration */
879 case REALPART_EXPR:
880 temp.off = 0;
881 break;
882 case VIEW_CONVERT_EXPR:
883 temp.off = 0;
884 temp.reverse = storage_order_barrier_p (ref);
885 break;
886 case IMAGPART_EXPR:
887 /* This is only interesting for its constant offset. */
888 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
889 break;
890 default:
891 gcc_unreachable ();
893 result->safe_push (temp);
895 if (REFERENCE_CLASS_P (ref)
896 || TREE_CODE (ref) == MODIFY_EXPR
897 || TREE_CODE (ref) == WITH_SIZE_EXPR
898 || (TREE_CODE (ref) == ADDR_EXPR
899 && !is_gimple_min_invariant (ref)))
900 ref = TREE_OPERAND (ref, 0);
901 else
902 ref = NULL_TREE;
906 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
907 operands in *OPS, the reference alias set SET and the reference type TYPE.
908 Return true if something useful was produced. */
910 bool
911 ao_ref_init_from_vn_reference (ao_ref *ref,
912 alias_set_type set, tree type,
913 vec<vn_reference_op_s> ops)
915 vn_reference_op_t op;
916 unsigned i;
917 tree base = NULL_TREE;
918 tree *op0_p = &base;
919 offset_int offset = 0;
920 offset_int max_size;
921 offset_int size = -1;
922 tree size_tree = NULL_TREE;
923 alias_set_type base_alias_set = -1;
925 /* First get the final access size from just the outermost expression. */
926 op = &ops[0];
927 if (op->opcode == COMPONENT_REF)
928 size_tree = DECL_SIZE (op->op0);
929 else if (op->opcode == BIT_FIELD_REF)
930 size_tree = op->op0;
931 else
933 machine_mode mode = TYPE_MODE (type);
934 if (mode == BLKmode)
935 size_tree = TYPE_SIZE (type);
936 else
937 size = int (GET_MODE_BITSIZE (mode));
939 if (size_tree != NULL_TREE
940 && TREE_CODE (size_tree) == INTEGER_CST)
941 size = wi::to_offset (size_tree);
943 /* Initially, maxsize is the same as the accessed element size.
944 In the following it will only grow (or become -1). */
945 max_size = size;
947 /* Compute cumulative bit-offset for nested component-refs and array-refs,
948 and find the ultimate containing object. */
949 FOR_EACH_VEC_ELT (ops, i, op)
951 switch (op->opcode)
953 /* These may be in the reference ops, but we cannot do anything
954 sensible with them here. */
955 case ADDR_EXPR:
956 /* Apart from ADDR_EXPR arguments to MEM_REF. */
957 if (base != NULL_TREE
958 && TREE_CODE (base) == MEM_REF
959 && op->op0
960 && DECL_P (TREE_OPERAND (op->op0, 0)))
962 vn_reference_op_t pop = &ops[i-1];
963 base = TREE_OPERAND (op->op0, 0);
964 if (pop->off == -1)
966 max_size = -1;
967 offset = 0;
969 else
970 offset += pop->off * BITS_PER_UNIT;
971 op0_p = NULL;
972 break;
974 /* Fallthru. */
975 case CALL_EXPR:
976 return false;
978 /* Record the base objects. */
979 case MEM_REF:
980 base_alias_set = get_deref_alias_set (op->op0);
981 *op0_p = build2 (MEM_REF, op->type,
982 NULL_TREE, op->op0);
983 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
984 MR_DEPENDENCE_BASE (*op0_p) = op->base;
985 op0_p = &TREE_OPERAND (*op0_p, 0);
986 break;
988 case VAR_DECL:
989 case PARM_DECL:
990 case RESULT_DECL:
991 case SSA_NAME:
992 *op0_p = op->op0;
993 op0_p = NULL;
994 break;
996 /* And now the usual component-reference style ops. */
997 case BIT_FIELD_REF:
998 offset += wi::to_offset (op->op1);
999 break;
1001 case COMPONENT_REF:
1003 tree field = op->op0;
1004 /* We do not have a complete COMPONENT_REF tree here so we
1005 cannot use component_ref_field_offset. Do the interesting
1006 parts manually. */
1007 tree this_offset = DECL_FIELD_OFFSET (field);
1009 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1010 max_size = -1;
1011 else
1013 offset_int woffset = (wi::to_offset (this_offset)
1014 << LOG2_BITS_PER_UNIT);
1015 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1016 offset += woffset;
1018 break;
1021 case ARRAY_RANGE_REF:
1022 case ARRAY_REF:
1023 /* We recorded the lower bound and the element size. */
1024 if (TREE_CODE (op->op0) != INTEGER_CST
1025 || TREE_CODE (op->op1) != INTEGER_CST
1026 || TREE_CODE (op->op2) != INTEGER_CST)
1027 max_size = -1;
1028 else
1030 offset_int woffset
1031 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1032 TYPE_PRECISION (TREE_TYPE (op->op0)));
1033 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1034 woffset <<= LOG2_BITS_PER_UNIT;
1035 offset += woffset;
1037 break;
1039 case REALPART_EXPR:
1040 break;
1042 case IMAGPART_EXPR:
1043 offset += size;
1044 break;
1046 case VIEW_CONVERT_EXPR:
1047 break;
1049 case STRING_CST:
1050 case INTEGER_CST:
1051 case COMPLEX_CST:
1052 case VECTOR_CST:
1053 case REAL_CST:
1054 case CONSTRUCTOR:
1055 case CONST_DECL:
1056 return false;
1058 default:
1059 return false;
1063 if (base == NULL_TREE)
1064 return false;
1066 ref->ref = NULL_TREE;
1067 ref->base = base;
1068 ref->ref_alias_set = set;
1069 if (base_alias_set != -1)
1070 ref->base_alias_set = base_alias_set;
1071 else
1072 ref->base_alias_set = get_alias_set (base);
1073 /* We discount volatiles from value-numbering elsewhere. */
1074 ref->volatile_p = false;
1076 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1078 ref->offset = 0;
1079 ref->size = -1;
1080 ref->max_size = -1;
1081 return true;
1084 ref->size = size.to_shwi ();
1086 if (!wi::fits_shwi_p (offset))
1088 ref->offset = 0;
1089 ref->max_size = -1;
1090 return true;
1093 ref->offset = offset.to_shwi ();
1095 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1096 ref->max_size = -1;
1097 else
1098 ref->max_size = max_size.to_shwi ();
1100 return true;
1103 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1104 vn_reference_op_s's. */
1106 static void
1107 copy_reference_ops_from_call (gcall *call,
1108 vec<vn_reference_op_s> *result)
1110 vn_reference_op_s temp;
1111 unsigned i;
1112 tree lhs = gimple_call_lhs (call);
1113 int lr;
1115 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1116 different. By adding the lhs here in the vector, we ensure that the
1117 hashcode is different, guaranteeing a different value number. */
1118 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1120 memset (&temp, 0, sizeof (temp));
1121 temp.opcode = MODIFY_EXPR;
1122 temp.type = TREE_TYPE (lhs);
1123 temp.op0 = lhs;
1124 temp.off = -1;
1125 result->safe_push (temp);
1128 /* Copy the type, opcode, function, static chain and EH region, if any. */
1129 memset (&temp, 0, sizeof (temp));
1130 temp.type = gimple_call_return_type (call);
1131 temp.opcode = CALL_EXPR;
1132 temp.op0 = gimple_call_fn (call);
1133 temp.op1 = gimple_call_chain (call);
1134 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1135 temp.op2 = size_int (lr);
1136 temp.off = -1;
1137 if (gimple_call_with_bounds_p (call))
1138 temp.with_bounds = 1;
1139 result->safe_push (temp);
1141 /* Copy the call arguments. As they can be references as well,
1142 just chain them together. */
1143 for (i = 0; i < gimple_call_num_args (call); ++i)
1145 tree callarg = gimple_call_arg (call, i);
1146 copy_reference_ops_from_ref (callarg, result);
1150 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1151 *I_P to point to the last element of the replacement. */
1152 static bool
1153 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1154 unsigned int *i_p)
1156 unsigned int i = *i_p;
1157 vn_reference_op_t op = &(*ops)[i];
1158 vn_reference_op_t mem_op = &(*ops)[i - 1];
1159 tree addr_base;
1160 HOST_WIDE_INT addr_offset = 0;
1162 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1163 from .foo.bar to the preceding MEM_REF offset and replace the
1164 address with &OBJ. */
1165 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1166 &addr_offset);
1167 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1168 if (addr_base != TREE_OPERAND (op->op0, 0))
1170 offset_int off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
1171 off += addr_offset;
1172 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1173 op->op0 = build_fold_addr_expr (addr_base);
1174 if (tree_fits_shwi_p (mem_op->op0))
1175 mem_op->off = tree_to_shwi (mem_op->op0);
1176 else
1177 mem_op->off = -1;
1178 return true;
1180 return false;
1183 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1184 *I_P to point to the last element of the replacement. */
1185 static bool
1186 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1187 unsigned int *i_p)
1189 unsigned int i = *i_p;
1190 vn_reference_op_t op = &(*ops)[i];
1191 vn_reference_op_t mem_op = &(*ops)[i - 1];
1192 gimple *def_stmt;
1193 enum tree_code code;
1194 offset_int off;
1196 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1197 if (!is_gimple_assign (def_stmt))
1198 return false;
1200 code = gimple_assign_rhs_code (def_stmt);
1201 if (code != ADDR_EXPR
1202 && code != POINTER_PLUS_EXPR)
1203 return false;
1205 off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
1207 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1208 from .foo.bar to the preceding MEM_REF offset and replace the
1209 address with &OBJ. */
1210 if (code == ADDR_EXPR)
1212 tree addr, addr_base;
1213 HOST_WIDE_INT addr_offset;
1215 addr = gimple_assign_rhs1 (def_stmt);
1216 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1217 &addr_offset);
1218 /* If that didn't work because the address isn't invariant propagate
1219 the reference tree from the address operation in case the current
1220 dereference isn't offsetted. */
1221 if (!addr_base
1222 && *i_p == ops->length () - 1
1223 && off == 0
1224 /* This makes us disable this transform for PRE where the
1225 reference ops might be also used for code insertion which
1226 is invalid. */
1227 && default_vn_walk_kind == VN_WALKREWRITE)
1229 auto_vec<vn_reference_op_s, 32> tem;
1230 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1231 /* Make sure to preserve TBAA info. The only objects not
1232 wrapped in MEM_REFs that can have their address taken are
1233 STRING_CSTs. */
1234 if (tem.length () >= 2
1235 && tem[tem.length () - 2].opcode == MEM_REF)
1237 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1238 new_mem_op->op0
1239 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1240 wi::to_wide (new_mem_op->op0));
1242 else
1243 gcc_assert (tem.last ().opcode == STRING_CST);
1244 ops->pop ();
1245 ops->pop ();
1246 ops->safe_splice (tem);
1247 --*i_p;
1248 return true;
1250 if (!addr_base
1251 || TREE_CODE (addr_base) != MEM_REF)
1252 return false;
1254 off += addr_offset;
1255 off += mem_ref_offset (addr_base);
1256 op->op0 = TREE_OPERAND (addr_base, 0);
1258 else
1260 tree ptr, ptroff;
1261 ptr = gimple_assign_rhs1 (def_stmt);
1262 ptroff = gimple_assign_rhs2 (def_stmt);
1263 if (TREE_CODE (ptr) != SSA_NAME
1264 || TREE_CODE (ptroff) != INTEGER_CST)
1265 return false;
1267 off += wi::to_offset (ptroff);
1268 op->op0 = ptr;
1271 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1272 if (tree_fits_shwi_p (mem_op->op0))
1273 mem_op->off = tree_to_shwi (mem_op->op0);
1274 else
1275 mem_op->off = -1;
1276 if (TREE_CODE (op->op0) == SSA_NAME)
1277 op->op0 = SSA_VAL (op->op0);
1278 if (TREE_CODE (op->op0) != SSA_NAME)
1279 op->opcode = TREE_CODE (op->op0);
1281 /* And recurse. */
1282 if (TREE_CODE (op->op0) == SSA_NAME)
1283 vn_reference_maybe_forwprop_address (ops, i_p);
1284 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1285 vn_reference_fold_indirect (ops, i_p);
1286 return true;
1289 /* Optimize the reference REF to a constant if possible or return
1290 NULL_TREE if not. */
1292 tree
1293 fully_constant_vn_reference_p (vn_reference_t ref)
1295 vec<vn_reference_op_s> operands = ref->operands;
1296 vn_reference_op_t op;
1298 /* Try to simplify the translated expression if it is
1299 a call to a builtin function with at most two arguments. */
1300 op = &operands[0];
1301 if (op->opcode == CALL_EXPR
1302 && TREE_CODE (op->op0) == ADDR_EXPR
1303 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1304 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1305 && operands.length () >= 2
1306 && operands.length () <= 3)
1308 vn_reference_op_t arg0, arg1 = NULL;
1309 bool anyconst = false;
1310 arg0 = &operands[1];
1311 if (operands.length () > 2)
1312 arg1 = &operands[2];
1313 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1314 || (arg0->opcode == ADDR_EXPR
1315 && is_gimple_min_invariant (arg0->op0)))
1316 anyconst = true;
1317 if (arg1
1318 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1319 || (arg1->opcode == ADDR_EXPR
1320 && is_gimple_min_invariant (arg1->op0))))
1321 anyconst = true;
1322 if (anyconst)
1324 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1325 arg1 ? 2 : 1,
1326 arg0->op0,
1327 arg1 ? arg1->op0 : NULL);
1328 if (folded
1329 && TREE_CODE (folded) == NOP_EXPR)
1330 folded = TREE_OPERAND (folded, 0);
1331 if (folded
1332 && is_gimple_min_invariant (folded))
1333 return folded;
1337 /* Simplify reads from constants or constant initializers. */
1338 else if (BITS_PER_UNIT == 8
1339 && is_gimple_reg_type (ref->type)
1340 && (!INTEGRAL_TYPE_P (ref->type)
1341 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1343 HOST_WIDE_INT off = 0;
1344 HOST_WIDE_INT size;
1345 if (INTEGRAL_TYPE_P (ref->type))
1346 size = TYPE_PRECISION (ref->type);
1347 else
1348 size = tree_to_shwi (TYPE_SIZE (ref->type));
1349 if (size % BITS_PER_UNIT != 0
1350 || size > MAX_BITSIZE_MODE_ANY_MODE)
1351 return NULL_TREE;
1352 size /= BITS_PER_UNIT;
1353 unsigned i;
1354 for (i = 0; i < operands.length (); ++i)
1356 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1358 ++i;
1359 break;
1361 if (operands[i].off == -1)
1362 return NULL_TREE;
1363 off += operands[i].off;
1364 if (operands[i].opcode == MEM_REF)
1366 ++i;
1367 break;
1370 vn_reference_op_t base = &operands[--i];
1371 tree ctor = error_mark_node;
1372 tree decl = NULL_TREE;
1373 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1374 ctor = base->op0;
1375 else if (base->opcode == MEM_REF
1376 && base[1].opcode == ADDR_EXPR
1377 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1378 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1380 decl = TREE_OPERAND (base[1].op0, 0);
1381 ctor = ctor_for_folding (decl);
1383 if (ctor == NULL_TREE)
1384 return build_zero_cst (ref->type);
1385 else if (ctor != error_mark_node)
1387 if (decl)
1389 tree res = fold_ctor_reference (ref->type, ctor,
1390 off * BITS_PER_UNIT,
1391 size * BITS_PER_UNIT, decl);
1392 if (res)
1394 STRIP_USELESS_TYPE_CONVERSION (res);
1395 if (is_gimple_min_invariant (res))
1396 return res;
1399 else
1401 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1402 int len = native_encode_expr (ctor, buf, size, off);
1403 if (len > 0)
1404 return native_interpret_expr (ref->type, buf, len);
1409 return NULL_TREE;
1412 /* Return true if OPS contain a storage order barrier. */
1414 static bool
1415 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1417 vn_reference_op_t op;
1418 unsigned i;
1420 FOR_EACH_VEC_ELT (ops, i, op)
1421 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1422 return true;
1424 return false;
1427 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1428 structures into their value numbers. This is done in-place, and
1429 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1430 whether any operands were valueized. */
1432 static vec<vn_reference_op_s>
1433 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1435 vn_reference_op_t vro;
1436 unsigned int i;
1438 *valueized_anything = false;
1440 FOR_EACH_VEC_ELT (orig, i, vro)
1442 if (vro->opcode == SSA_NAME
1443 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1445 tree tem = SSA_VAL (vro->op0);
1446 if (tem != vro->op0)
1448 *valueized_anything = true;
1449 vro->op0 = tem;
1451 /* If it transforms from an SSA_NAME to a constant, update
1452 the opcode. */
1453 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1454 vro->opcode = TREE_CODE (vro->op0);
1456 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1458 tree tem = SSA_VAL (vro->op1);
1459 if (tem != vro->op1)
1461 *valueized_anything = true;
1462 vro->op1 = tem;
1465 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1467 tree tem = SSA_VAL (vro->op2);
1468 if (tem != vro->op2)
1470 *valueized_anything = true;
1471 vro->op2 = tem;
1474 /* If it transforms from an SSA_NAME to an address, fold with
1475 a preceding indirect reference. */
1476 if (i > 0
1477 && vro->op0
1478 && TREE_CODE (vro->op0) == ADDR_EXPR
1479 && orig[i - 1].opcode == MEM_REF)
1481 if (vn_reference_fold_indirect (&orig, &i))
1482 *valueized_anything = true;
1484 else if (i > 0
1485 && vro->opcode == SSA_NAME
1486 && orig[i - 1].opcode == MEM_REF)
1488 if (vn_reference_maybe_forwprop_address (&orig, &i))
1489 *valueized_anything = true;
1491 /* If it transforms a non-constant ARRAY_REF into a constant
1492 one, adjust the constant offset. */
1493 else if (vro->opcode == ARRAY_REF
1494 && vro->off == -1
1495 && TREE_CODE (vro->op0) == INTEGER_CST
1496 && TREE_CODE (vro->op1) == INTEGER_CST
1497 && TREE_CODE (vro->op2) == INTEGER_CST)
1499 offset_int off = ((wi::to_offset (vro->op0)
1500 - wi::to_offset (vro->op1))
1501 * wi::to_offset (vro->op2)
1502 * vn_ref_op_align_unit (vro));
1503 if (wi::fits_shwi_p (off))
1504 vro->off = off.to_shwi ();
1508 return orig;
1511 static vec<vn_reference_op_s>
1512 valueize_refs (vec<vn_reference_op_s> orig)
1514 bool tem;
1515 return valueize_refs_1 (orig, &tem);
1518 static vec<vn_reference_op_s> shared_lookup_references;
1520 /* Create a vector of vn_reference_op_s structures from REF, a
1521 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1522 this function. *VALUEIZED_ANYTHING will specify whether any
1523 operands were valueized. */
1525 static vec<vn_reference_op_s>
1526 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1528 if (!ref)
1529 return vNULL;
1530 shared_lookup_references.truncate (0);
1531 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1532 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1533 valueized_anything);
1534 return shared_lookup_references;
1537 /* Create a vector of vn_reference_op_s structures from CALL, a
1538 call statement. The vector is shared among all callers of
1539 this function. */
1541 static vec<vn_reference_op_s>
1542 valueize_shared_reference_ops_from_call (gcall *call)
1544 if (!call)
1545 return vNULL;
1546 shared_lookup_references.truncate (0);
1547 copy_reference_ops_from_call (call, &shared_lookup_references);
1548 shared_lookup_references = valueize_refs (shared_lookup_references);
1549 return shared_lookup_references;
1552 /* Lookup a SCCVN reference operation VR in the current hash table.
1553 Returns the resulting value number if it exists in the hash table,
1554 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1555 vn_reference_t stored in the hashtable if something is found. */
1557 static tree
1558 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1560 vn_reference_s **slot;
1561 hashval_t hash;
1563 hash = vr->hashcode;
1564 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1565 if (!slot && current_info == optimistic_info)
1566 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1567 if (slot)
1569 if (vnresult)
1570 *vnresult = (vn_reference_t)*slot;
1571 return ((vn_reference_t)*slot)->result;
1574 return NULL_TREE;
1577 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1578 with the current VUSE and performs the expression lookup. */
1580 static void *
1581 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1582 unsigned int cnt, void *vr_)
1584 vn_reference_t vr = (vn_reference_t)vr_;
1585 vn_reference_s **slot;
1586 hashval_t hash;
1588 /* This bounds the stmt walks we perform on reference lookups
1589 to O(1) instead of O(N) where N is the number of dominating
1590 stores. */
1591 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1592 return (void *)-1;
1594 if (last_vuse_ptr)
1595 *last_vuse_ptr = vuse;
1597 /* Fixup vuse and hash. */
1598 if (vr->vuse)
1599 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1600 vr->vuse = vuse_ssa_val (vuse);
1601 if (vr->vuse)
1602 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1604 hash = vr->hashcode;
1605 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1606 if (!slot && current_info == optimistic_info)
1607 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1608 if (slot)
1609 return *slot;
1611 return NULL;
1614 /* Lookup an existing or insert a new vn_reference entry into the
1615 value table for the VUSE, SET, TYPE, OPERANDS reference which
1616 has the value VALUE which is either a constant or an SSA name. */
1618 static vn_reference_t
1619 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1620 alias_set_type set,
1621 tree type,
1622 vec<vn_reference_op_s,
1623 va_heap> operands,
1624 tree value)
1626 vn_reference_s vr1;
1627 vn_reference_t result;
1628 unsigned value_id;
1629 vr1.vuse = vuse;
1630 vr1.operands = operands;
1631 vr1.type = type;
1632 vr1.set = set;
1633 vr1.hashcode = vn_reference_compute_hash (&vr1);
1634 if (vn_reference_lookup_1 (&vr1, &result))
1635 return result;
1636 if (TREE_CODE (value) == SSA_NAME)
1637 value_id = VN_INFO (value)->value_id;
1638 else
1639 value_id = get_or_alloc_constant_value_id (value);
1640 return vn_reference_insert_pieces (vuse, set, type,
1641 operands.copy (), value, value_id);
1644 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1645 static unsigned mprts_hook_cnt;
1647 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1649 static tree
1650 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1652 if (!rcode.is_tree_code ())
1653 return NULL_TREE;
1654 tree *ops = ops_;
1655 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1656 if (rcode == CONSTRUCTOR
1657 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1658 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1659 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1661 length = CONSTRUCTOR_NELTS (ops_[0]);
1662 ops = XALLOCAVEC (tree, length);
1663 for (unsigned i = 0; i < length; ++i)
1664 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1666 vn_nary_op_t vnresult = NULL;
1667 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1668 type, ops, &vnresult);
1669 /* We can end up endlessly recursing simplifications if the lookup above
1670 presents us with a def-use chain that mirrors the original simplification.
1671 See PR80887 for an example. Limit successful lookup artificially
1672 to 10 times if we are called as mprts_hook. */
1673 if (res
1674 && mprts_hook
1675 && --mprts_hook_cnt == 0)
1677 if (dump_file && (dump_flags & TDF_DETAILS))
1678 fprintf (dump_file, "Resetting mprts_hook after too many "
1679 "invocations.\n");
1680 mprts_hook = NULL;
1682 return res;
1685 /* Return a value-number for RCODE OPS... either by looking up an existing
1686 value-number for the simplified result or by inserting the operation if
1687 INSERT is true. */
1689 static tree
1690 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1691 bool insert)
1693 tree result = NULL_TREE;
1694 /* We will be creating a value number for
1695 RCODE (OPS...).
1696 So first simplify and lookup this expression to see if it
1697 is already available. */
1698 mprts_hook = vn_lookup_simplify_result;
1699 mprts_hook_cnt = 9;
1700 bool res = false;
1701 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1703 case 1:
1704 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1705 break;
1706 case 2:
1707 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1708 break;
1709 case 3:
1710 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1711 break;
1713 mprts_hook = NULL;
1714 gimple *new_stmt = NULL;
1715 if (res
1716 && gimple_simplified_result_is_gimple_val (rcode, ops))
1717 /* The expression is already available. */
1718 result = ops[0];
1719 else
1721 tree val = vn_lookup_simplify_result (rcode, type, ops);
1722 if (!val && insert)
1724 gimple_seq stmts = NULL;
1725 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1726 if (result)
1728 gcc_assert (gimple_seq_singleton_p (stmts));
1729 new_stmt = gimple_seq_first_stmt (stmts);
1732 else
1733 /* The expression is already available. */
1734 result = val;
1736 if (new_stmt)
1738 /* The expression is not yet available, value-number lhs to
1739 the new SSA_NAME we created. */
1740 /* Initialize value-number information properly. */
1741 VN_INFO_GET (result)->valnum = result;
1742 VN_INFO (result)->value_id = get_next_value_id ();
1743 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1744 new_stmt);
1745 VN_INFO (result)->needs_insertion = true;
1746 /* ??? PRE phi-translation inserts NARYs without corresponding
1747 SSA name result. Re-use those but set their result according
1748 to the stmt we just built. */
1749 vn_nary_op_t nary = NULL;
1750 vn_nary_op_lookup_stmt (new_stmt, &nary);
1751 if (nary)
1753 gcc_assert (nary->result == NULL_TREE);
1754 nary->result = gimple_assign_lhs (new_stmt);
1756 /* As all "inserted" statements are singleton SCCs, insert
1757 to the valid table. This is strictly needed to
1758 avoid re-generating new value SSA_NAMEs for the same
1759 expression during SCC iteration over and over (the
1760 optimistic table gets cleared after each iteration).
1761 We do not need to insert into the optimistic table, as
1762 lookups there will fall back to the valid table. */
1763 else if (current_info == optimistic_info)
1765 current_info = valid_info;
1766 vn_nary_op_insert_stmt (new_stmt, result);
1767 current_info = optimistic_info;
1769 else
1770 vn_nary_op_insert_stmt (new_stmt, result);
1771 if (dump_file && (dump_flags & TDF_DETAILS))
1773 fprintf (dump_file, "Inserting name ");
1774 print_generic_expr (dump_file, result);
1775 fprintf (dump_file, " for expression ");
1776 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1777 fprintf (dump_file, "\n");
1780 return result;
1783 /* Return a value-number for RCODE OPS... either by looking up an existing
1784 value-number for the simplified result or by inserting the operation. */
1786 static tree
1787 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1789 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1792 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1793 its value if present. */
1795 tree
1796 vn_nary_simplify (vn_nary_op_t nary)
1798 if (nary->length > 3)
1799 return NULL_TREE;
1800 tree ops[3];
1801 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1802 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1806 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1807 from the statement defining VUSE and if not successful tries to
1808 translate *REFP and VR_ through an aggregate copy at the definition
1809 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1810 of *REF and *VR. If only disambiguation was performed then
1811 *DISAMBIGUATE_ONLY is set to true. */
1813 static void *
1814 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1815 bool *disambiguate_only)
1817 vn_reference_t vr = (vn_reference_t)vr_;
1818 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1819 tree base = ao_ref_base (ref);
1820 HOST_WIDE_INT offset, maxsize;
1821 static vec<vn_reference_op_s> lhs_ops;
1822 ao_ref lhs_ref;
1823 bool lhs_ref_ok = false;
1825 /* If the reference is based on a parameter that was determined as
1826 pointing to readonly memory it doesn't change. */
1827 if (TREE_CODE (base) == MEM_REF
1828 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1829 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1830 && bitmap_bit_p (const_parms,
1831 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1833 *disambiguate_only = true;
1834 return NULL;
1837 /* First try to disambiguate after value-replacing in the definitions LHS. */
1838 if (is_gimple_assign (def_stmt))
1840 tree lhs = gimple_assign_lhs (def_stmt);
1841 bool valueized_anything = false;
1842 /* Avoid re-allocation overhead. */
1843 lhs_ops.truncate (0);
1844 copy_reference_ops_from_ref (lhs, &lhs_ops);
1845 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1846 if (valueized_anything)
1848 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1849 get_alias_set (lhs),
1850 TREE_TYPE (lhs), lhs_ops);
1851 if (lhs_ref_ok
1852 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1854 *disambiguate_only = true;
1855 return NULL;
1858 else
1860 ao_ref_init (&lhs_ref, lhs);
1861 lhs_ref_ok = true;
1864 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1865 && gimple_call_num_args (def_stmt) <= 4)
1867 /* For builtin calls valueize its arguments and call the
1868 alias oracle again. Valueization may improve points-to
1869 info of pointers and constify size and position arguments.
1870 Originally this was motivated by PR61034 which has
1871 conditional calls to free falsely clobbering ref because
1872 of imprecise points-to info of the argument. */
1873 tree oldargs[4];
1874 bool valueized_anything = false;
1875 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1877 oldargs[i] = gimple_call_arg (def_stmt, i);
1878 tree val = vn_valueize (oldargs[i]);
1879 if (val != oldargs[i])
1881 gimple_call_set_arg (def_stmt, i, val);
1882 valueized_anything = true;
1885 if (valueized_anything)
1887 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1888 ref);
1889 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1890 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1891 if (!res)
1893 *disambiguate_only = true;
1894 return NULL;
1899 if (*disambiguate_only)
1900 return (void *)-1;
1902 offset = ref->offset;
1903 maxsize = ref->max_size;
1905 /* If we cannot constrain the size of the reference we cannot
1906 test if anything kills it. */
1907 if (maxsize == -1)
1908 return (void *)-1;
1910 /* We can't deduce anything useful from clobbers. */
1911 if (gimple_clobber_p (def_stmt))
1912 return (void *)-1;
1914 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1915 from that definition.
1916 1) Memset. */
1917 if (is_gimple_reg_type (vr->type)
1918 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1919 && integer_zerop (gimple_call_arg (def_stmt, 1))
1920 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1921 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1923 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1924 tree base2;
1925 HOST_WIDE_INT offset2, size2, maxsize2;
1926 bool reverse;
1927 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1928 &reverse);
1929 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1930 if ((unsigned HOST_WIDE_INT)size2 / 8
1931 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1932 && maxsize2 != -1
1933 && operand_equal_p (base, base2, 0)
1934 && offset2 <= offset
1935 && offset2 + size2 >= offset + maxsize)
1937 tree val = build_zero_cst (vr->type);
1938 return vn_reference_lookup_or_insert_for_pieces
1939 (vuse, vr->set, vr->type, vr->operands, val);
1943 /* 2) Assignment from an empty CONSTRUCTOR. */
1944 else if (is_gimple_reg_type (vr->type)
1945 && gimple_assign_single_p (def_stmt)
1946 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1947 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1949 tree base2;
1950 HOST_WIDE_INT offset2, size2, maxsize2;
1951 bool reverse;
1952 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1953 &offset2, &size2, &maxsize2, &reverse);
1954 if (maxsize2 != -1
1955 && operand_equal_p (base, base2, 0)
1956 && offset2 <= offset
1957 && offset2 + size2 >= offset + maxsize)
1959 tree val = build_zero_cst (vr->type);
1960 return vn_reference_lookup_or_insert_for_pieces
1961 (vuse, vr->set, vr->type, vr->operands, val);
1965 /* 3) Assignment from a constant. We can use folds native encode/interpret
1966 routines to extract the assigned bits. */
1967 else if (ref->size == maxsize
1968 && is_gimple_reg_type (vr->type)
1969 && !contains_storage_order_barrier_p (vr->operands)
1970 && gimple_assign_single_p (def_stmt)
1971 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1972 && maxsize % BITS_PER_UNIT == 0
1973 && offset % BITS_PER_UNIT == 0
1974 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1975 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1976 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1978 tree base2;
1979 HOST_WIDE_INT offset2, size2, maxsize2;
1980 bool reverse;
1981 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1982 &offset2, &size2, &maxsize2, &reverse);
1983 if (!reverse
1984 && maxsize2 != -1
1985 && maxsize2 == size2
1986 && size2 % BITS_PER_UNIT == 0
1987 && offset2 % BITS_PER_UNIT == 0
1988 && operand_equal_p (base, base2, 0)
1989 && offset2 <= offset
1990 && offset2 + size2 >= offset + maxsize)
1992 /* We support up to 512-bit values (for V8DFmode). */
1993 unsigned char buffer[64];
1994 int len;
1996 tree rhs = gimple_assign_rhs1 (def_stmt);
1997 if (TREE_CODE (rhs) == SSA_NAME)
1998 rhs = SSA_VAL (rhs);
1999 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2000 buffer, sizeof (buffer));
2001 if (len > 0)
2003 tree type = vr->type;
2004 /* Make sure to interpret in a type that has a range
2005 covering the whole access size. */
2006 if (INTEGRAL_TYPE_P (vr->type)
2007 && ref->size != TYPE_PRECISION (vr->type))
2008 type = build_nonstandard_integer_type (ref->size,
2009 TYPE_UNSIGNED (type));
2010 tree val = native_interpret_expr (type,
2011 buffer
2012 + ((offset - offset2)
2013 / BITS_PER_UNIT),
2014 ref->size / BITS_PER_UNIT);
2015 /* If we chop off bits because the types precision doesn't
2016 match the memory access size this is ok when optimizing
2017 reads but not when called from the DSE code during
2018 elimination. */
2019 if (val
2020 && type != vr->type)
2022 if (! int_fits_type_p (val, vr->type))
2023 val = NULL_TREE;
2024 else
2025 val = fold_convert (vr->type, val);
2028 if (val)
2029 return vn_reference_lookup_or_insert_for_pieces
2030 (vuse, vr->set, vr->type, vr->operands, val);
2035 /* 4) Assignment from an SSA name which definition we may be able
2036 to access pieces from. */
2037 else if (ref->size == maxsize
2038 && is_gimple_reg_type (vr->type)
2039 && !contains_storage_order_barrier_p (vr->operands)
2040 && gimple_assign_single_p (def_stmt)
2041 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2043 tree base2;
2044 HOST_WIDE_INT offset2, size2, maxsize2;
2045 bool reverse;
2046 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2047 &offset2, &size2, &maxsize2,
2048 &reverse);
2049 if (!reverse
2050 && maxsize2 != -1
2051 && maxsize2 == size2
2052 && operand_equal_p (base, base2, 0)
2053 && offset2 <= offset
2054 && offset2 + size2 >= offset + maxsize
2055 /* ??? We can't handle bitfield precision extracts without
2056 either using an alternate type for the BIT_FIELD_REF and
2057 then doing a conversion or possibly adjusting the offset
2058 according to endianness. */
2059 && (! INTEGRAL_TYPE_P (vr->type)
2060 || ref->size == TYPE_PRECISION (vr->type))
2061 && ref->size % BITS_PER_UNIT == 0)
2063 code_helper rcode = BIT_FIELD_REF;
2064 tree ops[3];
2065 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2066 ops[1] = bitsize_int (ref->size);
2067 ops[2] = bitsize_int (offset - offset2);
2068 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2069 if (val
2070 && (TREE_CODE (val) != SSA_NAME
2071 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2073 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2074 (vuse, vr->set, vr->type, vr->operands, val);
2075 return res;
2080 /* 5) For aggregate copies translate the reference through them if
2081 the copy kills ref. */
2082 else if (vn_walk_kind == VN_WALKREWRITE
2083 && gimple_assign_single_p (def_stmt)
2084 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2085 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2086 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2088 tree base2;
2089 HOST_WIDE_INT maxsize2;
2090 int i, j, k;
2091 auto_vec<vn_reference_op_s> rhs;
2092 vn_reference_op_t vro;
2093 ao_ref r;
2095 if (!lhs_ref_ok)
2096 return (void *)-1;
2098 /* See if the assignment kills REF. */
2099 base2 = ao_ref_base (&lhs_ref);
2100 maxsize2 = lhs_ref.max_size;
2101 if (maxsize2 == -1
2102 || (base != base2
2103 && (TREE_CODE (base) != MEM_REF
2104 || TREE_CODE (base2) != MEM_REF
2105 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2106 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2107 TREE_OPERAND (base2, 1))))
2108 || !stmt_kills_ref_p (def_stmt, ref))
2109 return (void *)-1;
2111 /* Find the common base of ref and the lhs. lhs_ops already
2112 contains valueized operands for the lhs. */
2113 i = vr->operands.length () - 1;
2114 j = lhs_ops.length () - 1;
2115 while (j >= 0 && i >= 0
2116 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2118 i--;
2119 j--;
2122 /* ??? The innermost op should always be a MEM_REF and we already
2123 checked that the assignment to the lhs kills vr. Thus for
2124 aggregate copies using char[] types the vn_reference_op_eq
2125 may fail when comparing types for compatibility. But we really
2126 don't care here - further lookups with the rewritten operands
2127 will simply fail if we messed up types too badly. */
2128 HOST_WIDE_INT extra_off = 0;
2129 if (j == 0 && i >= 0
2130 && lhs_ops[0].opcode == MEM_REF
2131 && lhs_ops[0].off != -1)
2133 if (lhs_ops[0].off == vr->operands[i].off)
2134 i--, j--;
2135 else if (vr->operands[i].opcode == MEM_REF
2136 && vr->operands[i].off != -1)
2138 extra_off = vr->operands[i].off - lhs_ops[0].off;
2139 i--, j--;
2143 /* i now points to the first additional op.
2144 ??? LHS may not be completely contained in VR, one or more
2145 VIEW_CONVERT_EXPRs could be in its way. We could at least
2146 try handling outermost VIEW_CONVERT_EXPRs. */
2147 if (j != -1)
2148 return (void *)-1;
2150 /* Punt if the additional ops contain a storage order barrier. */
2151 for (k = i; k >= 0; k--)
2153 vro = &vr->operands[k];
2154 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2155 return (void *)-1;
2158 /* Now re-write REF to be based on the rhs of the assignment. */
2159 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2161 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2162 if (extra_off != 0)
2164 if (rhs.length () < 2
2165 || rhs[0].opcode != MEM_REF
2166 || rhs[0].off == -1)
2167 return (void *)-1;
2168 rhs[0].off += extra_off;
2169 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2170 build_int_cst (TREE_TYPE (rhs[0].op0),
2171 extra_off));
2174 /* We need to pre-pend vr->operands[0..i] to rhs. */
2175 vec<vn_reference_op_s> old = vr->operands;
2176 if (i + 1 + rhs.length () > vr->operands.length ())
2177 vr->operands.safe_grow (i + 1 + rhs.length ());
2178 else
2179 vr->operands.truncate (i + 1 + rhs.length ());
2180 FOR_EACH_VEC_ELT (rhs, j, vro)
2181 vr->operands[i + 1 + j] = *vro;
2182 vr->operands = valueize_refs (vr->operands);
2183 if (old == shared_lookup_references)
2184 shared_lookup_references = vr->operands;
2185 vr->hashcode = vn_reference_compute_hash (vr);
2187 /* Try folding the new reference to a constant. */
2188 tree val = fully_constant_vn_reference_p (vr);
2189 if (val)
2190 return vn_reference_lookup_or_insert_for_pieces
2191 (vuse, vr->set, vr->type, vr->operands, val);
2193 /* Adjust *ref from the new operands. */
2194 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2195 return (void *)-1;
2196 /* This can happen with bitfields. */
2197 if (ref->size != r.size)
2198 return (void *)-1;
2199 *ref = r;
2201 /* Do not update last seen VUSE after translating. */
2202 last_vuse_ptr = NULL;
2204 /* Keep looking for the adjusted *REF / VR pair. */
2205 return NULL;
2208 /* 6) For memcpy copies translate the reference through them if
2209 the copy kills ref. */
2210 else if (vn_walk_kind == VN_WALKREWRITE
2211 && is_gimple_reg_type (vr->type)
2212 /* ??? Handle BCOPY as well. */
2213 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2214 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2215 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2216 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2217 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2218 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2219 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2220 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2222 tree lhs, rhs;
2223 ao_ref r;
2224 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2225 vn_reference_op_s op;
2226 HOST_WIDE_INT at;
2228 /* Only handle non-variable, addressable refs. */
2229 if (ref->size != maxsize
2230 || offset % BITS_PER_UNIT != 0
2231 || ref->size % BITS_PER_UNIT != 0)
2232 return (void *)-1;
2234 /* Extract a pointer base and an offset for the destination. */
2235 lhs = gimple_call_arg (def_stmt, 0);
2236 lhs_offset = 0;
2237 if (TREE_CODE (lhs) == SSA_NAME)
2239 lhs = SSA_VAL (lhs);
2240 if (TREE_CODE (lhs) == SSA_NAME)
2242 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2243 if (gimple_assign_single_p (def_stmt)
2244 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2245 lhs = gimple_assign_rhs1 (def_stmt);
2248 if (TREE_CODE (lhs) == ADDR_EXPR)
2250 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2251 &lhs_offset);
2252 if (!tem)
2253 return (void *)-1;
2254 if (TREE_CODE (tem) == MEM_REF
2255 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2257 lhs = TREE_OPERAND (tem, 0);
2258 if (TREE_CODE (lhs) == SSA_NAME)
2259 lhs = SSA_VAL (lhs);
2260 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2262 else if (DECL_P (tem))
2263 lhs = build_fold_addr_expr (tem);
2264 else
2265 return (void *)-1;
2267 if (TREE_CODE (lhs) != SSA_NAME
2268 && TREE_CODE (lhs) != ADDR_EXPR)
2269 return (void *)-1;
2271 /* Extract a pointer base and an offset for the source. */
2272 rhs = gimple_call_arg (def_stmt, 1);
2273 rhs_offset = 0;
2274 if (TREE_CODE (rhs) == SSA_NAME)
2275 rhs = SSA_VAL (rhs);
2276 if (TREE_CODE (rhs) == ADDR_EXPR)
2278 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2279 &rhs_offset);
2280 if (!tem)
2281 return (void *)-1;
2282 if (TREE_CODE (tem) == MEM_REF
2283 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2285 rhs = TREE_OPERAND (tem, 0);
2286 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2288 else if (DECL_P (tem))
2289 rhs = build_fold_addr_expr (tem);
2290 else
2291 return (void *)-1;
2293 if (TREE_CODE (rhs) != SSA_NAME
2294 && TREE_CODE (rhs) != ADDR_EXPR)
2295 return (void *)-1;
2297 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2299 /* The bases of the destination and the references have to agree. */
2300 if ((TREE_CODE (base) != MEM_REF
2301 && !DECL_P (base))
2302 || (TREE_CODE (base) == MEM_REF
2303 && (TREE_OPERAND (base, 0) != lhs
2304 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2305 || (DECL_P (base)
2306 && (TREE_CODE (lhs) != ADDR_EXPR
2307 || TREE_OPERAND (lhs, 0) != base)))
2308 return (void *)-1;
2310 at = offset / BITS_PER_UNIT;
2311 if (TREE_CODE (base) == MEM_REF)
2312 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2313 /* If the access is completely outside of the memcpy destination
2314 area there is no aliasing. */
2315 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2316 || lhs_offset + copy_size <= at)
2317 return NULL;
2318 /* And the access has to be contained within the memcpy destination. */
2319 if (lhs_offset > at
2320 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2321 return (void *)-1;
2323 /* Make room for 2 operands in the new reference. */
2324 if (vr->operands.length () < 2)
2326 vec<vn_reference_op_s> old = vr->operands;
2327 vr->operands.safe_grow_cleared (2);
2328 if (old == shared_lookup_references)
2329 shared_lookup_references = vr->operands;
2331 else
2332 vr->operands.truncate (2);
2334 /* The looked-through reference is a simple MEM_REF. */
2335 memset (&op, 0, sizeof (op));
2336 op.type = vr->type;
2337 op.opcode = MEM_REF;
2338 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2339 op.off = at - lhs_offset + rhs_offset;
2340 vr->operands[0] = op;
2341 op.type = TREE_TYPE (rhs);
2342 op.opcode = TREE_CODE (rhs);
2343 op.op0 = rhs;
2344 op.off = -1;
2345 vr->operands[1] = op;
2346 vr->hashcode = vn_reference_compute_hash (vr);
2348 /* Try folding the new reference to a constant. */
2349 tree val = fully_constant_vn_reference_p (vr);
2350 if (val)
2351 return vn_reference_lookup_or_insert_for_pieces
2352 (vuse, vr->set, vr->type, vr->operands, val);
2354 /* Adjust *ref from the new operands. */
2355 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2356 return (void *)-1;
2357 /* This can happen with bitfields. */
2358 if (ref->size != r.size)
2359 return (void *)-1;
2360 *ref = r;
2362 /* Do not update last seen VUSE after translating. */
2363 last_vuse_ptr = NULL;
2365 /* Keep looking for the adjusted *REF / VR pair. */
2366 return NULL;
2369 /* Bail out and stop walking. */
2370 return (void *)-1;
2373 /* Return a reference op vector from OP that can be used for
2374 vn_reference_lookup_pieces. The caller is responsible for releasing
2375 the vector. */
2377 vec<vn_reference_op_s>
2378 vn_reference_operands_for_lookup (tree op)
2380 bool valueized;
2381 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2384 /* Lookup a reference operation by it's parts, in the current hash table.
2385 Returns the resulting value number if it exists in the hash table,
2386 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2387 vn_reference_t stored in the hashtable if something is found. */
2389 tree
2390 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2391 vec<vn_reference_op_s> operands,
2392 vn_reference_t *vnresult, vn_lookup_kind kind)
2394 struct vn_reference_s vr1;
2395 vn_reference_t tmp;
2396 tree cst;
2398 if (!vnresult)
2399 vnresult = &tmp;
2400 *vnresult = NULL;
2402 vr1.vuse = vuse_ssa_val (vuse);
2403 shared_lookup_references.truncate (0);
2404 shared_lookup_references.safe_grow (operands.length ());
2405 memcpy (shared_lookup_references.address (),
2406 operands.address (),
2407 sizeof (vn_reference_op_s)
2408 * operands.length ());
2409 vr1.operands = operands = shared_lookup_references
2410 = valueize_refs (shared_lookup_references);
2411 vr1.type = type;
2412 vr1.set = set;
2413 vr1.hashcode = vn_reference_compute_hash (&vr1);
2414 if ((cst = fully_constant_vn_reference_p (&vr1)))
2415 return cst;
2417 vn_reference_lookup_1 (&vr1, vnresult);
2418 if (!*vnresult
2419 && kind != VN_NOWALK
2420 && vr1.vuse)
2422 ao_ref r;
2423 vn_walk_kind = kind;
2424 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2425 *vnresult =
2426 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2427 vn_reference_lookup_2,
2428 vn_reference_lookup_3,
2429 vuse_ssa_val, &vr1);
2430 gcc_checking_assert (vr1.operands == shared_lookup_references);
2433 if (*vnresult)
2434 return (*vnresult)->result;
2436 return NULL_TREE;
2439 /* Lookup OP in the current hash table, and return the resulting value
2440 number if it exists in the hash table. Return NULL_TREE if it does
2441 not exist in the hash table or if the result field of the structure
2442 was NULL.. VNRESULT will be filled in with the vn_reference_t
2443 stored in the hashtable if one exists. When TBAA_P is false assume
2444 we are looking up a store and treat it as having alias-set zero. */
2446 tree
2447 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2448 vn_reference_t *vnresult, bool tbaa_p)
2450 vec<vn_reference_op_s> operands;
2451 struct vn_reference_s vr1;
2452 tree cst;
2453 bool valuezied_anything;
2455 if (vnresult)
2456 *vnresult = NULL;
2458 vr1.vuse = vuse_ssa_val (vuse);
2459 vr1.operands = operands
2460 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2461 vr1.type = TREE_TYPE (op);
2462 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2463 vr1.hashcode = vn_reference_compute_hash (&vr1);
2464 if ((cst = fully_constant_vn_reference_p (&vr1)))
2465 return cst;
2467 if (kind != VN_NOWALK
2468 && vr1.vuse)
2470 vn_reference_t wvnresult;
2471 ao_ref r;
2472 /* Make sure to use a valueized reference if we valueized anything.
2473 Otherwise preserve the full reference for advanced TBAA. */
2474 if (!valuezied_anything
2475 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2476 vr1.operands))
2477 ao_ref_init (&r, op);
2478 if (! tbaa_p)
2479 r.ref_alias_set = r.base_alias_set = 0;
2480 vn_walk_kind = kind;
2481 wvnresult =
2482 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2483 vn_reference_lookup_2,
2484 vn_reference_lookup_3,
2485 vuse_ssa_val, &vr1);
2486 gcc_checking_assert (vr1.operands == shared_lookup_references);
2487 if (wvnresult)
2489 if (vnresult)
2490 *vnresult = wvnresult;
2491 return wvnresult->result;
2494 return NULL_TREE;
2497 return vn_reference_lookup_1 (&vr1, vnresult);
2500 /* Lookup CALL in the current hash table and return the entry in
2501 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2503 void
2504 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2505 vn_reference_t vr)
2507 if (vnresult)
2508 *vnresult = NULL;
2510 tree vuse = gimple_vuse (call);
2512 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2513 vr->operands = valueize_shared_reference_ops_from_call (call);
2514 vr->type = gimple_expr_type (call);
2515 vr->set = 0;
2516 vr->hashcode = vn_reference_compute_hash (vr);
2517 vn_reference_lookup_1 (vr, vnresult);
2520 /* Insert OP into the current hash table with a value number of
2521 RESULT, and return the resulting reference structure we created. */
2523 static vn_reference_t
2524 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2526 vn_reference_s **slot;
2527 vn_reference_t vr1;
2528 bool tem;
2530 vr1 = current_info->references_pool->allocate ();
2531 if (TREE_CODE (result) == SSA_NAME)
2532 vr1->value_id = VN_INFO (result)->value_id;
2533 else
2534 vr1->value_id = get_or_alloc_constant_value_id (result);
2535 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2536 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2537 vr1->type = TREE_TYPE (op);
2538 vr1->set = get_alias_set (op);
2539 vr1->hashcode = vn_reference_compute_hash (vr1);
2540 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2541 vr1->result_vdef = vdef;
2543 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2544 INSERT);
2546 /* Because we lookup stores using vuses, and value number failures
2547 using the vdefs (see visit_reference_op_store for how and why),
2548 it's possible that on failure we may try to insert an already
2549 inserted store. This is not wrong, there is no ssa name for a
2550 store that we could use as a differentiator anyway. Thus, unlike
2551 the other lookup functions, you cannot gcc_assert (!*slot)
2552 here. */
2554 /* But free the old slot in case of a collision. */
2555 if (*slot)
2556 free_reference (*slot);
2558 *slot = vr1;
2559 return vr1;
2562 /* Insert a reference by it's pieces into the current hash table with
2563 a value number of RESULT. Return the resulting reference
2564 structure we created. */
2566 vn_reference_t
2567 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2568 vec<vn_reference_op_s> operands,
2569 tree result, unsigned int value_id)
2572 vn_reference_s **slot;
2573 vn_reference_t vr1;
2575 vr1 = current_info->references_pool->allocate ();
2576 vr1->value_id = value_id;
2577 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2578 vr1->operands = valueize_refs (operands);
2579 vr1->type = type;
2580 vr1->set = set;
2581 vr1->hashcode = vn_reference_compute_hash (vr1);
2582 if (result && TREE_CODE (result) == SSA_NAME)
2583 result = SSA_VAL (result);
2584 vr1->result = result;
2586 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2587 INSERT);
2589 /* At this point we should have all the things inserted that we have
2590 seen before, and we should never try inserting something that
2591 already exists. */
2592 gcc_assert (!*slot);
2593 if (*slot)
2594 free_reference (*slot);
2596 *slot = vr1;
2597 return vr1;
2600 /* Compute and return the hash value for nary operation VBO1. */
2602 static hashval_t
2603 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2605 inchash::hash hstate;
2606 unsigned i;
2608 for (i = 0; i < vno1->length; ++i)
2609 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2610 vno1->op[i] = SSA_VAL (vno1->op[i]);
2612 if (((vno1->length == 2
2613 && commutative_tree_code (vno1->opcode))
2614 || (vno1->length == 3
2615 && commutative_ternary_tree_code (vno1->opcode)))
2616 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2617 std::swap (vno1->op[0], vno1->op[1]);
2618 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2619 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2621 std::swap (vno1->op[0], vno1->op[1]);
2622 vno1->opcode = swap_tree_comparison (vno1->opcode);
2625 hstate.add_int (vno1->opcode);
2626 for (i = 0; i < vno1->length; ++i)
2627 inchash::add_expr (vno1->op[i], hstate);
2629 return hstate.end ();
2632 /* Compare nary operations VNO1 and VNO2 and return true if they are
2633 equivalent. */
2635 bool
2636 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2638 unsigned i;
2640 if (vno1->hashcode != vno2->hashcode)
2641 return false;
2643 if (vno1->length != vno2->length)
2644 return false;
2646 if (vno1->opcode != vno2->opcode
2647 || !types_compatible_p (vno1->type, vno2->type))
2648 return false;
2650 for (i = 0; i < vno1->length; ++i)
2651 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2652 return false;
2654 /* BIT_INSERT_EXPR has an implict operand as the type precision
2655 of op1. Need to check to make sure they are the same. */
2656 if (vno1->opcode == BIT_INSERT_EXPR
2657 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2658 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2659 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2660 return false;
2662 return true;
2665 /* Initialize VNO from the pieces provided. */
2667 static void
2668 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2669 enum tree_code code, tree type, tree *ops)
2671 vno->opcode = code;
2672 vno->length = length;
2673 vno->type = type;
2674 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2677 /* Initialize VNO from OP. */
2679 static void
2680 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2682 unsigned i;
2684 vno->opcode = TREE_CODE (op);
2685 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2686 vno->type = TREE_TYPE (op);
2687 for (i = 0; i < vno->length; ++i)
2688 vno->op[i] = TREE_OPERAND (op, i);
2691 /* Return the number of operands for a vn_nary ops structure from STMT. */
2693 static unsigned int
2694 vn_nary_length_from_stmt (gimple *stmt)
2696 switch (gimple_assign_rhs_code (stmt))
2698 case REALPART_EXPR:
2699 case IMAGPART_EXPR:
2700 case VIEW_CONVERT_EXPR:
2701 return 1;
2703 case BIT_FIELD_REF:
2704 return 3;
2706 case CONSTRUCTOR:
2707 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2709 default:
2710 return gimple_num_ops (stmt) - 1;
2714 /* Initialize VNO from STMT. */
2716 static void
2717 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2719 unsigned i;
2721 vno->opcode = gimple_assign_rhs_code (stmt);
2722 vno->type = gimple_expr_type (stmt);
2723 switch (vno->opcode)
2725 case REALPART_EXPR:
2726 case IMAGPART_EXPR:
2727 case VIEW_CONVERT_EXPR:
2728 vno->length = 1;
2729 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2730 break;
2732 case BIT_FIELD_REF:
2733 vno->length = 3;
2734 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2735 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2736 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2737 break;
2739 case CONSTRUCTOR:
2740 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2741 for (i = 0; i < vno->length; ++i)
2742 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2743 break;
2745 default:
2746 gcc_checking_assert (!gimple_assign_single_p (stmt));
2747 vno->length = gimple_num_ops (stmt) - 1;
2748 for (i = 0; i < vno->length; ++i)
2749 vno->op[i] = gimple_op (stmt, i + 1);
2753 /* Compute the hashcode for VNO and look for it in the hash table;
2754 return the resulting value number if it exists in the hash table.
2755 Return NULL_TREE if it does not exist in the hash table or if the
2756 result field of the operation is NULL. VNRESULT will contain the
2757 vn_nary_op_t from the hashtable if it exists. */
2759 static tree
2760 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2762 vn_nary_op_s **slot;
2764 if (vnresult)
2765 *vnresult = NULL;
2767 vno->hashcode = vn_nary_op_compute_hash (vno);
2768 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2769 NO_INSERT);
2770 if (!slot && current_info == optimistic_info)
2771 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2772 NO_INSERT);
2773 if (!slot)
2774 return NULL_TREE;
2775 if (vnresult)
2776 *vnresult = *slot;
2777 return (*slot)->result;
2780 /* Lookup a n-ary operation by its pieces and return the resulting value
2781 number if it exists in the hash table. Return NULL_TREE if it does
2782 not exist in the hash table or if the result field of the operation
2783 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2784 if it exists. */
2786 tree
2787 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2788 tree type, tree *ops, vn_nary_op_t *vnresult)
2790 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2791 sizeof_vn_nary_op (length));
2792 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2793 return vn_nary_op_lookup_1 (vno1, vnresult);
2796 /* Lookup OP in the current hash table, and return the resulting value
2797 number if it exists in the hash table. Return NULL_TREE if it does
2798 not exist in the hash table or if the result field of the operation
2799 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2800 if it exists. */
2802 tree
2803 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2805 vn_nary_op_t vno1
2806 = XALLOCAVAR (struct vn_nary_op_s,
2807 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2808 init_vn_nary_op_from_op (vno1, op);
2809 return vn_nary_op_lookup_1 (vno1, vnresult);
2812 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2813 value number if it exists in the hash table. Return NULL_TREE if
2814 it does not exist in the hash table. VNRESULT will contain the
2815 vn_nary_op_t from the hashtable if it exists. */
2817 tree
2818 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2820 vn_nary_op_t vno1
2821 = XALLOCAVAR (struct vn_nary_op_s,
2822 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2823 init_vn_nary_op_from_stmt (vno1, stmt);
2824 return vn_nary_op_lookup_1 (vno1, vnresult);
2827 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2829 static vn_nary_op_t
2830 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2832 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2835 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2836 obstack. */
2838 static vn_nary_op_t
2839 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2841 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2842 &current_info->nary_obstack);
2844 vno1->value_id = value_id;
2845 vno1->length = length;
2846 vno1->result = result;
2848 return vno1;
2851 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2852 VNO->HASHCODE first. */
2854 static vn_nary_op_t
2855 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2856 bool compute_hash)
2858 vn_nary_op_s **slot;
2860 if (compute_hash)
2861 vno->hashcode = vn_nary_op_compute_hash (vno);
2863 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2864 /* While we do not want to insert things twice it's awkward to
2865 avoid it in the case where visit_nary_op pattern-matches stuff
2866 and ends up simplifying the replacement to itself. We then
2867 get two inserts, one from visit_nary_op and one from
2868 vn_nary_build_or_lookup.
2869 So allow inserts with the same value number. */
2870 if (*slot && (*slot)->result == vno->result)
2871 return *slot;
2873 gcc_assert (!*slot);
2875 *slot = vno;
2876 return vno;
2879 /* Insert a n-ary operation into the current hash table using it's
2880 pieces. Return the vn_nary_op_t structure we created and put in
2881 the hashtable. */
2883 vn_nary_op_t
2884 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2885 tree type, tree *ops,
2886 tree result, unsigned int value_id)
2888 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2889 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2890 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2893 /* Insert OP into the current hash table with a value number of
2894 RESULT. Return the vn_nary_op_t structure we created and put in
2895 the hashtable. */
2897 vn_nary_op_t
2898 vn_nary_op_insert (tree op, tree result)
2900 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2901 vn_nary_op_t vno1;
2903 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2904 init_vn_nary_op_from_op (vno1, op);
2905 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2908 /* Insert the rhs of STMT into the current hash table with a value number of
2909 RESULT. */
2911 static vn_nary_op_t
2912 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2914 vn_nary_op_t vno1
2915 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2916 result, VN_INFO (result)->value_id);
2917 init_vn_nary_op_from_stmt (vno1, stmt);
2918 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2921 /* Compute a hashcode for PHI operation VP1 and return it. */
2923 static inline hashval_t
2924 vn_phi_compute_hash (vn_phi_t vp1)
2926 inchash::hash hstate (vp1->phiargs.length () > 2
2927 ? vp1->block->index : vp1->phiargs.length ());
2928 tree phi1op;
2929 tree type;
2930 edge e;
2931 edge_iterator ei;
2933 /* If all PHI arguments are constants we need to distinguish
2934 the PHI node via its type. */
2935 type = vp1->type;
2936 hstate.merge_hash (vn_hash_type (type));
2938 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2940 /* Don't hash backedge values they need to be handled as VN_TOP
2941 for optimistic value-numbering. */
2942 if (e->flags & EDGE_DFS_BACK)
2943 continue;
2945 phi1op = vp1->phiargs[e->dest_idx];
2946 if (phi1op == VN_TOP)
2947 continue;
2948 inchash::add_expr (phi1op, hstate);
2951 return hstate.end ();
2955 /* Return true if COND1 and COND2 represent the same condition, set
2956 *INVERTED_P if one needs to be inverted to make it the same as
2957 the other. */
2959 static bool
2960 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2961 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2963 enum tree_code code1 = gimple_cond_code (cond1);
2964 enum tree_code code2 = gimple_cond_code (cond2);
2966 *inverted_p = false;
2967 if (code1 == code2)
2969 else if (code1 == swap_tree_comparison (code2))
2970 std::swap (lhs2, rhs2);
2971 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2972 *inverted_p = true;
2973 else if (code1 == invert_tree_comparison
2974 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2976 std::swap (lhs2, rhs2);
2977 *inverted_p = true;
2979 else
2980 return false;
2982 return ((expressions_equal_p (lhs1, lhs2)
2983 && expressions_equal_p (rhs1, rhs2))
2984 || (commutative_tree_code (code1)
2985 && expressions_equal_p (lhs1, rhs2)
2986 && expressions_equal_p (rhs1, lhs2)));
2989 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2991 static int
2992 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2994 if (vp1->hashcode != vp2->hashcode)
2995 return false;
2997 if (vp1->block != vp2->block)
2999 if (vp1->phiargs.length () != vp2->phiargs.length ())
3000 return false;
3002 switch (vp1->phiargs.length ())
3004 case 1:
3005 /* Single-arg PHIs are just copies. */
3006 break;
3008 case 2:
3010 /* Rule out backedges into the PHI. */
3011 if (vp1->block->loop_father->header == vp1->block
3012 || vp2->block->loop_father->header == vp2->block)
3013 return false;
3015 /* If the PHI nodes do not have compatible types
3016 they are not the same. */
3017 if (!types_compatible_p (vp1->type, vp2->type))
3018 return false;
3020 basic_block idom1
3021 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3022 basic_block idom2
3023 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3024 /* If the immediate dominator end in switch stmts multiple
3025 values may end up in the same PHI arg via intermediate
3026 CFG merges. */
3027 if (EDGE_COUNT (idom1->succs) != 2
3028 || EDGE_COUNT (idom2->succs) != 2)
3029 return false;
3031 /* Verify the controlling stmt is the same. */
3032 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3033 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3034 if (! last1 || ! last2)
3035 return false;
3036 bool inverted_p;
3037 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3038 last2, vp2->cclhs, vp2->ccrhs,
3039 &inverted_p))
3040 return false;
3042 /* Get at true/false controlled edges into the PHI. */
3043 edge te1, te2, fe1, fe2;
3044 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3045 &te1, &fe1)
3046 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3047 &te2, &fe2))
3048 return false;
3050 /* Swap edges if the second condition is the inverted of the
3051 first. */
3052 if (inverted_p)
3053 std::swap (te2, fe2);
3055 /* ??? Handle VN_TOP specially. */
3056 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3057 vp2->phiargs[te2->dest_idx])
3058 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3059 vp2->phiargs[fe2->dest_idx]))
3060 return false;
3062 return true;
3065 default:
3066 return false;
3070 /* If the PHI nodes do not have compatible types
3071 they are not the same. */
3072 if (!types_compatible_p (vp1->type, vp2->type))
3073 return false;
3075 /* Any phi in the same block will have it's arguments in the
3076 same edge order, because of how we store phi nodes. */
3077 int i;
3078 tree phi1op;
3079 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3081 tree phi2op = vp2->phiargs[i];
3082 if (phi1op == VN_TOP || phi2op == VN_TOP)
3083 continue;
3084 if (!expressions_equal_p (phi1op, phi2op))
3085 return false;
3088 return true;
3091 static vec<tree> shared_lookup_phiargs;
3093 /* Lookup PHI in the current hash table, and return the resulting
3094 value number if it exists in the hash table. Return NULL_TREE if
3095 it does not exist in the hash table. */
3097 static tree
3098 vn_phi_lookup (gimple *phi)
3100 vn_phi_s **slot;
3101 struct vn_phi_s vp1;
3102 edge e;
3103 edge_iterator ei;
3105 shared_lookup_phiargs.truncate (0);
3106 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3108 /* Canonicalize the SSA_NAME's to their value number. */
3109 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3111 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3112 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3113 shared_lookup_phiargs[e->dest_idx] = def;
3115 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3116 vp1.phiargs = shared_lookup_phiargs;
3117 vp1.block = gimple_bb (phi);
3118 /* Extract values of the controlling condition. */
3119 vp1.cclhs = NULL_TREE;
3120 vp1.ccrhs = NULL_TREE;
3121 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3122 if (EDGE_COUNT (idom1->succs) == 2)
3123 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3125 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3126 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3128 vp1.hashcode = vn_phi_compute_hash (&vp1);
3129 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3130 NO_INSERT);
3131 if (!slot && current_info == optimistic_info)
3132 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3133 NO_INSERT);
3134 if (!slot)
3135 return NULL_TREE;
3136 return (*slot)->result;
3139 /* Insert PHI into the current hash table with a value number of
3140 RESULT. */
3142 static vn_phi_t
3143 vn_phi_insert (gimple *phi, tree result)
3145 vn_phi_s **slot;
3146 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3147 vec<tree> args = vNULL;
3148 edge e;
3149 edge_iterator ei;
3151 args.safe_grow (gimple_phi_num_args (phi));
3153 /* Canonicalize the SSA_NAME's to their value number. */
3154 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3156 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3157 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3158 args[e->dest_idx] = def;
3160 vp1->value_id = VN_INFO (result)->value_id;
3161 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3162 vp1->phiargs = args;
3163 vp1->block = gimple_bb (phi);
3164 /* Extract values of the controlling condition. */
3165 vp1->cclhs = NULL_TREE;
3166 vp1->ccrhs = NULL_TREE;
3167 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3168 if (EDGE_COUNT (idom1->succs) == 2)
3169 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3171 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3172 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3174 vp1->result = result;
3175 vp1->hashcode = vn_phi_compute_hash (vp1);
3177 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3179 /* Because we iterate over phi operations more than once, it's
3180 possible the slot might already exist here, hence no assert.*/
3181 *slot = vp1;
3182 return vp1;
3186 /* Print set of components in strongly connected component SCC to OUT. */
3188 static void
3189 print_scc (FILE *out, vec<tree> scc)
3191 tree var;
3192 unsigned int i;
3194 fprintf (out, "SCC consists of %u:", scc.length ());
3195 FOR_EACH_VEC_ELT (scc, i, var)
3197 fprintf (out, " ");
3198 print_generic_expr (out, var);
3200 fprintf (out, "\n");
3203 /* Return true if BB1 is dominated by BB2 taking into account edges
3204 that are not executable. */
3206 static bool
3207 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3209 edge_iterator ei;
3210 edge e;
3212 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3213 return true;
3215 /* Before iterating we'd like to know if there exists a
3216 (executable) path from bb2 to bb1 at all, if not we can
3217 directly return false. For now simply iterate once. */
3219 /* Iterate to the single executable bb1 predecessor. */
3220 if (EDGE_COUNT (bb1->preds) > 1)
3222 edge prede = NULL;
3223 FOR_EACH_EDGE (e, ei, bb1->preds)
3224 if (e->flags & EDGE_EXECUTABLE)
3226 if (prede)
3228 prede = NULL;
3229 break;
3231 prede = e;
3233 if (prede)
3235 bb1 = prede->src;
3237 /* Re-do the dominance check with changed bb1. */
3238 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3239 return true;
3243 /* Iterate to the single executable bb2 successor. */
3244 edge succe = NULL;
3245 FOR_EACH_EDGE (e, ei, bb2->succs)
3246 if (e->flags & EDGE_EXECUTABLE)
3248 if (succe)
3250 succe = NULL;
3251 break;
3253 succe = e;
3255 if (succe)
3257 /* Verify the reached block is only reached through succe.
3258 If there is only one edge we can spare us the dominator
3259 check and iterate directly. */
3260 if (EDGE_COUNT (succe->dest->preds) > 1)
3262 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3263 if (e != succe
3264 && (e->flags & EDGE_EXECUTABLE))
3266 succe = NULL;
3267 break;
3270 if (succe)
3272 bb2 = succe->dest;
3274 /* Re-do the dominance check with changed bb2. */
3275 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3276 return true;
3280 /* We could now iterate updating bb1 / bb2. */
3281 return false;
3284 /* Set the value number of FROM to TO, return true if it has changed
3285 as a result. */
3287 static inline bool
3288 set_ssa_val_to (tree from, tree to)
3290 tree currval = SSA_VAL (from);
3291 HOST_WIDE_INT toff, coff;
3293 /* The only thing we allow as value numbers are ssa_names
3294 and invariants. So assert that here. We don't allow VN_TOP
3295 as visiting a stmt should produce a value-number other than
3296 that.
3297 ??? Still VN_TOP can happen for unreachable code, so force
3298 it to varying in that case. Not all code is prepared to
3299 get VN_TOP on valueization. */
3300 if (to == VN_TOP)
3302 if (dump_file && (dump_flags & TDF_DETAILS))
3303 fprintf (dump_file, "Forcing value number to varying on "
3304 "receiving VN_TOP\n");
3305 to = from;
3308 gcc_assert (to != NULL_TREE
3309 && ((TREE_CODE (to) == SSA_NAME
3310 && (to == from || SSA_VAL (to) == to))
3311 || is_gimple_min_invariant (to)));
3313 if (from != to)
3315 if (currval == from)
3317 if (dump_file && (dump_flags & TDF_DETAILS))
3319 fprintf (dump_file, "Not changing value number of ");
3320 print_generic_expr (dump_file, from);
3321 fprintf (dump_file, " from VARYING to ");
3322 print_generic_expr (dump_file, to);
3323 fprintf (dump_file, "\n");
3325 return false;
3327 else if (currval != VN_TOP
3328 && ! is_gimple_min_invariant (currval)
3329 && is_gimple_min_invariant (to))
3331 if (dump_file && (dump_flags & TDF_DETAILS))
3333 fprintf (dump_file, "Forcing VARYING instead of changing "
3334 "value number of ");
3335 print_generic_expr (dump_file, from);
3336 fprintf (dump_file, " from ");
3337 print_generic_expr (dump_file, currval);
3338 fprintf (dump_file, " (non-constant) to ");
3339 print_generic_expr (dump_file, to);
3340 fprintf (dump_file, " (constant)\n");
3342 to = from;
3344 else if (TREE_CODE (to) == SSA_NAME
3345 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3346 to = from;
3349 if (dump_file && (dump_flags & TDF_DETAILS))
3351 fprintf (dump_file, "Setting value number of ");
3352 print_generic_expr (dump_file, from);
3353 fprintf (dump_file, " to ");
3354 print_generic_expr (dump_file, to);
3357 if (currval != to
3358 && !operand_equal_p (currval, to, 0)
3359 /* Different undefined SSA names are not actually different. See
3360 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3361 && !(TREE_CODE (currval) == SSA_NAME
3362 && TREE_CODE (to) == SSA_NAME
3363 && ssa_undefined_value_p (currval, false)
3364 && ssa_undefined_value_p (to, false))
3365 /* ??? For addresses involving volatile objects or types operand_equal_p
3366 does not reliably detect ADDR_EXPRs as equal. We know we are only
3367 getting invariant gimple addresses here, so can use
3368 get_addr_base_and_unit_offset to do this comparison. */
3369 && !(TREE_CODE (currval) == ADDR_EXPR
3370 && TREE_CODE (to) == ADDR_EXPR
3371 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3372 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3373 && coff == toff))
3375 if (dump_file && (dump_flags & TDF_DETAILS))
3376 fprintf (dump_file, " (changed)\n");
3378 /* If we equate two SSA names we have to make the side-band info
3379 of the leader conservative (and remember whatever original value
3380 was present). */
3381 if (TREE_CODE (to) == SSA_NAME)
3383 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3384 && SSA_NAME_RANGE_INFO (to))
3386 if (SSA_NAME_IS_DEFAULT_DEF (to)
3387 || dominated_by_p_w_unex
3388 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3389 gimple_bb (SSA_NAME_DEF_STMT (to))))
3390 /* Keep the info from the dominator. */
3392 else
3394 /* Save old info. */
3395 if (! VN_INFO (to)->info.range_info)
3397 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3398 VN_INFO (to)->range_info_anti_range_p
3399 = SSA_NAME_ANTI_RANGE_P (to);
3401 /* Rather than allocating memory and unioning the info
3402 just clear it. */
3403 if (dump_file && (dump_flags & TDF_DETAILS))
3405 fprintf (dump_file, "clearing range info of ");
3406 print_generic_expr (dump_file, to);
3407 fprintf (dump_file, "\n");
3409 SSA_NAME_RANGE_INFO (to) = NULL;
3412 else if (POINTER_TYPE_P (TREE_TYPE (to))
3413 && SSA_NAME_PTR_INFO (to))
3415 if (SSA_NAME_IS_DEFAULT_DEF (to)
3416 || dominated_by_p_w_unex
3417 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3418 gimple_bb (SSA_NAME_DEF_STMT (to))))
3419 /* Keep the info from the dominator. */
3421 else if (! SSA_NAME_PTR_INFO (from)
3422 /* Handle the case of trivially equivalent info. */
3423 || memcmp (SSA_NAME_PTR_INFO (to),
3424 SSA_NAME_PTR_INFO (from),
3425 sizeof (ptr_info_def)) != 0)
3427 /* Save old info. */
3428 if (! VN_INFO (to)->info.ptr_info)
3429 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3430 /* Rather than allocating memory and unioning the info
3431 just clear it. */
3432 if (dump_file && (dump_flags & TDF_DETAILS))
3434 fprintf (dump_file, "clearing points-to info of ");
3435 print_generic_expr (dump_file, to);
3436 fprintf (dump_file, "\n");
3438 SSA_NAME_PTR_INFO (to) = NULL;
3443 VN_INFO (from)->valnum = to;
3444 return true;
3446 if (dump_file && (dump_flags & TDF_DETAILS))
3447 fprintf (dump_file, "\n");
3448 return false;
3451 /* Mark as processed all the definitions in the defining stmt of USE, or
3452 the USE itself. */
3454 static void
3455 mark_use_processed (tree use)
3457 ssa_op_iter iter;
3458 def_operand_p defp;
3459 gimple *stmt = SSA_NAME_DEF_STMT (use);
3461 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3463 VN_INFO (use)->use_processed = true;
3464 return;
3467 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3469 tree def = DEF_FROM_PTR (defp);
3471 VN_INFO (def)->use_processed = true;
3475 /* Set all definitions in STMT to value number to themselves.
3476 Return true if a value number changed. */
3478 static bool
3479 defs_to_varying (gimple *stmt)
3481 bool changed = false;
3482 ssa_op_iter iter;
3483 def_operand_p defp;
3485 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3487 tree def = DEF_FROM_PTR (defp);
3488 changed |= set_ssa_val_to (def, def);
3490 return changed;
3493 /* Visit a copy between LHS and RHS, return true if the value number
3494 changed. */
3496 static bool
3497 visit_copy (tree lhs, tree rhs)
3499 /* Valueize. */
3500 rhs = SSA_VAL (rhs);
3502 return set_ssa_val_to (lhs, rhs);
3505 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3506 is the same. */
3508 static tree
3509 valueized_wider_op (tree wide_type, tree op)
3511 if (TREE_CODE (op) == SSA_NAME)
3512 op = SSA_VAL (op);
3514 /* Either we have the op widened available. */
3515 tree ops[3] = {};
3516 ops[0] = op;
3517 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3518 wide_type, ops, NULL);
3519 if (tem)
3520 return tem;
3522 /* Or the op is truncated from some existing value. */
3523 if (TREE_CODE (op) == SSA_NAME)
3525 gimple *def = SSA_NAME_DEF_STMT (op);
3526 if (is_gimple_assign (def)
3527 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3529 tem = gimple_assign_rhs1 (def);
3530 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3532 if (TREE_CODE (tem) == SSA_NAME)
3533 tem = SSA_VAL (tem);
3534 return tem;
3539 /* For constants simply extend it. */
3540 if (TREE_CODE (op) == INTEGER_CST)
3541 return wide_int_to_tree (wide_type, wi::to_wide (op));
3543 return NULL_TREE;
3546 /* Visit a nary operator RHS, value number it, and return true if the
3547 value number of LHS has changed as a result. */
3549 static bool
3550 visit_nary_op (tree lhs, gassign *stmt)
3552 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3553 if (result)
3554 return set_ssa_val_to (lhs, result);
3556 /* Do some special pattern matching for redundancies of operations
3557 in different types. */
3558 enum tree_code code = gimple_assign_rhs_code (stmt);
3559 tree type = TREE_TYPE (lhs);
3560 tree rhs1 = gimple_assign_rhs1 (stmt);
3561 switch (code)
3563 CASE_CONVERT:
3564 /* Match arithmetic done in a different type where we can easily
3565 substitute the result from some earlier sign-changed or widened
3566 operation. */
3567 if (INTEGRAL_TYPE_P (type)
3568 && TREE_CODE (rhs1) == SSA_NAME
3569 /* We only handle sign-changes or zero-extension -> & mask. */
3570 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3571 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3572 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3574 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3575 if (def
3576 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3577 || gimple_assign_rhs_code (def) == MINUS_EXPR
3578 || gimple_assign_rhs_code (def) == MULT_EXPR))
3580 tree ops[3] = {};
3581 /* Either we have the op widened available. */
3582 ops[0] = valueized_wider_op (type,
3583 gimple_assign_rhs1 (def));
3584 if (ops[0])
3585 ops[1] = valueized_wider_op (type,
3586 gimple_assign_rhs2 (def));
3587 if (ops[0] && ops[1])
3589 ops[0] = vn_nary_op_lookup_pieces
3590 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3591 /* We have wider operation available. */
3592 if (ops[0])
3594 unsigned lhs_prec = TYPE_PRECISION (type);
3595 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3596 if (lhs_prec == rhs_prec)
3598 ops[1] = NULL_TREE;
3599 result = vn_nary_build_or_lookup (NOP_EXPR,
3600 type, ops);
3601 if (result)
3603 bool changed = set_ssa_val_to (lhs, result);
3604 vn_nary_op_insert_stmt (stmt, result);
3605 return changed;
3608 else
3610 ops[1] = wide_int_to_tree (type,
3611 wi::mask (rhs_prec, false,
3612 lhs_prec));
3613 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3614 TREE_TYPE (lhs),
3615 ops);
3616 if (result)
3618 bool changed = set_ssa_val_to (lhs, result);
3619 vn_nary_op_insert_stmt (stmt, result);
3620 return changed;
3627 default:;
3630 bool changed = set_ssa_val_to (lhs, lhs);
3631 vn_nary_op_insert_stmt (stmt, lhs);
3632 return changed;
3635 /* Visit a call STMT storing into LHS. Return true if the value number
3636 of the LHS has changed as a result. */
3638 static bool
3639 visit_reference_op_call (tree lhs, gcall *stmt)
3641 bool changed = false;
3642 struct vn_reference_s vr1;
3643 vn_reference_t vnresult = NULL;
3644 tree vdef = gimple_vdef (stmt);
3646 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3647 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3648 lhs = NULL_TREE;
3650 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3651 if (vnresult)
3653 if (vnresult->result_vdef && vdef)
3654 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3655 else if (vdef)
3656 /* If the call was discovered to be pure or const reflect
3657 that as far as possible. */
3658 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3660 if (!vnresult->result && lhs)
3661 vnresult->result = lhs;
3663 if (vnresult->result && lhs)
3664 changed |= set_ssa_val_to (lhs, vnresult->result);
3666 else
3668 vn_reference_t vr2;
3669 vn_reference_s **slot;
3670 tree vdef_val = vdef;
3671 if (vdef)
3673 /* If we value numbered an indirect functions function to
3674 one not clobbering memory value number its VDEF to its
3675 VUSE. */
3676 tree fn = gimple_call_fn (stmt);
3677 if (fn && TREE_CODE (fn) == SSA_NAME)
3679 fn = SSA_VAL (fn);
3680 if (TREE_CODE (fn) == ADDR_EXPR
3681 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3682 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3683 & (ECF_CONST | ECF_PURE)))
3684 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3686 changed |= set_ssa_val_to (vdef, vdef_val);
3688 if (lhs)
3689 changed |= set_ssa_val_to (lhs, lhs);
3690 vr2 = current_info->references_pool->allocate ();
3691 vr2->vuse = vr1.vuse;
3692 /* As we are not walking the virtual operand chain we know the
3693 shared_lookup_references are still original so we can re-use
3694 them here. */
3695 vr2->operands = vr1.operands.copy ();
3696 vr2->type = vr1.type;
3697 vr2->set = vr1.set;
3698 vr2->hashcode = vr1.hashcode;
3699 vr2->result = lhs;
3700 vr2->result_vdef = vdef_val;
3701 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3702 INSERT);
3703 gcc_assert (!*slot);
3704 *slot = vr2;
3707 return changed;
3710 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3711 and return true if the value number of the LHS has changed as a result. */
3713 static bool
3714 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3716 bool changed = false;
3717 tree last_vuse;
3718 tree result;
3720 last_vuse = gimple_vuse (stmt);
3721 last_vuse_ptr = &last_vuse;
3722 result = vn_reference_lookup (op, gimple_vuse (stmt),
3723 default_vn_walk_kind, NULL, true);
3724 last_vuse_ptr = NULL;
3726 /* We handle type-punning through unions by value-numbering based
3727 on offset and size of the access. Be prepared to handle a
3728 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3729 if (result
3730 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3732 /* We will be setting the value number of lhs to the value number
3733 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3734 So first simplify and lookup this expression to see if it
3735 is already available. */
3736 code_helper rcode = VIEW_CONVERT_EXPR;
3737 tree ops[3] = { result };
3738 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3741 if (result)
3742 changed = set_ssa_val_to (lhs, result);
3743 else
3745 changed = set_ssa_val_to (lhs, lhs);
3746 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3749 return changed;
3753 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3754 and return true if the value number of the LHS has changed as a result. */
3756 static bool
3757 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3759 bool changed = false;
3760 vn_reference_t vnresult = NULL;
3761 tree assign;
3762 bool resultsame = false;
3763 tree vuse = gimple_vuse (stmt);
3764 tree vdef = gimple_vdef (stmt);
3766 if (TREE_CODE (op) == SSA_NAME)
3767 op = SSA_VAL (op);
3769 /* First we want to lookup using the *vuses* from the store and see
3770 if there the last store to this location with the same address
3771 had the same value.
3773 The vuses represent the memory state before the store. If the
3774 memory state, address, and value of the store is the same as the
3775 last store to this location, then this store will produce the
3776 same memory state as that store.
3778 In this case the vdef versions for this store are value numbered to those
3779 vuse versions, since they represent the same memory state after
3780 this store.
3782 Otherwise, the vdefs for the store are used when inserting into
3783 the table, since the store generates a new memory state. */
3785 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3786 if (vnresult
3787 && vnresult->result)
3789 tree result = vnresult->result;
3790 if (TREE_CODE (result) == SSA_NAME)
3791 result = SSA_VAL (result);
3792 resultsame = expressions_equal_p (result, op);
3793 if (resultsame)
3795 /* If the TBAA state isn't compatible for downstream reads
3796 we cannot value-number the VDEFs the same. */
3797 alias_set_type set = get_alias_set (lhs);
3798 if (vnresult->set != set
3799 && ! alias_set_subset_of (set, vnresult->set))
3800 resultsame = false;
3804 if (!resultsame)
3806 /* Only perform the following when being called from PRE
3807 which embeds tail merging. */
3808 if (default_vn_walk_kind == VN_WALK)
3810 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3811 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3812 if (vnresult)
3814 VN_INFO (vdef)->use_processed = true;
3815 return set_ssa_val_to (vdef, vnresult->result_vdef);
3819 if (dump_file && (dump_flags & TDF_DETAILS))
3821 fprintf (dump_file, "No store match\n");
3822 fprintf (dump_file, "Value numbering store ");
3823 print_generic_expr (dump_file, lhs);
3824 fprintf (dump_file, " to ");
3825 print_generic_expr (dump_file, op);
3826 fprintf (dump_file, "\n");
3828 /* Have to set value numbers before insert, since insert is
3829 going to valueize the references in-place. */
3830 if (vdef)
3831 changed |= set_ssa_val_to (vdef, vdef);
3833 /* Do not insert structure copies into the tables. */
3834 if (is_gimple_min_invariant (op)
3835 || is_gimple_reg (op))
3836 vn_reference_insert (lhs, op, vdef, NULL);
3838 /* Only perform the following when being called from PRE
3839 which embeds tail merging. */
3840 if (default_vn_walk_kind == VN_WALK)
3842 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3843 vn_reference_insert (assign, lhs, vuse, vdef);
3846 else
3848 /* We had a match, so value number the vdef to have the value
3849 number of the vuse it came from. */
3851 if (dump_file && (dump_flags & TDF_DETAILS))
3852 fprintf (dump_file, "Store matched earlier value, "
3853 "value numbering store vdefs to matching vuses.\n");
3855 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3858 return changed;
3861 /* Visit and value number PHI, return true if the value number
3862 changed. */
3864 static bool
3865 visit_phi (gimple *phi)
3867 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3868 unsigned n_executable = 0;
3869 bool allsame = true;
3870 edge_iterator ei;
3871 edge e;
3873 /* TODO: We could check for this in init_sccvn, and replace this
3874 with a gcc_assert. */
3875 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3876 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3878 /* See if all non-TOP arguments have the same value. TOP is
3879 equivalent to everything, so we can ignore it. */
3880 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3881 if (e->flags & EDGE_EXECUTABLE)
3883 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3885 ++n_executable;
3886 if (TREE_CODE (def) == SSA_NAME)
3887 def = SSA_VAL (def);
3888 if (def == VN_TOP)
3890 /* Ignore undefined defs for sameval but record one. */
3891 else if (TREE_CODE (def) == SSA_NAME
3892 && ssa_undefined_value_p (def, false))
3893 seen_undef = def;
3894 else if (sameval == VN_TOP)
3895 sameval = def;
3896 else if (!expressions_equal_p (def, sameval))
3898 allsame = false;
3899 break;
3904 /* If none of the edges was executable keep the value-number at VN_TOP,
3905 if only a single edge is exectuable use its value. */
3906 if (n_executable <= 1)
3907 result = seen_undef ? seen_undef : sameval;
3908 /* If we saw only undefined values and VN_TOP use one of the
3909 undefined values. */
3910 else if (sameval == VN_TOP)
3911 result = seen_undef ? seen_undef : sameval;
3912 /* First see if it is equivalent to a phi node in this block. We prefer
3913 this as it allows IV elimination - see PRs 66502 and 67167. */
3914 else if ((result = vn_phi_lookup (phi)))
3916 /* If all values are the same use that, unless we've seen undefined
3917 values as well and the value isn't constant.
3918 CCP/copyprop have the same restriction to not remove uninit warnings. */
3919 else if (allsame
3920 && (! seen_undef || is_gimple_min_invariant (sameval)))
3921 result = sameval;
3922 else
3924 result = PHI_RESULT (phi);
3925 /* Only insert PHIs that are varying, for constant value numbers
3926 we mess up equivalences otherwise as we are only comparing
3927 the immediate controlling predicates. */
3928 vn_phi_insert (phi, result);
3931 return set_ssa_val_to (PHI_RESULT (phi), result);
3934 /* Try to simplify RHS using equivalences and constant folding. */
3936 static tree
3937 try_to_simplify (gassign *stmt)
3939 enum tree_code code = gimple_assign_rhs_code (stmt);
3940 tree tem;
3942 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3943 in this case, there is no point in doing extra work. */
3944 if (code == SSA_NAME)
3945 return NULL_TREE;
3947 /* First try constant folding based on our current lattice. */
3948 mprts_hook = vn_lookup_simplify_result;
3949 mprts_hook_cnt = 9;
3950 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3951 mprts_hook = NULL;
3952 if (tem
3953 && (TREE_CODE (tem) == SSA_NAME
3954 || is_gimple_min_invariant (tem)))
3955 return tem;
3957 return NULL_TREE;
3960 /* Visit and value number USE, return true if the value number
3961 changed. */
3963 static bool
3964 visit_use (tree use)
3966 bool changed = false;
3967 gimple *stmt = SSA_NAME_DEF_STMT (use);
3969 mark_use_processed (use);
3971 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
3972 && !SSA_NAME_IS_DEFAULT_DEF (use));
3974 if (dump_file && (dump_flags & TDF_DETAILS))
3976 fprintf (dump_file, "Value numbering ");
3977 print_generic_expr (dump_file, use);
3978 fprintf (dump_file, " stmt = ");
3979 print_gimple_stmt (dump_file, stmt, 0);
3982 if (gimple_code (stmt) == GIMPLE_PHI)
3983 changed = visit_phi (stmt);
3984 else if (gimple_has_volatile_ops (stmt))
3985 changed = defs_to_varying (stmt);
3986 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3988 enum tree_code code = gimple_assign_rhs_code (ass);
3989 tree lhs = gimple_assign_lhs (ass);
3990 tree rhs1 = gimple_assign_rhs1 (ass);
3991 tree simplified;
3993 /* Shortcut for copies. Simplifying copies is pointless,
3994 since we copy the expression and value they represent. */
3995 if (code == SSA_NAME
3996 && TREE_CODE (lhs) == SSA_NAME)
3998 changed = visit_copy (lhs, rhs1);
3999 goto done;
4001 simplified = try_to_simplify (ass);
4002 if (simplified)
4004 if (dump_file && (dump_flags & TDF_DETAILS))
4006 fprintf (dump_file, "RHS ");
4007 print_gimple_expr (dump_file, ass, 0);
4008 fprintf (dump_file, " simplified to ");
4009 print_generic_expr (dump_file, simplified);
4010 fprintf (dump_file, "\n");
4013 /* Setting value numbers to constants will occasionally
4014 screw up phi congruence because constants are not
4015 uniquely associated with a single ssa name that can be
4016 looked up. */
4017 if (simplified
4018 && is_gimple_min_invariant (simplified)
4019 && TREE_CODE (lhs) == SSA_NAME)
4021 changed = set_ssa_val_to (lhs, simplified);
4022 goto done;
4024 else if (simplified
4025 && TREE_CODE (simplified) == SSA_NAME
4026 && TREE_CODE (lhs) == SSA_NAME)
4028 changed = visit_copy (lhs, simplified);
4029 goto done;
4032 if ((TREE_CODE (lhs) == SSA_NAME
4033 /* We can substitute SSA_NAMEs that are live over
4034 abnormal edges with their constant value. */
4035 && !(gimple_assign_copy_p (ass)
4036 && is_gimple_min_invariant (rhs1))
4037 && !(simplified
4038 && is_gimple_min_invariant (simplified))
4039 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4040 /* Stores or copies from SSA_NAMEs that are live over
4041 abnormal edges are a problem. */
4042 || (code == SSA_NAME
4043 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4044 changed = defs_to_varying (ass);
4045 else if (REFERENCE_CLASS_P (lhs)
4046 || DECL_P (lhs))
4047 changed = visit_reference_op_store (lhs, rhs1, ass);
4048 else if (TREE_CODE (lhs) == SSA_NAME)
4050 if ((gimple_assign_copy_p (ass)
4051 && is_gimple_min_invariant (rhs1))
4052 || (simplified
4053 && is_gimple_min_invariant (simplified)))
4055 if (simplified)
4056 changed = set_ssa_val_to (lhs, simplified);
4057 else
4058 changed = set_ssa_val_to (lhs, rhs1);
4060 else
4062 /* Visit the original statement. */
4063 switch (vn_get_stmt_kind (ass))
4065 case VN_NARY:
4066 changed = visit_nary_op (lhs, ass);
4067 break;
4068 case VN_REFERENCE:
4069 changed = visit_reference_op_load (lhs, rhs1, ass);
4070 break;
4071 default:
4072 changed = defs_to_varying (ass);
4073 break;
4077 else
4078 changed = defs_to_varying (ass);
4080 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4082 tree lhs = gimple_call_lhs (call_stmt);
4083 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4085 /* Try constant folding based on our current lattice. */
4086 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4087 vn_valueize);
4088 if (simplified)
4090 if (dump_file && (dump_flags & TDF_DETAILS))
4092 fprintf (dump_file, "call ");
4093 print_gimple_expr (dump_file, call_stmt, 0);
4094 fprintf (dump_file, " simplified to ");
4095 print_generic_expr (dump_file, simplified);
4096 fprintf (dump_file, "\n");
4099 /* Setting value numbers to constants will occasionally
4100 screw up phi congruence because constants are not
4101 uniquely associated with a single ssa name that can be
4102 looked up. */
4103 if (simplified
4104 && is_gimple_min_invariant (simplified))
4106 changed = set_ssa_val_to (lhs, simplified);
4107 if (gimple_vdef (call_stmt))
4108 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4109 SSA_VAL (gimple_vuse (call_stmt)));
4110 goto done;
4112 else if (simplified
4113 && TREE_CODE (simplified) == SSA_NAME)
4115 changed = visit_copy (lhs, simplified);
4116 if (gimple_vdef (call_stmt))
4117 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4118 SSA_VAL (gimple_vuse (call_stmt)));
4119 goto done;
4121 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4123 changed = defs_to_varying (call_stmt);
4124 goto done;
4128 /* Pick up flags from a devirtualization target. */
4129 tree fn = gimple_call_fn (stmt);
4130 int extra_fnflags = 0;
4131 if (fn && TREE_CODE (fn) == SSA_NAME)
4133 fn = SSA_VAL (fn);
4134 if (TREE_CODE (fn) == ADDR_EXPR
4135 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4136 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4138 if (!gimple_call_internal_p (call_stmt)
4139 && (/* Calls to the same function with the same vuse
4140 and the same operands do not necessarily return the same
4141 value, unless they're pure or const. */
4142 ((gimple_call_flags (call_stmt) | extra_fnflags)
4143 & (ECF_PURE | ECF_CONST))
4144 /* If calls have a vdef, subsequent calls won't have
4145 the same incoming vuse. So, if 2 calls with vdef have the
4146 same vuse, we know they're not subsequent.
4147 We can value number 2 calls to the same function with the
4148 same vuse and the same operands which are not subsequent
4149 the same, because there is no code in the program that can
4150 compare the 2 values... */
4151 || (gimple_vdef (call_stmt)
4152 /* ... unless the call returns a pointer which does
4153 not alias with anything else. In which case the
4154 information that the values are distinct are encoded
4155 in the IL. */
4156 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4157 /* Only perform the following when being called from PRE
4158 which embeds tail merging. */
4159 && default_vn_walk_kind == VN_WALK)))
4160 changed = visit_reference_op_call (lhs, call_stmt);
4161 else
4162 changed = defs_to_varying (call_stmt);
4164 else
4165 changed = defs_to_varying (stmt);
4166 done:
4167 return changed;
4170 /* Compare two operands by reverse postorder index */
4172 static int
4173 compare_ops (const void *pa, const void *pb)
4175 const tree opa = *((const tree *)pa);
4176 const tree opb = *((const tree *)pb);
4177 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4178 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4179 basic_block bba;
4180 basic_block bbb;
4182 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4183 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4184 else if (gimple_nop_p (opstmta))
4185 return -1;
4186 else if (gimple_nop_p (opstmtb))
4187 return 1;
4189 bba = gimple_bb (opstmta);
4190 bbb = gimple_bb (opstmtb);
4192 if (!bba && !bbb)
4193 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4194 else if (!bba)
4195 return -1;
4196 else if (!bbb)
4197 return 1;
4199 if (bba == bbb)
4201 if (gimple_code (opstmta) == GIMPLE_PHI
4202 && gimple_code (opstmtb) == GIMPLE_PHI)
4203 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4204 else if (gimple_code (opstmta) == GIMPLE_PHI)
4205 return -1;
4206 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4207 return 1;
4208 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4209 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4210 else
4211 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4213 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4216 /* Sort an array containing members of a strongly connected component
4217 SCC so that the members are ordered by RPO number.
4218 This means that when the sort is complete, iterating through the
4219 array will give you the members in RPO order. */
4221 static void
4222 sort_scc (vec<tree> scc)
4224 scc.qsort (compare_ops);
4227 /* Insert the no longer used nary ONARY to the hash INFO. */
4229 static void
4230 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4232 size_t size = sizeof_vn_nary_op (onary->length);
4233 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4234 &info->nary_obstack);
4235 memcpy (nary, onary, size);
4236 vn_nary_op_insert_into (nary, info->nary, false);
4239 /* Insert the no longer used phi OPHI to the hash INFO. */
4241 static void
4242 copy_phi (vn_phi_t ophi, vn_tables_t info)
4244 vn_phi_t phi = info->phis_pool->allocate ();
4245 vn_phi_s **slot;
4246 memcpy (phi, ophi, sizeof (*phi));
4247 ophi->phiargs.create (0);
4248 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4249 gcc_assert (!*slot);
4250 *slot = phi;
4253 /* Insert the no longer used reference OREF to the hash INFO. */
4255 static void
4256 copy_reference (vn_reference_t oref, vn_tables_t info)
4258 vn_reference_t ref;
4259 vn_reference_s **slot;
4260 ref = info->references_pool->allocate ();
4261 memcpy (ref, oref, sizeof (*ref));
4262 oref->operands.create (0);
4263 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4264 if (*slot)
4265 free_reference (*slot);
4266 *slot = ref;
4269 /* Process a strongly connected component in the SSA graph. */
4271 static void
4272 process_scc (vec<tree> scc)
4274 tree var;
4275 unsigned int i;
4276 unsigned int iterations = 0;
4277 bool changed = true;
4278 vn_nary_op_iterator_type hin;
4279 vn_phi_iterator_type hip;
4280 vn_reference_iterator_type hir;
4281 vn_nary_op_t nary;
4282 vn_phi_t phi;
4283 vn_reference_t ref;
4285 /* If the SCC has a single member, just visit it. */
4286 if (scc.length () == 1)
4288 tree use = scc[0];
4289 if (VN_INFO (use)->use_processed)
4290 return;
4291 /* We need to make sure it doesn't form a cycle itself, which can
4292 happen for self-referential PHI nodes. In that case we would
4293 end up inserting an expression with VN_TOP operands into the
4294 valid table which makes us derive bogus equivalences later.
4295 The cheapest way to check this is to assume it for all PHI nodes. */
4296 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4297 /* Fallthru to iteration. */ ;
4298 else
4300 visit_use (use);
4301 return;
4305 if (dump_file && (dump_flags & TDF_DETAILS))
4306 print_scc (dump_file, scc);
4308 /* Iterate over the SCC with the optimistic table until it stops
4309 changing. */
4310 current_info = optimistic_info;
4311 while (changed)
4313 changed = false;
4314 iterations++;
4315 if (dump_file && (dump_flags & TDF_DETAILS))
4316 fprintf (dump_file, "Starting iteration %d\n", iterations);
4317 /* As we are value-numbering optimistically we have to
4318 clear the expression tables and the simplified expressions
4319 in each iteration until we converge. */
4320 optimistic_info->nary->empty ();
4321 optimistic_info->phis->empty ();
4322 optimistic_info->references->empty ();
4323 obstack_free (&optimistic_info->nary_obstack, NULL);
4324 gcc_obstack_init (&optimistic_info->nary_obstack);
4325 optimistic_info->phis_pool->release ();
4326 optimistic_info->references_pool->release ();
4327 FOR_EACH_VEC_ELT (scc, i, var)
4328 gcc_assert (!VN_INFO (var)->needs_insertion
4329 && VN_INFO (var)->expr == NULL);
4330 FOR_EACH_VEC_ELT (scc, i, var)
4331 changed |= visit_use (var);
4334 if (dump_file && (dump_flags & TDF_DETAILS))
4335 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4336 statistics_histogram_event (cfun, "SCC iterations", iterations);
4338 /* Finally, copy the contents of the no longer used optimistic
4339 table to the valid table. */
4340 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4341 copy_nary (nary, valid_info);
4342 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4343 copy_phi (phi, valid_info);
4344 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4345 ref, vn_reference_t, hir)
4346 copy_reference (ref, valid_info);
4348 current_info = valid_info;
4352 /* Pop the components of the found SCC for NAME off the SCC stack
4353 and process them. Returns true if all went well, false if
4354 we run into resource limits. */
4356 static void
4357 extract_and_process_scc_for_name (tree name)
4359 auto_vec<tree> scc;
4360 tree x;
4362 /* Found an SCC, pop the components off the SCC stack and
4363 process them. */
4366 x = sccstack.pop ();
4368 VN_INFO (x)->on_sccstack = false;
4369 scc.safe_push (x);
4370 } while (x != name);
4372 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4373 incredibly large.
4374 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4375 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4377 if (dump_file)
4379 print_scc (dump_file, scc);
4380 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4381 "size %u exceeding %u\n", scc.length (),
4382 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4384 tree var;
4385 unsigned i;
4386 FOR_EACH_VEC_ELT (scc, i, var)
4388 gimple *def = SSA_NAME_DEF_STMT (var);
4389 mark_use_processed (var);
4390 if (SSA_NAME_IS_DEFAULT_DEF (var)
4391 || gimple_code (def) == GIMPLE_PHI)
4392 set_ssa_val_to (var, var);
4393 else
4394 defs_to_varying (def);
4396 return;
4399 if (scc.length () > 1)
4400 sort_scc (scc);
4402 process_scc (scc);
4405 /* Depth first search on NAME to discover and process SCC's in the SSA
4406 graph.
4407 Execution of this algorithm relies on the fact that the SCC's are
4408 popped off the stack in topological order.
4409 Returns true if successful, false if we stopped processing SCC's due
4410 to resource constraints. */
4412 static void
4413 DFS (tree name)
4415 auto_vec<ssa_op_iter> itervec;
4416 auto_vec<tree> namevec;
4417 use_operand_p usep = NULL;
4418 gimple *defstmt;
4419 tree use;
4420 ssa_op_iter iter;
4422 start_over:
4423 /* SCC info */
4424 VN_INFO (name)->dfsnum = next_dfs_num++;
4425 VN_INFO (name)->visited = true;
4426 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4428 sccstack.safe_push (name);
4429 VN_INFO (name)->on_sccstack = true;
4430 defstmt = SSA_NAME_DEF_STMT (name);
4432 /* Recursively DFS on our operands, looking for SCC's. */
4433 if (!gimple_nop_p (defstmt))
4435 /* Push a new iterator. */
4436 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4437 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4438 else
4439 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4441 else
4442 clear_and_done_ssa_iter (&iter);
4444 while (1)
4446 /* If we are done processing uses of a name, go up the stack
4447 of iterators and process SCCs as we found them. */
4448 if (op_iter_done (&iter))
4450 /* See if we found an SCC. */
4451 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4452 extract_and_process_scc_for_name (name);
4454 /* Check if we are done. */
4455 if (namevec.is_empty ())
4456 return;
4458 /* Restore the last use walker and continue walking there. */
4459 use = name;
4460 name = namevec.pop ();
4461 memcpy (&iter, &itervec.last (),
4462 sizeof (ssa_op_iter));
4463 itervec.pop ();
4464 goto continue_walking;
4467 use = USE_FROM_PTR (usep);
4469 /* Since we handle phi nodes, we will sometimes get
4470 invariants in the use expression. */
4471 if (TREE_CODE (use) == SSA_NAME)
4473 if (! (VN_INFO (use)->visited))
4475 /* Recurse by pushing the current use walking state on
4476 the stack and starting over. */
4477 itervec.safe_push (iter);
4478 namevec.safe_push (name);
4479 name = use;
4480 goto start_over;
4482 continue_walking:
4483 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4484 VN_INFO (use)->low);
4486 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4487 && VN_INFO (use)->on_sccstack)
4489 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4490 VN_INFO (name)->low);
4494 usep = op_iter_next_use (&iter);
4498 /* Allocate a value number table. */
4500 static void
4501 allocate_vn_table (vn_tables_t table)
4503 table->phis = new vn_phi_table_type (23);
4504 table->nary = new vn_nary_op_table_type (23);
4505 table->references = new vn_reference_table_type (23);
4507 gcc_obstack_init (&table->nary_obstack);
4508 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4509 table->references_pool = new object_allocator<vn_reference_s>
4510 ("VN references");
4513 /* Free a value number table. */
4515 static void
4516 free_vn_table (vn_tables_t table)
4518 delete table->phis;
4519 table->phis = NULL;
4520 delete table->nary;
4521 table->nary = NULL;
4522 delete table->references;
4523 table->references = NULL;
4524 obstack_free (&table->nary_obstack, NULL);
4525 delete table->phis_pool;
4526 delete table->references_pool;
4529 static void
4530 init_scc_vn (void)
4532 int j;
4533 int *rpo_numbers_temp;
4535 calculate_dominance_info (CDI_DOMINATORS);
4536 mark_dfs_back_edges ();
4538 sccstack.create (0);
4539 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4541 constant_value_ids = BITMAP_ALLOC (NULL);
4543 next_dfs_num = 1;
4544 next_value_id = 1;
4546 vn_ssa_aux_table.create (num_ssa_names + 1);
4547 /* VEC_alloc doesn't actually grow it to the right size, it just
4548 preallocates the space to do so. */
4549 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4550 gcc_obstack_init (&vn_ssa_aux_obstack);
4552 shared_lookup_phiargs.create (0);
4553 shared_lookup_references.create (0);
4554 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4555 rpo_numbers_temp =
4556 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4557 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4559 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4560 the i'th block in RPO order is bb. We want to map bb's to RPO
4561 numbers, so we need to rearrange this array. */
4562 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4563 rpo_numbers[rpo_numbers_temp[j]] = j;
4565 XDELETE (rpo_numbers_temp);
4567 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4568 get_identifier ("VN_TOP"), error_mark_node);
4570 renumber_gimple_stmt_uids ();
4572 /* Create the valid and optimistic value numbering tables. */
4573 valid_info = XCNEW (struct vn_tables_s);
4574 allocate_vn_table (valid_info);
4575 optimistic_info = XCNEW (struct vn_tables_s);
4576 allocate_vn_table (optimistic_info);
4577 current_info = valid_info;
4579 /* Create the VN_INFO structures, and initialize value numbers to
4580 TOP or VARYING for parameters. */
4581 size_t i;
4582 tree name;
4584 FOR_EACH_SSA_NAME (i, name, cfun)
4586 VN_INFO_GET (name)->valnum = VN_TOP;
4587 VN_INFO (name)->needs_insertion = false;
4588 VN_INFO (name)->expr = NULL;
4589 VN_INFO (name)->value_id = 0;
4591 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4592 continue;
4594 switch (TREE_CODE (SSA_NAME_VAR (name)))
4596 case VAR_DECL:
4597 /* All undefined vars are VARYING. */
4598 VN_INFO (name)->valnum = name;
4599 VN_INFO (name)->visited = true;
4600 break;
4602 case PARM_DECL:
4603 /* Parameters are VARYING but we can record a condition
4604 if we know it is a non-NULL pointer. */
4605 VN_INFO (name)->visited = true;
4606 VN_INFO (name)->valnum = name;
4607 if (POINTER_TYPE_P (TREE_TYPE (name))
4608 && nonnull_arg_p (SSA_NAME_VAR (name)))
4610 tree ops[2];
4611 ops[0] = name;
4612 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4613 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4614 boolean_true_node, 0);
4615 if (dump_file && (dump_flags & TDF_DETAILS))
4617 fprintf (dump_file, "Recording ");
4618 print_generic_expr (dump_file, name, TDF_SLIM);
4619 fprintf (dump_file, " != 0\n");
4622 break;
4624 case RESULT_DECL:
4625 /* If the result is passed by invisible reference the default
4626 def is initialized, otherwise it's uninitialized. Still
4627 undefined is varying. */
4628 VN_INFO (name)->visited = true;
4629 VN_INFO (name)->valnum = name;
4630 break;
4632 default:
4633 gcc_unreachable ();
4638 /* Restore SSA info that has been reset on value leaders. */
4640 void
4641 scc_vn_restore_ssa_info (void)
4643 unsigned i;
4644 tree name;
4646 FOR_EACH_SSA_NAME (i, name, cfun)
4648 if (has_VN_INFO (name))
4650 if (VN_INFO (name)->needs_insertion)
4652 else if (POINTER_TYPE_P (TREE_TYPE (name))
4653 && VN_INFO (name)->info.ptr_info)
4654 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4655 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4656 && VN_INFO (name)->info.range_info)
4658 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4659 SSA_NAME_ANTI_RANGE_P (name)
4660 = VN_INFO (name)->range_info_anti_range_p;
4666 void
4667 free_scc_vn (void)
4669 size_t i;
4670 tree name;
4672 delete constant_to_value_id;
4673 constant_to_value_id = NULL;
4674 BITMAP_FREE (constant_value_ids);
4675 shared_lookup_phiargs.release ();
4676 shared_lookup_references.release ();
4677 XDELETEVEC (rpo_numbers);
4679 FOR_EACH_SSA_NAME (i, name, cfun)
4681 if (has_VN_INFO (name)
4682 && VN_INFO (name)->needs_insertion)
4683 release_ssa_name (name);
4685 obstack_free (&vn_ssa_aux_obstack, NULL);
4686 vn_ssa_aux_table.release ();
4688 sccstack.release ();
4689 free_vn_table (valid_info);
4690 XDELETE (valid_info);
4691 free_vn_table (optimistic_info);
4692 XDELETE (optimistic_info);
4694 BITMAP_FREE (const_parms);
4697 /* Set *ID according to RESULT. */
4699 static void
4700 set_value_id_for_result (tree result, unsigned int *id)
4702 if (result && TREE_CODE (result) == SSA_NAME)
4703 *id = VN_INFO (result)->value_id;
4704 else if (result && is_gimple_min_invariant (result))
4705 *id = get_or_alloc_constant_value_id (result);
4706 else
4707 *id = get_next_value_id ();
4710 /* Set the value ids in the valid hash tables. */
4712 static void
4713 set_hashtable_value_ids (void)
4715 vn_nary_op_iterator_type hin;
4716 vn_phi_iterator_type hip;
4717 vn_reference_iterator_type hir;
4718 vn_nary_op_t vno;
4719 vn_reference_t vr;
4720 vn_phi_t vp;
4722 /* Now set the value ids of the things we had put in the hash
4723 table. */
4725 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4726 set_value_id_for_result (vno->result, &vno->value_id);
4728 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4729 set_value_id_for_result (vp->result, &vp->value_id);
4731 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4732 hir)
4733 set_value_id_for_result (vr->result, &vr->value_id);
4736 class sccvn_dom_walker : public dom_walker
4738 public:
4739 sccvn_dom_walker ()
4740 : dom_walker (CDI_DOMINATORS, true), cond_stack (0) {}
4742 virtual edge before_dom_children (basic_block);
4743 virtual void after_dom_children (basic_block);
4745 void record_cond (basic_block,
4746 enum tree_code code, tree lhs, tree rhs, bool value);
4747 void record_conds (basic_block,
4748 enum tree_code code, tree lhs, tree rhs, bool value);
4750 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4751 cond_stack;
4754 /* Record a temporary condition for the BB and its dominated blocks. */
4756 void
4757 sccvn_dom_walker::record_cond (basic_block bb,
4758 enum tree_code code, tree lhs, tree rhs,
4759 bool value)
4761 tree ops[2] = { lhs, rhs };
4762 vn_nary_op_t old = NULL;
4763 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4764 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4765 vn_nary_op_t cond
4766 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4767 value
4768 ? boolean_true_node
4769 : boolean_false_node, 0);
4770 if (dump_file && (dump_flags & TDF_DETAILS))
4772 fprintf (dump_file, "Recording temporarily ");
4773 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4774 fprintf (dump_file, " %s ", get_tree_code_name (code));
4775 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4776 fprintf (dump_file, " == %s%s\n",
4777 value ? "true" : "false",
4778 old ? " (old entry saved)" : "");
4780 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4783 /* Record temporary conditions for the BB and its dominated blocks
4784 according to LHS CODE RHS == VALUE and its dominated conditions. */
4786 void
4787 sccvn_dom_walker::record_conds (basic_block bb,
4788 enum tree_code code, tree lhs, tree rhs,
4789 bool value)
4791 /* Record the original condition. */
4792 record_cond (bb, code, lhs, rhs, value);
4794 if (!value)
4795 return;
4797 /* Record dominated conditions if the condition is true. Note that
4798 the inversion is already recorded. */
4799 switch (code)
4801 case LT_EXPR:
4802 case GT_EXPR:
4803 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4804 record_cond (bb, NE_EXPR, lhs, rhs, true);
4805 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4806 break;
4808 case EQ_EXPR:
4809 record_cond (bb, LE_EXPR, lhs, rhs, true);
4810 record_cond (bb, GE_EXPR, lhs, rhs, true);
4811 record_cond (bb, LT_EXPR, lhs, rhs, false);
4812 record_cond (bb, GT_EXPR, lhs, rhs, false);
4813 break;
4815 default:
4816 break;
4820 /* Restore expressions and values derived from conditionals. */
4822 void
4823 sccvn_dom_walker::after_dom_children (basic_block bb)
4825 while (!cond_stack.is_empty ()
4826 && cond_stack.last ().first == bb)
4828 vn_nary_op_t cond = cond_stack.last ().second.first;
4829 vn_nary_op_t old = cond_stack.last ().second.second;
4830 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4831 if (old)
4832 vn_nary_op_insert_into (old, current_info->nary, false);
4833 cond_stack.pop ();
4837 /* Value number all statements in BB. */
4839 edge
4840 sccvn_dom_walker::before_dom_children (basic_block bb)
4842 edge e;
4843 edge_iterator ei;
4845 if (dump_file && (dump_flags & TDF_DETAILS))
4846 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4848 /* If we have a single predecessor record the equivalence from a
4849 possible condition on the predecessor edge. */
4850 edge pred_e = NULL;
4851 FOR_EACH_EDGE (e, ei, bb->preds)
4853 /* Ignore simple backedges from this to allow recording conditions
4854 in loop headers. */
4855 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4856 continue;
4857 if (! pred_e)
4858 pred_e = e;
4859 else
4861 pred_e = NULL;
4862 break;
4865 if (pred_e)
4867 /* Check if there are multiple executable successor edges in
4868 the source block. Otherwise there is no additional info
4869 to be recorded. */
4870 edge e2;
4871 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4872 if (e2 != pred_e
4873 && e2->flags & EDGE_EXECUTABLE)
4874 break;
4875 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4877 gimple *stmt = last_stmt (pred_e->src);
4878 if (stmt
4879 && gimple_code (stmt) == GIMPLE_COND)
4881 enum tree_code code = gimple_cond_code (stmt);
4882 tree lhs = gimple_cond_lhs (stmt);
4883 tree rhs = gimple_cond_rhs (stmt);
4884 record_conds (bb, code, lhs, rhs,
4885 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4886 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4887 if (code != ERROR_MARK)
4888 record_conds (bb, code, lhs, rhs,
4889 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4894 /* Value-number all defs in the basic-block. */
4895 for (gphi_iterator gsi = gsi_start_phis (bb);
4896 !gsi_end_p (gsi); gsi_next (&gsi))
4898 gphi *phi = gsi.phi ();
4899 tree res = PHI_RESULT (phi);
4900 if (!VN_INFO (res)->visited)
4901 DFS (res);
4903 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4904 !gsi_end_p (gsi); gsi_next (&gsi))
4906 ssa_op_iter i;
4907 tree op;
4908 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4909 if (!VN_INFO (op)->visited)
4910 DFS (op);
4913 /* Finally look at the last stmt. */
4914 gimple *stmt = last_stmt (bb);
4915 if (!stmt)
4916 return NULL;
4918 enum gimple_code code = gimple_code (stmt);
4919 if (code != GIMPLE_COND
4920 && code != GIMPLE_SWITCH
4921 && code != GIMPLE_GOTO)
4922 return NULL;
4924 if (dump_file && (dump_flags & TDF_DETAILS))
4926 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4927 print_gimple_stmt (dump_file, stmt, 0);
4930 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4931 if value-numbering can prove they are not reachable. Handling
4932 computed gotos is also possible. */
4933 tree val;
4934 switch (code)
4936 case GIMPLE_COND:
4938 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4939 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4940 val = gimple_simplify (gimple_cond_code (stmt),
4941 boolean_type_node, lhs, rhs,
4942 NULL, vn_valueize);
4943 /* If that didn't simplify to a constant see if we have recorded
4944 temporary expressions from taken edges. */
4945 if (!val || TREE_CODE (val) != INTEGER_CST)
4947 tree ops[2];
4948 ops[0] = lhs;
4949 ops[1] = rhs;
4950 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4951 boolean_type_node, ops, NULL);
4953 break;
4955 case GIMPLE_SWITCH:
4956 val = gimple_switch_index (as_a <gswitch *> (stmt));
4957 break;
4958 case GIMPLE_GOTO:
4959 val = gimple_goto_dest (stmt);
4960 break;
4961 default:
4962 gcc_unreachable ();
4964 if (!val)
4965 return NULL;
4967 edge taken = find_taken_edge (bb, vn_valueize (val));
4968 if (!taken)
4969 return NULL;
4971 if (dump_file && (dump_flags & TDF_DETAILS))
4972 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4973 "not executable\n", bb->index, bb->index, taken->dest->index);
4975 return taken;
4978 /* Do SCCVN. Returns true if it finished, false if we bailed out
4979 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4980 how we use the alias oracle walking during the VN process. */
4982 void
4983 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4985 size_t i;
4987 default_vn_walk_kind = default_vn_walk_kind_;
4989 init_scc_vn ();
4991 /* Collect pointers we know point to readonly memory. */
4992 const_parms = BITMAP_ALLOC (NULL);
4993 tree fnspec = lookup_attribute ("fn spec",
4994 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4995 if (fnspec)
4997 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4998 i = 1;
4999 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5000 arg; arg = DECL_CHAIN (arg), ++i)
5002 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5003 break;
5004 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5005 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5007 tree name = ssa_default_def (cfun, arg);
5008 if (name)
5009 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5014 /* Walk all blocks in dominator order, value-numbering stmts
5015 SSA defs and decide whether outgoing edges are not executable. */
5016 sccvn_dom_walker walker;
5017 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5019 /* Initialize the value ids and prune out remaining VN_TOPs
5020 from dead code. */
5021 tree name;
5022 FOR_EACH_SSA_NAME (i, name, cfun)
5024 vn_ssa_aux_t info = VN_INFO (name);
5025 if (!info->visited
5026 || info->valnum == VN_TOP)
5027 info->valnum = name;
5028 if (info->valnum == name)
5029 info->value_id = get_next_value_id ();
5030 else if (is_gimple_min_invariant (info->valnum))
5031 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5034 /* Propagate. */
5035 FOR_EACH_SSA_NAME (i, name, cfun)
5037 vn_ssa_aux_t info = VN_INFO (name);
5038 if (TREE_CODE (info->valnum) == SSA_NAME
5039 && info->valnum != name
5040 && info->value_id != VN_INFO (info->valnum)->value_id)
5041 info->value_id = VN_INFO (info->valnum)->value_id;
5044 set_hashtable_value_ids ();
5046 if (dump_file && (dump_flags & TDF_DETAILS))
5048 fprintf (dump_file, "Value numbers:\n");
5049 FOR_EACH_SSA_NAME (i, name, cfun)
5051 if (VN_INFO (name)->visited
5052 && SSA_VAL (name) != name)
5054 print_generic_expr (dump_file, name);
5055 fprintf (dump_file, " = ");
5056 print_generic_expr (dump_file, SSA_VAL (name));
5057 fprintf (dump_file, "\n");
5063 /* Return the maximum value id we have ever seen. */
5065 unsigned int
5066 get_max_value_id (void)
5068 return next_value_id;
5071 /* Return the next unique value id. */
5073 unsigned int
5074 get_next_value_id (void)
5076 return next_value_id++;
5080 /* Compare two expressions E1 and E2 and return true if they are equal. */
5082 bool
5083 expressions_equal_p (tree e1, tree e2)
5085 /* The obvious case. */
5086 if (e1 == e2)
5087 return true;
5089 /* If either one is VN_TOP consider them equal. */
5090 if (e1 == VN_TOP || e2 == VN_TOP)
5091 return true;
5093 /* If only one of them is null, they cannot be equal. */
5094 if (!e1 || !e2)
5095 return false;
5097 /* Now perform the actual comparison. */
5098 if (TREE_CODE (e1) == TREE_CODE (e2)
5099 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5100 return true;
5102 return false;
5106 /* Return true if the nary operation NARY may trap. This is a copy
5107 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5109 bool
5110 vn_nary_may_trap (vn_nary_op_t nary)
5112 tree type;
5113 tree rhs2 = NULL_TREE;
5114 bool honor_nans = false;
5115 bool honor_snans = false;
5116 bool fp_operation = false;
5117 bool honor_trapv = false;
5118 bool handled, ret;
5119 unsigned i;
5121 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5122 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5123 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5125 type = nary->type;
5126 fp_operation = FLOAT_TYPE_P (type);
5127 if (fp_operation)
5129 honor_nans = flag_trapping_math && !flag_finite_math_only;
5130 honor_snans = flag_signaling_nans != 0;
5132 else if (INTEGRAL_TYPE_P (type)
5133 && TYPE_OVERFLOW_TRAPS (type))
5134 honor_trapv = true;
5136 if (nary->length >= 2)
5137 rhs2 = nary->op[1];
5138 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5139 honor_trapv,
5140 honor_nans, honor_snans, rhs2,
5141 &handled);
5142 if (handled
5143 && ret)
5144 return true;
5146 for (i = 0; i < nary->length; ++i)
5147 if (tree_could_trap_p (nary->op[i]))
5148 return true;
5150 return false;