* tree-outof-ssa.h (ssaexpand): Add partitions_for_undefined_values.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob59397495abf6b7d0b890af4daf11ba1fe58e37bd
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-ssa-sccvn.h"
59 #include "tree-cfg.h"
60 #include "domwalk.h"
61 #include "gimple-iterator.h"
62 #include "gimple-match.h"
63 #include "stringpool.h"
64 #include "attribs.h"
66 /* This algorithm is based on the SCC algorithm presented by Keith
67 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
68 (http://citeseer.ist.psu.edu/41805.html). In
69 straight line code, it is equivalent to a regular hash based value
70 numbering that is performed in reverse postorder.
72 For code with cycles, there are two alternatives, both of which
73 require keeping the hashtables separate from the actual list of
74 value numbers for SSA names.
76 1. Iterate value numbering in an RPO walk of the blocks, removing
77 all the entries from the hashtable after each iteration (but
78 keeping the SSA name->value number mapping between iterations).
79 Iterate until it does not change.
81 2. Perform value numbering as part of an SCC walk on the SSA graph,
82 iterating only the cycles in the SSA graph until they do not change
83 (using a separate, optimistic hashtable for value numbering the SCC
84 operands).
86 The second is not just faster in practice (because most SSA graph
87 cycles do not involve all the variables in the graph), it also has
88 some nice properties.
90 One of these nice properties is that when we pop an SCC off the
91 stack, we are guaranteed to have processed all the operands coming from
92 *outside of that SCC*, so we do not need to do anything special to
93 ensure they have value numbers.
95 Another nice property is that the SCC walk is done as part of a DFS
96 of the SSA graph, which makes it easy to perform combining and
97 simplifying operations at the same time.
99 The code below is deliberately written in a way that makes it easy
100 to separate the SCC walk from the other work it does.
102 In order to propagate constants through the code, we track which
103 expressions contain constants, and use those while folding. In
104 theory, we could also track expressions whose value numbers are
105 replaced, in case we end up folding based on expression
106 identities.
108 In order to value number memory, we assign value numbers to vuses.
109 This enables us to note that, for example, stores to the same
110 address of the same value from the same starting memory states are
111 equivalent.
112 TODO:
114 1. We can iterate only the changing portions of the SCC's, but
115 I have not seen an SCC big enough for this to be a win.
116 2. If you differentiate between phi nodes for loops and phi nodes
117 for if-then-else, you can properly consider phi nodes in different
118 blocks for equivalence.
119 3. We could value number vuses in more cases, particularly, whole
120 structure copies.
124 static tree *last_vuse_ptr;
125 static vn_lookup_kind vn_walk_kind;
126 static vn_lookup_kind default_vn_walk_kind;
127 bitmap const_parms;
129 /* vn_nary_op hashtable helpers. */
131 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
133 typedef vn_nary_op_s *compare_type;
134 static inline hashval_t hash (const vn_nary_op_s *);
135 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
138 /* Return the computed hashcode for nary operation P1. */
140 inline hashval_t
141 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
143 return vno1->hashcode;
146 /* Compare nary operations P1 and P2 and return true if they are
147 equivalent. */
149 inline bool
150 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
152 return vn_nary_op_eq (vno1, vno2);
155 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
156 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
159 /* vn_phi hashtable helpers. */
161 static int
162 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
164 struct vn_phi_hasher : pointer_hash <vn_phi_s>
166 static inline hashval_t hash (const vn_phi_s *);
167 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
168 static inline void remove (vn_phi_s *);
171 /* Return the computed hashcode for phi operation P1. */
173 inline hashval_t
174 vn_phi_hasher::hash (const vn_phi_s *vp1)
176 return vp1->hashcode;
179 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
181 inline bool
182 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
184 return vn_phi_eq (vp1, vp2);
187 /* Free a phi operation structure VP. */
189 inline void
190 vn_phi_hasher::remove (vn_phi_s *phi)
192 phi->phiargs.release ();
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2));
219 /* Free a reference operation structure VP. */
221 static inline void
222 free_reference (vn_reference_s *vr)
224 vr->operands.release ();
228 /* vn_reference hashtable helpers. */
230 struct vn_reference_hasher : pointer_hash <vn_reference_s>
232 static inline hashval_t hash (const vn_reference_s *);
233 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
234 static inline void remove (vn_reference_s *);
237 /* Return the hashcode for a given reference operation P1. */
239 inline hashval_t
240 vn_reference_hasher::hash (const vn_reference_s *vr1)
242 return vr1->hashcode;
245 inline bool
246 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
248 return vn_reference_eq (v, c);
251 inline void
252 vn_reference_hasher::remove (vn_reference_s *v)
254 free_reference (v);
257 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
258 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
261 /* The set of hashtables and alloc_pool's for their items. */
263 typedef struct vn_tables_s
265 vn_nary_op_table_type *nary;
266 vn_phi_table_type *phis;
267 vn_reference_table_type *references;
268 struct obstack nary_obstack;
269 object_allocator<vn_phi_s> *phis_pool;
270 object_allocator<vn_reference_s> *references_pool;
271 } *vn_tables_t;
274 /* vn_constant hashtable helpers. */
276 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
278 static inline hashval_t hash (const vn_constant_s *);
279 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
282 /* Hash table hash function for vn_constant_t. */
284 inline hashval_t
285 vn_constant_hasher::hash (const vn_constant_s *vc1)
287 return vc1->hashcode;
290 /* Hash table equality function for vn_constant_t. */
292 inline bool
293 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
295 if (vc1->hashcode != vc2->hashcode)
296 return false;
298 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
301 static hash_table<vn_constant_hasher> *constant_to_value_id;
302 static bitmap constant_value_ids;
305 /* Valid hashtables storing information we have proven to be
306 correct. */
308 static vn_tables_t valid_info;
310 /* Optimistic hashtables storing information we are making assumptions about
311 during iterations. */
313 static vn_tables_t optimistic_info;
315 /* Pointer to the set of hashtables that is currently being used.
316 Should always point to either the optimistic_info, or the
317 valid_info. */
319 static vn_tables_t current_info;
322 /* Reverse post order index for each basic block. */
324 static int *rpo_numbers;
326 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
328 /* Return the SSA value of the VUSE x, supporting released VDEFs
329 during elimination which will value-number the VDEF to the
330 associated VUSE (but not substitute in the whole lattice). */
332 static inline tree
333 vuse_ssa_val (tree x)
335 if (!x)
336 return NULL_TREE;
340 x = SSA_VAL (x);
342 while (SSA_NAME_IN_FREE_LIST (x));
344 return x;
347 /* This represents the top of the VN lattice, which is the universal
348 value. */
350 tree VN_TOP;
352 /* Unique counter for our value ids. */
354 static unsigned int next_value_id;
356 /* Next DFS number and the stack for strongly connected component
357 detection. */
359 static unsigned int next_dfs_num;
360 static vec<tree> sccstack;
364 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
365 are allocated on an obstack for locality reasons, and to free them
366 without looping over the vec. */
368 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
369 static struct obstack vn_ssa_aux_obstack;
371 /* Return whether there is value numbering information for a given SSA name. */
373 bool
374 has_VN_INFO (tree name)
376 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
377 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
378 return false;
381 /* Return the value numbering information for a given SSA name. */
383 vn_ssa_aux_t
384 VN_INFO (tree name)
386 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
387 gcc_checking_assert (res);
388 return res;
391 /* Set the value numbering info for a given SSA name to a given
392 value. */
394 static inline void
395 VN_INFO_SET (tree name, vn_ssa_aux_t value)
397 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
400 /* Initialize the value numbering info for a given SSA name.
401 This should be called just once for every SSA name. */
403 vn_ssa_aux_t
404 VN_INFO_GET (tree name)
406 vn_ssa_aux_t newinfo;
408 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
409 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
410 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
411 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
412 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
413 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
414 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
415 return newinfo;
419 /* Return the vn_kind the expression computed by the stmt should be
420 associated with. */
422 enum vn_kind
423 vn_get_stmt_kind (gimple *stmt)
425 switch (gimple_code (stmt))
427 case GIMPLE_CALL:
428 return VN_REFERENCE;
429 case GIMPLE_PHI:
430 return VN_PHI;
431 case GIMPLE_ASSIGN:
433 enum tree_code code = gimple_assign_rhs_code (stmt);
434 tree rhs1 = gimple_assign_rhs1 (stmt);
435 switch (get_gimple_rhs_class (code))
437 case GIMPLE_UNARY_RHS:
438 case GIMPLE_BINARY_RHS:
439 case GIMPLE_TERNARY_RHS:
440 return VN_NARY;
441 case GIMPLE_SINGLE_RHS:
442 switch (TREE_CODE_CLASS (code))
444 case tcc_reference:
445 /* VOP-less references can go through unary case. */
446 if ((code == REALPART_EXPR
447 || code == IMAGPART_EXPR
448 || code == VIEW_CONVERT_EXPR
449 || code == BIT_FIELD_REF)
450 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
451 return VN_NARY;
453 /* Fallthrough. */
454 case tcc_declaration:
455 return VN_REFERENCE;
457 case tcc_constant:
458 return VN_CONSTANT;
460 default:
461 if (code == ADDR_EXPR)
462 return (is_gimple_min_invariant (rhs1)
463 ? VN_CONSTANT : VN_REFERENCE);
464 else if (code == CONSTRUCTOR)
465 return VN_NARY;
466 return VN_NONE;
468 default:
469 return VN_NONE;
472 default:
473 return VN_NONE;
477 /* Lookup a value id for CONSTANT and return it. If it does not
478 exist returns 0. */
480 unsigned int
481 get_constant_value_id (tree constant)
483 vn_constant_s **slot;
484 struct vn_constant_s vc;
486 vc.hashcode = vn_hash_constant_with_type (constant);
487 vc.constant = constant;
488 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
489 if (slot)
490 return (*slot)->value_id;
491 return 0;
494 /* Lookup a value id for CONSTANT, and if it does not exist, create a
495 new one and return it. If it does exist, return it. */
497 unsigned int
498 get_or_alloc_constant_value_id (tree constant)
500 vn_constant_s **slot;
501 struct vn_constant_s vc;
502 vn_constant_t vcp;
504 vc.hashcode = vn_hash_constant_with_type (constant);
505 vc.constant = constant;
506 slot = constant_to_value_id->find_slot (&vc, INSERT);
507 if (*slot)
508 return (*slot)->value_id;
510 vcp = XNEW (struct vn_constant_s);
511 vcp->hashcode = vc.hashcode;
512 vcp->constant = constant;
513 vcp->value_id = get_next_value_id ();
514 *slot = vcp;
515 bitmap_set_bit (constant_value_ids, vcp->value_id);
516 return vcp->value_id;
519 /* Return true if V is a value id for a constant. */
521 bool
522 value_id_constant_p (unsigned int v)
524 return bitmap_bit_p (constant_value_ids, v);
527 /* Compute the hash for a reference operand VRO1. */
529 static void
530 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
532 hstate.add_int (vro1->opcode);
533 if (vro1->op0)
534 inchash::add_expr (vro1->op0, hstate);
535 if (vro1->op1)
536 inchash::add_expr (vro1->op1, hstate);
537 if (vro1->op2)
538 inchash::add_expr (vro1->op2, hstate);
541 /* Compute a hash for the reference operation VR1 and return it. */
543 static hashval_t
544 vn_reference_compute_hash (const vn_reference_t vr1)
546 inchash::hash hstate;
547 hashval_t result;
548 int i;
549 vn_reference_op_t vro;
550 HOST_WIDE_INT off = -1;
551 bool deref = false;
553 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
555 if (vro->opcode == MEM_REF)
556 deref = true;
557 else if (vro->opcode != ADDR_EXPR)
558 deref = false;
559 if (vro->off != -1)
561 if (off == -1)
562 off = 0;
563 off += vro->off;
565 else
567 if (off != -1
568 && off != 0)
569 hstate.add_int (off);
570 off = -1;
571 if (deref
572 && vro->opcode == ADDR_EXPR)
574 if (vro->op0)
576 tree op = TREE_OPERAND (vro->op0, 0);
577 hstate.add_int (TREE_CODE (op));
578 inchash::add_expr (op, hstate);
581 else
582 vn_reference_op_compute_hash (vro, hstate);
585 result = hstate.end ();
586 /* ??? We would ICE later if we hash instead of adding that in. */
587 if (vr1->vuse)
588 result += SSA_NAME_VERSION (vr1->vuse);
590 return result;
593 /* Return true if reference operations VR1 and VR2 are equivalent. This
594 means they have the same set of operands and vuses. */
596 bool
597 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
599 unsigned i, j;
601 /* Early out if this is not a hash collision. */
602 if (vr1->hashcode != vr2->hashcode)
603 return false;
605 /* The VOP needs to be the same. */
606 if (vr1->vuse != vr2->vuse)
607 return false;
609 /* If the operands are the same we are done. */
610 if (vr1->operands == vr2->operands)
611 return true;
613 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
614 return false;
616 if (INTEGRAL_TYPE_P (vr1->type)
617 && INTEGRAL_TYPE_P (vr2->type))
619 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
620 return false;
622 else if (INTEGRAL_TYPE_P (vr1->type)
623 && (TYPE_PRECISION (vr1->type)
624 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
625 return false;
626 else if (INTEGRAL_TYPE_P (vr2->type)
627 && (TYPE_PRECISION (vr2->type)
628 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
629 return false;
631 i = 0;
632 j = 0;
635 HOST_WIDE_INT off1 = 0, off2 = 0;
636 vn_reference_op_t vro1, vro2;
637 vn_reference_op_s tem1, tem2;
638 bool deref1 = false, deref2 = false;
639 for (; vr1->operands.iterate (i, &vro1); i++)
641 if (vro1->opcode == MEM_REF)
642 deref1 = true;
643 /* Do not look through a storage order barrier. */
644 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
645 return false;
646 if (vro1->off == -1)
647 break;
648 off1 += vro1->off;
650 for (; vr2->operands.iterate (j, &vro2); j++)
652 if (vro2->opcode == MEM_REF)
653 deref2 = true;
654 /* Do not look through a storage order barrier. */
655 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
656 return false;
657 if (vro2->off == -1)
658 break;
659 off2 += vro2->off;
661 if (off1 != off2)
662 return false;
663 if (deref1 && vro1->opcode == ADDR_EXPR)
665 memset (&tem1, 0, sizeof (tem1));
666 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
667 tem1.type = TREE_TYPE (tem1.op0);
668 tem1.opcode = TREE_CODE (tem1.op0);
669 vro1 = &tem1;
670 deref1 = false;
672 if (deref2 && vro2->opcode == ADDR_EXPR)
674 memset (&tem2, 0, sizeof (tem2));
675 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
676 tem2.type = TREE_TYPE (tem2.op0);
677 tem2.opcode = TREE_CODE (tem2.op0);
678 vro2 = &tem2;
679 deref2 = false;
681 if (deref1 != deref2)
682 return false;
683 if (!vn_reference_op_eq (vro1, vro2))
684 return false;
685 ++j;
686 ++i;
688 while (vr1->operands.length () != i
689 || vr2->operands.length () != j);
691 return true;
694 /* Copy the operations present in load/store REF into RESULT, a vector of
695 vn_reference_op_s's. */
697 static void
698 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
700 if (TREE_CODE (ref) == TARGET_MEM_REF)
702 vn_reference_op_s temp;
704 result->reserve (3);
706 memset (&temp, 0, sizeof (temp));
707 temp.type = TREE_TYPE (ref);
708 temp.opcode = TREE_CODE (ref);
709 temp.op0 = TMR_INDEX (ref);
710 temp.op1 = TMR_STEP (ref);
711 temp.op2 = TMR_OFFSET (ref);
712 temp.off = -1;
713 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
714 temp.base = MR_DEPENDENCE_BASE (ref);
715 result->quick_push (temp);
717 memset (&temp, 0, sizeof (temp));
718 temp.type = NULL_TREE;
719 temp.opcode = ERROR_MARK;
720 temp.op0 = TMR_INDEX2 (ref);
721 temp.off = -1;
722 result->quick_push (temp);
724 memset (&temp, 0, sizeof (temp));
725 temp.type = NULL_TREE;
726 temp.opcode = TREE_CODE (TMR_BASE (ref));
727 temp.op0 = TMR_BASE (ref);
728 temp.off = -1;
729 result->quick_push (temp);
730 return;
733 /* For non-calls, store the information that makes up the address. */
734 tree orig = ref;
735 while (ref)
737 vn_reference_op_s temp;
739 memset (&temp, 0, sizeof (temp));
740 temp.type = TREE_TYPE (ref);
741 temp.opcode = TREE_CODE (ref);
742 temp.off = -1;
744 switch (temp.opcode)
746 case MODIFY_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 break;
749 case WITH_SIZE_EXPR:
750 temp.op0 = TREE_OPERAND (ref, 1);
751 temp.off = 0;
752 break;
753 case MEM_REF:
754 /* The base address gets its own vn_reference_op_s structure. */
755 temp.op0 = TREE_OPERAND (ref, 1);
757 offset_int off = mem_ref_offset (ref);
758 if (wi::fits_shwi_p (off))
759 temp.off = off.to_shwi ();
761 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
762 temp.base = MR_DEPENDENCE_BASE (ref);
763 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
764 break;
765 case BIT_FIELD_REF:
766 /* Record bits, position and storage order. */
767 temp.op0 = TREE_OPERAND (ref, 1);
768 temp.op1 = TREE_OPERAND (ref, 2);
769 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
771 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
772 if (off % BITS_PER_UNIT == 0)
773 temp.off = off / BITS_PER_UNIT;
775 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
776 break;
777 case COMPONENT_REF:
778 /* The field decl is enough to unambiguously specify the field,
779 a matching type is not necessary and a mismatching type
780 is always a spurious difference. */
781 temp.type = NULL_TREE;
782 temp.op0 = TREE_OPERAND (ref, 1);
783 temp.op1 = TREE_OPERAND (ref, 2);
785 tree this_offset = component_ref_field_offset (ref);
786 if (this_offset
787 && TREE_CODE (this_offset) == INTEGER_CST)
789 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
790 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
792 offset_int off
793 = (wi::to_offset (this_offset)
794 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
795 if (wi::fits_shwi_p (off)
796 /* Probibit value-numbering zero offset components
797 of addresses the same before the pass folding
798 __builtin_object_size had a chance to run
799 (checking cfun->after_inlining does the
800 trick here). */
801 && (TREE_CODE (orig) != ADDR_EXPR
802 || off != 0
803 || cfun->after_inlining))
804 temp.off = off.to_shwi ();
808 break;
809 case ARRAY_RANGE_REF:
810 case ARRAY_REF:
812 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
813 /* Record index as operand. */
814 temp.op0 = TREE_OPERAND (ref, 1);
815 /* Always record lower bounds and element size. */
816 temp.op1 = array_ref_low_bound (ref);
817 /* But record element size in units of the type alignment. */
818 temp.op2 = TREE_OPERAND (ref, 3);
819 temp.align = eltype->type_common.align;
820 if (! temp.op2)
821 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
822 size_int (TYPE_ALIGN_UNIT (eltype)));
823 if (TREE_CODE (temp.op0) == INTEGER_CST
824 && TREE_CODE (temp.op1) == INTEGER_CST
825 && TREE_CODE (temp.op2) == INTEGER_CST)
827 offset_int off = ((wi::to_offset (temp.op0)
828 - wi::to_offset (temp.op1))
829 * wi::to_offset (temp.op2)
830 * vn_ref_op_align_unit (&temp));
831 if (wi::fits_shwi_p (off))
832 temp.off = off.to_shwi();
835 break;
836 case VAR_DECL:
837 if (DECL_HARD_REGISTER (ref))
839 temp.op0 = ref;
840 break;
842 /* Fallthru. */
843 case PARM_DECL:
844 case CONST_DECL:
845 case RESULT_DECL:
846 /* Canonicalize decls to MEM[&decl] which is what we end up with
847 when valueizing MEM[ptr] with ptr = &decl. */
848 temp.opcode = MEM_REF;
849 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
850 temp.off = 0;
851 result->safe_push (temp);
852 temp.opcode = ADDR_EXPR;
853 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
854 temp.type = TREE_TYPE (temp.op0);
855 temp.off = -1;
856 break;
857 case STRING_CST:
858 case INTEGER_CST:
859 case COMPLEX_CST:
860 case VECTOR_CST:
861 case REAL_CST:
862 case FIXED_CST:
863 case CONSTRUCTOR:
864 case SSA_NAME:
865 temp.op0 = ref;
866 break;
867 case ADDR_EXPR:
868 if (is_gimple_min_invariant (ref))
870 temp.op0 = ref;
871 break;
873 break;
874 /* These are only interesting for their operands, their
875 existence, and their type. They will never be the last
876 ref in the chain of references (IE they require an
877 operand), so we don't have to put anything
878 for op* as it will be handled by the iteration */
879 case REALPART_EXPR:
880 temp.off = 0;
881 break;
882 case VIEW_CONVERT_EXPR:
883 temp.off = 0;
884 temp.reverse = storage_order_barrier_p (ref);
885 break;
886 case IMAGPART_EXPR:
887 /* This is only interesting for its constant offset. */
888 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
889 break;
890 default:
891 gcc_unreachable ();
893 result->safe_push (temp);
895 if (REFERENCE_CLASS_P (ref)
896 || TREE_CODE (ref) == MODIFY_EXPR
897 || TREE_CODE (ref) == WITH_SIZE_EXPR
898 || (TREE_CODE (ref) == ADDR_EXPR
899 && !is_gimple_min_invariant (ref)))
900 ref = TREE_OPERAND (ref, 0);
901 else
902 ref = NULL_TREE;
906 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
907 operands in *OPS, the reference alias set SET and the reference type TYPE.
908 Return true if something useful was produced. */
910 bool
911 ao_ref_init_from_vn_reference (ao_ref *ref,
912 alias_set_type set, tree type,
913 vec<vn_reference_op_s> ops)
915 vn_reference_op_t op;
916 unsigned i;
917 tree base = NULL_TREE;
918 tree *op0_p = &base;
919 offset_int offset = 0;
920 offset_int max_size;
921 offset_int size = -1;
922 tree size_tree = NULL_TREE;
923 alias_set_type base_alias_set = -1;
925 /* First get the final access size from just the outermost expression. */
926 op = &ops[0];
927 if (op->opcode == COMPONENT_REF)
928 size_tree = DECL_SIZE (op->op0);
929 else if (op->opcode == BIT_FIELD_REF)
930 size_tree = op->op0;
931 else
933 machine_mode mode = TYPE_MODE (type);
934 if (mode == BLKmode)
935 size_tree = TYPE_SIZE (type);
936 else
937 size = int (GET_MODE_BITSIZE (mode));
939 if (size_tree != NULL_TREE
940 && TREE_CODE (size_tree) == INTEGER_CST)
941 size = wi::to_offset (size_tree);
943 /* Initially, maxsize is the same as the accessed element size.
944 In the following it will only grow (or become -1). */
945 max_size = size;
947 /* Compute cumulative bit-offset for nested component-refs and array-refs,
948 and find the ultimate containing object. */
949 FOR_EACH_VEC_ELT (ops, i, op)
951 switch (op->opcode)
953 /* These may be in the reference ops, but we cannot do anything
954 sensible with them here. */
955 case ADDR_EXPR:
956 /* Apart from ADDR_EXPR arguments to MEM_REF. */
957 if (base != NULL_TREE
958 && TREE_CODE (base) == MEM_REF
959 && op->op0
960 && DECL_P (TREE_OPERAND (op->op0, 0)))
962 vn_reference_op_t pop = &ops[i-1];
963 base = TREE_OPERAND (op->op0, 0);
964 if (pop->off == -1)
966 max_size = -1;
967 offset = 0;
969 else
970 offset += pop->off * BITS_PER_UNIT;
971 op0_p = NULL;
972 break;
974 /* Fallthru. */
975 case CALL_EXPR:
976 return false;
978 /* Record the base objects. */
979 case MEM_REF:
980 base_alias_set = get_deref_alias_set (op->op0);
981 *op0_p = build2 (MEM_REF, op->type,
982 NULL_TREE, op->op0);
983 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
984 MR_DEPENDENCE_BASE (*op0_p) = op->base;
985 op0_p = &TREE_OPERAND (*op0_p, 0);
986 break;
988 case VAR_DECL:
989 case PARM_DECL:
990 case RESULT_DECL:
991 case SSA_NAME:
992 *op0_p = op->op0;
993 op0_p = NULL;
994 break;
996 /* And now the usual component-reference style ops. */
997 case BIT_FIELD_REF:
998 offset += wi::to_offset (op->op1);
999 break;
1001 case COMPONENT_REF:
1003 tree field = op->op0;
1004 /* We do not have a complete COMPONENT_REF tree here so we
1005 cannot use component_ref_field_offset. Do the interesting
1006 parts manually. */
1007 tree this_offset = DECL_FIELD_OFFSET (field);
1009 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1010 max_size = -1;
1011 else
1013 offset_int woffset = (wi::to_offset (this_offset)
1014 << LOG2_BITS_PER_UNIT);
1015 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1016 offset += woffset;
1018 break;
1021 case ARRAY_RANGE_REF:
1022 case ARRAY_REF:
1023 /* We recorded the lower bound and the element size. */
1024 if (TREE_CODE (op->op0) != INTEGER_CST
1025 || TREE_CODE (op->op1) != INTEGER_CST
1026 || TREE_CODE (op->op2) != INTEGER_CST)
1027 max_size = -1;
1028 else
1030 offset_int woffset
1031 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1032 TYPE_PRECISION (TREE_TYPE (op->op0)));
1033 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1034 woffset <<= LOG2_BITS_PER_UNIT;
1035 offset += woffset;
1037 break;
1039 case REALPART_EXPR:
1040 break;
1042 case IMAGPART_EXPR:
1043 offset += size;
1044 break;
1046 case VIEW_CONVERT_EXPR:
1047 break;
1049 case STRING_CST:
1050 case INTEGER_CST:
1051 case COMPLEX_CST:
1052 case VECTOR_CST:
1053 case REAL_CST:
1054 case CONSTRUCTOR:
1055 case CONST_DECL:
1056 return false;
1058 default:
1059 return false;
1063 if (base == NULL_TREE)
1064 return false;
1066 ref->ref = NULL_TREE;
1067 ref->base = base;
1068 ref->ref_alias_set = set;
1069 if (base_alias_set != -1)
1070 ref->base_alias_set = base_alias_set;
1071 else
1072 ref->base_alias_set = get_alias_set (base);
1073 /* We discount volatiles from value-numbering elsewhere. */
1074 ref->volatile_p = false;
1076 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1078 ref->offset = 0;
1079 ref->size = -1;
1080 ref->max_size = -1;
1081 return true;
1084 ref->size = size.to_shwi ();
1086 if (!wi::fits_shwi_p (offset))
1088 ref->offset = 0;
1089 ref->max_size = -1;
1090 return true;
1093 ref->offset = offset.to_shwi ();
1095 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1096 ref->max_size = -1;
1097 else
1098 ref->max_size = max_size.to_shwi ();
1100 return true;
1103 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1104 vn_reference_op_s's. */
1106 static void
1107 copy_reference_ops_from_call (gcall *call,
1108 vec<vn_reference_op_s> *result)
1110 vn_reference_op_s temp;
1111 unsigned i;
1112 tree lhs = gimple_call_lhs (call);
1113 int lr;
1115 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1116 different. By adding the lhs here in the vector, we ensure that the
1117 hashcode is different, guaranteeing a different value number. */
1118 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1120 memset (&temp, 0, sizeof (temp));
1121 temp.opcode = MODIFY_EXPR;
1122 temp.type = TREE_TYPE (lhs);
1123 temp.op0 = lhs;
1124 temp.off = -1;
1125 result->safe_push (temp);
1128 /* Copy the type, opcode, function, static chain and EH region, if any. */
1129 memset (&temp, 0, sizeof (temp));
1130 temp.type = gimple_call_return_type (call);
1131 temp.opcode = CALL_EXPR;
1132 temp.op0 = gimple_call_fn (call);
1133 temp.op1 = gimple_call_chain (call);
1134 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1135 temp.op2 = size_int (lr);
1136 temp.off = -1;
1137 if (gimple_call_with_bounds_p (call))
1138 temp.with_bounds = 1;
1139 result->safe_push (temp);
1141 /* Copy the call arguments. As they can be references as well,
1142 just chain them together. */
1143 for (i = 0; i < gimple_call_num_args (call); ++i)
1145 tree callarg = gimple_call_arg (call, i);
1146 copy_reference_ops_from_ref (callarg, result);
1150 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1151 *I_P to point to the last element of the replacement. */
1152 static bool
1153 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1154 unsigned int *i_p)
1156 unsigned int i = *i_p;
1157 vn_reference_op_t op = &(*ops)[i];
1158 vn_reference_op_t mem_op = &(*ops)[i - 1];
1159 tree addr_base;
1160 HOST_WIDE_INT addr_offset = 0;
1162 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1163 from .foo.bar to the preceding MEM_REF offset and replace the
1164 address with &OBJ. */
1165 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1166 &addr_offset);
1167 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1168 if (addr_base != TREE_OPERAND (op->op0, 0))
1170 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1171 off += addr_offset;
1172 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1173 op->op0 = build_fold_addr_expr (addr_base);
1174 if (tree_fits_shwi_p (mem_op->op0))
1175 mem_op->off = tree_to_shwi (mem_op->op0);
1176 else
1177 mem_op->off = -1;
1178 return true;
1180 return false;
1183 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1184 *I_P to point to the last element of the replacement. */
1185 static bool
1186 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1187 unsigned int *i_p)
1189 unsigned int i = *i_p;
1190 vn_reference_op_t op = &(*ops)[i];
1191 vn_reference_op_t mem_op = &(*ops)[i - 1];
1192 gimple *def_stmt;
1193 enum tree_code code;
1194 offset_int off;
1196 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1197 if (!is_gimple_assign (def_stmt))
1198 return false;
1200 code = gimple_assign_rhs_code (def_stmt);
1201 if (code != ADDR_EXPR
1202 && code != POINTER_PLUS_EXPR)
1203 return false;
1205 off = offset_int::from (mem_op->op0, SIGNED);
1207 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1208 from .foo.bar to the preceding MEM_REF offset and replace the
1209 address with &OBJ. */
1210 if (code == ADDR_EXPR)
1212 tree addr, addr_base;
1213 HOST_WIDE_INT addr_offset;
1215 addr = gimple_assign_rhs1 (def_stmt);
1216 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1217 &addr_offset);
1218 /* If that didn't work because the address isn't invariant propagate
1219 the reference tree from the address operation in case the current
1220 dereference isn't offsetted. */
1221 if (!addr_base
1222 && *i_p == ops->length () - 1
1223 && off == 0
1224 /* This makes us disable this transform for PRE where the
1225 reference ops might be also used for code insertion which
1226 is invalid. */
1227 && default_vn_walk_kind == VN_WALKREWRITE)
1229 auto_vec<vn_reference_op_s, 32> tem;
1230 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1231 /* Make sure to preserve TBAA info. The only objects not
1232 wrapped in MEM_REFs that can have their address taken are
1233 STRING_CSTs. */
1234 if (tem.length () >= 2
1235 && tem[tem.length () - 2].opcode == MEM_REF)
1237 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1238 new_mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1239 new_mem_op->op0);
1241 else
1242 gcc_assert (tem.last ().opcode == STRING_CST);
1243 ops->pop ();
1244 ops->pop ();
1245 ops->safe_splice (tem);
1246 --*i_p;
1247 return true;
1249 if (!addr_base
1250 || TREE_CODE (addr_base) != MEM_REF)
1251 return false;
1253 off += addr_offset;
1254 off += mem_ref_offset (addr_base);
1255 op->op0 = TREE_OPERAND (addr_base, 0);
1257 else
1259 tree ptr, ptroff;
1260 ptr = gimple_assign_rhs1 (def_stmt);
1261 ptroff = gimple_assign_rhs2 (def_stmt);
1262 if (TREE_CODE (ptr) != SSA_NAME
1263 || TREE_CODE (ptroff) != INTEGER_CST)
1264 return false;
1266 off += wi::to_offset (ptroff);
1267 op->op0 = ptr;
1270 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1271 if (tree_fits_shwi_p (mem_op->op0))
1272 mem_op->off = tree_to_shwi (mem_op->op0);
1273 else
1274 mem_op->off = -1;
1275 if (TREE_CODE (op->op0) == SSA_NAME)
1276 op->op0 = SSA_VAL (op->op0);
1277 if (TREE_CODE (op->op0) != SSA_NAME)
1278 op->opcode = TREE_CODE (op->op0);
1280 /* And recurse. */
1281 if (TREE_CODE (op->op0) == SSA_NAME)
1282 vn_reference_maybe_forwprop_address (ops, i_p);
1283 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1284 vn_reference_fold_indirect (ops, i_p);
1285 return true;
1288 /* Optimize the reference REF to a constant if possible or return
1289 NULL_TREE if not. */
1291 tree
1292 fully_constant_vn_reference_p (vn_reference_t ref)
1294 vec<vn_reference_op_s> operands = ref->operands;
1295 vn_reference_op_t op;
1297 /* Try to simplify the translated expression if it is
1298 a call to a builtin function with at most two arguments. */
1299 op = &operands[0];
1300 if (op->opcode == CALL_EXPR
1301 && TREE_CODE (op->op0) == ADDR_EXPR
1302 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1303 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1304 && operands.length () >= 2
1305 && operands.length () <= 3)
1307 vn_reference_op_t arg0, arg1 = NULL;
1308 bool anyconst = false;
1309 arg0 = &operands[1];
1310 if (operands.length () > 2)
1311 arg1 = &operands[2];
1312 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1313 || (arg0->opcode == ADDR_EXPR
1314 && is_gimple_min_invariant (arg0->op0)))
1315 anyconst = true;
1316 if (arg1
1317 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1318 || (arg1->opcode == ADDR_EXPR
1319 && is_gimple_min_invariant (arg1->op0))))
1320 anyconst = true;
1321 if (anyconst)
1323 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1324 arg1 ? 2 : 1,
1325 arg0->op0,
1326 arg1 ? arg1->op0 : NULL);
1327 if (folded
1328 && TREE_CODE (folded) == NOP_EXPR)
1329 folded = TREE_OPERAND (folded, 0);
1330 if (folded
1331 && is_gimple_min_invariant (folded))
1332 return folded;
1336 /* Simplify reads from constants or constant initializers. */
1337 else if (BITS_PER_UNIT == 8
1338 && is_gimple_reg_type (ref->type)
1339 && (!INTEGRAL_TYPE_P (ref->type)
1340 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1342 HOST_WIDE_INT off = 0;
1343 HOST_WIDE_INT size;
1344 if (INTEGRAL_TYPE_P (ref->type))
1345 size = TYPE_PRECISION (ref->type);
1346 else
1347 size = tree_to_shwi (TYPE_SIZE (ref->type));
1348 if (size % BITS_PER_UNIT != 0
1349 || size > MAX_BITSIZE_MODE_ANY_MODE)
1350 return NULL_TREE;
1351 size /= BITS_PER_UNIT;
1352 unsigned i;
1353 for (i = 0; i < operands.length (); ++i)
1355 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1357 ++i;
1358 break;
1360 if (operands[i].off == -1)
1361 return NULL_TREE;
1362 off += operands[i].off;
1363 if (operands[i].opcode == MEM_REF)
1365 ++i;
1366 break;
1369 vn_reference_op_t base = &operands[--i];
1370 tree ctor = error_mark_node;
1371 tree decl = NULL_TREE;
1372 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1373 ctor = base->op0;
1374 else if (base->opcode == MEM_REF
1375 && base[1].opcode == ADDR_EXPR
1376 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1377 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1379 decl = TREE_OPERAND (base[1].op0, 0);
1380 ctor = ctor_for_folding (decl);
1382 if (ctor == NULL_TREE)
1383 return build_zero_cst (ref->type);
1384 else if (ctor != error_mark_node)
1386 if (decl)
1388 tree res = fold_ctor_reference (ref->type, ctor,
1389 off * BITS_PER_UNIT,
1390 size * BITS_PER_UNIT, decl);
1391 if (res)
1393 STRIP_USELESS_TYPE_CONVERSION (res);
1394 if (is_gimple_min_invariant (res))
1395 return res;
1398 else
1400 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1401 int len = native_encode_expr (ctor, buf, size, off);
1402 if (len > 0)
1403 return native_interpret_expr (ref->type, buf, len);
1408 return NULL_TREE;
1411 /* Return true if OPS contain a storage order barrier. */
1413 static bool
1414 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1416 vn_reference_op_t op;
1417 unsigned i;
1419 FOR_EACH_VEC_ELT (ops, i, op)
1420 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1421 return true;
1423 return false;
1426 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1427 structures into their value numbers. This is done in-place, and
1428 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1429 whether any operands were valueized. */
1431 static vec<vn_reference_op_s>
1432 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1434 vn_reference_op_t vro;
1435 unsigned int i;
1437 *valueized_anything = false;
1439 FOR_EACH_VEC_ELT (orig, i, vro)
1441 if (vro->opcode == SSA_NAME
1442 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1444 tree tem = SSA_VAL (vro->op0);
1445 if (tem != vro->op0)
1447 *valueized_anything = true;
1448 vro->op0 = tem;
1450 /* If it transforms from an SSA_NAME to a constant, update
1451 the opcode. */
1452 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1453 vro->opcode = TREE_CODE (vro->op0);
1455 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1457 tree tem = SSA_VAL (vro->op1);
1458 if (tem != vro->op1)
1460 *valueized_anything = true;
1461 vro->op1 = tem;
1464 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1466 tree tem = SSA_VAL (vro->op2);
1467 if (tem != vro->op2)
1469 *valueized_anything = true;
1470 vro->op2 = tem;
1473 /* If it transforms from an SSA_NAME to an address, fold with
1474 a preceding indirect reference. */
1475 if (i > 0
1476 && vro->op0
1477 && TREE_CODE (vro->op0) == ADDR_EXPR
1478 && orig[i - 1].opcode == MEM_REF)
1480 if (vn_reference_fold_indirect (&orig, &i))
1481 *valueized_anything = true;
1483 else if (i > 0
1484 && vro->opcode == SSA_NAME
1485 && orig[i - 1].opcode == MEM_REF)
1487 if (vn_reference_maybe_forwprop_address (&orig, &i))
1488 *valueized_anything = true;
1490 /* If it transforms a non-constant ARRAY_REF into a constant
1491 one, adjust the constant offset. */
1492 else if (vro->opcode == ARRAY_REF
1493 && vro->off == -1
1494 && TREE_CODE (vro->op0) == INTEGER_CST
1495 && TREE_CODE (vro->op1) == INTEGER_CST
1496 && TREE_CODE (vro->op2) == INTEGER_CST)
1498 offset_int off = ((wi::to_offset (vro->op0)
1499 - wi::to_offset (vro->op1))
1500 * wi::to_offset (vro->op2)
1501 * vn_ref_op_align_unit (vro));
1502 if (wi::fits_shwi_p (off))
1503 vro->off = off.to_shwi ();
1507 return orig;
1510 static vec<vn_reference_op_s>
1511 valueize_refs (vec<vn_reference_op_s> orig)
1513 bool tem;
1514 return valueize_refs_1 (orig, &tem);
1517 static vec<vn_reference_op_s> shared_lookup_references;
1519 /* Create a vector of vn_reference_op_s structures from REF, a
1520 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1521 this function. *VALUEIZED_ANYTHING will specify whether any
1522 operands were valueized. */
1524 static vec<vn_reference_op_s>
1525 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1527 if (!ref)
1528 return vNULL;
1529 shared_lookup_references.truncate (0);
1530 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1531 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1532 valueized_anything);
1533 return shared_lookup_references;
1536 /* Create a vector of vn_reference_op_s structures from CALL, a
1537 call statement. The vector is shared among all callers of
1538 this function. */
1540 static vec<vn_reference_op_s>
1541 valueize_shared_reference_ops_from_call (gcall *call)
1543 if (!call)
1544 return vNULL;
1545 shared_lookup_references.truncate (0);
1546 copy_reference_ops_from_call (call, &shared_lookup_references);
1547 shared_lookup_references = valueize_refs (shared_lookup_references);
1548 return shared_lookup_references;
1551 /* Lookup a SCCVN reference operation VR in the current hash table.
1552 Returns the resulting value number if it exists in the hash table,
1553 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1554 vn_reference_t stored in the hashtable if something is found. */
1556 static tree
1557 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1559 vn_reference_s **slot;
1560 hashval_t hash;
1562 hash = vr->hashcode;
1563 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1564 if (!slot && current_info == optimistic_info)
1565 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1566 if (slot)
1568 if (vnresult)
1569 *vnresult = (vn_reference_t)*slot;
1570 return ((vn_reference_t)*slot)->result;
1573 return NULL_TREE;
1576 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1577 with the current VUSE and performs the expression lookup. */
1579 static void *
1580 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1581 unsigned int cnt, void *vr_)
1583 vn_reference_t vr = (vn_reference_t)vr_;
1584 vn_reference_s **slot;
1585 hashval_t hash;
1587 /* This bounds the stmt walks we perform on reference lookups
1588 to O(1) instead of O(N) where N is the number of dominating
1589 stores. */
1590 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1591 return (void *)-1;
1593 if (last_vuse_ptr)
1594 *last_vuse_ptr = vuse;
1596 /* Fixup vuse and hash. */
1597 if (vr->vuse)
1598 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1599 vr->vuse = vuse_ssa_val (vuse);
1600 if (vr->vuse)
1601 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1603 hash = vr->hashcode;
1604 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1605 if (!slot && current_info == optimistic_info)
1606 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1607 if (slot)
1608 return *slot;
1610 return NULL;
1613 /* Lookup an existing or insert a new vn_reference entry into the
1614 value table for the VUSE, SET, TYPE, OPERANDS reference which
1615 has the value VALUE which is either a constant or an SSA name. */
1617 static vn_reference_t
1618 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1619 alias_set_type set,
1620 tree type,
1621 vec<vn_reference_op_s,
1622 va_heap> operands,
1623 tree value)
1625 vn_reference_s vr1;
1626 vn_reference_t result;
1627 unsigned value_id;
1628 vr1.vuse = vuse;
1629 vr1.operands = operands;
1630 vr1.type = type;
1631 vr1.set = set;
1632 vr1.hashcode = vn_reference_compute_hash (&vr1);
1633 if (vn_reference_lookup_1 (&vr1, &result))
1634 return result;
1635 if (TREE_CODE (value) == SSA_NAME)
1636 value_id = VN_INFO (value)->value_id;
1637 else
1638 value_id = get_or_alloc_constant_value_id (value);
1639 return vn_reference_insert_pieces (vuse, set, type,
1640 operands.copy (), value, value_id);
1643 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1644 static unsigned mprts_hook_cnt;
1646 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1648 static tree
1649 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1651 if (!rcode.is_tree_code ())
1652 return NULL_TREE;
1653 tree *ops = ops_;
1654 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1655 if (rcode == CONSTRUCTOR
1656 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1657 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1658 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1660 length = CONSTRUCTOR_NELTS (ops_[0]);
1661 ops = XALLOCAVEC (tree, length);
1662 for (unsigned i = 0; i < length; ++i)
1663 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1665 vn_nary_op_t vnresult = NULL;
1666 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1667 type, ops, &vnresult);
1668 /* We can end up endlessly recursing simplifications if the lookup above
1669 presents us with a def-use chain that mirrors the original simplification.
1670 See PR80887 for an example. Limit successful lookup artificially
1671 to 10 times if we are called as mprts_hook. */
1672 if (res
1673 && mprts_hook
1674 && --mprts_hook_cnt == 0)
1676 if (dump_file && (dump_flags & TDF_DETAILS))
1677 fprintf (dump_file, "Resetting mprts_hook after too many "
1678 "invocations.\n");
1679 mprts_hook = NULL;
1681 return res;
1684 /* Return a value-number for RCODE OPS... either by looking up an existing
1685 value-number for the simplified result or by inserting the operation if
1686 INSERT is true. */
1688 static tree
1689 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1690 bool insert)
1692 tree result = NULL_TREE;
1693 /* We will be creating a value number for
1694 RCODE (OPS...).
1695 So first simplify and lookup this expression to see if it
1696 is already available. */
1697 mprts_hook = vn_lookup_simplify_result;
1698 mprts_hook_cnt = 9;
1699 bool res = false;
1700 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1702 case 1:
1703 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1704 break;
1705 case 2:
1706 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1707 break;
1708 case 3:
1709 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1710 break;
1712 mprts_hook = NULL;
1713 gimple *new_stmt = NULL;
1714 if (res
1715 && gimple_simplified_result_is_gimple_val (rcode, ops))
1716 /* The expression is already available. */
1717 result = ops[0];
1718 else
1720 tree val = vn_lookup_simplify_result (rcode, type, ops);
1721 if (!val && insert)
1723 gimple_seq stmts = NULL;
1724 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1725 if (result)
1727 gcc_assert (gimple_seq_singleton_p (stmts));
1728 new_stmt = gimple_seq_first_stmt (stmts);
1731 else
1732 /* The expression is already available. */
1733 result = val;
1735 if (new_stmt)
1737 /* The expression is not yet available, value-number lhs to
1738 the new SSA_NAME we created. */
1739 /* Initialize value-number information properly. */
1740 VN_INFO_GET (result)->valnum = result;
1741 VN_INFO (result)->value_id = get_next_value_id ();
1742 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1743 new_stmt);
1744 VN_INFO (result)->needs_insertion = true;
1745 /* ??? PRE phi-translation inserts NARYs without corresponding
1746 SSA name result. Re-use those but set their result according
1747 to the stmt we just built. */
1748 vn_nary_op_t nary = NULL;
1749 vn_nary_op_lookup_stmt (new_stmt, &nary);
1750 if (nary)
1752 gcc_assert (nary->result == NULL_TREE);
1753 nary->result = gimple_assign_lhs (new_stmt);
1755 /* As all "inserted" statements are singleton SCCs, insert
1756 to the valid table. This is strictly needed to
1757 avoid re-generating new value SSA_NAMEs for the same
1758 expression during SCC iteration over and over (the
1759 optimistic table gets cleared after each iteration).
1760 We do not need to insert into the optimistic table, as
1761 lookups there will fall back to the valid table. */
1762 else if (current_info == optimistic_info)
1764 current_info = valid_info;
1765 vn_nary_op_insert_stmt (new_stmt, result);
1766 current_info = optimistic_info;
1768 else
1769 vn_nary_op_insert_stmt (new_stmt, result);
1770 if (dump_file && (dump_flags & TDF_DETAILS))
1772 fprintf (dump_file, "Inserting name ");
1773 print_generic_expr (dump_file, result);
1774 fprintf (dump_file, " for expression ");
1775 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1776 fprintf (dump_file, "\n");
1779 return result;
1782 /* Return a value-number for RCODE OPS... either by looking up an existing
1783 value-number for the simplified result or by inserting the operation. */
1785 static tree
1786 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1788 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1791 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1792 its value if present. */
1794 tree
1795 vn_nary_simplify (vn_nary_op_t nary)
1797 if (nary->length > 3)
1798 return NULL_TREE;
1799 tree ops[3];
1800 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1801 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1805 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1806 from the statement defining VUSE and if not successful tries to
1807 translate *REFP and VR_ through an aggregate copy at the definition
1808 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1809 of *REF and *VR. If only disambiguation was performed then
1810 *DISAMBIGUATE_ONLY is set to true. */
1812 static void *
1813 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1814 bool *disambiguate_only)
1816 vn_reference_t vr = (vn_reference_t)vr_;
1817 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1818 tree base = ao_ref_base (ref);
1819 HOST_WIDE_INT offset, maxsize;
1820 static vec<vn_reference_op_s> lhs_ops;
1821 ao_ref lhs_ref;
1822 bool lhs_ref_ok = false;
1824 /* If the reference is based on a parameter that was determined as
1825 pointing to readonly memory it doesn't change. */
1826 if (TREE_CODE (base) == MEM_REF
1827 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1828 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1829 && bitmap_bit_p (const_parms,
1830 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1832 *disambiguate_only = true;
1833 return NULL;
1836 /* First try to disambiguate after value-replacing in the definitions LHS. */
1837 if (is_gimple_assign (def_stmt))
1839 tree lhs = gimple_assign_lhs (def_stmt);
1840 bool valueized_anything = false;
1841 /* Avoid re-allocation overhead. */
1842 lhs_ops.truncate (0);
1843 copy_reference_ops_from_ref (lhs, &lhs_ops);
1844 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1845 if (valueized_anything)
1847 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1848 get_alias_set (lhs),
1849 TREE_TYPE (lhs), lhs_ops);
1850 if (lhs_ref_ok
1851 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1853 *disambiguate_only = true;
1854 return NULL;
1857 else
1859 ao_ref_init (&lhs_ref, lhs);
1860 lhs_ref_ok = true;
1863 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1864 && gimple_call_num_args (def_stmt) <= 4)
1866 /* For builtin calls valueize its arguments and call the
1867 alias oracle again. Valueization may improve points-to
1868 info of pointers and constify size and position arguments.
1869 Originally this was motivated by PR61034 which has
1870 conditional calls to free falsely clobbering ref because
1871 of imprecise points-to info of the argument. */
1872 tree oldargs[4];
1873 bool valueized_anything = false;
1874 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1876 oldargs[i] = gimple_call_arg (def_stmt, i);
1877 tree val = vn_valueize (oldargs[i]);
1878 if (val != oldargs[i])
1880 gimple_call_set_arg (def_stmt, i, val);
1881 valueized_anything = true;
1884 if (valueized_anything)
1886 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1887 ref);
1888 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1889 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1890 if (!res)
1892 *disambiguate_only = true;
1893 return NULL;
1898 if (*disambiguate_only)
1899 return (void *)-1;
1901 offset = ref->offset;
1902 maxsize = ref->max_size;
1904 /* If we cannot constrain the size of the reference we cannot
1905 test if anything kills it. */
1906 if (maxsize == -1)
1907 return (void *)-1;
1909 /* We can't deduce anything useful from clobbers. */
1910 if (gimple_clobber_p (def_stmt))
1911 return (void *)-1;
1913 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1914 from that definition.
1915 1) Memset. */
1916 if (is_gimple_reg_type (vr->type)
1917 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1918 && integer_zerop (gimple_call_arg (def_stmt, 1))
1919 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1920 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1922 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1923 tree base2;
1924 HOST_WIDE_INT offset2, size2, maxsize2;
1925 bool reverse;
1926 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1927 &reverse);
1928 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1929 if ((unsigned HOST_WIDE_INT)size2 / 8
1930 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1931 && maxsize2 != -1
1932 && operand_equal_p (base, base2, 0)
1933 && offset2 <= offset
1934 && offset2 + size2 >= offset + maxsize)
1936 tree val = build_zero_cst (vr->type);
1937 return vn_reference_lookup_or_insert_for_pieces
1938 (vuse, vr->set, vr->type, vr->operands, val);
1942 /* 2) Assignment from an empty CONSTRUCTOR. */
1943 else if (is_gimple_reg_type (vr->type)
1944 && gimple_assign_single_p (def_stmt)
1945 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1946 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1948 tree base2;
1949 HOST_WIDE_INT offset2, size2, maxsize2;
1950 bool reverse;
1951 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1952 &offset2, &size2, &maxsize2, &reverse);
1953 if (maxsize2 != -1
1954 && operand_equal_p (base, base2, 0)
1955 && offset2 <= offset
1956 && offset2 + size2 >= offset + maxsize)
1958 tree val = build_zero_cst (vr->type);
1959 return vn_reference_lookup_or_insert_for_pieces
1960 (vuse, vr->set, vr->type, vr->operands, val);
1964 /* 3) Assignment from a constant. We can use folds native encode/interpret
1965 routines to extract the assigned bits. */
1966 else if (ref->size == maxsize
1967 && is_gimple_reg_type (vr->type)
1968 && !contains_storage_order_barrier_p (vr->operands)
1969 && gimple_assign_single_p (def_stmt)
1970 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1971 && maxsize % BITS_PER_UNIT == 0
1972 && offset % BITS_PER_UNIT == 0
1973 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1974 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1975 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1977 tree base2;
1978 HOST_WIDE_INT offset2, size2, maxsize2;
1979 bool reverse;
1980 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1981 &offset2, &size2, &maxsize2, &reverse);
1982 if (!reverse
1983 && maxsize2 != -1
1984 && maxsize2 == size2
1985 && size2 % BITS_PER_UNIT == 0
1986 && offset2 % BITS_PER_UNIT == 0
1987 && operand_equal_p (base, base2, 0)
1988 && offset2 <= offset
1989 && offset2 + size2 >= offset + maxsize)
1991 /* We support up to 512-bit values (for V8DFmode). */
1992 unsigned char buffer[64];
1993 int len;
1995 tree rhs = gimple_assign_rhs1 (def_stmt);
1996 if (TREE_CODE (rhs) == SSA_NAME)
1997 rhs = SSA_VAL (rhs);
1998 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1999 buffer, sizeof (buffer));
2000 if (len > 0)
2002 tree type = vr->type;
2003 /* Make sure to interpret in a type that has a range
2004 covering the whole access size. */
2005 if (INTEGRAL_TYPE_P (vr->type)
2006 && ref->size != TYPE_PRECISION (vr->type))
2007 type = build_nonstandard_integer_type (ref->size,
2008 TYPE_UNSIGNED (type));
2009 tree val = native_interpret_expr (type,
2010 buffer
2011 + ((offset - offset2)
2012 / BITS_PER_UNIT),
2013 ref->size / BITS_PER_UNIT);
2014 /* If we chop off bits because the types precision doesn't
2015 match the memory access size this is ok when optimizing
2016 reads but not when called from the DSE code during
2017 elimination. */
2018 if (val
2019 && type != vr->type)
2021 if (! int_fits_type_p (val, vr->type))
2022 val = NULL_TREE;
2023 else
2024 val = fold_convert (vr->type, val);
2027 if (val)
2028 return vn_reference_lookup_or_insert_for_pieces
2029 (vuse, vr->set, vr->type, vr->operands, val);
2034 /* 4) Assignment from an SSA name which definition we may be able
2035 to access pieces from. */
2036 else if (ref->size == maxsize
2037 && is_gimple_reg_type (vr->type)
2038 && !contains_storage_order_barrier_p (vr->operands)
2039 && gimple_assign_single_p (def_stmt)
2040 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2042 tree base2;
2043 HOST_WIDE_INT offset2, size2, maxsize2;
2044 bool reverse;
2045 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2046 &offset2, &size2, &maxsize2,
2047 &reverse);
2048 if (!reverse
2049 && maxsize2 != -1
2050 && maxsize2 == size2
2051 && operand_equal_p (base, base2, 0)
2052 && offset2 <= offset
2053 && offset2 + size2 >= offset + maxsize
2054 /* ??? We can't handle bitfield precision extracts without
2055 either using an alternate type for the BIT_FIELD_REF and
2056 then doing a conversion or possibly adjusting the offset
2057 according to endianness. */
2058 && (! INTEGRAL_TYPE_P (vr->type)
2059 || ref->size == TYPE_PRECISION (vr->type))
2060 && ref->size % BITS_PER_UNIT == 0)
2062 code_helper rcode = BIT_FIELD_REF;
2063 tree ops[3];
2064 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2065 ops[1] = bitsize_int (ref->size);
2066 ops[2] = bitsize_int (offset - offset2);
2067 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2068 if (val
2069 && (TREE_CODE (val) != SSA_NAME
2070 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2072 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2073 (vuse, vr->set, vr->type, vr->operands, val);
2074 return res;
2079 /* 5) For aggregate copies translate the reference through them if
2080 the copy kills ref. */
2081 else if (vn_walk_kind == VN_WALKREWRITE
2082 && gimple_assign_single_p (def_stmt)
2083 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2084 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2085 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2087 tree base2;
2088 HOST_WIDE_INT maxsize2;
2089 int i, j, k;
2090 auto_vec<vn_reference_op_s> rhs;
2091 vn_reference_op_t vro;
2092 ao_ref r;
2094 if (!lhs_ref_ok)
2095 return (void *)-1;
2097 /* See if the assignment kills REF. */
2098 base2 = ao_ref_base (&lhs_ref);
2099 maxsize2 = lhs_ref.max_size;
2100 if (maxsize2 == -1
2101 || (base != base2
2102 && (TREE_CODE (base) != MEM_REF
2103 || TREE_CODE (base2) != MEM_REF
2104 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2105 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2106 TREE_OPERAND (base2, 1))))
2107 || !stmt_kills_ref_p (def_stmt, ref))
2108 return (void *)-1;
2110 /* Find the common base of ref and the lhs. lhs_ops already
2111 contains valueized operands for the lhs. */
2112 i = vr->operands.length () - 1;
2113 j = lhs_ops.length () - 1;
2114 while (j >= 0 && i >= 0
2115 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2117 i--;
2118 j--;
2121 /* ??? The innermost op should always be a MEM_REF and we already
2122 checked that the assignment to the lhs kills vr. Thus for
2123 aggregate copies using char[] types the vn_reference_op_eq
2124 may fail when comparing types for compatibility. But we really
2125 don't care here - further lookups with the rewritten operands
2126 will simply fail if we messed up types too badly. */
2127 HOST_WIDE_INT extra_off = 0;
2128 if (j == 0 && i >= 0
2129 && lhs_ops[0].opcode == MEM_REF
2130 && lhs_ops[0].off != -1)
2132 if (lhs_ops[0].off == vr->operands[i].off)
2133 i--, j--;
2134 else if (vr->operands[i].opcode == MEM_REF
2135 && vr->operands[i].off != -1)
2137 extra_off = vr->operands[i].off - lhs_ops[0].off;
2138 i--, j--;
2142 /* i now points to the first additional op.
2143 ??? LHS may not be completely contained in VR, one or more
2144 VIEW_CONVERT_EXPRs could be in its way. We could at least
2145 try handling outermost VIEW_CONVERT_EXPRs. */
2146 if (j != -1)
2147 return (void *)-1;
2149 /* Punt if the additional ops contain a storage order barrier. */
2150 for (k = i; k >= 0; k--)
2152 vro = &vr->operands[k];
2153 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2154 return (void *)-1;
2157 /* Now re-write REF to be based on the rhs of the assignment. */
2158 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2160 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2161 if (extra_off != 0)
2163 if (rhs.length () < 2
2164 || rhs[0].opcode != MEM_REF
2165 || rhs[0].off == -1)
2166 return (void *)-1;
2167 rhs[0].off += extra_off;
2168 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2169 build_int_cst (TREE_TYPE (rhs[0].op0),
2170 extra_off));
2173 /* We need to pre-pend vr->operands[0..i] to rhs. */
2174 vec<vn_reference_op_s> old = vr->operands;
2175 if (i + 1 + rhs.length () > vr->operands.length ())
2176 vr->operands.safe_grow (i + 1 + rhs.length ());
2177 else
2178 vr->operands.truncate (i + 1 + rhs.length ());
2179 FOR_EACH_VEC_ELT (rhs, j, vro)
2180 vr->operands[i + 1 + j] = *vro;
2181 vr->operands = valueize_refs (vr->operands);
2182 if (old == shared_lookup_references)
2183 shared_lookup_references = vr->operands;
2184 vr->hashcode = vn_reference_compute_hash (vr);
2186 /* Try folding the new reference to a constant. */
2187 tree val = fully_constant_vn_reference_p (vr);
2188 if (val)
2189 return vn_reference_lookup_or_insert_for_pieces
2190 (vuse, vr->set, vr->type, vr->operands, val);
2192 /* Adjust *ref from the new operands. */
2193 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2194 return (void *)-1;
2195 /* This can happen with bitfields. */
2196 if (ref->size != r.size)
2197 return (void *)-1;
2198 *ref = r;
2200 /* Do not update last seen VUSE after translating. */
2201 last_vuse_ptr = NULL;
2203 /* Keep looking for the adjusted *REF / VR pair. */
2204 return NULL;
2207 /* 6) For memcpy copies translate the reference through them if
2208 the copy kills ref. */
2209 else if (vn_walk_kind == VN_WALKREWRITE
2210 && is_gimple_reg_type (vr->type)
2211 /* ??? Handle BCOPY as well. */
2212 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2213 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2214 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2215 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2216 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2217 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2218 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2219 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2221 tree lhs, rhs;
2222 ao_ref r;
2223 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2224 vn_reference_op_s op;
2225 HOST_WIDE_INT at;
2227 /* Only handle non-variable, addressable refs. */
2228 if (ref->size != maxsize
2229 || offset % BITS_PER_UNIT != 0
2230 || ref->size % BITS_PER_UNIT != 0)
2231 return (void *)-1;
2233 /* Extract a pointer base and an offset for the destination. */
2234 lhs = gimple_call_arg (def_stmt, 0);
2235 lhs_offset = 0;
2236 if (TREE_CODE (lhs) == SSA_NAME)
2238 lhs = SSA_VAL (lhs);
2239 if (TREE_CODE (lhs) == SSA_NAME)
2241 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2242 if (gimple_assign_single_p (def_stmt)
2243 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2244 lhs = gimple_assign_rhs1 (def_stmt);
2247 if (TREE_CODE (lhs) == ADDR_EXPR)
2249 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2250 &lhs_offset);
2251 if (!tem)
2252 return (void *)-1;
2253 if (TREE_CODE (tem) == MEM_REF
2254 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2256 lhs = TREE_OPERAND (tem, 0);
2257 if (TREE_CODE (lhs) == SSA_NAME)
2258 lhs = SSA_VAL (lhs);
2259 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2261 else if (DECL_P (tem))
2262 lhs = build_fold_addr_expr (tem);
2263 else
2264 return (void *)-1;
2266 if (TREE_CODE (lhs) != SSA_NAME
2267 && TREE_CODE (lhs) != ADDR_EXPR)
2268 return (void *)-1;
2270 /* Extract a pointer base and an offset for the source. */
2271 rhs = gimple_call_arg (def_stmt, 1);
2272 rhs_offset = 0;
2273 if (TREE_CODE (rhs) == SSA_NAME)
2274 rhs = SSA_VAL (rhs);
2275 if (TREE_CODE (rhs) == ADDR_EXPR)
2277 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2278 &rhs_offset);
2279 if (!tem)
2280 return (void *)-1;
2281 if (TREE_CODE (tem) == MEM_REF
2282 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2284 rhs = TREE_OPERAND (tem, 0);
2285 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2287 else if (DECL_P (tem))
2288 rhs = build_fold_addr_expr (tem);
2289 else
2290 return (void *)-1;
2292 if (TREE_CODE (rhs) != SSA_NAME
2293 && TREE_CODE (rhs) != ADDR_EXPR)
2294 return (void *)-1;
2296 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2298 /* The bases of the destination and the references have to agree. */
2299 if ((TREE_CODE (base) != MEM_REF
2300 && !DECL_P (base))
2301 || (TREE_CODE (base) == MEM_REF
2302 && (TREE_OPERAND (base, 0) != lhs
2303 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2304 || (DECL_P (base)
2305 && (TREE_CODE (lhs) != ADDR_EXPR
2306 || TREE_OPERAND (lhs, 0) != base)))
2307 return (void *)-1;
2309 at = offset / BITS_PER_UNIT;
2310 if (TREE_CODE (base) == MEM_REF)
2311 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2312 /* If the access is completely outside of the memcpy destination
2313 area there is no aliasing. */
2314 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2315 || lhs_offset + copy_size <= at)
2316 return NULL;
2317 /* And the access has to be contained within the memcpy destination. */
2318 if (lhs_offset > at
2319 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2320 return (void *)-1;
2322 /* Make room for 2 operands in the new reference. */
2323 if (vr->operands.length () < 2)
2325 vec<vn_reference_op_s> old = vr->operands;
2326 vr->operands.safe_grow_cleared (2);
2327 if (old == shared_lookup_references)
2328 shared_lookup_references = vr->operands;
2330 else
2331 vr->operands.truncate (2);
2333 /* The looked-through reference is a simple MEM_REF. */
2334 memset (&op, 0, sizeof (op));
2335 op.type = vr->type;
2336 op.opcode = MEM_REF;
2337 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2338 op.off = at - lhs_offset + rhs_offset;
2339 vr->operands[0] = op;
2340 op.type = TREE_TYPE (rhs);
2341 op.opcode = TREE_CODE (rhs);
2342 op.op0 = rhs;
2343 op.off = -1;
2344 vr->operands[1] = op;
2345 vr->hashcode = vn_reference_compute_hash (vr);
2347 /* Try folding the new reference to a constant. */
2348 tree val = fully_constant_vn_reference_p (vr);
2349 if (val)
2350 return vn_reference_lookup_or_insert_for_pieces
2351 (vuse, vr->set, vr->type, vr->operands, val);
2353 /* Adjust *ref from the new operands. */
2354 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2355 return (void *)-1;
2356 /* This can happen with bitfields. */
2357 if (ref->size != r.size)
2358 return (void *)-1;
2359 *ref = r;
2361 /* Do not update last seen VUSE after translating. */
2362 last_vuse_ptr = NULL;
2364 /* Keep looking for the adjusted *REF / VR pair. */
2365 return NULL;
2368 /* Bail out and stop walking. */
2369 return (void *)-1;
2372 /* Return a reference op vector from OP that can be used for
2373 vn_reference_lookup_pieces. The caller is responsible for releasing
2374 the vector. */
2376 vec<vn_reference_op_s>
2377 vn_reference_operands_for_lookup (tree op)
2379 bool valueized;
2380 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2383 /* Lookup a reference operation by it's parts, in the current hash table.
2384 Returns the resulting value number if it exists in the hash table,
2385 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2386 vn_reference_t stored in the hashtable if something is found. */
2388 tree
2389 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2390 vec<vn_reference_op_s> operands,
2391 vn_reference_t *vnresult, vn_lookup_kind kind)
2393 struct vn_reference_s vr1;
2394 vn_reference_t tmp;
2395 tree cst;
2397 if (!vnresult)
2398 vnresult = &tmp;
2399 *vnresult = NULL;
2401 vr1.vuse = vuse_ssa_val (vuse);
2402 shared_lookup_references.truncate (0);
2403 shared_lookup_references.safe_grow (operands.length ());
2404 memcpy (shared_lookup_references.address (),
2405 operands.address (),
2406 sizeof (vn_reference_op_s)
2407 * operands.length ());
2408 vr1.operands = operands = shared_lookup_references
2409 = valueize_refs (shared_lookup_references);
2410 vr1.type = type;
2411 vr1.set = set;
2412 vr1.hashcode = vn_reference_compute_hash (&vr1);
2413 if ((cst = fully_constant_vn_reference_p (&vr1)))
2414 return cst;
2416 vn_reference_lookup_1 (&vr1, vnresult);
2417 if (!*vnresult
2418 && kind != VN_NOWALK
2419 && vr1.vuse)
2421 ao_ref r;
2422 vn_walk_kind = kind;
2423 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2424 *vnresult =
2425 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2426 vn_reference_lookup_2,
2427 vn_reference_lookup_3,
2428 vuse_ssa_val, &vr1);
2429 gcc_checking_assert (vr1.operands == shared_lookup_references);
2432 if (*vnresult)
2433 return (*vnresult)->result;
2435 return NULL_TREE;
2438 /* Lookup OP in the current hash table, and return the resulting value
2439 number if it exists in the hash table. Return NULL_TREE if it does
2440 not exist in the hash table or if the result field of the structure
2441 was NULL.. VNRESULT will be filled in with the vn_reference_t
2442 stored in the hashtable if one exists. When TBAA_P is false assume
2443 we are looking up a store and treat it as having alias-set zero. */
2445 tree
2446 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2447 vn_reference_t *vnresult, bool tbaa_p)
2449 vec<vn_reference_op_s> operands;
2450 struct vn_reference_s vr1;
2451 tree cst;
2452 bool valuezied_anything;
2454 if (vnresult)
2455 *vnresult = NULL;
2457 vr1.vuse = vuse_ssa_val (vuse);
2458 vr1.operands = operands
2459 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2460 vr1.type = TREE_TYPE (op);
2461 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2462 vr1.hashcode = vn_reference_compute_hash (&vr1);
2463 if ((cst = fully_constant_vn_reference_p (&vr1)))
2464 return cst;
2466 if (kind != VN_NOWALK
2467 && vr1.vuse)
2469 vn_reference_t wvnresult;
2470 ao_ref r;
2471 /* Make sure to use a valueized reference if we valueized anything.
2472 Otherwise preserve the full reference for advanced TBAA. */
2473 if (!valuezied_anything
2474 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2475 vr1.operands))
2476 ao_ref_init (&r, op);
2477 if (! tbaa_p)
2478 r.ref_alias_set = r.base_alias_set = 0;
2479 vn_walk_kind = kind;
2480 wvnresult =
2481 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2482 vn_reference_lookup_2,
2483 vn_reference_lookup_3,
2484 vuse_ssa_val, &vr1);
2485 gcc_checking_assert (vr1.operands == shared_lookup_references);
2486 if (wvnresult)
2488 if (vnresult)
2489 *vnresult = wvnresult;
2490 return wvnresult->result;
2493 return NULL_TREE;
2496 return vn_reference_lookup_1 (&vr1, vnresult);
2499 /* Lookup CALL in the current hash table and return the entry in
2500 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2502 void
2503 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2504 vn_reference_t vr)
2506 if (vnresult)
2507 *vnresult = NULL;
2509 tree vuse = gimple_vuse (call);
2511 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2512 vr->operands = valueize_shared_reference_ops_from_call (call);
2513 vr->type = gimple_expr_type (call);
2514 vr->set = 0;
2515 vr->hashcode = vn_reference_compute_hash (vr);
2516 vn_reference_lookup_1 (vr, vnresult);
2519 /* Insert OP into the current hash table with a value number of
2520 RESULT, and return the resulting reference structure we created. */
2522 static vn_reference_t
2523 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2525 vn_reference_s **slot;
2526 vn_reference_t vr1;
2527 bool tem;
2529 vr1 = current_info->references_pool->allocate ();
2530 if (TREE_CODE (result) == SSA_NAME)
2531 vr1->value_id = VN_INFO (result)->value_id;
2532 else
2533 vr1->value_id = get_or_alloc_constant_value_id (result);
2534 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2535 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2536 vr1->type = TREE_TYPE (op);
2537 vr1->set = get_alias_set (op);
2538 vr1->hashcode = vn_reference_compute_hash (vr1);
2539 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2540 vr1->result_vdef = vdef;
2542 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2543 INSERT);
2545 /* Because we lookup stores using vuses, and value number failures
2546 using the vdefs (see visit_reference_op_store for how and why),
2547 it's possible that on failure we may try to insert an already
2548 inserted store. This is not wrong, there is no ssa name for a
2549 store that we could use as a differentiator anyway. Thus, unlike
2550 the other lookup functions, you cannot gcc_assert (!*slot)
2551 here. */
2553 /* But free the old slot in case of a collision. */
2554 if (*slot)
2555 free_reference (*slot);
2557 *slot = vr1;
2558 return vr1;
2561 /* Insert a reference by it's pieces into the current hash table with
2562 a value number of RESULT. Return the resulting reference
2563 structure we created. */
2565 vn_reference_t
2566 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2567 vec<vn_reference_op_s> operands,
2568 tree result, unsigned int value_id)
2571 vn_reference_s **slot;
2572 vn_reference_t vr1;
2574 vr1 = current_info->references_pool->allocate ();
2575 vr1->value_id = value_id;
2576 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2577 vr1->operands = valueize_refs (operands);
2578 vr1->type = type;
2579 vr1->set = set;
2580 vr1->hashcode = vn_reference_compute_hash (vr1);
2581 if (result && TREE_CODE (result) == SSA_NAME)
2582 result = SSA_VAL (result);
2583 vr1->result = result;
2585 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2586 INSERT);
2588 /* At this point we should have all the things inserted that we have
2589 seen before, and we should never try inserting something that
2590 already exists. */
2591 gcc_assert (!*slot);
2592 if (*slot)
2593 free_reference (*slot);
2595 *slot = vr1;
2596 return vr1;
2599 /* Compute and return the hash value for nary operation VBO1. */
2601 static hashval_t
2602 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2604 inchash::hash hstate;
2605 unsigned i;
2607 for (i = 0; i < vno1->length; ++i)
2608 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2609 vno1->op[i] = SSA_VAL (vno1->op[i]);
2611 if (((vno1->length == 2
2612 && commutative_tree_code (vno1->opcode))
2613 || (vno1->length == 3
2614 && commutative_ternary_tree_code (vno1->opcode)))
2615 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2616 std::swap (vno1->op[0], vno1->op[1]);
2617 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2618 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2620 std::swap (vno1->op[0], vno1->op[1]);
2621 vno1->opcode = swap_tree_comparison (vno1->opcode);
2624 hstate.add_int (vno1->opcode);
2625 for (i = 0; i < vno1->length; ++i)
2626 inchash::add_expr (vno1->op[i], hstate);
2628 return hstate.end ();
2631 /* Compare nary operations VNO1 and VNO2 and return true if they are
2632 equivalent. */
2634 bool
2635 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2637 unsigned i;
2639 if (vno1->hashcode != vno2->hashcode)
2640 return false;
2642 if (vno1->length != vno2->length)
2643 return false;
2645 if (vno1->opcode != vno2->opcode
2646 || !types_compatible_p (vno1->type, vno2->type))
2647 return false;
2649 for (i = 0; i < vno1->length; ++i)
2650 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2651 return false;
2653 /* BIT_INSERT_EXPR has an implict operand as the type precision
2654 of op1. Need to check to make sure they are the same. */
2655 if (vno1->opcode == BIT_INSERT_EXPR
2656 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2657 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2658 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2659 return false;
2661 return true;
2664 /* Initialize VNO from the pieces provided. */
2666 static void
2667 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2668 enum tree_code code, tree type, tree *ops)
2670 vno->opcode = code;
2671 vno->length = length;
2672 vno->type = type;
2673 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2676 /* Initialize VNO from OP. */
2678 static void
2679 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2681 unsigned i;
2683 vno->opcode = TREE_CODE (op);
2684 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2685 vno->type = TREE_TYPE (op);
2686 for (i = 0; i < vno->length; ++i)
2687 vno->op[i] = TREE_OPERAND (op, i);
2690 /* Return the number of operands for a vn_nary ops structure from STMT. */
2692 static unsigned int
2693 vn_nary_length_from_stmt (gimple *stmt)
2695 switch (gimple_assign_rhs_code (stmt))
2697 case REALPART_EXPR:
2698 case IMAGPART_EXPR:
2699 case VIEW_CONVERT_EXPR:
2700 return 1;
2702 case BIT_FIELD_REF:
2703 return 3;
2705 case CONSTRUCTOR:
2706 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2708 default:
2709 return gimple_num_ops (stmt) - 1;
2713 /* Initialize VNO from STMT. */
2715 static void
2716 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2718 unsigned i;
2720 vno->opcode = gimple_assign_rhs_code (stmt);
2721 vno->type = gimple_expr_type (stmt);
2722 switch (vno->opcode)
2724 case REALPART_EXPR:
2725 case IMAGPART_EXPR:
2726 case VIEW_CONVERT_EXPR:
2727 vno->length = 1;
2728 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2729 break;
2731 case BIT_FIELD_REF:
2732 vno->length = 3;
2733 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2734 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2735 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2736 break;
2738 case CONSTRUCTOR:
2739 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2740 for (i = 0; i < vno->length; ++i)
2741 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2742 break;
2744 default:
2745 gcc_checking_assert (!gimple_assign_single_p (stmt));
2746 vno->length = gimple_num_ops (stmt) - 1;
2747 for (i = 0; i < vno->length; ++i)
2748 vno->op[i] = gimple_op (stmt, i + 1);
2752 /* Compute the hashcode for VNO and look for it in the hash table;
2753 return the resulting value number if it exists in the hash table.
2754 Return NULL_TREE if it does not exist in the hash table or if the
2755 result field of the operation is NULL. VNRESULT will contain the
2756 vn_nary_op_t from the hashtable if it exists. */
2758 static tree
2759 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2761 vn_nary_op_s **slot;
2763 if (vnresult)
2764 *vnresult = NULL;
2766 vno->hashcode = vn_nary_op_compute_hash (vno);
2767 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2768 NO_INSERT);
2769 if (!slot && current_info == optimistic_info)
2770 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2771 NO_INSERT);
2772 if (!slot)
2773 return NULL_TREE;
2774 if (vnresult)
2775 *vnresult = *slot;
2776 return (*slot)->result;
2779 /* Lookup a n-ary operation by its pieces and return the resulting value
2780 number if it exists in the hash table. Return NULL_TREE if it does
2781 not exist in the hash table or if the result field of the operation
2782 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2783 if it exists. */
2785 tree
2786 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2787 tree type, tree *ops, vn_nary_op_t *vnresult)
2789 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2790 sizeof_vn_nary_op (length));
2791 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2792 return vn_nary_op_lookup_1 (vno1, vnresult);
2795 /* Lookup OP in the current hash table, and return the resulting value
2796 number if it exists in the hash table. Return NULL_TREE if it does
2797 not exist in the hash table or if the result field of the operation
2798 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2799 if it exists. */
2801 tree
2802 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2804 vn_nary_op_t vno1
2805 = XALLOCAVAR (struct vn_nary_op_s,
2806 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2807 init_vn_nary_op_from_op (vno1, op);
2808 return vn_nary_op_lookup_1 (vno1, vnresult);
2811 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2812 value number if it exists in the hash table. Return NULL_TREE if
2813 it does not exist in the hash table. VNRESULT will contain the
2814 vn_nary_op_t from the hashtable if it exists. */
2816 tree
2817 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2819 vn_nary_op_t vno1
2820 = XALLOCAVAR (struct vn_nary_op_s,
2821 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2822 init_vn_nary_op_from_stmt (vno1, stmt);
2823 return vn_nary_op_lookup_1 (vno1, vnresult);
2826 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2828 static vn_nary_op_t
2829 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2831 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2834 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2835 obstack. */
2837 static vn_nary_op_t
2838 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2840 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2841 &current_info->nary_obstack);
2843 vno1->value_id = value_id;
2844 vno1->length = length;
2845 vno1->result = result;
2847 return vno1;
2850 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2851 VNO->HASHCODE first. */
2853 static vn_nary_op_t
2854 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2855 bool compute_hash)
2857 vn_nary_op_s **slot;
2859 if (compute_hash)
2860 vno->hashcode = vn_nary_op_compute_hash (vno);
2862 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2863 /* While we do not want to insert things twice it's awkward to
2864 avoid it in the case where visit_nary_op pattern-matches stuff
2865 and ends up simplifying the replacement to itself. We then
2866 get two inserts, one from visit_nary_op and one from
2867 vn_nary_build_or_lookup.
2868 So allow inserts with the same value number. */
2869 if (*slot && (*slot)->result == vno->result)
2870 return *slot;
2872 gcc_assert (!*slot);
2874 *slot = vno;
2875 return vno;
2878 /* Insert a n-ary operation into the current hash table using it's
2879 pieces. Return the vn_nary_op_t structure we created and put in
2880 the hashtable. */
2882 vn_nary_op_t
2883 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2884 tree type, tree *ops,
2885 tree result, unsigned int value_id)
2887 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2888 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2889 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2892 /* Insert OP into the current hash table with a value number of
2893 RESULT. Return the vn_nary_op_t structure we created and put in
2894 the hashtable. */
2896 vn_nary_op_t
2897 vn_nary_op_insert (tree op, tree result)
2899 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2900 vn_nary_op_t vno1;
2902 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2903 init_vn_nary_op_from_op (vno1, op);
2904 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2907 /* Insert the rhs of STMT into the current hash table with a value number of
2908 RESULT. */
2910 static vn_nary_op_t
2911 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2913 vn_nary_op_t vno1
2914 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2915 result, VN_INFO (result)->value_id);
2916 init_vn_nary_op_from_stmt (vno1, stmt);
2917 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2920 /* Compute a hashcode for PHI operation VP1 and return it. */
2922 static inline hashval_t
2923 vn_phi_compute_hash (vn_phi_t vp1)
2925 inchash::hash hstate (vp1->phiargs.length () > 2
2926 ? vp1->block->index : vp1->phiargs.length ());
2927 tree phi1op;
2928 tree type;
2929 edge e;
2930 edge_iterator ei;
2932 /* If all PHI arguments are constants we need to distinguish
2933 the PHI node via its type. */
2934 type = vp1->type;
2935 hstate.merge_hash (vn_hash_type (type));
2937 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2939 /* Don't hash backedge values they need to be handled as VN_TOP
2940 for optimistic value-numbering. */
2941 if (e->flags & EDGE_DFS_BACK)
2942 continue;
2944 phi1op = vp1->phiargs[e->dest_idx];
2945 if (phi1op == VN_TOP)
2946 continue;
2947 inchash::add_expr (phi1op, hstate);
2950 return hstate.end ();
2954 /* Return true if COND1 and COND2 represent the same condition, set
2955 *INVERTED_P if one needs to be inverted to make it the same as
2956 the other. */
2958 static bool
2959 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2960 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2962 enum tree_code code1 = gimple_cond_code (cond1);
2963 enum tree_code code2 = gimple_cond_code (cond2);
2965 *inverted_p = false;
2966 if (code1 == code2)
2968 else if (code1 == swap_tree_comparison (code2))
2969 std::swap (lhs2, rhs2);
2970 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2971 *inverted_p = true;
2972 else if (code1 == invert_tree_comparison
2973 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2975 std::swap (lhs2, rhs2);
2976 *inverted_p = true;
2978 else
2979 return false;
2981 return ((expressions_equal_p (lhs1, lhs2)
2982 && expressions_equal_p (rhs1, rhs2))
2983 || (commutative_tree_code (code1)
2984 && expressions_equal_p (lhs1, rhs2)
2985 && expressions_equal_p (rhs1, lhs2)));
2988 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2990 static int
2991 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2993 if (vp1->hashcode != vp2->hashcode)
2994 return false;
2996 if (vp1->block != vp2->block)
2998 if (vp1->phiargs.length () != vp2->phiargs.length ())
2999 return false;
3001 switch (vp1->phiargs.length ())
3003 case 1:
3004 /* Single-arg PHIs are just copies. */
3005 break;
3007 case 2:
3009 /* Rule out backedges into the PHI. */
3010 if (vp1->block->loop_father->header == vp1->block
3011 || vp2->block->loop_father->header == vp2->block)
3012 return false;
3014 /* If the PHI nodes do not have compatible types
3015 they are not the same. */
3016 if (!types_compatible_p (vp1->type, vp2->type))
3017 return false;
3019 basic_block idom1
3020 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3021 basic_block idom2
3022 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3023 /* If the immediate dominator end in switch stmts multiple
3024 values may end up in the same PHI arg via intermediate
3025 CFG merges. */
3026 if (EDGE_COUNT (idom1->succs) != 2
3027 || EDGE_COUNT (idom2->succs) != 2)
3028 return false;
3030 /* Verify the controlling stmt is the same. */
3031 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3032 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3033 if (! last1 || ! last2)
3034 return false;
3035 bool inverted_p;
3036 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3037 last2, vp2->cclhs, vp2->ccrhs,
3038 &inverted_p))
3039 return false;
3041 /* Get at true/false controlled edges into the PHI. */
3042 edge te1, te2, fe1, fe2;
3043 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3044 &te1, &fe1)
3045 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3046 &te2, &fe2))
3047 return false;
3049 /* Swap edges if the second condition is the inverted of the
3050 first. */
3051 if (inverted_p)
3052 std::swap (te2, fe2);
3054 /* ??? Handle VN_TOP specially. */
3055 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3056 vp2->phiargs[te2->dest_idx])
3057 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3058 vp2->phiargs[fe2->dest_idx]))
3059 return false;
3061 return true;
3064 default:
3065 return false;
3069 /* If the PHI nodes do not have compatible types
3070 they are not the same. */
3071 if (!types_compatible_p (vp1->type, vp2->type))
3072 return false;
3074 /* Any phi in the same block will have it's arguments in the
3075 same edge order, because of how we store phi nodes. */
3076 int i;
3077 tree phi1op;
3078 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3080 tree phi2op = vp2->phiargs[i];
3081 if (phi1op == VN_TOP || phi2op == VN_TOP)
3082 continue;
3083 if (!expressions_equal_p (phi1op, phi2op))
3084 return false;
3087 return true;
3090 static vec<tree> shared_lookup_phiargs;
3092 /* Lookup PHI in the current hash table, and return the resulting
3093 value number if it exists in the hash table. Return NULL_TREE if
3094 it does not exist in the hash table. */
3096 static tree
3097 vn_phi_lookup (gimple *phi)
3099 vn_phi_s **slot;
3100 struct vn_phi_s vp1;
3101 edge e;
3102 edge_iterator ei;
3104 shared_lookup_phiargs.truncate (0);
3105 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3107 /* Canonicalize the SSA_NAME's to their value number. */
3108 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3110 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3111 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3112 shared_lookup_phiargs[e->dest_idx] = def;
3114 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3115 vp1.phiargs = shared_lookup_phiargs;
3116 vp1.block = gimple_bb (phi);
3117 /* Extract values of the controlling condition. */
3118 vp1.cclhs = NULL_TREE;
3119 vp1.ccrhs = NULL_TREE;
3120 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3121 if (EDGE_COUNT (idom1->succs) == 2)
3122 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3124 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3125 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3127 vp1.hashcode = vn_phi_compute_hash (&vp1);
3128 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3129 NO_INSERT);
3130 if (!slot && current_info == optimistic_info)
3131 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3132 NO_INSERT);
3133 if (!slot)
3134 return NULL_TREE;
3135 return (*slot)->result;
3138 /* Insert PHI into the current hash table with a value number of
3139 RESULT. */
3141 static vn_phi_t
3142 vn_phi_insert (gimple *phi, tree result)
3144 vn_phi_s **slot;
3145 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3146 vec<tree> args = vNULL;
3147 edge e;
3148 edge_iterator ei;
3150 args.safe_grow (gimple_phi_num_args (phi));
3152 /* Canonicalize the SSA_NAME's to their value number. */
3153 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3155 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3156 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3157 args[e->dest_idx] = def;
3159 vp1->value_id = VN_INFO (result)->value_id;
3160 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3161 vp1->phiargs = args;
3162 vp1->block = gimple_bb (phi);
3163 /* Extract values of the controlling condition. */
3164 vp1->cclhs = NULL_TREE;
3165 vp1->ccrhs = NULL_TREE;
3166 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3167 if (EDGE_COUNT (idom1->succs) == 2)
3168 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3170 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3171 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3173 vp1->result = result;
3174 vp1->hashcode = vn_phi_compute_hash (vp1);
3176 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3178 /* Because we iterate over phi operations more than once, it's
3179 possible the slot might already exist here, hence no assert.*/
3180 *slot = vp1;
3181 return vp1;
3185 /* Print set of components in strongly connected component SCC to OUT. */
3187 static void
3188 print_scc (FILE *out, vec<tree> scc)
3190 tree var;
3191 unsigned int i;
3193 fprintf (out, "SCC consists of %u:", scc.length ());
3194 FOR_EACH_VEC_ELT (scc, i, var)
3196 fprintf (out, " ");
3197 print_generic_expr (out, var);
3199 fprintf (out, "\n");
3202 /* Return true if BB1 is dominated by BB2 taking into account edges
3203 that are not executable. */
3205 static bool
3206 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3208 edge_iterator ei;
3209 edge e;
3211 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3212 return true;
3214 /* Before iterating we'd like to know if there exists a
3215 (executable) path from bb2 to bb1 at all, if not we can
3216 directly return false. For now simply iterate once. */
3218 /* Iterate to the single executable bb1 predecessor. */
3219 if (EDGE_COUNT (bb1->preds) > 1)
3221 edge prede = NULL;
3222 FOR_EACH_EDGE (e, ei, bb1->preds)
3223 if (e->flags & EDGE_EXECUTABLE)
3225 if (prede)
3227 prede = NULL;
3228 break;
3230 prede = e;
3232 if (prede)
3234 bb1 = prede->src;
3236 /* Re-do the dominance check with changed bb1. */
3237 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3238 return true;
3242 /* Iterate to the single executable bb2 successor. */
3243 edge succe = NULL;
3244 FOR_EACH_EDGE (e, ei, bb2->succs)
3245 if (e->flags & EDGE_EXECUTABLE)
3247 if (succe)
3249 succe = NULL;
3250 break;
3252 succe = e;
3254 if (succe)
3256 /* Verify the reached block is only reached through succe.
3257 If there is only one edge we can spare us the dominator
3258 check and iterate directly. */
3259 if (EDGE_COUNT (succe->dest->preds) > 1)
3261 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3262 if (e != succe
3263 && (e->flags & EDGE_EXECUTABLE))
3265 succe = NULL;
3266 break;
3269 if (succe)
3271 bb2 = succe->dest;
3273 /* Re-do the dominance check with changed bb2. */
3274 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3275 return true;
3279 /* We could now iterate updating bb1 / bb2. */
3280 return false;
3283 /* Set the value number of FROM to TO, return true if it has changed
3284 as a result. */
3286 static inline bool
3287 set_ssa_val_to (tree from, tree to)
3289 tree currval = SSA_VAL (from);
3290 HOST_WIDE_INT toff, coff;
3292 /* The only thing we allow as value numbers are ssa_names
3293 and invariants. So assert that here. We don't allow VN_TOP
3294 as visiting a stmt should produce a value-number other than
3295 that.
3296 ??? Still VN_TOP can happen for unreachable code, so force
3297 it to varying in that case. Not all code is prepared to
3298 get VN_TOP on valueization. */
3299 if (to == VN_TOP)
3301 if (dump_file && (dump_flags & TDF_DETAILS))
3302 fprintf (dump_file, "Forcing value number to varying on "
3303 "receiving VN_TOP\n");
3304 to = from;
3307 gcc_assert (to != NULL_TREE
3308 && ((TREE_CODE (to) == SSA_NAME
3309 && (to == from || SSA_VAL (to) == to))
3310 || is_gimple_min_invariant (to)));
3312 if (from != to)
3314 if (currval == from)
3316 if (dump_file && (dump_flags & TDF_DETAILS))
3318 fprintf (dump_file, "Not changing value number of ");
3319 print_generic_expr (dump_file, from);
3320 fprintf (dump_file, " from VARYING to ");
3321 print_generic_expr (dump_file, to);
3322 fprintf (dump_file, "\n");
3324 return false;
3326 else if (currval != VN_TOP
3327 && ! is_gimple_min_invariant (currval)
3328 && is_gimple_min_invariant (to))
3330 if (dump_file && (dump_flags & TDF_DETAILS))
3332 fprintf (dump_file, "Forcing VARYING instead of changing "
3333 "value number of ");
3334 print_generic_expr (dump_file, from);
3335 fprintf (dump_file, " from ");
3336 print_generic_expr (dump_file, currval);
3337 fprintf (dump_file, " (non-constant) to ");
3338 print_generic_expr (dump_file, to);
3339 fprintf (dump_file, " (constant)\n");
3341 to = from;
3343 else if (TREE_CODE (to) == SSA_NAME
3344 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3345 to = from;
3348 if (dump_file && (dump_flags & TDF_DETAILS))
3350 fprintf (dump_file, "Setting value number of ");
3351 print_generic_expr (dump_file, from);
3352 fprintf (dump_file, " to ");
3353 print_generic_expr (dump_file, to);
3356 if (currval != to
3357 && !operand_equal_p (currval, to, 0)
3358 /* Different undefined SSA names are not actually different. See
3359 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3360 && !(TREE_CODE (currval) == SSA_NAME
3361 && TREE_CODE (to) == SSA_NAME
3362 && ssa_undefined_value_p (currval, false)
3363 && ssa_undefined_value_p (to, false))
3364 /* ??? For addresses involving volatile objects or types operand_equal_p
3365 does not reliably detect ADDR_EXPRs as equal. We know we are only
3366 getting invariant gimple addresses here, so can use
3367 get_addr_base_and_unit_offset to do this comparison. */
3368 && !(TREE_CODE (currval) == ADDR_EXPR
3369 && TREE_CODE (to) == ADDR_EXPR
3370 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3371 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3372 && coff == toff))
3374 if (dump_file && (dump_flags & TDF_DETAILS))
3375 fprintf (dump_file, " (changed)\n");
3377 /* If we equate two SSA names we have to make the side-band info
3378 of the leader conservative (and remember whatever original value
3379 was present). */
3380 if (TREE_CODE (to) == SSA_NAME)
3382 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3383 && SSA_NAME_RANGE_INFO (to))
3385 if (SSA_NAME_IS_DEFAULT_DEF (to)
3386 || dominated_by_p_w_unex
3387 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3388 gimple_bb (SSA_NAME_DEF_STMT (to))))
3389 /* Keep the info from the dominator. */
3391 else
3393 /* Save old info. */
3394 if (! VN_INFO (to)->info.range_info)
3396 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3397 VN_INFO (to)->range_info_anti_range_p
3398 = SSA_NAME_ANTI_RANGE_P (to);
3400 /* Rather than allocating memory and unioning the info
3401 just clear it. */
3402 if (dump_file && (dump_flags & TDF_DETAILS))
3404 fprintf (dump_file, "clearing range info of ");
3405 print_generic_expr (dump_file, to);
3406 fprintf (dump_file, "\n");
3408 SSA_NAME_RANGE_INFO (to) = NULL;
3411 else if (POINTER_TYPE_P (TREE_TYPE (to))
3412 && SSA_NAME_PTR_INFO (to))
3414 if (SSA_NAME_IS_DEFAULT_DEF (to)
3415 || dominated_by_p_w_unex
3416 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3417 gimple_bb (SSA_NAME_DEF_STMT (to))))
3418 /* Keep the info from the dominator. */
3420 else if (! SSA_NAME_PTR_INFO (from)
3421 /* Handle the case of trivially equivalent info. */
3422 || memcmp (SSA_NAME_PTR_INFO (to),
3423 SSA_NAME_PTR_INFO (from),
3424 sizeof (ptr_info_def)) != 0)
3426 /* Save old info. */
3427 if (! VN_INFO (to)->info.ptr_info)
3428 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3429 /* Rather than allocating memory and unioning the info
3430 just clear it. */
3431 if (dump_file && (dump_flags & TDF_DETAILS))
3433 fprintf (dump_file, "clearing points-to info of ");
3434 print_generic_expr (dump_file, to);
3435 fprintf (dump_file, "\n");
3437 SSA_NAME_PTR_INFO (to) = NULL;
3442 VN_INFO (from)->valnum = to;
3443 return true;
3445 if (dump_file && (dump_flags & TDF_DETAILS))
3446 fprintf (dump_file, "\n");
3447 return false;
3450 /* Mark as processed all the definitions in the defining stmt of USE, or
3451 the USE itself. */
3453 static void
3454 mark_use_processed (tree use)
3456 ssa_op_iter iter;
3457 def_operand_p defp;
3458 gimple *stmt = SSA_NAME_DEF_STMT (use);
3460 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3462 VN_INFO (use)->use_processed = true;
3463 return;
3466 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3468 tree def = DEF_FROM_PTR (defp);
3470 VN_INFO (def)->use_processed = true;
3474 /* Set all definitions in STMT to value number to themselves.
3475 Return true if a value number changed. */
3477 static bool
3478 defs_to_varying (gimple *stmt)
3480 bool changed = false;
3481 ssa_op_iter iter;
3482 def_operand_p defp;
3484 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3486 tree def = DEF_FROM_PTR (defp);
3487 changed |= set_ssa_val_to (def, def);
3489 return changed;
3492 /* Visit a copy between LHS and RHS, return true if the value number
3493 changed. */
3495 static bool
3496 visit_copy (tree lhs, tree rhs)
3498 /* Valueize. */
3499 rhs = SSA_VAL (rhs);
3501 return set_ssa_val_to (lhs, rhs);
3504 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3505 is the same. */
3507 static tree
3508 valueized_wider_op (tree wide_type, tree op)
3510 if (TREE_CODE (op) == SSA_NAME)
3511 op = SSA_VAL (op);
3513 /* Either we have the op widened available. */
3514 tree ops[3] = {};
3515 ops[0] = op;
3516 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3517 wide_type, ops, NULL);
3518 if (tem)
3519 return tem;
3521 /* Or the op is truncated from some existing value. */
3522 if (TREE_CODE (op) == SSA_NAME)
3524 gimple *def = SSA_NAME_DEF_STMT (op);
3525 if (is_gimple_assign (def)
3526 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3528 tem = gimple_assign_rhs1 (def);
3529 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3531 if (TREE_CODE (tem) == SSA_NAME)
3532 tem = SSA_VAL (tem);
3533 return tem;
3538 /* For constants simply extend it. */
3539 if (TREE_CODE (op) == INTEGER_CST)
3540 return wide_int_to_tree (wide_type, op);
3542 return NULL_TREE;
3545 /* Visit a nary operator RHS, value number it, and return true if the
3546 value number of LHS has changed as a result. */
3548 static bool
3549 visit_nary_op (tree lhs, gassign *stmt)
3551 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3552 if (result)
3553 return set_ssa_val_to (lhs, result);
3555 /* Do some special pattern matching for redundancies of operations
3556 in different types. */
3557 enum tree_code code = gimple_assign_rhs_code (stmt);
3558 tree type = TREE_TYPE (lhs);
3559 tree rhs1 = gimple_assign_rhs1 (stmt);
3560 switch (code)
3562 CASE_CONVERT:
3563 /* Match arithmetic done in a different type where we can easily
3564 substitute the result from some earlier sign-changed or widened
3565 operation. */
3566 if (INTEGRAL_TYPE_P (type)
3567 && TREE_CODE (rhs1) == SSA_NAME
3568 /* We only handle sign-changes or zero-extension -> & mask. */
3569 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3570 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3571 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3573 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3574 if (def
3575 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3576 || gimple_assign_rhs_code (def) == MINUS_EXPR
3577 || gimple_assign_rhs_code (def) == MULT_EXPR))
3579 tree ops[3] = {};
3580 /* Either we have the op widened available. */
3581 ops[0] = valueized_wider_op (type,
3582 gimple_assign_rhs1 (def));
3583 if (ops[0])
3584 ops[1] = valueized_wider_op (type,
3585 gimple_assign_rhs2 (def));
3586 if (ops[0] && ops[1])
3588 ops[0] = vn_nary_op_lookup_pieces
3589 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3590 /* We have wider operation available. */
3591 if (ops[0])
3593 unsigned lhs_prec = TYPE_PRECISION (type);
3594 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3595 if (lhs_prec == rhs_prec)
3597 ops[1] = NULL_TREE;
3598 result = vn_nary_build_or_lookup (NOP_EXPR,
3599 type, ops);
3600 if (result)
3602 bool changed = set_ssa_val_to (lhs, result);
3603 vn_nary_op_insert_stmt (stmt, result);
3604 return changed;
3607 else
3609 ops[1] = wide_int_to_tree (type,
3610 wi::mask (rhs_prec, false,
3611 lhs_prec));
3612 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3613 TREE_TYPE (lhs),
3614 ops);
3615 if (result)
3617 bool changed = set_ssa_val_to (lhs, result);
3618 vn_nary_op_insert_stmt (stmt, result);
3619 return changed;
3626 default:;
3629 bool changed = set_ssa_val_to (lhs, lhs);
3630 vn_nary_op_insert_stmt (stmt, lhs);
3631 return changed;
3634 /* Visit a call STMT storing into LHS. Return true if the value number
3635 of the LHS has changed as a result. */
3637 static bool
3638 visit_reference_op_call (tree lhs, gcall *stmt)
3640 bool changed = false;
3641 struct vn_reference_s vr1;
3642 vn_reference_t vnresult = NULL;
3643 tree vdef = gimple_vdef (stmt);
3645 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3646 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3647 lhs = NULL_TREE;
3649 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3650 if (vnresult)
3652 if (vnresult->result_vdef && vdef)
3653 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3654 else if (vdef)
3655 /* If the call was discovered to be pure or const reflect
3656 that as far as possible. */
3657 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3659 if (!vnresult->result && lhs)
3660 vnresult->result = lhs;
3662 if (vnresult->result && lhs)
3663 changed |= set_ssa_val_to (lhs, vnresult->result);
3665 else
3667 vn_reference_t vr2;
3668 vn_reference_s **slot;
3669 tree vdef_val = vdef;
3670 if (vdef)
3672 /* If we value numbered an indirect functions function to
3673 one not clobbering memory value number its VDEF to its
3674 VUSE. */
3675 tree fn = gimple_call_fn (stmt);
3676 if (fn && TREE_CODE (fn) == SSA_NAME)
3678 fn = SSA_VAL (fn);
3679 if (TREE_CODE (fn) == ADDR_EXPR
3680 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3681 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3682 & (ECF_CONST | ECF_PURE)))
3683 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3685 changed |= set_ssa_val_to (vdef, vdef_val);
3687 if (lhs)
3688 changed |= set_ssa_val_to (lhs, lhs);
3689 vr2 = current_info->references_pool->allocate ();
3690 vr2->vuse = vr1.vuse;
3691 /* As we are not walking the virtual operand chain we know the
3692 shared_lookup_references are still original so we can re-use
3693 them here. */
3694 vr2->operands = vr1.operands.copy ();
3695 vr2->type = vr1.type;
3696 vr2->set = vr1.set;
3697 vr2->hashcode = vr1.hashcode;
3698 vr2->result = lhs;
3699 vr2->result_vdef = vdef_val;
3700 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3701 INSERT);
3702 gcc_assert (!*slot);
3703 *slot = vr2;
3706 return changed;
3709 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3710 and return true if the value number of the LHS has changed as a result. */
3712 static bool
3713 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3715 bool changed = false;
3716 tree last_vuse;
3717 tree result;
3719 last_vuse = gimple_vuse (stmt);
3720 last_vuse_ptr = &last_vuse;
3721 result = vn_reference_lookup (op, gimple_vuse (stmt),
3722 default_vn_walk_kind, NULL, true);
3723 last_vuse_ptr = NULL;
3725 /* We handle type-punning through unions by value-numbering based
3726 on offset and size of the access. Be prepared to handle a
3727 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3728 if (result
3729 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3731 /* We will be setting the value number of lhs to the value number
3732 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3733 So first simplify and lookup this expression to see if it
3734 is already available. */
3735 code_helper rcode = VIEW_CONVERT_EXPR;
3736 tree ops[3] = { result };
3737 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3740 if (result)
3741 changed = set_ssa_val_to (lhs, result);
3742 else
3744 changed = set_ssa_val_to (lhs, lhs);
3745 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3748 return changed;
3752 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3753 and return true if the value number of the LHS has changed as a result. */
3755 static bool
3756 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3758 bool changed = false;
3759 vn_reference_t vnresult = NULL;
3760 tree assign;
3761 bool resultsame = false;
3762 tree vuse = gimple_vuse (stmt);
3763 tree vdef = gimple_vdef (stmt);
3765 if (TREE_CODE (op) == SSA_NAME)
3766 op = SSA_VAL (op);
3768 /* First we want to lookup using the *vuses* from the store and see
3769 if there the last store to this location with the same address
3770 had the same value.
3772 The vuses represent the memory state before the store. If the
3773 memory state, address, and value of the store is the same as the
3774 last store to this location, then this store will produce the
3775 same memory state as that store.
3777 In this case the vdef versions for this store are value numbered to those
3778 vuse versions, since they represent the same memory state after
3779 this store.
3781 Otherwise, the vdefs for the store are used when inserting into
3782 the table, since the store generates a new memory state. */
3784 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3785 if (vnresult
3786 && vnresult->result)
3788 tree result = vnresult->result;
3789 if (TREE_CODE (result) == SSA_NAME)
3790 result = SSA_VAL (result);
3791 resultsame = expressions_equal_p (result, op);
3792 if (resultsame)
3794 /* If the TBAA state isn't compatible for downstream reads
3795 we cannot value-number the VDEFs the same. */
3796 alias_set_type set = get_alias_set (lhs);
3797 if (vnresult->set != set
3798 && ! alias_set_subset_of (set, vnresult->set))
3799 resultsame = false;
3803 if (!resultsame)
3805 /* Only perform the following when being called from PRE
3806 which embeds tail merging. */
3807 if (default_vn_walk_kind == VN_WALK)
3809 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3810 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3811 if (vnresult)
3813 VN_INFO (vdef)->use_processed = true;
3814 return set_ssa_val_to (vdef, vnresult->result_vdef);
3818 if (dump_file && (dump_flags & TDF_DETAILS))
3820 fprintf (dump_file, "No store match\n");
3821 fprintf (dump_file, "Value numbering store ");
3822 print_generic_expr (dump_file, lhs);
3823 fprintf (dump_file, " to ");
3824 print_generic_expr (dump_file, op);
3825 fprintf (dump_file, "\n");
3827 /* Have to set value numbers before insert, since insert is
3828 going to valueize the references in-place. */
3829 if (vdef)
3830 changed |= set_ssa_val_to (vdef, vdef);
3832 /* Do not insert structure copies into the tables. */
3833 if (is_gimple_min_invariant (op)
3834 || is_gimple_reg (op))
3835 vn_reference_insert (lhs, op, vdef, NULL);
3837 /* Only perform the following when being called from PRE
3838 which embeds tail merging. */
3839 if (default_vn_walk_kind == VN_WALK)
3841 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3842 vn_reference_insert (assign, lhs, vuse, vdef);
3845 else
3847 /* We had a match, so value number the vdef to have the value
3848 number of the vuse it came from. */
3850 if (dump_file && (dump_flags & TDF_DETAILS))
3851 fprintf (dump_file, "Store matched earlier value, "
3852 "value numbering store vdefs to matching vuses.\n");
3854 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3857 return changed;
3860 /* Visit and value number PHI, return true if the value number
3861 changed. */
3863 static bool
3864 visit_phi (gimple *phi)
3866 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3867 unsigned n_executable = 0;
3868 bool allsame = true;
3869 edge_iterator ei;
3870 edge e;
3872 /* TODO: We could check for this in init_sccvn, and replace this
3873 with a gcc_assert. */
3874 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3875 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3877 /* See if all non-TOP arguments have the same value. TOP is
3878 equivalent to everything, so we can ignore it. */
3879 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3880 if (e->flags & EDGE_EXECUTABLE)
3882 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3884 ++n_executable;
3885 if (TREE_CODE (def) == SSA_NAME)
3886 def = SSA_VAL (def);
3887 if (def == VN_TOP)
3889 /* Ignore undefined defs for sameval but record one. */
3890 else if (TREE_CODE (def) == SSA_NAME
3891 && ssa_undefined_value_p (def, false))
3892 seen_undef = def;
3893 else if (sameval == VN_TOP)
3894 sameval = def;
3895 else if (!expressions_equal_p (def, sameval))
3897 allsame = false;
3898 break;
3903 /* If none of the edges was executable keep the value-number at VN_TOP,
3904 if only a single edge is exectuable use its value. */
3905 if (n_executable <= 1)
3906 result = seen_undef ? seen_undef : sameval;
3907 /* If we saw only undefined values and VN_TOP use one of the
3908 undefined values. */
3909 else if (sameval == VN_TOP)
3910 result = seen_undef ? seen_undef : sameval;
3911 /* First see if it is equivalent to a phi node in this block. We prefer
3912 this as it allows IV elimination - see PRs 66502 and 67167. */
3913 else if ((result = vn_phi_lookup (phi)))
3915 /* If all values are the same use that, unless we've seen undefined
3916 values as well and the value isn't constant.
3917 CCP/copyprop have the same restriction to not remove uninit warnings. */
3918 else if (allsame
3919 && (! seen_undef || is_gimple_min_invariant (sameval)))
3920 result = sameval;
3921 else
3923 result = PHI_RESULT (phi);
3924 /* Only insert PHIs that are varying, for constant value numbers
3925 we mess up equivalences otherwise as we are only comparing
3926 the immediate controlling predicates. */
3927 vn_phi_insert (phi, result);
3930 return set_ssa_val_to (PHI_RESULT (phi), result);
3933 /* Try to simplify RHS using equivalences and constant folding. */
3935 static tree
3936 try_to_simplify (gassign *stmt)
3938 enum tree_code code = gimple_assign_rhs_code (stmt);
3939 tree tem;
3941 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3942 in this case, there is no point in doing extra work. */
3943 if (code == SSA_NAME)
3944 return NULL_TREE;
3946 /* First try constant folding based on our current lattice. */
3947 mprts_hook = vn_lookup_simplify_result;
3948 mprts_hook_cnt = 9;
3949 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3950 mprts_hook = NULL;
3951 if (tem
3952 && (TREE_CODE (tem) == SSA_NAME
3953 || is_gimple_min_invariant (tem)))
3954 return tem;
3956 return NULL_TREE;
3959 /* Visit and value number USE, return true if the value number
3960 changed. */
3962 static bool
3963 visit_use (tree use)
3965 bool changed = false;
3966 gimple *stmt = SSA_NAME_DEF_STMT (use);
3968 mark_use_processed (use);
3970 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
3971 && !SSA_NAME_IS_DEFAULT_DEF (use));
3973 if (dump_file && (dump_flags & TDF_DETAILS))
3975 fprintf (dump_file, "Value numbering ");
3976 print_generic_expr (dump_file, use);
3977 fprintf (dump_file, " stmt = ");
3978 print_gimple_stmt (dump_file, stmt, 0);
3981 if (gimple_code (stmt) == GIMPLE_PHI)
3982 changed = visit_phi (stmt);
3983 else if (gimple_has_volatile_ops (stmt))
3984 changed = defs_to_varying (stmt);
3985 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3987 enum tree_code code = gimple_assign_rhs_code (ass);
3988 tree lhs = gimple_assign_lhs (ass);
3989 tree rhs1 = gimple_assign_rhs1 (ass);
3990 tree simplified;
3992 /* Shortcut for copies. Simplifying copies is pointless,
3993 since we copy the expression and value they represent. */
3994 if (code == SSA_NAME
3995 && TREE_CODE (lhs) == SSA_NAME)
3997 changed = visit_copy (lhs, rhs1);
3998 goto done;
4000 simplified = try_to_simplify (ass);
4001 if (simplified)
4003 if (dump_file && (dump_flags & TDF_DETAILS))
4005 fprintf (dump_file, "RHS ");
4006 print_gimple_expr (dump_file, ass, 0);
4007 fprintf (dump_file, " simplified to ");
4008 print_generic_expr (dump_file, simplified);
4009 fprintf (dump_file, "\n");
4012 /* Setting value numbers to constants will occasionally
4013 screw up phi congruence because constants are not
4014 uniquely associated with a single ssa name that can be
4015 looked up. */
4016 if (simplified
4017 && is_gimple_min_invariant (simplified)
4018 && TREE_CODE (lhs) == SSA_NAME)
4020 changed = set_ssa_val_to (lhs, simplified);
4021 goto done;
4023 else if (simplified
4024 && TREE_CODE (simplified) == SSA_NAME
4025 && TREE_CODE (lhs) == SSA_NAME)
4027 changed = visit_copy (lhs, simplified);
4028 goto done;
4031 if ((TREE_CODE (lhs) == SSA_NAME
4032 /* We can substitute SSA_NAMEs that are live over
4033 abnormal edges with their constant value. */
4034 && !(gimple_assign_copy_p (ass)
4035 && is_gimple_min_invariant (rhs1))
4036 && !(simplified
4037 && is_gimple_min_invariant (simplified))
4038 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4039 /* Stores or copies from SSA_NAMEs that are live over
4040 abnormal edges are a problem. */
4041 || (code == SSA_NAME
4042 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4043 changed = defs_to_varying (ass);
4044 else if (REFERENCE_CLASS_P (lhs)
4045 || DECL_P (lhs))
4046 changed = visit_reference_op_store (lhs, rhs1, ass);
4047 else if (TREE_CODE (lhs) == SSA_NAME)
4049 if ((gimple_assign_copy_p (ass)
4050 && is_gimple_min_invariant (rhs1))
4051 || (simplified
4052 && is_gimple_min_invariant (simplified)))
4054 if (simplified)
4055 changed = set_ssa_val_to (lhs, simplified);
4056 else
4057 changed = set_ssa_val_to (lhs, rhs1);
4059 else
4061 /* Visit the original statement. */
4062 switch (vn_get_stmt_kind (ass))
4064 case VN_NARY:
4065 changed = visit_nary_op (lhs, ass);
4066 break;
4067 case VN_REFERENCE:
4068 changed = visit_reference_op_load (lhs, rhs1, ass);
4069 break;
4070 default:
4071 changed = defs_to_varying (ass);
4072 break;
4076 else
4077 changed = defs_to_varying (ass);
4079 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4081 tree lhs = gimple_call_lhs (call_stmt);
4082 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4084 /* Try constant folding based on our current lattice. */
4085 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4086 vn_valueize);
4087 if (simplified)
4089 if (dump_file && (dump_flags & TDF_DETAILS))
4091 fprintf (dump_file, "call ");
4092 print_gimple_expr (dump_file, call_stmt, 0);
4093 fprintf (dump_file, " simplified to ");
4094 print_generic_expr (dump_file, simplified);
4095 fprintf (dump_file, "\n");
4098 /* Setting value numbers to constants will occasionally
4099 screw up phi congruence because constants are not
4100 uniquely associated with a single ssa name that can be
4101 looked up. */
4102 if (simplified
4103 && is_gimple_min_invariant (simplified))
4105 changed = set_ssa_val_to (lhs, simplified);
4106 if (gimple_vdef (call_stmt))
4107 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4108 SSA_VAL (gimple_vuse (call_stmt)));
4109 goto done;
4111 else if (simplified
4112 && TREE_CODE (simplified) == SSA_NAME)
4114 changed = visit_copy (lhs, simplified);
4115 if (gimple_vdef (call_stmt))
4116 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4117 SSA_VAL (gimple_vuse (call_stmt)));
4118 goto done;
4120 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4122 changed = defs_to_varying (call_stmt);
4123 goto done;
4127 /* Pick up flags from a devirtualization target. */
4128 tree fn = gimple_call_fn (stmt);
4129 int extra_fnflags = 0;
4130 if (fn && TREE_CODE (fn) == SSA_NAME)
4132 fn = SSA_VAL (fn);
4133 if (TREE_CODE (fn) == ADDR_EXPR
4134 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4135 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4137 if (!gimple_call_internal_p (call_stmt)
4138 && (/* Calls to the same function with the same vuse
4139 and the same operands do not necessarily return the same
4140 value, unless they're pure or const. */
4141 ((gimple_call_flags (call_stmt) | extra_fnflags)
4142 & (ECF_PURE | ECF_CONST))
4143 /* If calls have a vdef, subsequent calls won't have
4144 the same incoming vuse. So, if 2 calls with vdef have the
4145 same vuse, we know they're not subsequent.
4146 We can value number 2 calls to the same function with the
4147 same vuse and the same operands which are not subsequent
4148 the same, because there is no code in the program that can
4149 compare the 2 values... */
4150 || (gimple_vdef (call_stmt)
4151 /* ... unless the call returns a pointer which does
4152 not alias with anything else. In which case the
4153 information that the values are distinct are encoded
4154 in the IL. */
4155 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4156 /* Only perform the following when being called from PRE
4157 which embeds tail merging. */
4158 && default_vn_walk_kind == VN_WALK)))
4159 changed = visit_reference_op_call (lhs, call_stmt);
4160 else
4161 changed = defs_to_varying (call_stmt);
4163 else
4164 changed = defs_to_varying (stmt);
4165 done:
4166 return changed;
4169 /* Compare two operands by reverse postorder index */
4171 static int
4172 compare_ops (const void *pa, const void *pb)
4174 const tree opa = *((const tree *)pa);
4175 const tree opb = *((const tree *)pb);
4176 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4177 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4178 basic_block bba;
4179 basic_block bbb;
4181 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4182 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4183 else if (gimple_nop_p (opstmta))
4184 return -1;
4185 else if (gimple_nop_p (opstmtb))
4186 return 1;
4188 bba = gimple_bb (opstmta);
4189 bbb = gimple_bb (opstmtb);
4191 if (!bba && !bbb)
4192 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4193 else if (!bba)
4194 return -1;
4195 else if (!bbb)
4196 return 1;
4198 if (bba == bbb)
4200 if (gimple_code (opstmta) == GIMPLE_PHI
4201 && gimple_code (opstmtb) == GIMPLE_PHI)
4202 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4203 else if (gimple_code (opstmta) == GIMPLE_PHI)
4204 return -1;
4205 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4206 return 1;
4207 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4208 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4209 else
4210 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4212 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4215 /* Sort an array containing members of a strongly connected component
4216 SCC so that the members are ordered by RPO number.
4217 This means that when the sort is complete, iterating through the
4218 array will give you the members in RPO order. */
4220 static void
4221 sort_scc (vec<tree> scc)
4223 scc.qsort (compare_ops);
4226 /* Insert the no longer used nary ONARY to the hash INFO. */
4228 static void
4229 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4231 size_t size = sizeof_vn_nary_op (onary->length);
4232 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4233 &info->nary_obstack);
4234 memcpy (nary, onary, size);
4235 vn_nary_op_insert_into (nary, info->nary, false);
4238 /* Insert the no longer used phi OPHI to the hash INFO. */
4240 static void
4241 copy_phi (vn_phi_t ophi, vn_tables_t info)
4243 vn_phi_t phi = info->phis_pool->allocate ();
4244 vn_phi_s **slot;
4245 memcpy (phi, ophi, sizeof (*phi));
4246 ophi->phiargs.create (0);
4247 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4248 gcc_assert (!*slot);
4249 *slot = phi;
4252 /* Insert the no longer used reference OREF to the hash INFO. */
4254 static void
4255 copy_reference (vn_reference_t oref, vn_tables_t info)
4257 vn_reference_t ref;
4258 vn_reference_s **slot;
4259 ref = info->references_pool->allocate ();
4260 memcpy (ref, oref, sizeof (*ref));
4261 oref->operands.create (0);
4262 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4263 if (*slot)
4264 free_reference (*slot);
4265 *slot = ref;
4268 /* Process a strongly connected component in the SSA graph. */
4270 static void
4271 process_scc (vec<tree> scc)
4273 tree var;
4274 unsigned int i;
4275 unsigned int iterations = 0;
4276 bool changed = true;
4277 vn_nary_op_iterator_type hin;
4278 vn_phi_iterator_type hip;
4279 vn_reference_iterator_type hir;
4280 vn_nary_op_t nary;
4281 vn_phi_t phi;
4282 vn_reference_t ref;
4284 /* If the SCC has a single member, just visit it. */
4285 if (scc.length () == 1)
4287 tree use = scc[0];
4288 if (VN_INFO (use)->use_processed)
4289 return;
4290 /* We need to make sure it doesn't form a cycle itself, which can
4291 happen for self-referential PHI nodes. In that case we would
4292 end up inserting an expression with VN_TOP operands into the
4293 valid table which makes us derive bogus equivalences later.
4294 The cheapest way to check this is to assume it for all PHI nodes. */
4295 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4296 /* Fallthru to iteration. */ ;
4297 else
4299 visit_use (use);
4300 return;
4304 if (dump_file && (dump_flags & TDF_DETAILS))
4305 print_scc (dump_file, scc);
4307 /* Iterate over the SCC with the optimistic table until it stops
4308 changing. */
4309 current_info = optimistic_info;
4310 while (changed)
4312 changed = false;
4313 iterations++;
4314 if (dump_file && (dump_flags & TDF_DETAILS))
4315 fprintf (dump_file, "Starting iteration %d\n", iterations);
4316 /* As we are value-numbering optimistically we have to
4317 clear the expression tables and the simplified expressions
4318 in each iteration until we converge. */
4319 optimistic_info->nary->empty ();
4320 optimistic_info->phis->empty ();
4321 optimistic_info->references->empty ();
4322 obstack_free (&optimistic_info->nary_obstack, NULL);
4323 gcc_obstack_init (&optimistic_info->nary_obstack);
4324 optimistic_info->phis_pool->release ();
4325 optimistic_info->references_pool->release ();
4326 FOR_EACH_VEC_ELT (scc, i, var)
4327 gcc_assert (!VN_INFO (var)->needs_insertion
4328 && VN_INFO (var)->expr == NULL);
4329 FOR_EACH_VEC_ELT (scc, i, var)
4330 changed |= visit_use (var);
4333 if (dump_file && (dump_flags & TDF_DETAILS))
4334 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4335 statistics_histogram_event (cfun, "SCC iterations", iterations);
4337 /* Finally, copy the contents of the no longer used optimistic
4338 table to the valid table. */
4339 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4340 copy_nary (nary, valid_info);
4341 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4342 copy_phi (phi, valid_info);
4343 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4344 ref, vn_reference_t, hir)
4345 copy_reference (ref, valid_info);
4347 current_info = valid_info;
4351 /* Pop the components of the found SCC for NAME off the SCC stack
4352 and process them. Returns true if all went well, false if
4353 we run into resource limits. */
4355 static void
4356 extract_and_process_scc_for_name (tree name)
4358 auto_vec<tree> scc;
4359 tree x;
4361 /* Found an SCC, pop the components off the SCC stack and
4362 process them. */
4365 x = sccstack.pop ();
4367 VN_INFO (x)->on_sccstack = false;
4368 scc.safe_push (x);
4369 } while (x != name);
4371 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4372 incredibly large.
4373 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4374 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4376 if (dump_file)
4378 print_scc (dump_file, scc);
4379 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4380 "size %u exceeding %u\n", scc.length (),
4381 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4383 tree var;
4384 unsigned i;
4385 FOR_EACH_VEC_ELT (scc, i, var)
4387 gimple *def = SSA_NAME_DEF_STMT (var);
4388 mark_use_processed (var);
4389 if (SSA_NAME_IS_DEFAULT_DEF (var)
4390 || gimple_code (def) == GIMPLE_PHI)
4391 set_ssa_val_to (var, var);
4392 else
4393 defs_to_varying (def);
4395 return;
4398 if (scc.length () > 1)
4399 sort_scc (scc);
4401 process_scc (scc);
4404 /* Depth first search on NAME to discover and process SCC's in the SSA
4405 graph.
4406 Execution of this algorithm relies on the fact that the SCC's are
4407 popped off the stack in topological order.
4408 Returns true if successful, false if we stopped processing SCC's due
4409 to resource constraints. */
4411 static void
4412 DFS (tree name)
4414 auto_vec<ssa_op_iter> itervec;
4415 auto_vec<tree> namevec;
4416 use_operand_p usep = NULL;
4417 gimple *defstmt;
4418 tree use;
4419 ssa_op_iter iter;
4421 start_over:
4422 /* SCC info */
4423 VN_INFO (name)->dfsnum = next_dfs_num++;
4424 VN_INFO (name)->visited = true;
4425 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4427 sccstack.safe_push (name);
4428 VN_INFO (name)->on_sccstack = true;
4429 defstmt = SSA_NAME_DEF_STMT (name);
4431 /* Recursively DFS on our operands, looking for SCC's. */
4432 if (!gimple_nop_p (defstmt))
4434 /* Push a new iterator. */
4435 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4436 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4437 else
4438 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4440 else
4441 clear_and_done_ssa_iter (&iter);
4443 while (1)
4445 /* If we are done processing uses of a name, go up the stack
4446 of iterators and process SCCs as we found them. */
4447 if (op_iter_done (&iter))
4449 /* See if we found an SCC. */
4450 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4451 extract_and_process_scc_for_name (name);
4453 /* Check if we are done. */
4454 if (namevec.is_empty ())
4455 return;
4457 /* Restore the last use walker and continue walking there. */
4458 use = name;
4459 name = namevec.pop ();
4460 memcpy (&iter, &itervec.last (),
4461 sizeof (ssa_op_iter));
4462 itervec.pop ();
4463 goto continue_walking;
4466 use = USE_FROM_PTR (usep);
4468 /* Since we handle phi nodes, we will sometimes get
4469 invariants in the use expression. */
4470 if (TREE_CODE (use) == SSA_NAME)
4472 if (! (VN_INFO (use)->visited))
4474 /* Recurse by pushing the current use walking state on
4475 the stack and starting over. */
4476 itervec.safe_push (iter);
4477 namevec.safe_push (name);
4478 name = use;
4479 goto start_over;
4481 continue_walking:
4482 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4483 VN_INFO (use)->low);
4485 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4486 && VN_INFO (use)->on_sccstack)
4488 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4489 VN_INFO (name)->low);
4493 usep = op_iter_next_use (&iter);
4497 /* Allocate a value number table. */
4499 static void
4500 allocate_vn_table (vn_tables_t table)
4502 table->phis = new vn_phi_table_type (23);
4503 table->nary = new vn_nary_op_table_type (23);
4504 table->references = new vn_reference_table_type (23);
4506 gcc_obstack_init (&table->nary_obstack);
4507 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4508 table->references_pool = new object_allocator<vn_reference_s>
4509 ("VN references");
4512 /* Free a value number table. */
4514 static void
4515 free_vn_table (vn_tables_t table)
4517 delete table->phis;
4518 table->phis = NULL;
4519 delete table->nary;
4520 table->nary = NULL;
4521 delete table->references;
4522 table->references = NULL;
4523 obstack_free (&table->nary_obstack, NULL);
4524 delete table->phis_pool;
4525 delete table->references_pool;
4528 static void
4529 init_scc_vn (void)
4531 int j;
4532 int *rpo_numbers_temp;
4534 calculate_dominance_info (CDI_DOMINATORS);
4535 mark_dfs_back_edges ();
4537 sccstack.create (0);
4538 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4540 constant_value_ids = BITMAP_ALLOC (NULL);
4542 next_dfs_num = 1;
4543 next_value_id = 1;
4545 vn_ssa_aux_table.create (num_ssa_names + 1);
4546 /* VEC_alloc doesn't actually grow it to the right size, it just
4547 preallocates the space to do so. */
4548 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4549 gcc_obstack_init (&vn_ssa_aux_obstack);
4551 shared_lookup_phiargs.create (0);
4552 shared_lookup_references.create (0);
4553 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4554 rpo_numbers_temp =
4555 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4556 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4558 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4559 the i'th block in RPO order is bb. We want to map bb's to RPO
4560 numbers, so we need to rearrange this array. */
4561 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4562 rpo_numbers[rpo_numbers_temp[j]] = j;
4564 XDELETE (rpo_numbers_temp);
4566 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4567 get_identifier ("VN_TOP"), error_mark_node);
4569 renumber_gimple_stmt_uids ();
4571 /* Create the valid and optimistic value numbering tables. */
4572 valid_info = XCNEW (struct vn_tables_s);
4573 allocate_vn_table (valid_info);
4574 optimistic_info = XCNEW (struct vn_tables_s);
4575 allocate_vn_table (optimistic_info);
4576 current_info = valid_info;
4578 /* Create the VN_INFO structures, and initialize value numbers to
4579 TOP or VARYING for parameters. */
4580 size_t i;
4581 tree name;
4583 FOR_EACH_SSA_NAME (i, name, cfun)
4585 VN_INFO_GET (name)->valnum = VN_TOP;
4586 VN_INFO (name)->needs_insertion = false;
4587 VN_INFO (name)->expr = NULL;
4588 VN_INFO (name)->value_id = 0;
4590 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4591 continue;
4593 switch (TREE_CODE (SSA_NAME_VAR (name)))
4595 case VAR_DECL:
4596 /* All undefined vars are VARYING. */
4597 VN_INFO (name)->valnum = name;
4598 VN_INFO (name)->visited = true;
4599 break;
4601 case PARM_DECL:
4602 /* Parameters are VARYING but we can record a condition
4603 if we know it is a non-NULL pointer. */
4604 VN_INFO (name)->visited = true;
4605 VN_INFO (name)->valnum = name;
4606 if (POINTER_TYPE_P (TREE_TYPE (name))
4607 && nonnull_arg_p (SSA_NAME_VAR (name)))
4609 tree ops[2];
4610 ops[0] = name;
4611 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4612 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4613 boolean_true_node, 0);
4614 if (dump_file && (dump_flags & TDF_DETAILS))
4616 fprintf (dump_file, "Recording ");
4617 print_generic_expr (dump_file, name, TDF_SLIM);
4618 fprintf (dump_file, " != 0\n");
4621 break;
4623 case RESULT_DECL:
4624 /* If the result is passed by invisible reference the default
4625 def is initialized, otherwise it's uninitialized. Still
4626 undefined is varying. */
4627 VN_INFO (name)->visited = true;
4628 VN_INFO (name)->valnum = name;
4629 break;
4631 default:
4632 gcc_unreachable ();
4637 /* Restore SSA info that has been reset on value leaders. */
4639 void
4640 scc_vn_restore_ssa_info (void)
4642 unsigned i;
4643 tree name;
4645 FOR_EACH_SSA_NAME (i, name, cfun)
4647 if (has_VN_INFO (name))
4649 if (VN_INFO (name)->needs_insertion)
4651 else if (POINTER_TYPE_P (TREE_TYPE (name))
4652 && VN_INFO (name)->info.ptr_info)
4653 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4654 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4655 && VN_INFO (name)->info.range_info)
4657 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4658 SSA_NAME_ANTI_RANGE_P (name)
4659 = VN_INFO (name)->range_info_anti_range_p;
4665 void
4666 free_scc_vn (void)
4668 size_t i;
4669 tree name;
4671 delete constant_to_value_id;
4672 constant_to_value_id = NULL;
4673 BITMAP_FREE (constant_value_ids);
4674 shared_lookup_phiargs.release ();
4675 shared_lookup_references.release ();
4676 XDELETEVEC (rpo_numbers);
4678 FOR_EACH_SSA_NAME (i, name, cfun)
4680 if (has_VN_INFO (name)
4681 && VN_INFO (name)->needs_insertion)
4682 release_ssa_name (name);
4684 obstack_free (&vn_ssa_aux_obstack, NULL);
4685 vn_ssa_aux_table.release ();
4687 sccstack.release ();
4688 free_vn_table (valid_info);
4689 XDELETE (valid_info);
4690 free_vn_table (optimistic_info);
4691 XDELETE (optimistic_info);
4693 BITMAP_FREE (const_parms);
4696 /* Set *ID according to RESULT. */
4698 static void
4699 set_value_id_for_result (tree result, unsigned int *id)
4701 if (result && TREE_CODE (result) == SSA_NAME)
4702 *id = VN_INFO (result)->value_id;
4703 else if (result && is_gimple_min_invariant (result))
4704 *id = get_or_alloc_constant_value_id (result);
4705 else
4706 *id = get_next_value_id ();
4709 /* Set the value ids in the valid hash tables. */
4711 static void
4712 set_hashtable_value_ids (void)
4714 vn_nary_op_iterator_type hin;
4715 vn_phi_iterator_type hip;
4716 vn_reference_iterator_type hir;
4717 vn_nary_op_t vno;
4718 vn_reference_t vr;
4719 vn_phi_t vp;
4721 /* Now set the value ids of the things we had put in the hash
4722 table. */
4724 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4725 set_value_id_for_result (vno->result, &vno->value_id);
4727 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4728 set_value_id_for_result (vp->result, &vp->value_id);
4730 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4731 hir)
4732 set_value_id_for_result (vr->result, &vr->value_id);
4735 class sccvn_dom_walker : public dom_walker
4737 public:
4738 sccvn_dom_walker ()
4739 : dom_walker (CDI_DOMINATORS, true), cond_stack (0) {}
4741 virtual edge before_dom_children (basic_block);
4742 virtual void after_dom_children (basic_block);
4744 void record_cond (basic_block,
4745 enum tree_code code, tree lhs, tree rhs, bool value);
4746 void record_conds (basic_block,
4747 enum tree_code code, tree lhs, tree rhs, bool value);
4749 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4750 cond_stack;
4753 /* Record a temporary condition for the BB and its dominated blocks. */
4755 void
4756 sccvn_dom_walker::record_cond (basic_block bb,
4757 enum tree_code code, tree lhs, tree rhs,
4758 bool value)
4760 tree ops[2] = { lhs, rhs };
4761 vn_nary_op_t old = NULL;
4762 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4763 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4764 vn_nary_op_t cond
4765 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4766 value
4767 ? boolean_true_node
4768 : boolean_false_node, 0);
4769 if (dump_file && (dump_flags & TDF_DETAILS))
4771 fprintf (dump_file, "Recording temporarily ");
4772 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4773 fprintf (dump_file, " %s ", get_tree_code_name (code));
4774 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4775 fprintf (dump_file, " == %s%s\n",
4776 value ? "true" : "false",
4777 old ? " (old entry saved)" : "");
4779 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4782 /* Record temporary conditions for the BB and its dominated blocks
4783 according to LHS CODE RHS == VALUE and its dominated conditions. */
4785 void
4786 sccvn_dom_walker::record_conds (basic_block bb,
4787 enum tree_code code, tree lhs, tree rhs,
4788 bool value)
4790 /* Record the original condition. */
4791 record_cond (bb, code, lhs, rhs, value);
4793 if (!value)
4794 return;
4796 /* Record dominated conditions if the condition is true. Note that
4797 the inversion is already recorded. */
4798 switch (code)
4800 case LT_EXPR:
4801 case GT_EXPR:
4802 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4803 record_cond (bb, NE_EXPR, lhs, rhs, true);
4804 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4805 break;
4807 case EQ_EXPR:
4808 record_cond (bb, LE_EXPR, lhs, rhs, true);
4809 record_cond (bb, GE_EXPR, lhs, rhs, true);
4810 record_cond (bb, LT_EXPR, lhs, rhs, false);
4811 record_cond (bb, GT_EXPR, lhs, rhs, false);
4812 break;
4814 default:
4815 break;
4819 /* Restore expressions and values derived from conditionals. */
4821 void
4822 sccvn_dom_walker::after_dom_children (basic_block bb)
4824 while (!cond_stack.is_empty ()
4825 && cond_stack.last ().first == bb)
4827 vn_nary_op_t cond = cond_stack.last ().second.first;
4828 vn_nary_op_t old = cond_stack.last ().second.second;
4829 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4830 if (old)
4831 vn_nary_op_insert_into (old, current_info->nary, false);
4832 cond_stack.pop ();
4836 /* Value number all statements in BB. */
4838 edge
4839 sccvn_dom_walker::before_dom_children (basic_block bb)
4841 edge e;
4842 edge_iterator ei;
4844 if (dump_file && (dump_flags & TDF_DETAILS))
4845 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4847 /* If we have a single predecessor record the equivalence from a
4848 possible condition on the predecessor edge. */
4849 edge pred_e = NULL;
4850 FOR_EACH_EDGE (e, ei, bb->preds)
4852 /* Ignore simple backedges from this to allow recording conditions
4853 in loop headers. */
4854 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4855 continue;
4856 if (! pred_e)
4857 pred_e = e;
4858 else
4860 pred_e = NULL;
4861 break;
4864 if (pred_e)
4866 /* Check if there are multiple executable successor edges in
4867 the source block. Otherwise there is no additional info
4868 to be recorded. */
4869 edge e2;
4870 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4871 if (e2 != pred_e
4872 && e2->flags & EDGE_EXECUTABLE)
4873 break;
4874 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4876 gimple *stmt = last_stmt (pred_e->src);
4877 if (stmt
4878 && gimple_code (stmt) == GIMPLE_COND)
4880 enum tree_code code = gimple_cond_code (stmt);
4881 tree lhs = gimple_cond_lhs (stmt);
4882 tree rhs = gimple_cond_rhs (stmt);
4883 record_conds (bb, code, lhs, rhs,
4884 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4885 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4886 if (code != ERROR_MARK)
4887 record_conds (bb, code, lhs, rhs,
4888 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4893 /* Value-number all defs in the basic-block. */
4894 for (gphi_iterator gsi = gsi_start_phis (bb);
4895 !gsi_end_p (gsi); gsi_next (&gsi))
4897 gphi *phi = gsi.phi ();
4898 tree res = PHI_RESULT (phi);
4899 if (!VN_INFO (res)->visited)
4900 DFS (res);
4902 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4903 !gsi_end_p (gsi); gsi_next (&gsi))
4905 ssa_op_iter i;
4906 tree op;
4907 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4908 if (!VN_INFO (op)->visited)
4909 DFS (op);
4912 /* Finally look at the last stmt. */
4913 gimple *stmt = last_stmt (bb);
4914 if (!stmt)
4915 return NULL;
4917 enum gimple_code code = gimple_code (stmt);
4918 if (code != GIMPLE_COND
4919 && code != GIMPLE_SWITCH
4920 && code != GIMPLE_GOTO)
4921 return NULL;
4923 if (dump_file && (dump_flags & TDF_DETAILS))
4925 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4926 print_gimple_stmt (dump_file, stmt, 0);
4929 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4930 if value-numbering can prove they are not reachable. Handling
4931 computed gotos is also possible. */
4932 tree val;
4933 switch (code)
4935 case GIMPLE_COND:
4937 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4938 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4939 val = gimple_simplify (gimple_cond_code (stmt),
4940 boolean_type_node, lhs, rhs,
4941 NULL, vn_valueize);
4942 /* If that didn't simplify to a constant see if we have recorded
4943 temporary expressions from taken edges. */
4944 if (!val || TREE_CODE (val) != INTEGER_CST)
4946 tree ops[2];
4947 ops[0] = lhs;
4948 ops[1] = rhs;
4949 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4950 boolean_type_node, ops, NULL);
4952 break;
4954 case GIMPLE_SWITCH:
4955 val = gimple_switch_index (as_a <gswitch *> (stmt));
4956 break;
4957 case GIMPLE_GOTO:
4958 val = gimple_goto_dest (stmt);
4959 break;
4960 default:
4961 gcc_unreachable ();
4963 if (!val)
4964 return NULL;
4966 edge taken = find_taken_edge (bb, vn_valueize (val));
4967 if (!taken)
4968 return NULL;
4970 if (dump_file && (dump_flags & TDF_DETAILS))
4971 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4972 "not executable\n", bb->index, bb->index, taken->dest->index);
4974 return taken;
4977 /* Do SCCVN. Returns true if it finished, false if we bailed out
4978 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4979 how we use the alias oracle walking during the VN process. */
4981 void
4982 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4984 size_t i;
4986 default_vn_walk_kind = default_vn_walk_kind_;
4988 init_scc_vn ();
4990 /* Collect pointers we know point to readonly memory. */
4991 const_parms = BITMAP_ALLOC (NULL);
4992 tree fnspec = lookup_attribute ("fn spec",
4993 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4994 if (fnspec)
4996 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4997 i = 1;
4998 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4999 arg; arg = DECL_CHAIN (arg), ++i)
5001 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5002 break;
5003 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5004 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5006 tree name = ssa_default_def (cfun, arg);
5007 if (name)
5008 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5013 /* Walk all blocks in dominator order, value-numbering stmts
5014 SSA defs and decide whether outgoing edges are not executable. */
5015 sccvn_dom_walker walker;
5016 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5018 /* Initialize the value ids and prune out remaining VN_TOPs
5019 from dead code. */
5020 tree name;
5021 FOR_EACH_SSA_NAME (i, name, cfun)
5023 vn_ssa_aux_t info = VN_INFO (name);
5024 if (!info->visited
5025 || info->valnum == VN_TOP)
5026 info->valnum = name;
5027 if (info->valnum == name)
5028 info->value_id = get_next_value_id ();
5029 else if (is_gimple_min_invariant (info->valnum))
5030 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5033 /* Propagate. */
5034 FOR_EACH_SSA_NAME (i, name, cfun)
5036 vn_ssa_aux_t info = VN_INFO (name);
5037 if (TREE_CODE (info->valnum) == SSA_NAME
5038 && info->valnum != name
5039 && info->value_id != VN_INFO (info->valnum)->value_id)
5040 info->value_id = VN_INFO (info->valnum)->value_id;
5043 set_hashtable_value_ids ();
5045 if (dump_file && (dump_flags & TDF_DETAILS))
5047 fprintf (dump_file, "Value numbers:\n");
5048 FOR_EACH_SSA_NAME (i, name, cfun)
5050 if (VN_INFO (name)->visited
5051 && SSA_VAL (name) != name)
5053 print_generic_expr (dump_file, name);
5054 fprintf (dump_file, " = ");
5055 print_generic_expr (dump_file, SSA_VAL (name));
5056 fprintf (dump_file, "\n");
5062 /* Return the maximum value id we have ever seen. */
5064 unsigned int
5065 get_max_value_id (void)
5067 return next_value_id;
5070 /* Return the next unique value id. */
5072 unsigned int
5073 get_next_value_id (void)
5075 return next_value_id++;
5079 /* Compare two expressions E1 and E2 and return true if they are equal. */
5081 bool
5082 expressions_equal_p (tree e1, tree e2)
5084 /* The obvious case. */
5085 if (e1 == e2)
5086 return true;
5088 /* If either one is VN_TOP consider them equal. */
5089 if (e1 == VN_TOP || e2 == VN_TOP)
5090 return true;
5092 /* If only one of them is null, they cannot be equal. */
5093 if (!e1 || !e2)
5094 return false;
5096 /* Now perform the actual comparison. */
5097 if (TREE_CODE (e1) == TREE_CODE (e2)
5098 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5099 return true;
5101 return false;
5105 /* Return true if the nary operation NARY may trap. This is a copy
5106 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5108 bool
5109 vn_nary_may_trap (vn_nary_op_t nary)
5111 tree type;
5112 tree rhs2 = NULL_TREE;
5113 bool honor_nans = false;
5114 bool honor_snans = false;
5115 bool fp_operation = false;
5116 bool honor_trapv = false;
5117 bool handled, ret;
5118 unsigned i;
5120 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5121 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5122 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5124 type = nary->type;
5125 fp_operation = FLOAT_TYPE_P (type);
5126 if (fp_operation)
5128 honor_nans = flag_trapping_math && !flag_finite_math_only;
5129 honor_snans = flag_signaling_nans != 0;
5131 else if (INTEGRAL_TYPE_P (type)
5132 && TYPE_OVERFLOW_TRAPS (type))
5133 honor_trapv = true;
5135 if (nary->length >= 2)
5136 rhs2 = nary->op[1];
5137 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5138 honor_trapv,
5139 honor_nans, honor_snans, rhs2,
5140 &handled);
5141 if (handled
5142 && ret)
5143 return true;
5145 for (i = 0; i < nary->length; ++i)
5146 if (tree_could_trap_p (nary->op[i]))
5147 return true;
5149 return false;