* doc/invoke.texi: Document -ftree-loop-distribution for O3.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobd62a49d2d1e55ce7219a2a0c8c595f3d38574158
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-ssa-sccvn.h"
59 #include "tree-cfg.h"
60 #include "domwalk.h"
61 #include "gimple-iterator.h"
62 #include "gimple-match.h"
63 #include "stringpool.h"
64 #include "attribs.h"
66 /* This algorithm is based on the SCC algorithm presented by Keith
67 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
68 (http://citeseer.ist.psu.edu/41805.html). In
69 straight line code, it is equivalent to a regular hash based value
70 numbering that is performed in reverse postorder.
72 For code with cycles, there are two alternatives, both of which
73 require keeping the hashtables separate from the actual list of
74 value numbers for SSA names.
76 1. Iterate value numbering in an RPO walk of the blocks, removing
77 all the entries from the hashtable after each iteration (but
78 keeping the SSA name->value number mapping between iterations).
79 Iterate until it does not change.
81 2. Perform value numbering as part of an SCC walk on the SSA graph,
82 iterating only the cycles in the SSA graph until they do not change
83 (using a separate, optimistic hashtable for value numbering the SCC
84 operands).
86 The second is not just faster in practice (because most SSA graph
87 cycles do not involve all the variables in the graph), it also has
88 some nice properties.
90 One of these nice properties is that when we pop an SCC off the
91 stack, we are guaranteed to have processed all the operands coming from
92 *outside of that SCC*, so we do not need to do anything special to
93 ensure they have value numbers.
95 Another nice property is that the SCC walk is done as part of a DFS
96 of the SSA graph, which makes it easy to perform combining and
97 simplifying operations at the same time.
99 The code below is deliberately written in a way that makes it easy
100 to separate the SCC walk from the other work it does.
102 In order to propagate constants through the code, we track which
103 expressions contain constants, and use those while folding. In
104 theory, we could also track expressions whose value numbers are
105 replaced, in case we end up folding based on expression
106 identities.
108 In order to value number memory, we assign value numbers to vuses.
109 This enables us to note that, for example, stores to the same
110 address of the same value from the same starting memory states are
111 equivalent.
112 TODO:
114 1. We can iterate only the changing portions of the SCC's, but
115 I have not seen an SCC big enough for this to be a win.
116 2. If you differentiate between phi nodes for loops and phi nodes
117 for if-then-else, you can properly consider phi nodes in different
118 blocks for equivalence.
119 3. We could value number vuses in more cases, particularly, whole
120 structure copies.
124 static tree *last_vuse_ptr;
125 static vn_lookup_kind vn_walk_kind;
126 static vn_lookup_kind default_vn_walk_kind;
127 bitmap const_parms;
129 /* vn_nary_op hashtable helpers. */
131 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
133 typedef vn_nary_op_s *compare_type;
134 static inline hashval_t hash (const vn_nary_op_s *);
135 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
138 /* Return the computed hashcode for nary operation P1. */
140 inline hashval_t
141 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
143 return vno1->hashcode;
146 /* Compare nary operations P1 and P2 and return true if they are
147 equivalent. */
149 inline bool
150 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
152 return vn_nary_op_eq (vno1, vno2);
155 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
156 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
159 /* vn_phi hashtable helpers. */
161 static int
162 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
164 struct vn_phi_hasher : pointer_hash <vn_phi_s>
166 static inline hashval_t hash (const vn_phi_s *);
167 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
168 static inline void remove (vn_phi_s *);
171 /* Return the computed hashcode for phi operation P1. */
173 inline hashval_t
174 vn_phi_hasher::hash (const vn_phi_s *vp1)
176 return vp1->hashcode;
179 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
181 inline bool
182 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
184 return vn_phi_eq (vp1, vp2);
187 /* Free a phi operation structure VP. */
189 inline void
190 vn_phi_hasher::remove (vn_phi_s *phi)
192 phi->phiargs.release ();
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2));
219 /* Free a reference operation structure VP. */
221 static inline void
222 free_reference (vn_reference_s *vr)
224 vr->operands.release ();
228 /* vn_reference hashtable helpers. */
230 struct vn_reference_hasher : pointer_hash <vn_reference_s>
232 static inline hashval_t hash (const vn_reference_s *);
233 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
234 static inline void remove (vn_reference_s *);
237 /* Return the hashcode for a given reference operation P1. */
239 inline hashval_t
240 vn_reference_hasher::hash (const vn_reference_s *vr1)
242 return vr1->hashcode;
245 inline bool
246 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
248 return vn_reference_eq (v, c);
251 inline void
252 vn_reference_hasher::remove (vn_reference_s *v)
254 free_reference (v);
257 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
258 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
261 /* The set of hashtables and alloc_pool's for their items. */
263 typedef struct vn_tables_s
265 vn_nary_op_table_type *nary;
266 vn_phi_table_type *phis;
267 vn_reference_table_type *references;
268 struct obstack nary_obstack;
269 object_allocator<vn_phi_s> *phis_pool;
270 object_allocator<vn_reference_s> *references_pool;
271 } *vn_tables_t;
274 /* vn_constant hashtable helpers. */
276 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
278 static inline hashval_t hash (const vn_constant_s *);
279 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
282 /* Hash table hash function for vn_constant_t. */
284 inline hashval_t
285 vn_constant_hasher::hash (const vn_constant_s *vc1)
287 return vc1->hashcode;
290 /* Hash table equality function for vn_constant_t. */
292 inline bool
293 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
295 if (vc1->hashcode != vc2->hashcode)
296 return false;
298 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
301 static hash_table<vn_constant_hasher> *constant_to_value_id;
302 static bitmap constant_value_ids;
305 /* Valid hashtables storing information we have proven to be
306 correct. */
308 static vn_tables_t valid_info;
310 /* Optimistic hashtables storing information we are making assumptions about
311 during iterations. */
313 static vn_tables_t optimistic_info;
315 /* Pointer to the set of hashtables that is currently being used.
316 Should always point to either the optimistic_info, or the
317 valid_info. */
319 static vn_tables_t current_info;
322 /* Reverse post order index for each basic block. */
324 static int *rpo_numbers;
326 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
328 /* Return the SSA value of the VUSE x, supporting released VDEFs
329 during elimination which will value-number the VDEF to the
330 associated VUSE (but not substitute in the whole lattice). */
332 static inline tree
333 vuse_ssa_val (tree x)
335 if (!x)
336 return NULL_TREE;
340 x = SSA_VAL (x);
342 while (SSA_NAME_IN_FREE_LIST (x));
344 return x;
347 /* This represents the top of the VN lattice, which is the universal
348 value. */
350 tree VN_TOP;
352 /* Unique counter for our value ids. */
354 static unsigned int next_value_id;
356 /* Next DFS number and the stack for strongly connected component
357 detection. */
359 static unsigned int next_dfs_num;
360 static vec<tree> sccstack;
364 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
365 are allocated on an obstack for locality reasons, and to free them
366 without looping over the vec. */
368 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
369 static struct obstack vn_ssa_aux_obstack;
371 /* Return whether there is value numbering information for a given SSA name. */
373 bool
374 has_VN_INFO (tree name)
376 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
377 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
378 return false;
381 /* Return the value numbering information for a given SSA name. */
383 vn_ssa_aux_t
384 VN_INFO (tree name)
386 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
387 gcc_checking_assert (res);
388 return res;
391 /* Set the value numbering info for a given SSA name to a given
392 value. */
394 static inline void
395 VN_INFO_SET (tree name, vn_ssa_aux_t value)
397 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
400 /* Initialize the value numbering info for a given SSA name.
401 This should be called just once for every SSA name. */
403 vn_ssa_aux_t
404 VN_INFO_GET (tree name)
406 vn_ssa_aux_t newinfo;
408 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
409 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
410 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
411 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
412 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
413 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
414 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
415 return newinfo;
419 /* Return the vn_kind the expression computed by the stmt should be
420 associated with. */
422 enum vn_kind
423 vn_get_stmt_kind (gimple *stmt)
425 switch (gimple_code (stmt))
427 case GIMPLE_CALL:
428 return VN_REFERENCE;
429 case GIMPLE_PHI:
430 return VN_PHI;
431 case GIMPLE_ASSIGN:
433 enum tree_code code = gimple_assign_rhs_code (stmt);
434 tree rhs1 = gimple_assign_rhs1 (stmt);
435 switch (get_gimple_rhs_class (code))
437 case GIMPLE_UNARY_RHS:
438 case GIMPLE_BINARY_RHS:
439 case GIMPLE_TERNARY_RHS:
440 return VN_NARY;
441 case GIMPLE_SINGLE_RHS:
442 switch (TREE_CODE_CLASS (code))
444 case tcc_reference:
445 /* VOP-less references can go through unary case. */
446 if ((code == REALPART_EXPR
447 || code == IMAGPART_EXPR
448 || code == VIEW_CONVERT_EXPR
449 || code == BIT_FIELD_REF)
450 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
451 return VN_NARY;
453 /* Fallthrough. */
454 case tcc_declaration:
455 return VN_REFERENCE;
457 case tcc_constant:
458 return VN_CONSTANT;
460 default:
461 if (code == ADDR_EXPR)
462 return (is_gimple_min_invariant (rhs1)
463 ? VN_CONSTANT : VN_REFERENCE);
464 else if (code == CONSTRUCTOR)
465 return VN_NARY;
466 return VN_NONE;
468 default:
469 return VN_NONE;
472 default:
473 return VN_NONE;
477 /* Lookup a value id for CONSTANT and return it. If it does not
478 exist returns 0. */
480 unsigned int
481 get_constant_value_id (tree constant)
483 vn_constant_s **slot;
484 struct vn_constant_s vc;
486 vc.hashcode = vn_hash_constant_with_type (constant);
487 vc.constant = constant;
488 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
489 if (slot)
490 return (*slot)->value_id;
491 return 0;
494 /* Lookup a value id for CONSTANT, and if it does not exist, create a
495 new one and return it. If it does exist, return it. */
497 unsigned int
498 get_or_alloc_constant_value_id (tree constant)
500 vn_constant_s **slot;
501 struct vn_constant_s vc;
502 vn_constant_t vcp;
504 vc.hashcode = vn_hash_constant_with_type (constant);
505 vc.constant = constant;
506 slot = constant_to_value_id->find_slot (&vc, INSERT);
507 if (*slot)
508 return (*slot)->value_id;
510 vcp = XNEW (struct vn_constant_s);
511 vcp->hashcode = vc.hashcode;
512 vcp->constant = constant;
513 vcp->value_id = get_next_value_id ();
514 *slot = vcp;
515 bitmap_set_bit (constant_value_ids, vcp->value_id);
516 return vcp->value_id;
519 /* Return true if V is a value id for a constant. */
521 bool
522 value_id_constant_p (unsigned int v)
524 return bitmap_bit_p (constant_value_ids, v);
527 /* Compute the hash for a reference operand VRO1. */
529 static void
530 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
532 hstate.add_int (vro1->opcode);
533 if (vro1->op0)
534 inchash::add_expr (vro1->op0, hstate);
535 if (vro1->op1)
536 inchash::add_expr (vro1->op1, hstate);
537 if (vro1->op2)
538 inchash::add_expr (vro1->op2, hstate);
541 /* Compute a hash for the reference operation VR1 and return it. */
543 static hashval_t
544 vn_reference_compute_hash (const vn_reference_t vr1)
546 inchash::hash hstate;
547 hashval_t result;
548 int i;
549 vn_reference_op_t vro;
550 HOST_WIDE_INT off = -1;
551 bool deref = false;
553 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
555 if (vro->opcode == MEM_REF)
556 deref = true;
557 else if (vro->opcode != ADDR_EXPR)
558 deref = false;
559 if (vro->off != -1)
561 if (off == -1)
562 off = 0;
563 off += vro->off;
565 else
567 if (off != -1
568 && off != 0)
569 hstate.add_int (off);
570 off = -1;
571 if (deref
572 && vro->opcode == ADDR_EXPR)
574 if (vro->op0)
576 tree op = TREE_OPERAND (vro->op0, 0);
577 hstate.add_int (TREE_CODE (op));
578 inchash::add_expr (op, hstate);
581 else
582 vn_reference_op_compute_hash (vro, hstate);
585 result = hstate.end ();
586 /* ??? We would ICE later if we hash instead of adding that in. */
587 if (vr1->vuse)
588 result += SSA_NAME_VERSION (vr1->vuse);
590 return result;
593 /* Return true if reference operations VR1 and VR2 are equivalent. This
594 means they have the same set of operands and vuses. */
596 bool
597 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
599 unsigned i, j;
601 /* Early out if this is not a hash collision. */
602 if (vr1->hashcode != vr2->hashcode)
603 return false;
605 /* The VOP needs to be the same. */
606 if (vr1->vuse != vr2->vuse)
607 return false;
609 /* If the operands are the same we are done. */
610 if (vr1->operands == vr2->operands)
611 return true;
613 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
614 return false;
616 if (INTEGRAL_TYPE_P (vr1->type)
617 && INTEGRAL_TYPE_P (vr2->type))
619 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
620 return false;
622 else if (INTEGRAL_TYPE_P (vr1->type)
623 && (TYPE_PRECISION (vr1->type)
624 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
625 return false;
626 else if (INTEGRAL_TYPE_P (vr2->type)
627 && (TYPE_PRECISION (vr2->type)
628 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
629 return false;
631 i = 0;
632 j = 0;
635 HOST_WIDE_INT off1 = 0, off2 = 0;
636 vn_reference_op_t vro1, vro2;
637 vn_reference_op_s tem1, tem2;
638 bool deref1 = false, deref2 = false;
639 for (; vr1->operands.iterate (i, &vro1); i++)
641 if (vro1->opcode == MEM_REF)
642 deref1 = true;
643 /* Do not look through a storage order barrier. */
644 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
645 return false;
646 if (vro1->off == -1)
647 break;
648 off1 += vro1->off;
650 for (; vr2->operands.iterate (j, &vro2); j++)
652 if (vro2->opcode == MEM_REF)
653 deref2 = true;
654 /* Do not look through a storage order barrier. */
655 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
656 return false;
657 if (vro2->off == -1)
658 break;
659 off2 += vro2->off;
661 if (off1 != off2)
662 return false;
663 if (deref1 && vro1->opcode == ADDR_EXPR)
665 memset (&tem1, 0, sizeof (tem1));
666 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
667 tem1.type = TREE_TYPE (tem1.op0);
668 tem1.opcode = TREE_CODE (tem1.op0);
669 vro1 = &tem1;
670 deref1 = false;
672 if (deref2 && vro2->opcode == ADDR_EXPR)
674 memset (&tem2, 0, sizeof (tem2));
675 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
676 tem2.type = TREE_TYPE (tem2.op0);
677 tem2.opcode = TREE_CODE (tem2.op0);
678 vro2 = &tem2;
679 deref2 = false;
681 if (deref1 != deref2)
682 return false;
683 if (!vn_reference_op_eq (vro1, vro2))
684 return false;
685 ++j;
686 ++i;
688 while (vr1->operands.length () != i
689 || vr2->operands.length () != j);
691 return true;
694 /* Copy the operations present in load/store REF into RESULT, a vector of
695 vn_reference_op_s's. */
697 static void
698 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
700 if (TREE_CODE (ref) == TARGET_MEM_REF)
702 vn_reference_op_s temp;
704 result->reserve (3);
706 memset (&temp, 0, sizeof (temp));
707 temp.type = TREE_TYPE (ref);
708 temp.opcode = TREE_CODE (ref);
709 temp.op0 = TMR_INDEX (ref);
710 temp.op1 = TMR_STEP (ref);
711 temp.op2 = TMR_OFFSET (ref);
712 temp.off = -1;
713 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
714 temp.base = MR_DEPENDENCE_BASE (ref);
715 result->quick_push (temp);
717 memset (&temp, 0, sizeof (temp));
718 temp.type = NULL_TREE;
719 temp.opcode = ERROR_MARK;
720 temp.op0 = TMR_INDEX2 (ref);
721 temp.off = -1;
722 result->quick_push (temp);
724 memset (&temp, 0, sizeof (temp));
725 temp.type = NULL_TREE;
726 temp.opcode = TREE_CODE (TMR_BASE (ref));
727 temp.op0 = TMR_BASE (ref);
728 temp.off = -1;
729 result->quick_push (temp);
730 return;
733 /* For non-calls, store the information that makes up the address. */
734 tree orig = ref;
735 while (ref)
737 vn_reference_op_s temp;
739 memset (&temp, 0, sizeof (temp));
740 temp.type = TREE_TYPE (ref);
741 temp.opcode = TREE_CODE (ref);
742 temp.off = -1;
744 switch (temp.opcode)
746 case MODIFY_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 break;
749 case WITH_SIZE_EXPR:
750 temp.op0 = TREE_OPERAND (ref, 1);
751 temp.off = 0;
752 break;
753 case MEM_REF:
754 /* The base address gets its own vn_reference_op_s structure. */
755 temp.op0 = TREE_OPERAND (ref, 1);
757 offset_int off = mem_ref_offset (ref);
758 if (wi::fits_shwi_p (off))
759 temp.off = off.to_shwi ();
761 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
762 temp.base = MR_DEPENDENCE_BASE (ref);
763 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
764 break;
765 case BIT_FIELD_REF:
766 /* Record bits, position and storage order. */
767 temp.op0 = TREE_OPERAND (ref, 1);
768 temp.op1 = TREE_OPERAND (ref, 2);
769 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
771 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
772 if (off % BITS_PER_UNIT == 0)
773 temp.off = off / BITS_PER_UNIT;
775 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
776 break;
777 case COMPONENT_REF:
778 /* The field decl is enough to unambiguously specify the field,
779 a matching type is not necessary and a mismatching type
780 is always a spurious difference. */
781 temp.type = NULL_TREE;
782 temp.op0 = TREE_OPERAND (ref, 1);
783 temp.op1 = TREE_OPERAND (ref, 2);
785 tree this_offset = component_ref_field_offset (ref);
786 if (this_offset
787 && TREE_CODE (this_offset) == INTEGER_CST)
789 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
790 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
792 offset_int off
793 = (wi::to_offset (this_offset)
794 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
795 if (wi::fits_shwi_p (off)
796 /* Probibit value-numbering zero offset components
797 of addresses the same before the pass folding
798 __builtin_object_size had a chance to run
799 (checking cfun->after_inlining does the
800 trick here). */
801 && (TREE_CODE (orig) != ADDR_EXPR
802 || off != 0
803 || cfun->after_inlining))
804 temp.off = off.to_shwi ();
808 break;
809 case ARRAY_RANGE_REF:
810 case ARRAY_REF:
812 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
813 /* Record index as operand. */
814 temp.op0 = TREE_OPERAND (ref, 1);
815 /* Always record lower bounds and element size. */
816 temp.op1 = array_ref_low_bound (ref);
817 /* But record element size in units of the type alignment. */
818 temp.op2 = TREE_OPERAND (ref, 3);
819 temp.align = eltype->type_common.align;
820 if (! temp.op2)
821 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
822 size_int (TYPE_ALIGN_UNIT (eltype)));
823 if (TREE_CODE (temp.op0) == INTEGER_CST
824 && TREE_CODE (temp.op1) == INTEGER_CST
825 && TREE_CODE (temp.op2) == INTEGER_CST)
827 offset_int off = ((wi::to_offset (temp.op0)
828 - wi::to_offset (temp.op1))
829 * wi::to_offset (temp.op2)
830 * vn_ref_op_align_unit (&temp));
831 if (wi::fits_shwi_p (off))
832 temp.off = off.to_shwi();
835 break;
836 case VAR_DECL:
837 if (DECL_HARD_REGISTER (ref))
839 temp.op0 = ref;
840 break;
842 /* Fallthru. */
843 case PARM_DECL:
844 case CONST_DECL:
845 case RESULT_DECL:
846 /* Canonicalize decls to MEM[&decl] which is what we end up with
847 when valueizing MEM[ptr] with ptr = &decl. */
848 temp.opcode = MEM_REF;
849 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
850 temp.off = 0;
851 result->safe_push (temp);
852 temp.opcode = ADDR_EXPR;
853 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
854 temp.type = TREE_TYPE (temp.op0);
855 temp.off = -1;
856 break;
857 case STRING_CST:
858 case INTEGER_CST:
859 case COMPLEX_CST:
860 case VECTOR_CST:
861 case REAL_CST:
862 case FIXED_CST:
863 case CONSTRUCTOR:
864 case SSA_NAME:
865 temp.op0 = ref;
866 break;
867 case ADDR_EXPR:
868 if (is_gimple_min_invariant (ref))
870 temp.op0 = ref;
871 break;
873 break;
874 /* These are only interesting for their operands, their
875 existence, and their type. They will never be the last
876 ref in the chain of references (IE they require an
877 operand), so we don't have to put anything
878 for op* as it will be handled by the iteration */
879 case REALPART_EXPR:
880 temp.off = 0;
881 break;
882 case VIEW_CONVERT_EXPR:
883 temp.off = 0;
884 temp.reverse = storage_order_barrier_p (ref);
885 break;
886 case IMAGPART_EXPR:
887 /* This is only interesting for its constant offset. */
888 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
889 break;
890 default:
891 gcc_unreachable ();
893 result->safe_push (temp);
895 if (REFERENCE_CLASS_P (ref)
896 || TREE_CODE (ref) == MODIFY_EXPR
897 || TREE_CODE (ref) == WITH_SIZE_EXPR
898 || (TREE_CODE (ref) == ADDR_EXPR
899 && !is_gimple_min_invariant (ref)))
900 ref = TREE_OPERAND (ref, 0);
901 else
902 ref = NULL_TREE;
906 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
907 operands in *OPS, the reference alias set SET and the reference type TYPE.
908 Return true if something useful was produced. */
910 bool
911 ao_ref_init_from_vn_reference (ao_ref *ref,
912 alias_set_type set, tree type,
913 vec<vn_reference_op_s> ops)
915 vn_reference_op_t op;
916 unsigned i;
917 tree base = NULL_TREE;
918 tree *op0_p = &base;
919 offset_int offset = 0;
920 offset_int max_size;
921 offset_int size = -1;
922 tree size_tree = NULL_TREE;
923 alias_set_type base_alias_set = -1;
925 /* First get the final access size from just the outermost expression. */
926 op = &ops[0];
927 if (op->opcode == COMPONENT_REF)
928 size_tree = DECL_SIZE (op->op0);
929 else if (op->opcode == BIT_FIELD_REF)
930 size_tree = op->op0;
931 else
933 machine_mode mode = TYPE_MODE (type);
934 if (mode == BLKmode)
935 size_tree = TYPE_SIZE (type);
936 else
937 size = int (GET_MODE_BITSIZE (mode));
939 if (size_tree != NULL_TREE
940 && TREE_CODE (size_tree) == INTEGER_CST)
941 size = wi::to_offset (size_tree);
943 /* Initially, maxsize is the same as the accessed element size.
944 In the following it will only grow (or become -1). */
945 max_size = size;
947 /* Compute cumulative bit-offset for nested component-refs and array-refs,
948 and find the ultimate containing object. */
949 FOR_EACH_VEC_ELT (ops, i, op)
951 switch (op->opcode)
953 /* These may be in the reference ops, but we cannot do anything
954 sensible with them here. */
955 case ADDR_EXPR:
956 /* Apart from ADDR_EXPR arguments to MEM_REF. */
957 if (base != NULL_TREE
958 && TREE_CODE (base) == MEM_REF
959 && op->op0
960 && DECL_P (TREE_OPERAND (op->op0, 0)))
962 vn_reference_op_t pop = &ops[i-1];
963 base = TREE_OPERAND (op->op0, 0);
964 if (pop->off == -1)
966 max_size = -1;
967 offset = 0;
969 else
970 offset += pop->off * BITS_PER_UNIT;
971 op0_p = NULL;
972 break;
974 /* Fallthru. */
975 case CALL_EXPR:
976 return false;
978 /* Record the base objects. */
979 case MEM_REF:
980 base_alias_set = get_deref_alias_set (op->op0);
981 *op0_p = build2 (MEM_REF, op->type,
982 NULL_TREE, op->op0);
983 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
984 MR_DEPENDENCE_BASE (*op0_p) = op->base;
985 op0_p = &TREE_OPERAND (*op0_p, 0);
986 break;
988 case VAR_DECL:
989 case PARM_DECL:
990 case RESULT_DECL:
991 case SSA_NAME:
992 *op0_p = op->op0;
993 op0_p = NULL;
994 break;
996 /* And now the usual component-reference style ops. */
997 case BIT_FIELD_REF:
998 offset += wi::to_offset (op->op1);
999 break;
1001 case COMPONENT_REF:
1003 tree field = op->op0;
1004 /* We do not have a complete COMPONENT_REF tree here so we
1005 cannot use component_ref_field_offset. Do the interesting
1006 parts manually. */
1007 tree this_offset = DECL_FIELD_OFFSET (field);
1009 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1010 max_size = -1;
1011 else
1013 offset_int woffset = (wi::to_offset (this_offset)
1014 << LOG2_BITS_PER_UNIT);
1015 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1016 offset += woffset;
1018 break;
1021 case ARRAY_RANGE_REF:
1022 case ARRAY_REF:
1023 /* We recorded the lower bound and the element size. */
1024 if (TREE_CODE (op->op0) != INTEGER_CST
1025 || TREE_CODE (op->op1) != INTEGER_CST
1026 || TREE_CODE (op->op2) != INTEGER_CST)
1027 max_size = -1;
1028 else
1030 offset_int woffset
1031 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1032 TYPE_PRECISION (TREE_TYPE (op->op0)));
1033 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1034 woffset <<= LOG2_BITS_PER_UNIT;
1035 offset += woffset;
1037 break;
1039 case REALPART_EXPR:
1040 break;
1042 case IMAGPART_EXPR:
1043 offset += size;
1044 break;
1046 case VIEW_CONVERT_EXPR:
1047 break;
1049 case STRING_CST:
1050 case INTEGER_CST:
1051 case COMPLEX_CST:
1052 case VECTOR_CST:
1053 case REAL_CST:
1054 case CONSTRUCTOR:
1055 case CONST_DECL:
1056 return false;
1058 default:
1059 return false;
1063 if (base == NULL_TREE)
1064 return false;
1066 ref->ref = NULL_TREE;
1067 ref->base = base;
1068 ref->ref_alias_set = set;
1069 if (base_alias_set != -1)
1070 ref->base_alias_set = base_alias_set;
1071 else
1072 ref->base_alias_set = get_alias_set (base);
1073 /* We discount volatiles from value-numbering elsewhere. */
1074 ref->volatile_p = false;
1076 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1078 ref->offset = 0;
1079 ref->size = -1;
1080 ref->max_size = -1;
1081 return true;
1084 ref->size = size.to_shwi ();
1086 if (!wi::fits_shwi_p (offset))
1088 ref->offset = 0;
1089 ref->max_size = -1;
1090 return true;
1093 ref->offset = offset.to_shwi ();
1095 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1096 ref->max_size = -1;
1097 else
1098 ref->max_size = max_size.to_shwi ();
1100 return true;
1103 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1104 vn_reference_op_s's. */
1106 static void
1107 copy_reference_ops_from_call (gcall *call,
1108 vec<vn_reference_op_s> *result)
1110 vn_reference_op_s temp;
1111 unsigned i;
1112 tree lhs = gimple_call_lhs (call);
1113 int lr;
1115 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1116 different. By adding the lhs here in the vector, we ensure that the
1117 hashcode is different, guaranteeing a different value number. */
1118 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1120 memset (&temp, 0, sizeof (temp));
1121 temp.opcode = MODIFY_EXPR;
1122 temp.type = TREE_TYPE (lhs);
1123 temp.op0 = lhs;
1124 temp.off = -1;
1125 result->safe_push (temp);
1128 /* Copy the type, opcode, function, static chain and EH region, if any. */
1129 memset (&temp, 0, sizeof (temp));
1130 temp.type = gimple_call_return_type (call);
1131 temp.opcode = CALL_EXPR;
1132 temp.op0 = gimple_call_fn (call);
1133 temp.op1 = gimple_call_chain (call);
1134 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1135 temp.op2 = size_int (lr);
1136 temp.off = -1;
1137 if (gimple_call_with_bounds_p (call))
1138 temp.with_bounds = 1;
1139 result->safe_push (temp);
1141 /* Copy the call arguments. As they can be references as well,
1142 just chain them together. */
1143 for (i = 0; i < gimple_call_num_args (call); ++i)
1145 tree callarg = gimple_call_arg (call, i);
1146 copy_reference_ops_from_ref (callarg, result);
1150 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1151 *I_P to point to the last element of the replacement. */
1152 static bool
1153 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1154 unsigned int *i_p)
1156 unsigned int i = *i_p;
1157 vn_reference_op_t op = &(*ops)[i];
1158 vn_reference_op_t mem_op = &(*ops)[i - 1];
1159 tree addr_base;
1160 HOST_WIDE_INT addr_offset = 0;
1162 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1163 from .foo.bar to the preceding MEM_REF offset and replace the
1164 address with &OBJ. */
1165 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1166 &addr_offset);
1167 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1168 if (addr_base != TREE_OPERAND (op->op0, 0))
1170 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1171 off += addr_offset;
1172 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1173 op->op0 = build_fold_addr_expr (addr_base);
1174 if (tree_fits_shwi_p (mem_op->op0))
1175 mem_op->off = tree_to_shwi (mem_op->op0);
1176 else
1177 mem_op->off = -1;
1178 return true;
1180 return false;
1183 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1184 *I_P to point to the last element of the replacement. */
1185 static bool
1186 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1187 unsigned int *i_p)
1189 unsigned int i = *i_p;
1190 vn_reference_op_t op = &(*ops)[i];
1191 vn_reference_op_t mem_op = &(*ops)[i - 1];
1192 gimple *def_stmt;
1193 enum tree_code code;
1194 offset_int off;
1196 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1197 if (!is_gimple_assign (def_stmt))
1198 return false;
1200 code = gimple_assign_rhs_code (def_stmt);
1201 if (code != ADDR_EXPR
1202 && code != POINTER_PLUS_EXPR)
1203 return false;
1205 off = offset_int::from (mem_op->op0, SIGNED);
1207 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1208 from .foo.bar to the preceding MEM_REF offset and replace the
1209 address with &OBJ. */
1210 if (code == ADDR_EXPR)
1212 tree addr, addr_base;
1213 HOST_WIDE_INT addr_offset;
1215 addr = gimple_assign_rhs1 (def_stmt);
1216 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1217 &addr_offset);
1218 /* If that didn't work because the address isn't invariant propagate
1219 the reference tree from the address operation in case the current
1220 dereference isn't offsetted. */
1221 if (!addr_base
1222 && *i_p == ops->length () - 1
1223 && off == 0
1224 /* This makes us disable this transform for PRE where the
1225 reference ops might be also used for code insertion which
1226 is invalid. */
1227 && default_vn_walk_kind == VN_WALKREWRITE)
1229 auto_vec<vn_reference_op_s, 32> tem;
1230 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1231 /* Make sure to preserve TBAA info. The only objects not
1232 wrapped in MEM_REFs that can have their address taken are
1233 STRING_CSTs. */
1234 if (tem.length () >= 2
1235 && tem[tem.length () - 2].opcode == MEM_REF)
1237 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1238 new_mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1239 new_mem_op->op0);
1241 else
1242 gcc_assert (tem.last ().opcode == STRING_CST);
1243 ops->pop ();
1244 ops->pop ();
1245 ops->safe_splice (tem);
1246 --*i_p;
1247 return true;
1249 if (!addr_base
1250 || TREE_CODE (addr_base) != MEM_REF)
1251 return false;
1253 off += addr_offset;
1254 off += mem_ref_offset (addr_base);
1255 op->op0 = TREE_OPERAND (addr_base, 0);
1257 else
1259 tree ptr, ptroff;
1260 ptr = gimple_assign_rhs1 (def_stmt);
1261 ptroff = gimple_assign_rhs2 (def_stmt);
1262 if (TREE_CODE (ptr) != SSA_NAME
1263 || TREE_CODE (ptroff) != INTEGER_CST)
1264 return false;
1266 off += wi::to_offset (ptroff);
1267 op->op0 = ptr;
1270 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1271 if (tree_fits_shwi_p (mem_op->op0))
1272 mem_op->off = tree_to_shwi (mem_op->op0);
1273 else
1274 mem_op->off = -1;
1275 if (TREE_CODE (op->op0) == SSA_NAME)
1276 op->op0 = SSA_VAL (op->op0);
1277 if (TREE_CODE (op->op0) != SSA_NAME)
1278 op->opcode = TREE_CODE (op->op0);
1280 /* And recurse. */
1281 if (TREE_CODE (op->op0) == SSA_NAME)
1282 vn_reference_maybe_forwprop_address (ops, i_p);
1283 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1284 vn_reference_fold_indirect (ops, i_p);
1285 return true;
1288 /* Optimize the reference REF to a constant if possible or return
1289 NULL_TREE if not. */
1291 tree
1292 fully_constant_vn_reference_p (vn_reference_t ref)
1294 vec<vn_reference_op_s> operands = ref->operands;
1295 vn_reference_op_t op;
1297 /* Try to simplify the translated expression if it is
1298 a call to a builtin function with at most two arguments. */
1299 op = &operands[0];
1300 if (op->opcode == CALL_EXPR
1301 && TREE_CODE (op->op0) == ADDR_EXPR
1302 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1303 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1304 && operands.length () >= 2
1305 && operands.length () <= 3)
1307 vn_reference_op_t arg0, arg1 = NULL;
1308 bool anyconst = false;
1309 arg0 = &operands[1];
1310 if (operands.length () > 2)
1311 arg1 = &operands[2];
1312 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1313 || (arg0->opcode == ADDR_EXPR
1314 && is_gimple_min_invariant (arg0->op0)))
1315 anyconst = true;
1316 if (arg1
1317 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1318 || (arg1->opcode == ADDR_EXPR
1319 && is_gimple_min_invariant (arg1->op0))))
1320 anyconst = true;
1321 if (anyconst)
1323 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1324 arg1 ? 2 : 1,
1325 arg0->op0,
1326 arg1 ? arg1->op0 : NULL);
1327 if (folded
1328 && TREE_CODE (folded) == NOP_EXPR)
1329 folded = TREE_OPERAND (folded, 0);
1330 if (folded
1331 && is_gimple_min_invariant (folded))
1332 return folded;
1336 /* Simplify reads from constants or constant initializers. */
1337 else if (BITS_PER_UNIT == 8
1338 && is_gimple_reg_type (ref->type)
1339 && (!INTEGRAL_TYPE_P (ref->type)
1340 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1342 HOST_WIDE_INT off = 0;
1343 HOST_WIDE_INT size;
1344 if (INTEGRAL_TYPE_P (ref->type))
1345 size = TYPE_PRECISION (ref->type);
1346 else
1347 size = tree_to_shwi (TYPE_SIZE (ref->type));
1348 if (size % BITS_PER_UNIT != 0
1349 || size > MAX_BITSIZE_MODE_ANY_MODE)
1350 return NULL_TREE;
1351 size /= BITS_PER_UNIT;
1352 unsigned i;
1353 for (i = 0; i < operands.length (); ++i)
1355 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1357 ++i;
1358 break;
1360 if (operands[i].off == -1)
1361 return NULL_TREE;
1362 off += operands[i].off;
1363 if (operands[i].opcode == MEM_REF)
1365 ++i;
1366 break;
1369 vn_reference_op_t base = &operands[--i];
1370 tree ctor = error_mark_node;
1371 tree decl = NULL_TREE;
1372 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1373 ctor = base->op0;
1374 else if (base->opcode == MEM_REF
1375 && base[1].opcode == ADDR_EXPR
1376 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1377 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1379 decl = TREE_OPERAND (base[1].op0, 0);
1380 ctor = ctor_for_folding (decl);
1382 if (ctor == NULL_TREE)
1383 return build_zero_cst (ref->type);
1384 else if (ctor != error_mark_node)
1386 if (decl)
1388 tree res = fold_ctor_reference (ref->type, ctor,
1389 off * BITS_PER_UNIT,
1390 size * BITS_PER_UNIT, decl);
1391 if (res)
1393 STRIP_USELESS_TYPE_CONVERSION (res);
1394 if (is_gimple_min_invariant (res))
1395 return res;
1398 else
1400 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1401 int len = native_encode_expr (ctor, buf, size, off);
1402 if (len > 0)
1403 return native_interpret_expr (ref->type, buf, len);
1408 return NULL_TREE;
1411 /* Return true if OPS contain a storage order barrier. */
1413 static bool
1414 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1416 vn_reference_op_t op;
1417 unsigned i;
1419 FOR_EACH_VEC_ELT (ops, i, op)
1420 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1421 return true;
1423 return false;
1426 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1427 structures into their value numbers. This is done in-place, and
1428 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1429 whether any operands were valueized. */
1431 static vec<vn_reference_op_s>
1432 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1434 vn_reference_op_t vro;
1435 unsigned int i;
1437 *valueized_anything = false;
1439 FOR_EACH_VEC_ELT (orig, i, vro)
1441 if (vro->opcode == SSA_NAME
1442 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1444 tree tem = SSA_VAL (vro->op0);
1445 if (tem != vro->op0)
1447 *valueized_anything = true;
1448 vro->op0 = tem;
1450 /* If it transforms from an SSA_NAME to a constant, update
1451 the opcode. */
1452 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1453 vro->opcode = TREE_CODE (vro->op0);
1455 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1457 tree tem = SSA_VAL (vro->op1);
1458 if (tem != vro->op1)
1460 *valueized_anything = true;
1461 vro->op1 = tem;
1464 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1466 tree tem = SSA_VAL (vro->op2);
1467 if (tem != vro->op2)
1469 *valueized_anything = true;
1470 vro->op2 = tem;
1473 /* If it transforms from an SSA_NAME to an address, fold with
1474 a preceding indirect reference. */
1475 if (i > 0
1476 && vro->op0
1477 && TREE_CODE (vro->op0) == ADDR_EXPR
1478 && orig[i - 1].opcode == MEM_REF)
1480 if (vn_reference_fold_indirect (&orig, &i))
1481 *valueized_anything = true;
1483 else if (i > 0
1484 && vro->opcode == SSA_NAME
1485 && orig[i - 1].opcode == MEM_REF)
1487 if (vn_reference_maybe_forwprop_address (&orig, &i))
1488 *valueized_anything = true;
1490 /* If it transforms a non-constant ARRAY_REF into a constant
1491 one, adjust the constant offset. */
1492 else if (vro->opcode == ARRAY_REF
1493 && vro->off == -1
1494 && TREE_CODE (vro->op0) == INTEGER_CST
1495 && TREE_CODE (vro->op1) == INTEGER_CST
1496 && TREE_CODE (vro->op2) == INTEGER_CST)
1498 offset_int off = ((wi::to_offset (vro->op0)
1499 - wi::to_offset (vro->op1))
1500 * wi::to_offset (vro->op2)
1501 * vn_ref_op_align_unit (vro));
1502 if (wi::fits_shwi_p (off))
1503 vro->off = off.to_shwi ();
1507 return orig;
1510 static vec<vn_reference_op_s>
1511 valueize_refs (vec<vn_reference_op_s> orig)
1513 bool tem;
1514 return valueize_refs_1 (orig, &tem);
1517 static vec<vn_reference_op_s> shared_lookup_references;
1519 /* Create a vector of vn_reference_op_s structures from REF, a
1520 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1521 this function. *VALUEIZED_ANYTHING will specify whether any
1522 operands were valueized. */
1524 static vec<vn_reference_op_s>
1525 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1527 if (!ref)
1528 return vNULL;
1529 shared_lookup_references.truncate (0);
1530 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1531 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1532 valueized_anything);
1533 return shared_lookup_references;
1536 /* Create a vector of vn_reference_op_s structures from CALL, a
1537 call statement. The vector is shared among all callers of
1538 this function. */
1540 static vec<vn_reference_op_s>
1541 valueize_shared_reference_ops_from_call (gcall *call)
1543 if (!call)
1544 return vNULL;
1545 shared_lookup_references.truncate (0);
1546 copy_reference_ops_from_call (call, &shared_lookup_references);
1547 shared_lookup_references = valueize_refs (shared_lookup_references);
1548 return shared_lookup_references;
1551 /* Lookup a SCCVN reference operation VR in the current hash table.
1552 Returns the resulting value number if it exists in the hash table,
1553 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1554 vn_reference_t stored in the hashtable if something is found. */
1556 static tree
1557 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1559 vn_reference_s **slot;
1560 hashval_t hash;
1562 hash = vr->hashcode;
1563 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1564 if (!slot && current_info == optimistic_info)
1565 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1566 if (slot)
1568 if (vnresult)
1569 *vnresult = (vn_reference_t)*slot;
1570 return ((vn_reference_t)*slot)->result;
1573 return NULL_TREE;
1576 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1577 with the current VUSE and performs the expression lookup. */
1579 static void *
1580 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1581 unsigned int cnt, void *vr_)
1583 vn_reference_t vr = (vn_reference_t)vr_;
1584 vn_reference_s **slot;
1585 hashval_t hash;
1587 /* This bounds the stmt walks we perform on reference lookups
1588 to O(1) instead of O(N) where N is the number of dominating
1589 stores. */
1590 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1591 return (void *)-1;
1593 if (last_vuse_ptr)
1594 *last_vuse_ptr = vuse;
1596 /* Fixup vuse and hash. */
1597 if (vr->vuse)
1598 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1599 vr->vuse = vuse_ssa_val (vuse);
1600 if (vr->vuse)
1601 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1603 hash = vr->hashcode;
1604 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1605 if (!slot && current_info == optimistic_info)
1606 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1607 if (slot)
1608 return *slot;
1610 return NULL;
1613 /* Lookup an existing or insert a new vn_reference entry into the
1614 value table for the VUSE, SET, TYPE, OPERANDS reference which
1615 has the value VALUE which is either a constant or an SSA name. */
1617 static vn_reference_t
1618 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1619 alias_set_type set,
1620 tree type,
1621 vec<vn_reference_op_s,
1622 va_heap> operands,
1623 tree value)
1625 vn_reference_s vr1;
1626 vn_reference_t result;
1627 unsigned value_id;
1628 vr1.vuse = vuse;
1629 vr1.operands = operands;
1630 vr1.type = type;
1631 vr1.set = set;
1632 vr1.hashcode = vn_reference_compute_hash (&vr1);
1633 if (vn_reference_lookup_1 (&vr1, &result))
1634 return result;
1635 if (TREE_CODE (value) == SSA_NAME)
1636 value_id = VN_INFO (value)->value_id;
1637 else
1638 value_id = get_or_alloc_constant_value_id (value);
1639 return vn_reference_insert_pieces (vuse, set, type,
1640 operands.copy (), value, value_id);
1643 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1644 static unsigned mprts_hook_cnt;
1646 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1648 static tree
1649 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
1651 if (!rcode.is_tree_code ())
1652 return NULL_TREE;
1653 vn_nary_op_t vnresult = NULL;
1654 tree res = vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
1655 (tree_code) rcode, type, ops, &vnresult);
1656 /* We can end up endlessly recursing simplifications if the lookup above
1657 presents us with a def-use chain that mirrors the original simplification.
1658 See PR80887 for an example. Limit successful lookup artificially
1659 to 10 times if we are called as mprts_hook. */
1660 if (res
1661 && mprts_hook
1662 && --mprts_hook_cnt == 0)
1664 if (dump_file && (dump_flags & TDF_DETAILS))
1665 fprintf (dump_file, "Resetting mprts_hook after too many "
1666 "invocations.\n");
1667 mprts_hook = NULL;
1669 return res;
1672 /* Return a value-number for RCODE OPS... either by looking up an existing
1673 value-number for the simplified result or by inserting the operation if
1674 INSERT is true. */
1676 static tree
1677 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1678 bool insert)
1680 tree result = NULL_TREE;
1681 /* We will be creating a value number for
1682 RCODE (OPS...).
1683 So first simplify and lookup this expression to see if it
1684 is already available. */
1685 mprts_hook = vn_lookup_simplify_result;
1686 mprts_hook_cnt = 9;
1687 bool res = false;
1688 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1690 case 1:
1691 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1692 break;
1693 case 2:
1694 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1695 break;
1696 case 3:
1697 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1698 break;
1700 mprts_hook = NULL;
1701 gimple *new_stmt = NULL;
1702 if (res
1703 && gimple_simplified_result_is_gimple_val (rcode, ops))
1704 /* The expression is already available. */
1705 result = ops[0];
1706 else
1708 tree val = vn_lookup_simplify_result (rcode, type, ops);
1709 if (!val && insert)
1711 gimple_seq stmts = NULL;
1712 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1713 if (result)
1715 gcc_assert (gimple_seq_singleton_p (stmts));
1716 new_stmt = gimple_seq_first_stmt (stmts);
1719 else
1720 /* The expression is already available. */
1721 result = val;
1723 if (new_stmt)
1725 /* The expression is not yet available, value-number lhs to
1726 the new SSA_NAME we created. */
1727 /* Initialize value-number information properly. */
1728 VN_INFO_GET (result)->valnum = result;
1729 VN_INFO (result)->value_id = get_next_value_id ();
1730 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1731 new_stmt);
1732 VN_INFO (result)->needs_insertion = true;
1733 /* ??? PRE phi-translation inserts NARYs without corresponding
1734 SSA name result. Re-use those but set their result according
1735 to the stmt we just built. */
1736 vn_nary_op_t nary = NULL;
1737 vn_nary_op_lookup_stmt (new_stmt, &nary);
1738 if (nary)
1740 gcc_assert (nary->result == NULL_TREE);
1741 nary->result = gimple_assign_lhs (new_stmt);
1743 /* As all "inserted" statements are singleton SCCs, insert
1744 to the valid table. This is strictly needed to
1745 avoid re-generating new value SSA_NAMEs for the same
1746 expression during SCC iteration over and over (the
1747 optimistic table gets cleared after each iteration).
1748 We do not need to insert into the optimistic table, as
1749 lookups there will fall back to the valid table. */
1750 else if (current_info == optimistic_info)
1752 current_info = valid_info;
1753 vn_nary_op_insert_stmt (new_stmt, result);
1754 current_info = optimistic_info;
1756 else
1757 vn_nary_op_insert_stmt (new_stmt, result);
1758 if (dump_file && (dump_flags & TDF_DETAILS))
1760 fprintf (dump_file, "Inserting name ");
1761 print_generic_expr (dump_file, result);
1762 fprintf (dump_file, " for expression ");
1763 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1764 fprintf (dump_file, "\n");
1767 return result;
1770 /* Return a value-number for RCODE OPS... either by looking up an existing
1771 value-number for the simplified result or by inserting the operation. */
1773 static tree
1774 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1776 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1779 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1780 its value if present. */
1782 tree
1783 vn_nary_simplify (vn_nary_op_t nary)
1785 if (nary->length > 3)
1786 return NULL_TREE;
1787 tree ops[3];
1788 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1789 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1793 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1794 from the statement defining VUSE and if not successful tries to
1795 translate *REFP and VR_ through an aggregate copy at the definition
1796 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1797 of *REF and *VR. If only disambiguation was performed then
1798 *DISAMBIGUATE_ONLY is set to true. */
1800 static void *
1801 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1802 bool *disambiguate_only)
1804 vn_reference_t vr = (vn_reference_t)vr_;
1805 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1806 tree base = ao_ref_base (ref);
1807 HOST_WIDE_INT offset, maxsize;
1808 static vec<vn_reference_op_s> lhs_ops;
1809 ao_ref lhs_ref;
1810 bool lhs_ref_ok = false;
1812 /* If the reference is based on a parameter that was determined as
1813 pointing to readonly memory it doesn't change. */
1814 if (TREE_CODE (base) == MEM_REF
1815 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1816 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1817 && bitmap_bit_p (const_parms,
1818 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1820 *disambiguate_only = true;
1821 return NULL;
1824 /* First try to disambiguate after value-replacing in the definitions LHS. */
1825 if (is_gimple_assign (def_stmt))
1827 tree lhs = gimple_assign_lhs (def_stmt);
1828 bool valueized_anything = false;
1829 /* Avoid re-allocation overhead. */
1830 lhs_ops.truncate (0);
1831 copy_reference_ops_from_ref (lhs, &lhs_ops);
1832 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1833 if (valueized_anything)
1835 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1836 get_alias_set (lhs),
1837 TREE_TYPE (lhs), lhs_ops);
1838 if (lhs_ref_ok
1839 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1841 *disambiguate_only = true;
1842 return NULL;
1845 else
1847 ao_ref_init (&lhs_ref, lhs);
1848 lhs_ref_ok = true;
1851 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1852 && gimple_call_num_args (def_stmt) <= 4)
1854 /* For builtin calls valueize its arguments and call the
1855 alias oracle again. Valueization may improve points-to
1856 info of pointers and constify size and position arguments.
1857 Originally this was motivated by PR61034 which has
1858 conditional calls to free falsely clobbering ref because
1859 of imprecise points-to info of the argument. */
1860 tree oldargs[4];
1861 bool valueized_anything = false;
1862 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1864 oldargs[i] = gimple_call_arg (def_stmt, i);
1865 if (TREE_CODE (oldargs[i]) == SSA_NAME
1866 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1868 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1869 valueized_anything = true;
1872 if (valueized_anything)
1874 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1875 ref);
1876 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1877 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1878 if (!res)
1880 *disambiguate_only = true;
1881 return NULL;
1886 if (*disambiguate_only)
1887 return (void *)-1;
1889 offset = ref->offset;
1890 maxsize = ref->max_size;
1892 /* If we cannot constrain the size of the reference we cannot
1893 test if anything kills it. */
1894 if (maxsize == -1)
1895 return (void *)-1;
1897 /* We can't deduce anything useful from clobbers. */
1898 if (gimple_clobber_p (def_stmt))
1899 return (void *)-1;
1901 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1902 from that definition.
1903 1) Memset. */
1904 if (is_gimple_reg_type (vr->type)
1905 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1906 && integer_zerop (gimple_call_arg (def_stmt, 1))
1907 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1908 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1910 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1911 tree base2;
1912 HOST_WIDE_INT offset2, size2, maxsize2;
1913 bool reverse;
1914 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1915 &reverse);
1916 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1917 if ((unsigned HOST_WIDE_INT)size2 / 8
1918 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1919 && maxsize2 != -1
1920 && operand_equal_p (base, base2, 0)
1921 && offset2 <= offset
1922 && offset2 + size2 >= offset + maxsize)
1924 tree val = build_zero_cst (vr->type);
1925 return vn_reference_lookup_or_insert_for_pieces
1926 (vuse, vr->set, vr->type, vr->operands, val);
1930 /* 2) Assignment from an empty CONSTRUCTOR. */
1931 else if (is_gimple_reg_type (vr->type)
1932 && gimple_assign_single_p (def_stmt)
1933 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1934 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1936 tree base2;
1937 HOST_WIDE_INT offset2, size2, maxsize2;
1938 bool reverse;
1939 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1940 &offset2, &size2, &maxsize2, &reverse);
1941 if (maxsize2 != -1
1942 && operand_equal_p (base, base2, 0)
1943 && offset2 <= offset
1944 && offset2 + size2 >= offset + maxsize)
1946 tree val = build_zero_cst (vr->type);
1947 return vn_reference_lookup_or_insert_for_pieces
1948 (vuse, vr->set, vr->type, vr->operands, val);
1952 /* 3) Assignment from a constant. We can use folds native encode/interpret
1953 routines to extract the assigned bits. */
1954 else if (ref->size == maxsize
1955 && is_gimple_reg_type (vr->type)
1956 && !contains_storage_order_barrier_p (vr->operands)
1957 && gimple_assign_single_p (def_stmt)
1958 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1959 && maxsize % BITS_PER_UNIT == 0
1960 && offset % BITS_PER_UNIT == 0
1961 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1962 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1963 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1965 tree base2;
1966 HOST_WIDE_INT offset2, size2, maxsize2;
1967 bool reverse;
1968 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1969 &offset2, &size2, &maxsize2, &reverse);
1970 if (!reverse
1971 && maxsize2 != -1
1972 && maxsize2 == size2
1973 && size2 % BITS_PER_UNIT == 0
1974 && offset2 % BITS_PER_UNIT == 0
1975 && operand_equal_p (base, base2, 0)
1976 && offset2 <= offset
1977 && offset2 + size2 >= offset + maxsize)
1979 /* We support up to 512-bit values (for V8DFmode). */
1980 unsigned char buffer[64];
1981 int len;
1983 tree rhs = gimple_assign_rhs1 (def_stmt);
1984 if (TREE_CODE (rhs) == SSA_NAME)
1985 rhs = SSA_VAL (rhs);
1986 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1987 buffer, sizeof (buffer));
1988 if (len > 0)
1990 tree type = vr->type;
1991 /* Make sure to interpret in a type that has a range
1992 covering the whole access size. */
1993 if (INTEGRAL_TYPE_P (vr->type)
1994 && ref->size != TYPE_PRECISION (vr->type))
1995 type = build_nonstandard_integer_type (ref->size,
1996 TYPE_UNSIGNED (type));
1997 tree val = native_interpret_expr (type,
1998 buffer
1999 + ((offset - offset2)
2000 / BITS_PER_UNIT),
2001 ref->size / BITS_PER_UNIT);
2002 /* If we chop off bits because the types precision doesn't
2003 match the memory access size this is ok when optimizing
2004 reads but not when called from the DSE code during
2005 elimination. */
2006 if (val
2007 && type != vr->type)
2009 if (! int_fits_type_p (val, vr->type))
2010 val = NULL_TREE;
2011 else
2012 val = fold_convert (vr->type, val);
2015 if (val)
2016 return vn_reference_lookup_or_insert_for_pieces
2017 (vuse, vr->set, vr->type, vr->operands, val);
2022 /* 4) Assignment from an SSA name which definition we may be able
2023 to access pieces from. */
2024 else if (ref->size == maxsize
2025 && is_gimple_reg_type (vr->type)
2026 && !contains_storage_order_barrier_p (vr->operands)
2027 && gimple_assign_single_p (def_stmt)
2028 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2030 tree base2;
2031 HOST_WIDE_INT offset2, size2, maxsize2;
2032 bool reverse;
2033 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2034 &offset2, &size2, &maxsize2,
2035 &reverse);
2036 if (!reverse
2037 && maxsize2 != -1
2038 && maxsize2 == size2
2039 && operand_equal_p (base, base2, 0)
2040 && offset2 <= offset
2041 && offset2 + size2 >= offset + maxsize
2042 /* ??? We can't handle bitfield precision extracts without
2043 either using an alternate type for the BIT_FIELD_REF and
2044 then doing a conversion or possibly adjusting the offset
2045 according to endianness. */
2046 && (! INTEGRAL_TYPE_P (vr->type)
2047 || ref->size == TYPE_PRECISION (vr->type))
2048 && ref->size % BITS_PER_UNIT == 0)
2050 code_helper rcode = BIT_FIELD_REF;
2051 tree ops[3];
2052 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2053 ops[1] = bitsize_int (ref->size);
2054 ops[2] = bitsize_int (offset - offset2);
2055 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2056 if (val
2057 && (TREE_CODE (val) != SSA_NAME
2058 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2060 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2061 (vuse, vr->set, vr->type, vr->operands, val);
2062 return res;
2067 /* 5) For aggregate copies translate the reference through them if
2068 the copy kills ref. */
2069 else if (vn_walk_kind == VN_WALKREWRITE
2070 && gimple_assign_single_p (def_stmt)
2071 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2072 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2073 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2075 tree base2;
2076 HOST_WIDE_INT maxsize2;
2077 int i, j, k;
2078 auto_vec<vn_reference_op_s> rhs;
2079 vn_reference_op_t vro;
2080 ao_ref r;
2082 if (!lhs_ref_ok)
2083 return (void *)-1;
2085 /* See if the assignment kills REF. */
2086 base2 = ao_ref_base (&lhs_ref);
2087 maxsize2 = lhs_ref.max_size;
2088 if (maxsize2 == -1
2089 || (base != base2
2090 && (TREE_CODE (base) != MEM_REF
2091 || TREE_CODE (base2) != MEM_REF
2092 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2093 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2094 TREE_OPERAND (base2, 1))))
2095 || !stmt_kills_ref_p (def_stmt, ref))
2096 return (void *)-1;
2098 /* Find the common base of ref and the lhs. lhs_ops already
2099 contains valueized operands for the lhs. */
2100 i = vr->operands.length () - 1;
2101 j = lhs_ops.length () - 1;
2102 while (j >= 0 && i >= 0
2103 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2105 i--;
2106 j--;
2109 /* ??? The innermost op should always be a MEM_REF and we already
2110 checked that the assignment to the lhs kills vr. Thus for
2111 aggregate copies using char[] types the vn_reference_op_eq
2112 may fail when comparing types for compatibility. But we really
2113 don't care here - further lookups with the rewritten operands
2114 will simply fail if we messed up types too badly. */
2115 HOST_WIDE_INT extra_off = 0;
2116 if (j == 0 && i >= 0
2117 && lhs_ops[0].opcode == MEM_REF
2118 && lhs_ops[0].off != -1)
2120 if (lhs_ops[0].off == vr->operands[i].off)
2121 i--, j--;
2122 else if (vr->operands[i].opcode == MEM_REF
2123 && vr->operands[i].off != -1)
2125 extra_off = vr->operands[i].off - lhs_ops[0].off;
2126 i--, j--;
2130 /* i now points to the first additional op.
2131 ??? LHS may not be completely contained in VR, one or more
2132 VIEW_CONVERT_EXPRs could be in its way. We could at least
2133 try handling outermost VIEW_CONVERT_EXPRs. */
2134 if (j != -1)
2135 return (void *)-1;
2137 /* Punt if the additional ops contain a storage order barrier. */
2138 for (k = i; k >= 0; k--)
2140 vro = &vr->operands[k];
2141 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2142 return (void *)-1;
2145 /* Now re-write REF to be based on the rhs of the assignment. */
2146 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2148 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2149 if (extra_off != 0)
2151 if (rhs.length () < 2
2152 || rhs[0].opcode != MEM_REF
2153 || rhs[0].off == -1)
2154 return (void *)-1;
2155 rhs[0].off += extra_off;
2156 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2157 build_int_cst (TREE_TYPE (rhs[0].op0),
2158 extra_off));
2161 /* We need to pre-pend vr->operands[0..i] to rhs. */
2162 vec<vn_reference_op_s> old = vr->operands;
2163 if (i + 1 + rhs.length () > vr->operands.length ())
2164 vr->operands.safe_grow (i + 1 + rhs.length ());
2165 else
2166 vr->operands.truncate (i + 1 + rhs.length ());
2167 FOR_EACH_VEC_ELT (rhs, j, vro)
2168 vr->operands[i + 1 + j] = *vro;
2169 vr->operands = valueize_refs (vr->operands);
2170 if (old == shared_lookup_references)
2171 shared_lookup_references = vr->operands;
2172 vr->hashcode = vn_reference_compute_hash (vr);
2174 /* Try folding the new reference to a constant. */
2175 tree val = fully_constant_vn_reference_p (vr);
2176 if (val)
2177 return vn_reference_lookup_or_insert_for_pieces
2178 (vuse, vr->set, vr->type, vr->operands, val);
2180 /* Adjust *ref from the new operands. */
2181 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2182 return (void *)-1;
2183 /* This can happen with bitfields. */
2184 if (ref->size != r.size)
2185 return (void *)-1;
2186 *ref = r;
2188 /* Do not update last seen VUSE after translating. */
2189 last_vuse_ptr = NULL;
2191 /* Keep looking for the adjusted *REF / VR pair. */
2192 return NULL;
2195 /* 6) For memcpy copies translate the reference through them if
2196 the copy kills ref. */
2197 else if (vn_walk_kind == VN_WALKREWRITE
2198 && is_gimple_reg_type (vr->type)
2199 /* ??? Handle BCOPY as well. */
2200 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2201 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2202 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2203 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2204 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2205 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2206 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2207 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2209 tree lhs, rhs;
2210 ao_ref r;
2211 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2212 vn_reference_op_s op;
2213 HOST_WIDE_INT at;
2215 /* Only handle non-variable, addressable refs. */
2216 if (ref->size != maxsize
2217 || offset % BITS_PER_UNIT != 0
2218 || ref->size % BITS_PER_UNIT != 0)
2219 return (void *)-1;
2221 /* Extract a pointer base and an offset for the destination. */
2222 lhs = gimple_call_arg (def_stmt, 0);
2223 lhs_offset = 0;
2224 if (TREE_CODE (lhs) == SSA_NAME)
2226 lhs = SSA_VAL (lhs);
2227 if (TREE_CODE (lhs) == SSA_NAME)
2229 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2230 if (gimple_assign_single_p (def_stmt)
2231 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2232 lhs = gimple_assign_rhs1 (def_stmt);
2235 if (TREE_CODE (lhs) == ADDR_EXPR)
2237 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2238 &lhs_offset);
2239 if (!tem)
2240 return (void *)-1;
2241 if (TREE_CODE (tem) == MEM_REF
2242 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2244 lhs = TREE_OPERAND (tem, 0);
2245 if (TREE_CODE (lhs) == SSA_NAME)
2246 lhs = SSA_VAL (lhs);
2247 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2249 else if (DECL_P (tem))
2250 lhs = build_fold_addr_expr (tem);
2251 else
2252 return (void *)-1;
2254 if (TREE_CODE (lhs) != SSA_NAME
2255 && TREE_CODE (lhs) != ADDR_EXPR)
2256 return (void *)-1;
2258 /* Extract a pointer base and an offset for the source. */
2259 rhs = gimple_call_arg (def_stmt, 1);
2260 rhs_offset = 0;
2261 if (TREE_CODE (rhs) == SSA_NAME)
2262 rhs = SSA_VAL (rhs);
2263 if (TREE_CODE (rhs) == ADDR_EXPR)
2265 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2266 &rhs_offset);
2267 if (!tem)
2268 return (void *)-1;
2269 if (TREE_CODE (tem) == MEM_REF
2270 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2272 rhs = TREE_OPERAND (tem, 0);
2273 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2275 else if (DECL_P (tem))
2276 rhs = build_fold_addr_expr (tem);
2277 else
2278 return (void *)-1;
2280 if (TREE_CODE (rhs) != SSA_NAME
2281 && TREE_CODE (rhs) != ADDR_EXPR)
2282 return (void *)-1;
2284 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2286 /* The bases of the destination and the references have to agree. */
2287 if ((TREE_CODE (base) != MEM_REF
2288 && !DECL_P (base))
2289 || (TREE_CODE (base) == MEM_REF
2290 && (TREE_OPERAND (base, 0) != lhs
2291 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2292 || (DECL_P (base)
2293 && (TREE_CODE (lhs) != ADDR_EXPR
2294 || TREE_OPERAND (lhs, 0) != base)))
2295 return (void *)-1;
2297 at = offset / BITS_PER_UNIT;
2298 if (TREE_CODE (base) == MEM_REF)
2299 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2300 /* If the access is completely outside of the memcpy destination
2301 area there is no aliasing. */
2302 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2303 || lhs_offset + copy_size <= at)
2304 return NULL;
2305 /* And the access has to be contained within the memcpy destination. */
2306 if (lhs_offset > at
2307 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2308 return (void *)-1;
2310 /* Make room for 2 operands in the new reference. */
2311 if (vr->operands.length () < 2)
2313 vec<vn_reference_op_s> old = vr->operands;
2314 vr->operands.safe_grow_cleared (2);
2315 if (old == shared_lookup_references)
2316 shared_lookup_references = vr->operands;
2318 else
2319 vr->operands.truncate (2);
2321 /* The looked-through reference is a simple MEM_REF. */
2322 memset (&op, 0, sizeof (op));
2323 op.type = vr->type;
2324 op.opcode = MEM_REF;
2325 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2326 op.off = at - lhs_offset + rhs_offset;
2327 vr->operands[0] = op;
2328 op.type = TREE_TYPE (rhs);
2329 op.opcode = TREE_CODE (rhs);
2330 op.op0 = rhs;
2331 op.off = -1;
2332 vr->operands[1] = op;
2333 vr->hashcode = vn_reference_compute_hash (vr);
2335 /* Try folding the new reference to a constant. */
2336 tree val = fully_constant_vn_reference_p (vr);
2337 if (val)
2338 return vn_reference_lookup_or_insert_for_pieces
2339 (vuse, vr->set, vr->type, vr->operands, val);
2341 /* Adjust *ref from the new operands. */
2342 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2343 return (void *)-1;
2344 /* This can happen with bitfields. */
2345 if (ref->size != r.size)
2346 return (void *)-1;
2347 *ref = r;
2349 /* Do not update last seen VUSE after translating. */
2350 last_vuse_ptr = NULL;
2352 /* Keep looking for the adjusted *REF / VR pair. */
2353 return NULL;
2356 /* Bail out and stop walking. */
2357 return (void *)-1;
2360 /* Return a reference op vector from OP that can be used for
2361 vn_reference_lookup_pieces. The caller is responsible for releasing
2362 the vector. */
2364 vec<vn_reference_op_s>
2365 vn_reference_operands_for_lookup (tree op)
2367 bool valueized;
2368 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2371 /* Lookup a reference operation by it's parts, in the current hash table.
2372 Returns the resulting value number if it exists in the hash table,
2373 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2374 vn_reference_t stored in the hashtable if something is found. */
2376 tree
2377 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2378 vec<vn_reference_op_s> operands,
2379 vn_reference_t *vnresult, vn_lookup_kind kind)
2381 struct vn_reference_s vr1;
2382 vn_reference_t tmp;
2383 tree cst;
2385 if (!vnresult)
2386 vnresult = &tmp;
2387 *vnresult = NULL;
2389 vr1.vuse = vuse_ssa_val (vuse);
2390 shared_lookup_references.truncate (0);
2391 shared_lookup_references.safe_grow (operands.length ());
2392 memcpy (shared_lookup_references.address (),
2393 operands.address (),
2394 sizeof (vn_reference_op_s)
2395 * operands.length ());
2396 vr1.operands = operands = shared_lookup_references
2397 = valueize_refs (shared_lookup_references);
2398 vr1.type = type;
2399 vr1.set = set;
2400 vr1.hashcode = vn_reference_compute_hash (&vr1);
2401 if ((cst = fully_constant_vn_reference_p (&vr1)))
2402 return cst;
2404 vn_reference_lookup_1 (&vr1, vnresult);
2405 if (!*vnresult
2406 && kind != VN_NOWALK
2407 && vr1.vuse)
2409 ao_ref r;
2410 vn_walk_kind = kind;
2411 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2412 *vnresult =
2413 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2414 vn_reference_lookup_2,
2415 vn_reference_lookup_3,
2416 vuse_ssa_val, &vr1);
2417 gcc_checking_assert (vr1.operands == shared_lookup_references);
2420 if (*vnresult)
2421 return (*vnresult)->result;
2423 return NULL_TREE;
2426 /* Lookup OP in the current hash table, and return the resulting value
2427 number if it exists in the hash table. Return NULL_TREE if it does
2428 not exist in the hash table or if the result field of the structure
2429 was NULL.. VNRESULT will be filled in with the vn_reference_t
2430 stored in the hashtable if one exists. When TBAA_P is false assume
2431 we are looking up a store and treat it as having alias-set zero. */
2433 tree
2434 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2435 vn_reference_t *vnresult, bool tbaa_p)
2437 vec<vn_reference_op_s> operands;
2438 struct vn_reference_s vr1;
2439 tree cst;
2440 bool valuezied_anything;
2442 if (vnresult)
2443 *vnresult = NULL;
2445 vr1.vuse = vuse_ssa_val (vuse);
2446 vr1.operands = operands
2447 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2448 vr1.type = TREE_TYPE (op);
2449 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2450 vr1.hashcode = vn_reference_compute_hash (&vr1);
2451 if ((cst = fully_constant_vn_reference_p (&vr1)))
2452 return cst;
2454 if (kind != VN_NOWALK
2455 && vr1.vuse)
2457 vn_reference_t wvnresult;
2458 ao_ref r;
2459 /* Make sure to use a valueized reference if we valueized anything.
2460 Otherwise preserve the full reference for advanced TBAA. */
2461 if (!valuezied_anything
2462 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2463 vr1.operands))
2464 ao_ref_init (&r, op);
2465 if (! tbaa_p)
2466 r.ref_alias_set = r.base_alias_set = 0;
2467 vn_walk_kind = kind;
2468 wvnresult =
2469 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2470 vn_reference_lookup_2,
2471 vn_reference_lookup_3,
2472 vuse_ssa_val, &vr1);
2473 gcc_checking_assert (vr1.operands == shared_lookup_references);
2474 if (wvnresult)
2476 if (vnresult)
2477 *vnresult = wvnresult;
2478 return wvnresult->result;
2481 return NULL_TREE;
2484 return vn_reference_lookup_1 (&vr1, vnresult);
2487 /* Lookup CALL in the current hash table and return the entry in
2488 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2490 void
2491 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2492 vn_reference_t vr)
2494 if (vnresult)
2495 *vnresult = NULL;
2497 tree vuse = gimple_vuse (call);
2499 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2500 vr->operands = valueize_shared_reference_ops_from_call (call);
2501 vr->type = gimple_expr_type (call);
2502 vr->set = 0;
2503 vr->hashcode = vn_reference_compute_hash (vr);
2504 vn_reference_lookup_1 (vr, vnresult);
2507 /* Insert OP into the current hash table with a value number of
2508 RESULT, and return the resulting reference structure we created. */
2510 static vn_reference_t
2511 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2513 vn_reference_s **slot;
2514 vn_reference_t vr1;
2515 bool tem;
2517 vr1 = current_info->references_pool->allocate ();
2518 if (TREE_CODE (result) == SSA_NAME)
2519 vr1->value_id = VN_INFO (result)->value_id;
2520 else
2521 vr1->value_id = get_or_alloc_constant_value_id (result);
2522 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2523 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2524 vr1->type = TREE_TYPE (op);
2525 vr1->set = get_alias_set (op);
2526 vr1->hashcode = vn_reference_compute_hash (vr1);
2527 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2528 vr1->result_vdef = vdef;
2530 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2531 INSERT);
2533 /* Because we lookup stores using vuses, and value number failures
2534 using the vdefs (see visit_reference_op_store for how and why),
2535 it's possible that on failure we may try to insert an already
2536 inserted store. This is not wrong, there is no ssa name for a
2537 store that we could use as a differentiator anyway. Thus, unlike
2538 the other lookup functions, you cannot gcc_assert (!*slot)
2539 here. */
2541 /* But free the old slot in case of a collision. */
2542 if (*slot)
2543 free_reference (*slot);
2545 *slot = vr1;
2546 return vr1;
2549 /* Insert a reference by it's pieces into the current hash table with
2550 a value number of RESULT. Return the resulting reference
2551 structure we created. */
2553 vn_reference_t
2554 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2555 vec<vn_reference_op_s> operands,
2556 tree result, unsigned int value_id)
2559 vn_reference_s **slot;
2560 vn_reference_t vr1;
2562 vr1 = current_info->references_pool->allocate ();
2563 vr1->value_id = value_id;
2564 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2565 vr1->operands = valueize_refs (operands);
2566 vr1->type = type;
2567 vr1->set = set;
2568 vr1->hashcode = vn_reference_compute_hash (vr1);
2569 if (result && TREE_CODE (result) == SSA_NAME)
2570 result = SSA_VAL (result);
2571 vr1->result = result;
2573 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2574 INSERT);
2576 /* At this point we should have all the things inserted that we have
2577 seen before, and we should never try inserting something that
2578 already exists. */
2579 gcc_assert (!*slot);
2580 if (*slot)
2581 free_reference (*slot);
2583 *slot = vr1;
2584 return vr1;
2587 /* Compute and return the hash value for nary operation VBO1. */
2589 static hashval_t
2590 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2592 inchash::hash hstate;
2593 unsigned i;
2595 for (i = 0; i < vno1->length; ++i)
2596 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2597 vno1->op[i] = SSA_VAL (vno1->op[i]);
2599 if (((vno1->length == 2
2600 && commutative_tree_code (vno1->opcode))
2601 || (vno1->length == 3
2602 && commutative_ternary_tree_code (vno1->opcode)))
2603 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2604 std::swap (vno1->op[0], vno1->op[1]);
2605 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2606 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2608 std::swap (vno1->op[0], vno1->op[1]);
2609 vno1->opcode = swap_tree_comparison (vno1->opcode);
2612 hstate.add_int (vno1->opcode);
2613 for (i = 0; i < vno1->length; ++i)
2614 inchash::add_expr (vno1->op[i], hstate);
2616 return hstate.end ();
2619 /* Compare nary operations VNO1 and VNO2 and return true if they are
2620 equivalent. */
2622 bool
2623 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2625 unsigned i;
2627 if (vno1->hashcode != vno2->hashcode)
2628 return false;
2630 if (vno1->length != vno2->length)
2631 return false;
2633 if (vno1->opcode != vno2->opcode
2634 || !types_compatible_p (vno1->type, vno2->type))
2635 return false;
2637 for (i = 0; i < vno1->length; ++i)
2638 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2639 return false;
2641 /* BIT_INSERT_EXPR has an implict operand as the type precision
2642 of op1. Need to check to make sure they are the same. */
2643 if (vno1->opcode == BIT_INSERT_EXPR
2644 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2645 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2646 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2647 return false;
2649 return true;
2652 /* Initialize VNO from the pieces provided. */
2654 static void
2655 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2656 enum tree_code code, tree type, tree *ops)
2658 vno->opcode = code;
2659 vno->length = length;
2660 vno->type = type;
2661 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2664 /* Initialize VNO from OP. */
2666 static void
2667 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2669 unsigned i;
2671 vno->opcode = TREE_CODE (op);
2672 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2673 vno->type = TREE_TYPE (op);
2674 for (i = 0; i < vno->length; ++i)
2675 vno->op[i] = TREE_OPERAND (op, i);
2678 /* Return the number of operands for a vn_nary ops structure from STMT. */
2680 static unsigned int
2681 vn_nary_length_from_stmt (gimple *stmt)
2683 switch (gimple_assign_rhs_code (stmt))
2685 case REALPART_EXPR:
2686 case IMAGPART_EXPR:
2687 case VIEW_CONVERT_EXPR:
2688 return 1;
2690 case BIT_FIELD_REF:
2691 return 3;
2693 case CONSTRUCTOR:
2694 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2696 default:
2697 return gimple_num_ops (stmt) - 1;
2701 /* Initialize VNO from STMT. */
2703 static void
2704 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2706 unsigned i;
2708 vno->opcode = gimple_assign_rhs_code (stmt);
2709 vno->type = gimple_expr_type (stmt);
2710 switch (vno->opcode)
2712 case REALPART_EXPR:
2713 case IMAGPART_EXPR:
2714 case VIEW_CONVERT_EXPR:
2715 vno->length = 1;
2716 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2717 break;
2719 case BIT_FIELD_REF:
2720 vno->length = 3;
2721 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2722 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2723 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2724 break;
2726 case CONSTRUCTOR:
2727 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2728 for (i = 0; i < vno->length; ++i)
2729 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2730 break;
2732 default:
2733 gcc_checking_assert (!gimple_assign_single_p (stmt));
2734 vno->length = gimple_num_ops (stmt) - 1;
2735 for (i = 0; i < vno->length; ++i)
2736 vno->op[i] = gimple_op (stmt, i + 1);
2740 /* Compute the hashcode for VNO and look for it in the hash table;
2741 return the resulting value number if it exists in the hash table.
2742 Return NULL_TREE if it does not exist in the hash table or if the
2743 result field of the operation is NULL. VNRESULT will contain the
2744 vn_nary_op_t from the hashtable if it exists. */
2746 static tree
2747 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2749 vn_nary_op_s **slot;
2751 if (vnresult)
2752 *vnresult = NULL;
2754 vno->hashcode = vn_nary_op_compute_hash (vno);
2755 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2756 NO_INSERT);
2757 if (!slot && current_info == optimistic_info)
2758 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2759 NO_INSERT);
2760 if (!slot)
2761 return NULL_TREE;
2762 if (vnresult)
2763 *vnresult = *slot;
2764 return (*slot)->result;
2767 /* Lookup a n-ary operation by its pieces and return the resulting value
2768 number if it exists in the hash table. Return NULL_TREE if it does
2769 not exist in the hash table or if the result field of the operation
2770 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2771 if it exists. */
2773 tree
2774 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2775 tree type, tree *ops, vn_nary_op_t *vnresult)
2777 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2778 sizeof_vn_nary_op (length));
2779 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2780 return vn_nary_op_lookup_1 (vno1, vnresult);
2783 /* Lookup OP in the current hash table, and return the resulting value
2784 number if it exists in the hash table. Return NULL_TREE if it does
2785 not exist in the hash table or if the result field of the operation
2786 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2787 if it exists. */
2789 tree
2790 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2792 vn_nary_op_t vno1
2793 = XALLOCAVAR (struct vn_nary_op_s,
2794 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2795 init_vn_nary_op_from_op (vno1, op);
2796 return vn_nary_op_lookup_1 (vno1, vnresult);
2799 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2800 value number if it exists in the hash table. Return NULL_TREE if
2801 it does not exist in the hash table. VNRESULT will contain the
2802 vn_nary_op_t from the hashtable if it exists. */
2804 tree
2805 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2807 vn_nary_op_t vno1
2808 = XALLOCAVAR (struct vn_nary_op_s,
2809 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2810 init_vn_nary_op_from_stmt (vno1, stmt);
2811 return vn_nary_op_lookup_1 (vno1, vnresult);
2814 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2816 static vn_nary_op_t
2817 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2819 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2822 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2823 obstack. */
2825 static vn_nary_op_t
2826 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2828 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2829 &current_info->nary_obstack);
2831 vno1->value_id = value_id;
2832 vno1->length = length;
2833 vno1->result = result;
2835 return vno1;
2838 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2839 VNO->HASHCODE first. */
2841 static vn_nary_op_t
2842 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2843 bool compute_hash)
2845 vn_nary_op_s **slot;
2847 if (compute_hash)
2848 vno->hashcode = vn_nary_op_compute_hash (vno);
2850 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2851 /* While we do not want to insert things twice it's awkward to
2852 avoid it in the case where visit_nary_op pattern-matches stuff
2853 and ends up simplifying the replacement to itself. We then
2854 get two inserts, one from visit_nary_op and one from
2855 vn_nary_build_or_lookup.
2856 So allow inserts with the same value number. */
2857 if (*slot && (*slot)->result == vno->result)
2858 return *slot;
2860 gcc_assert (!*slot);
2862 *slot = vno;
2863 return vno;
2866 /* Insert a n-ary operation into the current hash table using it's
2867 pieces. Return the vn_nary_op_t structure we created and put in
2868 the hashtable. */
2870 vn_nary_op_t
2871 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2872 tree type, tree *ops,
2873 tree result, unsigned int value_id)
2875 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2876 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2877 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2880 /* Insert OP into the current hash table with a value number of
2881 RESULT. Return the vn_nary_op_t structure we created and put in
2882 the hashtable. */
2884 vn_nary_op_t
2885 vn_nary_op_insert (tree op, tree result)
2887 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2888 vn_nary_op_t vno1;
2890 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2891 init_vn_nary_op_from_op (vno1, op);
2892 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2895 /* Insert the rhs of STMT into the current hash table with a value number of
2896 RESULT. */
2898 static vn_nary_op_t
2899 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2901 vn_nary_op_t vno1
2902 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2903 result, VN_INFO (result)->value_id);
2904 init_vn_nary_op_from_stmt (vno1, stmt);
2905 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2908 /* Compute a hashcode for PHI operation VP1 and return it. */
2910 static inline hashval_t
2911 vn_phi_compute_hash (vn_phi_t vp1)
2913 inchash::hash hstate (vp1->phiargs.length () > 2
2914 ? vp1->block->index : vp1->phiargs.length ());
2915 tree phi1op;
2916 tree type;
2917 edge e;
2918 edge_iterator ei;
2920 /* If all PHI arguments are constants we need to distinguish
2921 the PHI node via its type. */
2922 type = vp1->type;
2923 hstate.merge_hash (vn_hash_type (type));
2925 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2927 /* Don't hash backedge values they need to be handled as VN_TOP
2928 for optimistic value-numbering. */
2929 if (e->flags & EDGE_DFS_BACK)
2930 continue;
2932 phi1op = vp1->phiargs[e->dest_idx];
2933 if (phi1op == VN_TOP)
2934 continue;
2935 inchash::add_expr (phi1op, hstate);
2938 return hstate.end ();
2942 /* Return true if COND1 and COND2 represent the same condition, set
2943 *INVERTED_P if one needs to be inverted to make it the same as
2944 the other. */
2946 static bool
2947 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2948 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2950 enum tree_code code1 = gimple_cond_code (cond1);
2951 enum tree_code code2 = gimple_cond_code (cond2);
2953 *inverted_p = false;
2954 if (code1 == code2)
2956 else if (code1 == swap_tree_comparison (code2))
2957 std::swap (lhs2, rhs2);
2958 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2959 *inverted_p = true;
2960 else if (code1 == invert_tree_comparison
2961 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2963 std::swap (lhs2, rhs2);
2964 *inverted_p = true;
2966 else
2967 return false;
2969 return ((expressions_equal_p (lhs1, lhs2)
2970 && expressions_equal_p (rhs1, rhs2))
2971 || (commutative_tree_code (code1)
2972 && expressions_equal_p (lhs1, rhs2)
2973 && expressions_equal_p (rhs1, lhs2)));
2976 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2978 static int
2979 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2981 if (vp1->hashcode != vp2->hashcode)
2982 return false;
2984 if (vp1->block != vp2->block)
2986 if (vp1->phiargs.length () != vp2->phiargs.length ())
2987 return false;
2989 switch (vp1->phiargs.length ())
2991 case 1:
2992 /* Single-arg PHIs are just copies. */
2993 break;
2995 case 2:
2997 /* Rule out backedges into the PHI. */
2998 if (vp1->block->loop_father->header == vp1->block
2999 || vp2->block->loop_father->header == vp2->block)
3000 return false;
3002 /* If the PHI nodes do not have compatible types
3003 they are not the same. */
3004 if (!types_compatible_p (vp1->type, vp2->type))
3005 return false;
3007 basic_block idom1
3008 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3009 basic_block idom2
3010 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3011 /* If the immediate dominator end in switch stmts multiple
3012 values may end up in the same PHI arg via intermediate
3013 CFG merges. */
3014 if (EDGE_COUNT (idom1->succs) != 2
3015 || EDGE_COUNT (idom2->succs) != 2)
3016 return false;
3018 /* Verify the controlling stmt is the same. */
3019 gimple *last1 = last_stmt (idom1);
3020 gimple *last2 = last_stmt (idom2);
3021 if (gimple_code (last1) != GIMPLE_COND
3022 || gimple_code (last2) != GIMPLE_COND)
3023 return false;
3024 bool inverted_p;
3025 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
3026 vp1->cclhs, vp1->ccrhs,
3027 as_a <gcond *> (last2),
3028 vp2->cclhs, vp2->ccrhs,
3029 &inverted_p))
3030 return false;
3032 /* Get at true/false controlled edges into the PHI. */
3033 edge te1, te2, fe1, fe2;
3034 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3035 &te1, &fe1)
3036 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3037 &te2, &fe2))
3038 return false;
3040 /* Swap edges if the second condition is the inverted of the
3041 first. */
3042 if (inverted_p)
3043 std::swap (te2, fe2);
3045 /* ??? Handle VN_TOP specially. */
3046 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3047 vp2->phiargs[te2->dest_idx])
3048 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3049 vp2->phiargs[fe2->dest_idx]))
3050 return false;
3052 return true;
3055 default:
3056 return false;
3060 /* If the PHI nodes do not have compatible types
3061 they are not the same. */
3062 if (!types_compatible_p (vp1->type, vp2->type))
3063 return false;
3065 /* Any phi in the same block will have it's arguments in the
3066 same edge order, because of how we store phi nodes. */
3067 int i;
3068 tree phi1op;
3069 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3071 tree phi2op = vp2->phiargs[i];
3072 if (phi1op == VN_TOP || phi2op == VN_TOP)
3073 continue;
3074 if (!expressions_equal_p (phi1op, phi2op))
3075 return false;
3078 return true;
3081 static vec<tree> shared_lookup_phiargs;
3083 /* Lookup PHI in the current hash table, and return the resulting
3084 value number if it exists in the hash table. Return NULL_TREE if
3085 it does not exist in the hash table. */
3087 static tree
3088 vn_phi_lookup (gimple *phi)
3090 vn_phi_s **slot;
3091 struct vn_phi_s vp1;
3092 edge e;
3093 edge_iterator ei;
3095 shared_lookup_phiargs.truncate (0);
3096 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3098 /* Canonicalize the SSA_NAME's to their value number. */
3099 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3101 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3102 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3103 shared_lookup_phiargs[e->dest_idx] = def;
3105 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3106 vp1.phiargs = shared_lookup_phiargs;
3107 vp1.block = gimple_bb (phi);
3108 /* Extract values of the controlling condition. */
3109 vp1.cclhs = NULL_TREE;
3110 vp1.ccrhs = NULL_TREE;
3111 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3112 if (EDGE_COUNT (idom1->succs) == 2)
3113 if (gcond *last1 = dyn_cast <gcond *> (last_stmt (idom1)))
3115 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3116 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3118 vp1.hashcode = vn_phi_compute_hash (&vp1);
3119 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3120 NO_INSERT);
3121 if (!slot && current_info == optimistic_info)
3122 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3123 NO_INSERT);
3124 if (!slot)
3125 return NULL_TREE;
3126 return (*slot)->result;
3129 /* Insert PHI into the current hash table with a value number of
3130 RESULT. */
3132 static vn_phi_t
3133 vn_phi_insert (gimple *phi, tree result)
3135 vn_phi_s **slot;
3136 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3137 vec<tree> args = vNULL;
3138 edge e;
3139 edge_iterator ei;
3141 args.safe_grow (gimple_phi_num_args (phi));
3143 /* Canonicalize the SSA_NAME's to their value number. */
3144 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3146 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3147 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3148 args[e->dest_idx] = def;
3150 vp1->value_id = VN_INFO (result)->value_id;
3151 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3152 vp1->phiargs = args;
3153 vp1->block = gimple_bb (phi);
3154 /* Extract values of the controlling condition. */
3155 vp1->cclhs = NULL_TREE;
3156 vp1->ccrhs = NULL_TREE;
3157 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3158 if (EDGE_COUNT (idom1->succs) == 2)
3159 if (gcond *last1 = dyn_cast <gcond *> (last_stmt (idom1)))
3161 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3162 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3164 vp1->result = result;
3165 vp1->hashcode = vn_phi_compute_hash (vp1);
3167 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3169 /* Because we iterate over phi operations more than once, it's
3170 possible the slot might already exist here, hence no assert.*/
3171 *slot = vp1;
3172 return vp1;
3176 /* Print set of components in strongly connected component SCC to OUT. */
3178 static void
3179 print_scc (FILE *out, vec<tree> scc)
3181 tree var;
3182 unsigned int i;
3184 fprintf (out, "SCC consists of %u:", scc.length ());
3185 FOR_EACH_VEC_ELT (scc, i, var)
3187 fprintf (out, " ");
3188 print_generic_expr (out, var);
3190 fprintf (out, "\n");
3193 /* Return true if BB1 is dominated by BB2 taking into account edges
3194 that are not executable. */
3196 static bool
3197 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3199 edge_iterator ei;
3200 edge e;
3202 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3203 return true;
3205 /* Before iterating we'd like to know if there exists a
3206 (executable) path from bb2 to bb1 at all, if not we can
3207 directly return false. For now simply iterate once. */
3209 /* Iterate to the single executable bb1 predecessor. */
3210 if (EDGE_COUNT (bb1->preds) > 1)
3212 edge prede = NULL;
3213 FOR_EACH_EDGE (e, ei, bb1->preds)
3214 if (e->flags & EDGE_EXECUTABLE)
3216 if (prede)
3218 prede = NULL;
3219 break;
3221 prede = e;
3223 if (prede)
3225 bb1 = prede->src;
3227 /* Re-do the dominance check with changed bb1. */
3228 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3229 return true;
3233 /* Iterate to the single executable bb2 successor. */
3234 edge succe = NULL;
3235 FOR_EACH_EDGE (e, ei, bb2->succs)
3236 if (e->flags & EDGE_EXECUTABLE)
3238 if (succe)
3240 succe = NULL;
3241 break;
3243 succe = e;
3245 if (succe)
3247 /* Verify the reached block is only reached through succe.
3248 If there is only one edge we can spare us the dominator
3249 check and iterate directly. */
3250 if (EDGE_COUNT (succe->dest->preds) > 1)
3252 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3253 if (e != succe
3254 && (e->flags & EDGE_EXECUTABLE))
3256 succe = NULL;
3257 break;
3260 if (succe)
3262 bb2 = succe->dest;
3264 /* Re-do the dominance check with changed bb2. */
3265 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3266 return true;
3270 /* We could now iterate updating bb1 / bb2. */
3271 return false;
3274 /* Set the value number of FROM to TO, return true if it has changed
3275 as a result. */
3277 static inline bool
3278 set_ssa_val_to (tree from, tree to)
3280 tree currval = SSA_VAL (from);
3281 HOST_WIDE_INT toff, coff;
3283 /* The only thing we allow as value numbers are ssa_names
3284 and invariants. So assert that here. We don't allow VN_TOP
3285 as visiting a stmt should produce a value-number other than
3286 that.
3287 ??? Still VN_TOP can happen for unreachable code, so force
3288 it to varying in that case. Not all code is prepared to
3289 get VN_TOP on valueization. */
3290 if (to == VN_TOP)
3292 if (dump_file && (dump_flags & TDF_DETAILS))
3293 fprintf (dump_file, "Forcing value number to varying on "
3294 "receiving VN_TOP\n");
3295 to = from;
3298 gcc_assert (to != NULL_TREE
3299 && ((TREE_CODE (to) == SSA_NAME
3300 && (to == from || SSA_VAL (to) == to))
3301 || is_gimple_min_invariant (to)));
3303 if (from != to)
3305 if (currval == from)
3307 if (dump_file && (dump_flags & TDF_DETAILS))
3309 fprintf (dump_file, "Not changing value number of ");
3310 print_generic_expr (dump_file, from);
3311 fprintf (dump_file, " from VARYING to ");
3312 print_generic_expr (dump_file, to);
3313 fprintf (dump_file, "\n");
3315 return false;
3317 else if (currval != VN_TOP
3318 && ! is_gimple_min_invariant (currval)
3319 && is_gimple_min_invariant (to))
3321 if (dump_file && (dump_flags & TDF_DETAILS))
3323 fprintf (dump_file, "Forcing VARYING instead of changing "
3324 "value number of ");
3325 print_generic_expr (dump_file, from);
3326 fprintf (dump_file, " from ");
3327 print_generic_expr (dump_file, currval);
3328 fprintf (dump_file, " (non-constant) to ");
3329 print_generic_expr (dump_file, to);
3330 fprintf (dump_file, " (constant)\n");
3332 to = from;
3334 else if (TREE_CODE (to) == SSA_NAME
3335 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3336 to = from;
3339 if (dump_file && (dump_flags & TDF_DETAILS))
3341 fprintf (dump_file, "Setting value number of ");
3342 print_generic_expr (dump_file, from);
3343 fprintf (dump_file, " to ");
3344 print_generic_expr (dump_file, to);
3347 if (currval != to
3348 && !operand_equal_p (currval, to, 0)
3349 /* ??? For addresses involving volatile objects or types operand_equal_p
3350 does not reliably detect ADDR_EXPRs as equal. We know we are only
3351 getting invariant gimple addresses here, so can use
3352 get_addr_base_and_unit_offset to do this comparison. */
3353 && !(TREE_CODE (currval) == ADDR_EXPR
3354 && TREE_CODE (to) == ADDR_EXPR
3355 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3356 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3357 && coff == toff))
3359 if (dump_file && (dump_flags & TDF_DETAILS))
3360 fprintf (dump_file, " (changed)\n");
3362 /* If we equate two SSA names we have to make the side-band info
3363 of the leader conservative (and remember whatever original value
3364 was present). */
3365 if (TREE_CODE (to) == SSA_NAME)
3367 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3368 && SSA_NAME_RANGE_INFO (to))
3370 if (SSA_NAME_IS_DEFAULT_DEF (to)
3371 || dominated_by_p_w_unex
3372 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3373 gimple_bb (SSA_NAME_DEF_STMT (to))))
3374 /* Keep the info from the dominator. */
3376 else
3378 /* Save old info. */
3379 if (! VN_INFO (to)->info.range_info)
3381 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3382 VN_INFO (to)->range_info_anti_range_p
3383 = SSA_NAME_ANTI_RANGE_P (to);
3385 /* Rather than allocating memory and unioning the info
3386 just clear it. */
3387 if (dump_file && (dump_flags & TDF_DETAILS))
3389 fprintf (dump_file, "clearing range info of ");
3390 print_generic_expr (dump_file, to);
3391 fprintf (dump_file, "\n");
3393 SSA_NAME_RANGE_INFO (to) = NULL;
3396 else if (POINTER_TYPE_P (TREE_TYPE (to))
3397 && SSA_NAME_PTR_INFO (to))
3399 if (SSA_NAME_IS_DEFAULT_DEF (to)
3400 || dominated_by_p_w_unex
3401 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3402 gimple_bb (SSA_NAME_DEF_STMT (to))))
3403 /* Keep the info from the dominator. */
3405 else if (! SSA_NAME_PTR_INFO (from)
3406 /* Handle the case of trivially equivalent info. */
3407 || memcmp (SSA_NAME_PTR_INFO (to),
3408 SSA_NAME_PTR_INFO (from),
3409 sizeof (ptr_info_def)) != 0)
3411 /* Save old info. */
3412 if (! VN_INFO (to)->info.ptr_info)
3413 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3414 /* Rather than allocating memory and unioning the info
3415 just clear it. */
3416 if (dump_file && (dump_flags & TDF_DETAILS))
3418 fprintf (dump_file, "clearing points-to info of ");
3419 print_generic_expr (dump_file, to);
3420 fprintf (dump_file, "\n");
3422 SSA_NAME_PTR_INFO (to) = NULL;
3427 VN_INFO (from)->valnum = to;
3428 return true;
3430 if (dump_file && (dump_flags & TDF_DETAILS))
3431 fprintf (dump_file, "\n");
3432 return false;
3435 /* Mark as processed all the definitions in the defining stmt of USE, or
3436 the USE itself. */
3438 static void
3439 mark_use_processed (tree use)
3441 ssa_op_iter iter;
3442 def_operand_p defp;
3443 gimple *stmt = SSA_NAME_DEF_STMT (use);
3445 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3447 VN_INFO (use)->use_processed = true;
3448 return;
3451 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3453 tree def = DEF_FROM_PTR (defp);
3455 VN_INFO (def)->use_processed = true;
3459 /* Set all definitions in STMT to value number to themselves.
3460 Return true if a value number changed. */
3462 static bool
3463 defs_to_varying (gimple *stmt)
3465 bool changed = false;
3466 ssa_op_iter iter;
3467 def_operand_p defp;
3469 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3471 tree def = DEF_FROM_PTR (defp);
3472 changed |= set_ssa_val_to (def, def);
3474 return changed;
3477 /* Visit a copy between LHS and RHS, return true if the value number
3478 changed. */
3480 static bool
3481 visit_copy (tree lhs, tree rhs)
3483 /* Valueize. */
3484 rhs = SSA_VAL (rhs);
3486 return set_ssa_val_to (lhs, rhs);
3489 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3490 is the same. */
3492 static tree
3493 valueized_wider_op (tree wide_type, tree op)
3495 if (TREE_CODE (op) == SSA_NAME)
3496 op = SSA_VAL (op);
3498 /* Either we have the op widened available. */
3499 tree ops[3] = {};
3500 ops[0] = op;
3501 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3502 wide_type, ops, NULL);
3503 if (tem)
3504 return tem;
3506 /* Or the op is truncated from some existing value. */
3507 if (TREE_CODE (op) == SSA_NAME)
3509 gimple *def = SSA_NAME_DEF_STMT (op);
3510 if (is_gimple_assign (def)
3511 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3513 tem = gimple_assign_rhs1 (def);
3514 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3516 if (TREE_CODE (tem) == SSA_NAME)
3517 tem = SSA_VAL (tem);
3518 return tem;
3523 /* For constants simply extend it. */
3524 if (TREE_CODE (op) == INTEGER_CST)
3525 return wide_int_to_tree (wide_type, op);
3527 return NULL_TREE;
3530 /* Visit a nary operator RHS, value number it, and return true if the
3531 value number of LHS has changed as a result. */
3533 static bool
3534 visit_nary_op (tree lhs, gassign *stmt)
3536 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3537 if (result)
3538 return set_ssa_val_to (lhs, result);
3540 /* Do some special pattern matching for redundancies of operations
3541 in different types. */
3542 enum tree_code code = gimple_assign_rhs_code (stmt);
3543 tree type = TREE_TYPE (lhs);
3544 tree rhs1 = gimple_assign_rhs1 (stmt);
3545 switch (code)
3547 CASE_CONVERT:
3548 /* Match arithmetic done in a different type where we can easily
3549 substitute the result from some earlier sign-changed or widened
3550 operation. */
3551 if (INTEGRAL_TYPE_P (type)
3552 && TREE_CODE (rhs1) == SSA_NAME
3553 /* We only handle sign-changes or zero-extension -> & mask. */
3554 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3555 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3556 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3558 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3559 if (def
3560 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3561 || gimple_assign_rhs_code (def) == MINUS_EXPR
3562 || gimple_assign_rhs_code (def) == MULT_EXPR))
3564 tree ops[3] = {};
3565 /* Either we have the op widened available. */
3566 ops[0] = valueized_wider_op (type,
3567 gimple_assign_rhs1 (def));
3568 if (ops[0])
3569 ops[1] = valueized_wider_op (type,
3570 gimple_assign_rhs2 (def));
3571 if (ops[0] && ops[1])
3573 ops[0] = vn_nary_op_lookup_pieces
3574 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3575 /* We have wider operation available. */
3576 if (ops[0])
3578 unsigned lhs_prec = TYPE_PRECISION (type);
3579 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3580 if (lhs_prec == rhs_prec)
3582 ops[1] = NULL_TREE;
3583 result = vn_nary_build_or_lookup (NOP_EXPR,
3584 type, ops);
3585 if (result)
3587 bool changed = set_ssa_val_to (lhs, result);
3588 vn_nary_op_insert_stmt (stmt, result);
3589 return changed;
3592 else
3594 ops[1] = wide_int_to_tree (type,
3595 wi::mask (rhs_prec, false,
3596 lhs_prec));
3597 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3598 TREE_TYPE (lhs),
3599 ops);
3600 if (result)
3602 bool changed = set_ssa_val_to (lhs, result);
3603 vn_nary_op_insert_stmt (stmt, result);
3604 return changed;
3611 default:;
3614 bool changed = set_ssa_val_to (lhs, lhs);
3615 vn_nary_op_insert_stmt (stmt, lhs);
3616 return changed;
3619 /* Visit a call STMT storing into LHS. Return true if the value number
3620 of the LHS has changed as a result. */
3622 static bool
3623 visit_reference_op_call (tree lhs, gcall *stmt)
3625 bool changed = false;
3626 struct vn_reference_s vr1;
3627 vn_reference_t vnresult = NULL;
3628 tree vdef = gimple_vdef (stmt);
3630 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3631 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3632 lhs = NULL_TREE;
3634 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3635 if (vnresult)
3637 if (vnresult->result_vdef && vdef)
3638 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3639 else if (vdef)
3640 /* If the call was discovered to be pure or const reflect
3641 that as far as possible. */
3642 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3644 if (!vnresult->result && lhs)
3645 vnresult->result = lhs;
3647 if (vnresult->result && lhs)
3648 changed |= set_ssa_val_to (lhs, vnresult->result);
3650 else
3652 vn_reference_t vr2;
3653 vn_reference_s **slot;
3654 tree vdef_val = vdef;
3655 if (vdef)
3657 /* If we value numbered an indirect functions function to
3658 one not clobbering memory value number its VDEF to its
3659 VUSE. */
3660 tree fn = gimple_call_fn (stmt);
3661 if (fn && TREE_CODE (fn) == SSA_NAME)
3663 fn = SSA_VAL (fn);
3664 if (TREE_CODE (fn) == ADDR_EXPR
3665 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3666 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3667 & (ECF_CONST | ECF_PURE)))
3668 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3670 changed |= set_ssa_val_to (vdef, vdef_val);
3672 if (lhs)
3673 changed |= set_ssa_val_to (lhs, lhs);
3674 vr2 = current_info->references_pool->allocate ();
3675 vr2->vuse = vr1.vuse;
3676 /* As we are not walking the virtual operand chain we know the
3677 shared_lookup_references are still original so we can re-use
3678 them here. */
3679 vr2->operands = vr1.operands.copy ();
3680 vr2->type = vr1.type;
3681 vr2->set = vr1.set;
3682 vr2->hashcode = vr1.hashcode;
3683 vr2->result = lhs;
3684 vr2->result_vdef = vdef_val;
3685 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3686 INSERT);
3687 gcc_assert (!*slot);
3688 *slot = vr2;
3691 return changed;
3694 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3695 and return true if the value number of the LHS has changed as a result. */
3697 static bool
3698 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3700 bool changed = false;
3701 tree last_vuse;
3702 tree result;
3704 last_vuse = gimple_vuse (stmt);
3705 last_vuse_ptr = &last_vuse;
3706 result = vn_reference_lookup (op, gimple_vuse (stmt),
3707 default_vn_walk_kind, NULL, true);
3708 last_vuse_ptr = NULL;
3710 /* We handle type-punning through unions by value-numbering based
3711 on offset and size of the access. Be prepared to handle a
3712 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3713 if (result
3714 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3716 /* We will be setting the value number of lhs to the value number
3717 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3718 So first simplify and lookup this expression to see if it
3719 is already available. */
3720 code_helper rcode = VIEW_CONVERT_EXPR;
3721 tree ops[3] = { result };
3722 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3725 if (result)
3726 changed = set_ssa_val_to (lhs, result);
3727 else
3729 changed = set_ssa_val_to (lhs, lhs);
3730 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3733 return changed;
3737 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3738 and return true if the value number of the LHS has changed as a result. */
3740 static bool
3741 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3743 bool changed = false;
3744 vn_reference_t vnresult = NULL;
3745 tree assign;
3746 bool resultsame = false;
3747 tree vuse = gimple_vuse (stmt);
3748 tree vdef = gimple_vdef (stmt);
3750 if (TREE_CODE (op) == SSA_NAME)
3751 op = SSA_VAL (op);
3753 /* First we want to lookup using the *vuses* from the store and see
3754 if there the last store to this location with the same address
3755 had the same value.
3757 The vuses represent the memory state before the store. If the
3758 memory state, address, and value of the store is the same as the
3759 last store to this location, then this store will produce the
3760 same memory state as that store.
3762 In this case the vdef versions for this store are value numbered to those
3763 vuse versions, since they represent the same memory state after
3764 this store.
3766 Otherwise, the vdefs for the store are used when inserting into
3767 the table, since the store generates a new memory state. */
3769 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3770 if (vnresult
3771 && vnresult->result)
3773 tree result = vnresult->result;
3774 if (TREE_CODE (result) == SSA_NAME)
3775 result = SSA_VAL (result);
3776 resultsame = expressions_equal_p (result, op);
3777 if (resultsame)
3779 /* If the TBAA state isn't compatible for downstream reads
3780 we cannot value-number the VDEFs the same. */
3781 alias_set_type set = get_alias_set (lhs);
3782 if (vnresult->set != set
3783 && ! alias_set_subset_of (set, vnresult->set))
3784 resultsame = false;
3788 if (!resultsame)
3790 /* Only perform the following when being called from PRE
3791 which embeds tail merging. */
3792 if (default_vn_walk_kind == VN_WALK)
3794 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3795 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3796 if (vnresult)
3798 VN_INFO (vdef)->use_processed = true;
3799 return set_ssa_val_to (vdef, vnresult->result_vdef);
3803 if (dump_file && (dump_flags & TDF_DETAILS))
3805 fprintf (dump_file, "No store match\n");
3806 fprintf (dump_file, "Value numbering store ");
3807 print_generic_expr (dump_file, lhs);
3808 fprintf (dump_file, " to ");
3809 print_generic_expr (dump_file, op);
3810 fprintf (dump_file, "\n");
3812 /* Have to set value numbers before insert, since insert is
3813 going to valueize the references in-place. */
3814 if (vdef)
3815 changed |= set_ssa_val_to (vdef, vdef);
3817 /* Do not insert structure copies into the tables. */
3818 if (is_gimple_min_invariant (op)
3819 || is_gimple_reg (op))
3820 vn_reference_insert (lhs, op, vdef, NULL);
3822 /* Only perform the following when being called from PRE
3823 which embeds tail merging. */
3824 if (default_vn_walk_kind == VN_WALK)
3826 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3827 vn_reference_insert (assign, lhs, vuse, vdef);
3830 else
3832 /* We had a match, so value number the vdef to have the value
3833 number of the vuse it came from. */
3835 if (dump_file && (dump_flags & TDF_DETAILS))
3836 fprintf (dump_file, "Store matched earlier value, "
3837 "value numbering store vdefs to matching vuses.\n");
3839 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3842 return changed;
3845 /* Visit and value number PHI, return true if the value number
3846 changed. */
3848 static bool
3849 visit_phi (gimple *phi)
3851 bool changed = false;
3852 tree result;
3853 tree sameval = VN_TOP;
3854 bool allsame = true;
3855 unsigned n_executable = 0;
3857 /* TODO: We could check for this in init_sccvn, and replace this
3858 with a gcc_assert. */
3859 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3860 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3862 /* See if all non-TOP arguments have the same value. TOP is
3863 equivalent to everything, so we can ignore it. */
3864 edge_iterator ei;
3865 edge e;
3866 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3867 if (e->flags & EDGE_EXECUTABLE)
3869 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3871 ++n_executable;
3872 if (TREE_CODE (def) == SSA_NAME)
3873 def = SSA_VAL (def);
3874 if (def == VN_TOP)
3875 continue;
3876 if (sameval == VN_TOP)
3877 sameval = def;
3878 else if (!expressions_equal_p (def, sameval))
3880 allsame = false;
3881 break;
3885 /* If none of the edges was executable or all incoming values are
3886 undefined keep the value-number at VN_TOP. If only a single edge
3887 is exectuable use its value. */
3888 if (sameval == VN_TOP
3889 || n_executable == 1)
3890 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3892 /* First see if it is equivalent to a phi node in this block. We prefer
3893 this as it allows IV elimination - see PRs 66502 and 67167. */
3894 result = vn_phi_lookup (phi);
3895 if (result)
3896 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3897 /* Otherwise all value numbered to the same value, the phi node has that
3898 value. */
3899 else if (allsame)
3900 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3901 else
3903 vn_phi_insert (phi, PHI_RESULT (phi));
3904 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3907 return changed;
3910 /* Try to simplify RHS using equivalences and constant folding. */
3912 static tree
3913 try_to_simplify (gassign *stmt)
3915 enum tree_code code = gimple_assign_rhs_code (stmt);
3916 tree tem;
3918 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3919 in this case, there is no point in doing extra work. */
3920 if (code == SSA_NAME)
3921 return NULL_TREE;
3923 /* First try constant folding based on our current lattice. */
3924 mprts_hook = vn_lookup_simplify_result;
3925 mprts_hook_cnt = 9;
3926 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3927 mprts_hook = NULL;
3928 if (tem
3929 && (TREE_CODE (tem) == SSA_NAME
3930 || is_gimple_min_invariant (tem)))
3931 return tem;
3933 return NULL_TREE;
3936 /* Visit and value number USE, return true if the value number
3937 changed. */
3939 static bool
3940 visit_use (tree use)
3942 bool changed = false;
3943 gimple *stmt = SSA_NAME_DEF_STMT (use);
3945 mark_use_processed (use);
3947 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3948 if (dump_file && (dump_flags & TDF_DETAILS)
3949 && !SSA_NAME_IS_DEFAULT_DEF (use))
3951 fprintf (dump_file, "Value numbering ");
3952 print_generic_expr (dump_file, use);
3953 fprintf (dump_file, " stmt = ");
3954 print_gimple_stmt (dump_file, stmt, 0);
3957 /* Handle uninitialized uses. */
3958 if (SSA_NAME_IS_DEFAULT_DEF (use))
3959 changed = set_ssa_val_to (use, use);
3960 else if (gimple_code (stmt) == GIMPLE_PHI)
3961 changed = visit_phi (stmt);
3962 else if (gimple_has_volatile_ops (stmt))
3963 changed = defs_to_varying (stmt);
3964 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3966 enum tree_code code = gimple_assign_rhs_code (ass);
3967 tree lhs = gimple_assign_lhs (ass);
3968 tree rhs1 = gimple_assign_rhs1 (ass);
3969 tree simplified;
3971 /* Shortcut for copies. Simplifying copies is pointless,
3972 since we copy the expression and value they represent. */
3973 if (code == SSA_NAME
3974 && TREE_CODE (lhs) == SSA_NAME)
3976 changed = visit_copy (lhs, rhs1);
3977 goto done;
3979 simplified = try_to_simplify (ass);
3980 if (simplified)
3982 if (dump_file && (dump_flags & TDF_DETAILS))
3984 fprintf (dump_file, "RHS ");
3985 print_gimple_expr (dump_file, ass, 0);
3986 fprintf (dump_file, " simplified to ");
3987 print_generic_expr (dump_file, simplified);
3988 fprintf (dump_file, "\n");
3991 /* Setting value numbers to constants will occasionally
3992 screw up phi congruence because constants are not
3993 uniquely associated with a single ssa name that can be
3994 looked up. */
3995 if (simplified
3996 && is_gimple_min_invariant (simplified)
3997 && TREE_CODE (lhs) == SSA_NAME)
3999 changed = set_ssa_val_to (lhs, simplified);
4000 goto done;
4002 else if (simplified
4003 && TREE_CODE (simplified) == SSA_NAME
4004 && TREE_CODE (lhs) == SSA_NAME)
4006 changed = visit_copy (lhs, simplified);
4007 goto done;
4010 if ((TREE_CODE (lhs) == SSA_NAME
4011 /* We can substitute SSA_NAMEs that are live over
4012 abnormal edges with their constant value. */
4013 && !(gimple_assign_copy_p (ass)
4014 && is_gimple_min_invariant (rhs1))
4015 && !(simplified
4016 && is_gimple_min_invariant (simplified))
4017 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4018 /* Stores or copies from SSA_NAMEs that are live over
4019 abnormal edges are a problem. */
4020 || (code == SSA_NAME
4021 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4022 changed = defs_to_varying (ass);
4023 else if (REFERENCE_CLASS_P (lhs)
4024 || DECL_P (lhs))
4025 changed = visit_reference_op_store (lhs, rhs1, ass);
4026 else if (TREE_CODE (lhs) == SSA_NAME)
4028 if ((gimple_assign_copy_p (ass)
4029 && is_gimple_min_invariant (rhs1))
4030 || (simplified
4031 && is_gimple_min_invariant (simplified)))
4033 if (simplified)
4034 changed = set_ssa_val_to (lhs, simplified);
4035 else
4036 changed = set_ssa_val_to (lhs, rhs1);
4038 else
4040 /* Visit the original statement. */
4041 switch (vn_get_stmt_kind (ass))
4043 case VN_NARY:
4044 changed = visit_nary_op (lhs, ass);
4045 break;
4046 case VN_REFERENCE:
4047 changed = visit_reference_op_load (lhs, rhs1, ass);
4048 break;
4049 default:
4050 changed = defs_to_varying (ass);
4051 break;
4055 else
4056 changed = defs_to_varying (ass);
4058 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4060 tree lhs = gimple_call_lhs (call_stmt);
4061 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4063 /* Try constant folding based on our current lattice. */
4064 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4065 vn_valueize);
4066 if (simplified)
4068 if (dump_file && (dump_flags & TDF_DETAILS))
4070 fprintf (dump_file, "call ");
4071 print_gimple_expr (dump_file, call_stmt, 0);
4072 fprintf (dump_file, " simplified to ");
4073 print_generic_expr (dump_file, simplified);
4074 fprintf (dump_file, "\n");
4077 /* Setting value numbers to constants will occasionally
4078 screw up phi congruence because constants are not
4079 uniquely associated with a single ssa name that can be
4080 looked up. */
4081 if (simplified
4082 && is_gimple_min_invariant (simplified))
4084 changed = set_ssa_val_to (lhs, simplified);
4085 if (gimple_vdef (call_stmt))
4086 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4087 SSA_VAL (gimple_vuse (call_stmt)));
4088 goto done;
4090 else if (simplified
4091 && TREE_CODE (simplified) == SSA_NAME)
4093 changed = visit_copy (lhs, simplified);
4094 if (gimple_vdef (call_stmt))
4095 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4096 SSA_VAL (gimple_vuse (call_stmt)));
4097 goto done;
4099 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4101 changed = defs_to_varying (call_stmt);
4102 goto done;
4106 /* Pick up flags from a devirtualization target. */
4107 tree fn = gimple_call_fn (stmt);
4108 int extra_fnflags = 0;
4109 if (fn && TREE_CODE (fn) == SSA_NAME)
4111 fn = SSA_VAL (fn);
4112 if (TREE_CODE (fn) == ADDR_EXPR
4113 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4114 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4116 if (!gimple_call_internal_p (call_stmt)
4117 && (/* Calls to the same function with the same vuse
4118 and the same operands do not necessarily return the same
4119 value, unless they're pure or const. */
4120 ((gimple_call_flags (call_stmt) | extra_fnflags)
4121 & (ECF_PURE | ECF_CONST))
4122 /* If calls have a vdef, subsequent calls won't have
4123 the same incoming vuse. So, if 2 calls with vdef have the
4124 same vuse, we know they're not subsequent.
4125 We can value number 2 calls to the same function with the
4126 same vuse and the same operands which are not subsequent
4127 the same, because there is no code in the program that can
4128 compare the 2 values... */
4129 || (gimple_vdef (call_stmt)
4130 /* ... unless the call returns a pointer which does
4131 not alias with anything else. In which case the
4132 information that the values are distinct are encoded
4133 in the IL. */
4134 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4135 /* Only perform the following when being called from PRE
4136 which embeds tail merging. */
4137 && default_vn_walk_kind == VN_WALK)))
4138 changed = visit_reference_op_call (lhs, call_stmt);
4139 else
4140 changed = defs_to_varying (call_stmt);
4142 else
4143 changed = defs_to_varying (stmt);
4144 done:
4145 return changed;
4148 /* Compare two operands by reverse postorder index */
4150 static int
4151 compare_ops (const void *pa, const void *pb)
4153 const tree opa = *((const tree *)pa);
4154 const tree opb = *((const tree *)pb);
4155 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4156 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4157 basic_block bba;
4158 basic_block bbb;
4160 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4161 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4162 else if (gimple_nop_p (opstmta))
4163 return -1;
4164 else if (gimple_nop_p (opstmtb))
4165 return 1;
4167 bba = gimple_bb (opstmta);
4168 bbb = gimple_bb (opstmtb);
4170 if (!bba && !bbb)
4171 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4172 else if (!bba)
4173 return -1;
4174 else if (!bbb)
4175 return 1;
4177 if (bba == bbb)
4179 if (gimple_code (opstmta) == GIMPLE_PHI
4180 && gimple_code (opstmtb) == GIMPLE_PHI)
4181 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4182 else if (gimple_code (opstmta) == GIMPLE_PHI)
4183 return -1;
4184 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4185 return 1;
4186 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4187 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4188 else
4189 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4191 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4194 /* Sort an array containing members of a strongly connected component
4195 SCC so that the members are ordered by RPO number.
4196 This means that when the sort is complete, iterating through the
4197 array will give you the members in RPO order. */
4199 static void
4200 sort_scc (vec<tree> scc)
4202 scc.qsort (compare_ops);
4205 /* Insert the no longer used nary ONARY to the hash INFO. */
4207 static void
4208 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4210 size_t size = sizeof_vn_nary_op (onary->length);
4211 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4212 &info->nary_obstack);
4213 memcpy (nary, onary, size);
4214 vn_nary_op_insert_into (nary, info->nary, false);
4217 /* Insert the no longer used phi OPHI to the hash INFO. */
4219 static void
4220 copy_phi (vn_phi_t ophi, vn_tables_t info)
4222 vn_phi_t phi = info->phis_pool->allocate ();
4223 vn_phi_s **slot;
4224 memcpy (phi, ophi, sizeof (*phi));
4225 ophi->phiargs.create (0);
4226 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4227 gcc_assert (!*slot);
4228 *slot = phi;
4231 /* Insert the no longer used reference OREF to the hash INFO. */
4233 static void
4234 copy_reference (vn_reference_t oref, vn_tables_t info)
4236 vn_reference_t ref;
4237 vn_reference_s **slot;
4238 ref = info->references_pool->allocate ();
4239 memcpy (ref, oref, sizeof (*ref));
4240 oref->operands.create (0);
4241 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4242 if (*slot)
4243 free_reference (*slot);
4244 *slot = ref;
4247 /* Process a strongly connected component in the SSA graph. */
4249 static void
4250 process_scc (vec<tree> scc)
4252 tree var;
4253 unsigned int i;
4254 unsigned int iterations = 0;
4255 bool changed = true;
4256 vn_nary_op_iterator_type hin;
4257 vn_phi_iterator_type hip;
4258 vn_reference_iterator_type hir;
4259 vn_nary_op_t nary;
4260 vn_phi_t phi;
4261 vn_reference_t ref;
4263 /* If the SCC has a single member, just visit it. */
4264 if (scc.length () == 1)
4266 tree use = scc[0];
4267 if (VN_INFO (use)->use_processed)
4268 return;
4269 /* We need to make sure it doesn't form a cycle itself, which can
4270 happen for self-referential PHI nodes. In that case we would
4271 end up inserting an expression with VN_TOP operands into the
4272 valid table which makes us derive bogus equivalences later.
4273 The cheapest way to check this is to assume it for all PHI nodes. */
4274 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4275 /* Fallthru to iteration. */ ;
4276 else
4278 visit_use (use);
4279 return;
4283 if (dump_file && (dump_flags & TDF_DETAILS))
4284 print_scc (dump_file, scc);
4286 /* Iterate over the SCC with the optimistic table until it stops
4287 changing. */
4288 current_info = optimistic_info;
4289 while (changed)
4291 changed = false;
4292 iterations++;
4293 if (dump_file && (dump_flags & TDF_DETAILS))
4294 fprintf (dump_file, "Starting iteration %d\n", iterations);
4295 /* As we are value-numbering optimistically we have to
4296 clear the expression tables and the simplified expressions
4297 in each iteration until we converge. */
4298 optimistic_info->nary->empty ();
4299 optimistic_info->phis->empty ();
4300 optimistic_info->references->empty ();
4301 obstack_free (&optimistic_info->nary_obstack, NULL);
4302 gcc_obstack_init (&optimistic_info->nary_obstack);
4303 optimistic_info->phis_pool->release ();
4304 optimistic_info->references_pool->release ();
4305 FOR_EACH_VEC_ELT (scc, i, var)
4306 gcc_assert (!VN_INFO (var)->needs_insertion
4307 && VN_INFO (var)->expr == NULL);
4308 FOR_EACH_VEC_ELT (scc, i, var)
4309 changed |= visit_use (var);
4312 if (dump_file && (dump_flags & TDF_DETAILS))
4313 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4314 statistics_histogram_event (cfun, "SCC iterations", iterations);
4316 /* Finally, copy the contents of the no longer used optimistic
4317 table to the valid table. */
4318 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4319 copy_nary (nary, valid_info);
4320 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4321 copy_phi (phi, valid_info);
4322 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4323 ref, vn_reference_t, hir)
4324 copy_reference (ref, valid_info);
4326 current_info = valid_info;
4330 /* Pop the components of the found SCC for NAME off the SCC stack
4331 and process them. Returns true if all went well, false if
4332 we run into resource limits. */
4334 static void
4335 extract_and_process_scc_for_name (tree name)
4337 auto_vec<tree> scc;
4338 tree x;
4340 /* Found an SCC, pop the components off the SCC stack and
4341 process them. */
4344 x = sccstack.pop ();
4346 VN_INFO (x)->on_sccstack = false;
4347 scc.safe_push (x);
4348 } while (x != name);
4350 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4351 incredibly large.
4352 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4353 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4355 if (dump_file)
4357 print_scc (dump_file, scc);
4358 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4359 "size %u exceeding %u\n", scc.length (),
4360 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4362 tree var;
4363 unsigned i;
4364 FOR_EACH_VEC_ELT (scc, i, var)
4366 gimple *def = SSA_NAME_DEF_STMT (var);
4367 mark_use_processed (var);
4368 if (SSA_NAME_IS_DEFAULT_DEF (var)
4369 || gimple_code (def) == GIMPLE_PHI)
4370 set_ssa_val_to (var, var);
4371 else
4372 defs_to_varying (def);
4374 return;
4377 if (scc.length () > 1)
4378 sort_scc (scc);
4380 process_scc (scc);
4383 /* Depth first search on NAME to discover and process SCC's in the SSA
4384 graph.
4385 Execution of this algorithm relies on the fact that the SCC's are
4386 popped off the stack in topological order.
4387 Returns true if successful, false if we stopped processing SCC's due
4388 to resource constraints. */
4390 static void
4391 DFS (tree name)
4393 auto_vec<ssa_op_iter> itervec;
4394 auto_vec<tree> namevec;
4395 use_operand_p usep = NULL;
4396 gimple *defstmt;
4397 tree use;
4398 ssa_op_iter iter;
4400 start_over:
4401 /* SCC info */
4402 VN_INFO (name)->dfsnum = next_dfs_num++;
4403 VN_INFO (name)->visited = true;
4404 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4406 sccstack.safe_push (name);
4407 VN_INFO (name)->on_sccstack = true;
4408 defstmt = SSA_NAME_DEF_STMT (name);
4410 /* Recursively DFS on our operands, looking for SCC's. */
4411 if (!gimple_nop_p (defstmt))
4413 /* Push a new iterator. */
4414 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4415 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4416 else
4417 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4419 else
4420 clear_and_done_ssa_iter (&iter);
4422 while (1)
4424 /* If we are done processing uses of a name, go up the stack
4425 of iterators and process SCCs as we found them. */
4426 if (op_iter_done (&iter))
4428 /* See if we found an SCC. */
4429 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4430 extract_and_process_scc_for_name (name);
4432 /* Check if we are done. */
4433 if (namevec.is_empty ())
4434 return;
4436 /* Restore the last use walker and continue walking there. */
4437 use = name;
4438 name = namevec.pop ();
4439 memcpy (&iter, &itervec.last (),
4440 sizeof (ssa_op_iter));
4441 itervec.pop ();
4442 goto continue_walking;
4445 use = USE_FROM_PTR (usep);
4447 /* Since we handle phi nodes, we will sometimes get
4448 invariants in the use expression. */
4449 if (TREE_CODE (use) == SSA_NAME)
4451 if (! (VN_INFO (use)->visited))
4453 /* Recurse by pushing the current use walking state on
4454 the stack and starting over. */
4455 itervec.safe_push (iter);
4456 namevec.safe_push (name);
4457 name = use;
4458 goto start_over;
4460 continue_walking:
4461 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4462 VN_INFO (use)->low);
4464 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4465 && VN_INFO (use)->on_sccstack)
4467 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4468 VN_INFO (name)->low);
4472 usep = op_iter_next_use (&iter);
4476 /* Allocate a value number table. */
4478 static void
4479 allocate_vn_table (vn_tables_t table)
4481 table->phis = new vn_phi_table_type (23);
4482 table->nary = new vn_nary_op_table_type (23);
4483 table->references = new vn_reference_table_type (23);
4485 gcc_obstack_init (&table->nary_obstack);
4486 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4487 table->references_pool = new object_allocator<vn_reference_s>
4488 ("VN references");
4491 /* Free a value number table. */
4493 static void
4494 free_vn_table (vn_tables_t table)
4496 delete table->phis;
4497 table->phis = NULL;
4498 delete table->nary;
4499 table->nary = NULL;
4500 delete table->references;
4501 table->references = NULL;
4502 obstack_free (&table->nary_obstack, NULL);
4503 delete table->phis_pool;
4504 delete table->references_pool;
4507 static void
4508 init_scc_vn (void)
4510 int j;
4511 int *rpo_numbers_temp;
4513 calculate_dominance_info (CDI_DOMINATORS);
4514 mark_dfs_back_edges ();
4516 sccstack.create (0);
4517 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4519 constant_value_ids = BITMAP_ALLOC (NULL);
4521 next_dfs_num = 1;
4522 next_value_id = 1;
4524 vn_ssa_aux_table.create (num_ssa_names + 1);
4525 /* VEC_alloc doesn't actually grow it to the right size, it just
4526 preallocates the space to do so. */
4527 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4528 gcc_obstack_init (&vn_ssa_aux_obstack);
4530 shared_lookup_phiargs.create (0);
4531 shared_lookup_references.create (0);
4532 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4533 rpo_numbers_temp =
4534 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4535 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4537 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4538 the i'th block in RPO order is bb. We want to map bb's to RPO
4539 numbers, so we need to rearrange this array. */
4540 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4541 rpo_numbers[rpo_numbers_temp[j]] = j;
4543 XDELETE (rpo_numbers_temp);
4545 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4547 renumber_gimple_stmt_uids ();
4549 /* Create the valid and optimistic value numbering tables. */
4550 valid_info = XCNEW (struct vn_tables_s);
4551 allocate_vn_table (valid_info);
4552 optimistic_info = XCNEW (struct vn_tables_s);
4553 allocate_vn_table (optimistic_info);
4554 current_info = valid_info;
4556 /* Create the VN_INFO structures, and initialize value numbers to
4557 TOP or VARYING for parameters. */
4558 size_t i;
4559 tree name;
4561 FOR_EACH_SSA_NAME (i, name, cfun)
4563 VN_INFO_GET (name)->valnum = VN_TOP;
4564 VN_INFO (name)->needs_insertion = false;
4565 VN_INFO (name)->expr = NULL;
4566 VN_INFO (name)->value_id = 0;
4568 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4569 continue;
4571 switch (TREE_CODE (SSA_NAME_VAR (name)))
4573 case VAR_DECL:
4574 /* Undefined vars keep TOP. */
4575 break;
4577 case PARM_DECL:
4578 /* Parameters are VARYING but we can record a condition
4579 if we know it is a non-NULL pointer. */
4580 VN_INFO (name)->visited = true;
4581 VN_INFO (name)->valnum = name;
4582 if (POINTER_TYPE_P (TREE_TYPE (name))
4583 && nonnull_arg_p (SSA_NAME_VAR (name)))
4585 tree ops[2];
4586 ops[0] = name;
4587 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4588 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4589 boolean_true_node, 0);
4590 if (dump_file && (dump_flags & TDF_DETAILS))
4592 fprintf (dump_file, "Recording ");
4593 print_generic_expr (dump_file, name, TDF_SLIM);
4594 fprintf (dump_file, " != 0\n");
4597 break;
4599 case RESULT_DECL:
4600 /* If the result is passed by invisible reference the default
4601 def is initialized, otherwise it's uninitialized. */
4602 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4604 VN_INFO (name)->visited = true;
4605 VN_INFO (name)->valnum = name;
4607 break;
4609 default:
4610 gcc_unreachable ();
4615 /* Restore SSA info that has been reset on value leaders. */
4617 void
4618 scc_vn_restore_ssa_info (void)
4620 unsigned i;
4621 tree name;
4623 FOR_EACH_SSA_NAME (i, name, cfun)
4625 if (has_VN_INFO (name))
4627 if (VN_INFO (name)->needs_insertion)
4629 else if (POINTER_TYPE_P (TREE_TYPE (name))
4630 && VN_INFO (name)->info.ptr_info)
4631 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4632 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4633 && VN_INFO (name)->info.range_info)
4635 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4636 SSA_NAME_ANTI_RANGE_P (name)
4637 = VN_INFO (name)->range_info_anti_range_p;
4643 void
4644 free_scc_vn (void)
4646 size_t i;
4647 tree name;
4649 delete constant_to_value_id;
4650 constant_to_value_id = NULL;
4651 BITMAP_FREE (constant_value_ids);
4652 shared_lookup_phiargs.release ();
4653 shared_lookup_references.release ();
4654 XDELETEVEC (rpo_numbers);
4656 FOR_EACH_SSA_NAME (i, name, cfun)
4658 if (has_VN_INFO (name)
4659 && VN_INFO (name)->needs_insertion)
4660 release_ssa_name (name);
4662 obstack_free (&vn_ssa_aux_obstack, NULL);
4663 vn_ssa_aux_table.release ();
4665 sccstack.release ();
4666 free_vn_table (valid_info);
4667 XDELETE (valid_info);
4668 free_vn_table (optimistic_info);
4669 XDELETE (optimistic_info);
4671 BITMAP_FREE (const_parms);
4674 /* Set *ID according to RESULT. */
4676 static void
4677 set_value_id_for_result (tree result, unsigned int *id)
4679 if (result && TREE_CODE (result) == SSA_NAME)
4680 *id = VN_INFO (result)->value_id;
4681 else if (result && is_gimple_min_invariant (result))
4682 *id = get_or_alloc_constant_value_id (result);
4683 else
4684 *id = get_next_value_id ();
4687 /* Set the value ids in the valid hash tables. */
4689 static void
4690 set_hashtable_value_ids (void)
4692 vn_nary_op_iterator_type hin;
4693 vn_phi_iterator_type hip;
4694 vn_reference_iterator_type hir;
4695 vn_nary_op_t vno;
4696 vn_reference_t vr;
4697 vn_phi_t vp;
4699 /* Now set the value ids of the things we had put in the hash
4700 table. */
4702 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4703 set_value_id_for_result (vno->result, &vno->value_id);
4705 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4706 set_value_id_for_result (vp->result, &vp->value_id);
4708 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4709 hir)
4710 set_value_id_for_result (vr->result, &vr->value_id);
4713 class sccvn_dom_walker : public dom_walker
4715 public:
4716 sccvn_dom_walker ()
4717 : dom_walker (CDI_DOMINATORS, true), cond_stack (0) {}
4719 virtual edge before_dom_children (basic_block);
4720 virtual void after_dom_children (basic_block);
4722 void record_cond (basic_block,
4723 enum tree_code code, tree lhs, tree rhs, bool value);
4724 void record_conds (basic_block,
4725 enum tree_code code, tree lhs, tree rhs, bool value);
4727 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4728 cond_stack;
4731 /* Record a temporary condition for the BB and its dominated blocks. */
4733 void
4734 sccvn_dom_walker::record_cond (basic_block bb,
4735 enum tree_code code, tree lhs, tree rhs,
4736 bool value)
4738 tree ops[2] = { lhs, rhs };
4739 vn_nary_op_t old = NULL;
4740 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4741 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4742 vn_nary_op_t cond
4743 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4744 value
4745 ? boolean_true_node
4746 : boolean_false_node, 0);
4747 if (dump_file && (dump_flags & TDF_DETAILS))
4749 fprintf (dump_file, "Recording temporarily ");
4750 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4751 fprintf (dump_file, " %s ", get_tree_code_name (code));
4752 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4753 fprintf (dump_file, " == %s%s\n",
4754 value ? "true" : "false",
4755 old ? " (old entry saved)" : "");
4757 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4760 /* Record temporary conditions for the BB and its dominated blocks
4761 according to LHS CODE RHS == VALUE and its dominated conditions. */
4763 void
4764 sccvn_dom_walker::record_conds (basic_block bb,
4765 enum tree_code code, tree lhs, tree rhs,
4766 bool value)
4768 /* Record the original condition. */
4769 record_cond (bb, code, lhs, rhs, value);
4771 if (!value)
4772 return;
4774 /* Record dominated conditions if the condition is true. Note that
4775 the inversion is already recorded. */
4776 switch (code)
4778 case LT_EXPR:
4779 case GT_EXPR:
4780 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4781 record_cond (bb, NE_EXPR, lhs, rhs, true);
4782 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4783 break;
4785 case EQ_EXPR:
4786 record_cond (bb, LE_EXPR, lhs, rhs, true);
4787 record_cond (bb, GE_EXPR, lhs, rhs, true);
4788 record_cond (bb, LT_EXPR, lhs, rhs, false);
4789 record_cond (bb, GT_EXPR, lhs, rhs, false);
4790 break;
4792 default:
4793 break;
4797 /* Restore expressions and values derived from conditionals. */
4799 void
4800 sccvn_dom_walker::after_dom_children (basic_block bb)
4802 while (!cond_stack.is_empty ()
4803 && cond_stack.last ().first == bb)
4805 vn_nary_op_t cond = cond_stack.last ().second.first;
4806 vn_nary_op_t old = cond_stack.last ().second.second;
4807 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4808 if (old)
4809 vn_nary_op_insert_into (old, current_info->nary, false);
4810 cond_stack.pop ();
4814 /* Value number all statements in BB. */
4816 edge
4817 sccvn_dom_walker::before_dom_children (basic_block bb)
4819 edge e;
4820 edge_iterator ei;
4822 if (dump_file && (dump_flags & TDF_DETAILS))
4823 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4825 /* If we have a single predecessor record the equivalence from a
4826 possible condition on the predecessor edge. */
4827 edge pred_e = NULL;
4828 FOR_EACH_EDGE (e, ei, bb->preds)
4830 /* Ignore simple backedges from this to allow recording conditions
4831 in loop headers. */
4832 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4833 continue;
4834 if (! pred_e)
4835 pred_e = e;
4836 else
4838 pred_e = NULL;
4839 break;
4842 if (pred_e)
4844 /* Check if there are multiple executable successor edges in
4845 the source block. Otherwise there is no additional info
4846 to be recorded. */
4847 edge e2;
4848 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4849 if (e2 != pred_e
4850 && e2->flags & EDGE_EXECUTABLE)
4851 break;
4852 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4854 gimple *stmt = last_stmt (pred_e->src);
4855 if (stmt
4856 && gimple_code (stmt) == GIMPLE_COND)
4858 enum tree_code code = gimple_cond_code (stmt);
4859 tree lhs = gimple_cond_lhs (stmt);
4860 tree rhs = gimple_cond_rhs (stmt);
4861 record_conds (bb, code, lhs, rhs,
4862 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4863 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4864 if (code != ERROR_MARK)
4865 record_conds (bb, code, lhs, rhs,
4866 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4871 /* Value-number all defs in the basic-block. */
4872 for (gphi_iterator gsi = gsi_start_phis (bb);
4873 !gsi_end_p (gsi); gsi_next (&gsi))
4875 gphi *phi = gsi.phi ();
4876 tree res = PHI_RESULT (phi);
4877 if (!VN_INFO (res)->visited)
4878 DFS (res);
4880 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4881 !gsi_end_p (gsi); gsi_next (&gsi))
4883 ssa_op_iter i;
4884 tree op;
4885 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4886 if (!VN_INFO (op)->visited)
4887 DFS (op);
4890 /* Finally look at the last stmt. */
4891 gimple *stmt = last_stmt (bb);
4892 if (!stmt)
4893 return NULL;
4895 enum gimple_code code = gimple_code (stmt);
4896 if (code != GIMPLE_COND
4897 && code != GIMPLE_SWITCH
4898 && code != GIMPLE_GOTO)
4899 return NULL;
4901 if (dump_file && (dump_flags & TDF_DETAILS))
4903 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4904 print_gimple_stmt (dump_file, stmt, 0);
4907 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4908 if value-numbering can prove they are not reachable. Handling
4909 computed gotos is also possible. */
4910 tree val;
4911 switch (code)
4913 case GIMPLE_COND:
4915 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4916 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4917 val = gimple_simplify (gimple_cond_code (stmt),
4918 boolean_type_node, lhs, rhs,
4919 NULL, vn_valueize);
4920 /* If that didn't simplify to a constant see if we have recorded
4921 temporary expressions from taken edges. */
4922 if (!val || TREE_CODE (val) != INTEGER_CST)
4924 tree ops[2];
4925 ops[0] = lhs;
4926 ops[1] = rhs;
4927 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4928 boolean_type_node, ops, NULL);
4930 break;
4932 case GIMPLE_SWITCH:
4933 val = gimple_switch_index (as_a <gswitch *> (stmt));
4934 break;
4935 case GIMPLE_GOTO:
4936 val = gimple_goto_dest (stmt);
4937 break;
4938 default:
4939 gcc_unreachable ();
4941 if (!val)
4942 return NULL;
4944 edge taken = find_taken_edge (bb, vn_valueize (val));
4945 if (!taken)
4946 return NULL;
4948 if (dump_file && (dump_flags & TDF_DETAILS))
4949 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4950 "not executable\n", bb->index, bb->index, taken->dest->index);
4952 return taken;
4955 /* Do SCCVN. Returns true if it finished, false if we bailed out
4956 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4957 how we use the alias oracle walking during the VN process. */
4959 void
4960 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4962 size_t i;
4964 default_vn_walk_kind = default_vn_walk_kind_;
4966 init_scc_vn ();
4968 /* Collect pointers we know point to readonly memory. */
4969 const_parms = BITMAP_ALLOC (NULL);
4970 tree fnspec = lookup_attribute ("fn spec",
4971 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4972 if (fnspec)
4974 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4975 i = 1;
4976 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4977 arg; arg = DECL_CHAIN (arg), ++i)
4979 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4980 break;
4981 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4982 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4984 tree name = ssa_default_def (cfun, arg);
4985 if (name)
4986 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4991 /* Walk all blocks in dominator order, value-numbering stmts
4992 SSA defs and decide whether outgoing edges are not executable. */
4993 sccvn_dom_walker walker;
4994 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4996 /* Initialize the value ids and prune out remaining VN_TOPs
4997 from dead code. */
4998 tree name;
5000 FOR_EACH_SSA_NAME (i, name, cfun)
5002 vn_ssa_aux_t info = VN_INFO (name);
5003 if (!info->visited)
5004 info->valnum = name;
5005 if (info->valnum == name
5006 || info->valnum == VN_TOP)
5007 info->value_id = get_next_value_id ();
5008 else if (is_gimple_min_invariant (info->valnum))
5009 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5012 /* Propagate. */
5013 FOR_EACH_SSA_NAME (i, name, cfun)
5015 vn_ssa_aux_t info = VN_INFO (name);
5016 if (TREE_CODE (info->valnum) == SSA_NAME
5017 && info->valnum != name
5018 && info->value_id != VN_INFO (info->valnum)->value_id)
5019 info->value_id = VN_INFO (info->valnum)->value_id;
5022 set_hashtable_value_ids ();
5024 if (dump_file && (dump_flags & TDF_DETAILS))
5026 fprintf (dump_file, "Value numbers:\n");
5027 FOR_EACH_SSA_NAME (i, name, cfun)
5029 if (VN_INFO (name)->visited
5030 && SSA_VAL (name) != name)
5032 print_generic_expr (dump_file, name);
5033 fprintf (dump_file, " = ");
5034 print_generic_expr (dump_file, SSA_VAL (name));
5035 fprintf (dump_file, "\n");
5041 /* Return the maximum value id we have ever seen. */
5043 unsigned int
5044 get_max_value_id (void)
5046 return next_value_id;
5049 /* Return the next unique value id. */
5051 unsigned int
5052 get_next_value_id (void)
5054 return next_value_id++;
5058 /* Compare two expressions E1 and E2 and return true if they are equal. */
5060 bool
5061 expressions_equal_p (tree e1, tree e2)
5063 /* The obvious case. */
5064 if (e1 == e2)
5065 return true;
5067 /* If either one is VN_TOP consider them equal. */
5068 if (e1 == VN_TOP || e2 == VN_TOP)
5069 return true;
5071 /* If only one of them is null, they cannot be equal. */
5072 if (!e1 || !e2)
5073 return false;
5075 /* Now perform the actual comparison. */
5076 if (TREE_CODE (e1) == TREE_CODE (e2)
5077 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5078 return true;
5080 return false;
5084 /* Return true if the nary operation NARY may trap. This is a copy
5085 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5087 bool
5088 vn_nary_may_trap (vn_nary_op_t nary)
5090 tree type;
5091 tree rhs2 = NULL_TREE;
5092 bool honor_nans = false;
5093 bool honor_snans = false;
5094 bool fp_operation = false;
5095 bool honor_trapv = false;
5096 bool handled, ret;
5097 unsigned i;
5099 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5100 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5101 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5103 type = nary->type;
5104 fp_operation = FLOAT_TYPE_P (type);
5105 if (fp_operation)
5107 honor_nans = flag_trapping_math && !flag_finite_math_only;
5108 honor_snans = flag_signaling_nans != 0;
5110 else if (INTEGRAL_TYPE_P (type)
5111 && TYPE_OVERFLOW_TRAPS (type))
5112 honor_trapv = true;
5114 if (nary->length >= 2)
5115 rhs2 = nary->op[1];
5116 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5117 honor_trapv,
5118 honor_nans, honor_snans, rhs2,
5119 &handled);
5120 if (handled
5121 && ret)
5122 return true;
5124 for (i = 0; i < nary->length; ++i)
5125 if (tree_could_trap_p (nary->op[i]))
5126 return true;
5128 return false;