usr.sbin/makefs/hammer2: Remove redundant hammer2_inode_modify()
[dragonfly.git] / contrib / gcc-8.0 / gcc / tree-ssa-sccvn.c
blob6766fc58bf86394a9c5f8162babd5a2f05cb84d8
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vn_phi_eq (vp1, vp2);
195 /* Free a phi operation structure VP. */
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
200 phi->phiargs.release ();
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
227 /* Free a reference operation structure VP. */
229 static inline void
230 free_reference (vn_reference_s *vr)
232 vr->operands.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return vn_reference_eq (v, c);
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
262 free_reference (v);
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
290 /* Hash table hash function for vn_constant_t. */
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 return vc1->hashcode;
298 /* Hash table equality function for vn_constant_t. */
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
313 /* Valid hashtables storing information we have proven to be
314 correct. */
316 static vn_tables_t valid_info;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
327 static vn_tables_t current_info;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
340 static inline tree
341 vuse_ssa_val (tree x)
343 if (!x)
344 return NULL_TREE;
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
355 while (SSA_NAME_IN_FREE_LIST (x));
357 return x;
360 /* This represents the top of the VN lattice, which is the universal
361 value. */
363 tree VN_TOP;
365 /* Unique counter for our value ids. */
367 static unsigned int next_value_id;
369 /* Next DFS number and the stack for strongly connected component
370 detection. */
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
384 /* Return whether there is value numbering information for a given SSA name. */
386 bool
387 has_VN_INFO (tree name)
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
394 /* Return the value numbering information for a given SSA name. */
396 vn_ssa_aux_t
397 VN_INFO (tree name)
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
404 /* Set the value numbering info for a given SSA name to a given
405 value. */
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
419 vn_ssa_aux_t newinfo;
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
438 switch (gimple_code (stmt))
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
470 case tcc_constant:
471 return VN_CONSTANT;
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
481 default:
482 return VN_NONE;
485 default:
486 return VN_NONE;
490 /* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
493 unsigned int
494 get_constant_value_id (tree constant)
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
532 /* Return true if V is a value id for a constant. */
534 bool
535 value_id_constant_p (unsigned int v)
537 return bitmap_bit_p (constant_value_ids, v);
540 /* Compute the hash for a reference operand VRO1. */
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
554 /* Compute a hash for the reference operation VR1 and return it. */
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 poly_int64 off = -1;
564 bool deref = false;
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (maybe_ne (vro->off, -1))
574 if (known_eq (off, -1))
575 off = 0;
576 off += vro->off;
578 else
580 if (maybe_ne (off, -1)
581 && maybe_ne (off, 0))
582 hstate.add_poly_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
587 if (vro->op0)
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
594 else
595 vn_reference_op_compute_hash (vro, hstate);
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
603 return result;
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
612 unsigned i, j;
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
644 i = 0;
645 j = 0;
648 poly_int64 off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (known_eq (vro1->off, -1))
660 break;
661 off1 += vro1->off;
663 for (; vr2->operands.iterate (j, &vro2); j++)
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (known_eq (vro2->off, -1))
671 break;
672 off2 += vro2->off;
674 if (maybe_ne (off1, off2))
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
685 if (deref2 && vro2->opcode == ADDR_EXPR)
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
704 return true;
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
715 vn_reference_op_s temp;
717 result->reserve (3);
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
750 vn_reference_op_s temp;
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
757 switch (temp.opcode)
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
769 if (!mem_ref_offset (ref).to_shwi (&temp.off))
770 temp.off = -1;
771 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
772 temp.base = MR_DEPENDENCE_BASE (ref);
773 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 break;
775 case BIT_FIELD_REF:
776 /* Record bits, position and storage order. */
777 temp.op0 = TREE_OPERAND (ref, 1);
778 temp.op1 = TREE_OPERAND (ref, 2);
779 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
780 temp.off = -1;
781 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
782 break;
783 case COMPONENT_REF:
784 /* The field decl is enough to unambiguously specify the field,
785 a matching type is not necessary and a mismatching type
786 is always a spurious difference. */
787 temp.type = NULL_TREE;
788 temp.op0 = TREE_OPERAND (ref, 1);
789 temp.op1 = TREE_OPERAND (ref, 2);
791 tree this_offset = component_ref_field_offset (ref);
792 if (this_offset
793 && poly_int_tree_p (this_offset))
795 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
796 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
798 poly_offset_int off
799 = (wi::to_poly_offset (this_offset)
800 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
801 /* Probibit value-numbering zero offset components
802 of addresses the same before the pass folding
803 __builtin_object_size had a chance to run
804 (checking cfun->after_inlining does the
805 trick here). */
806 if (TREE_CODE (orig) != ADDR_EXPR
807 || maybe_ne (off, 0)
808 || cfun->after_inlining)
809 off.to_shwi (&temp.off);
813 break;
814 case ARRAY_RANGE_REF:
815 case ARRAY_REF:
817 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
818 /* Record index as operand. */
819 temp.op0 = TREE_OPERAND (ref, 1);
820 /* Always record lower bounds and element size. */
821 temp.op1 = array_ref_low_bound (ref);
822 /* But record element size in units of the type alignment. */
823 temp.op2 = TREE_OPERAND (ref, 3);
824 temp.align = eltype->type_common.align;
825 if (! temp.op2)
826 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
827 size_int (TYPE_ALIGN_UNIT (eltype)));
828 if (poly_int_tree_p (temp.op0)
829 && poly_int_tree_p (temp.op1)
830 && TREE_CODE (temp.op2) == INTEGER_CST)
832 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
833 - wi::to_poly_offset (temp.op1))
834 * wi::to_offset (temp.op2)
835 * vn_ref_op_align_unit (&temp));
836 off.to_shwi (&temp.off);
839 break;
840 case VAR_DECL:
841 if (DECL_HARD_REGISTER (ref))
843 temp.op0 = ref;
844 break;
846 /* Fallthru. */
847 case PARM_DECL:
848 case CONST_DECL:
849 case RESULT_DECL:
850 /* Canonicalize decls to MEM[&decl] which is what we end up with
851 when valueizing MEM[ptr] with ptr = &decl. */
852 temp.opcode = MEM_REF;
853 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
854 temp.off = 0;
855 result->safe_push (temp);
856 temp.opcode = ADDR_EXPR;
857 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
858 temp.type = TREE_TYPE (temp.op0);
859 temp.off = -1;
860 break;
861 case STRING_CST:
862 case INTEGER_CST:
863 case COMPLEX_CST:
864 case VECTOR_CST:
865 case REAL_CST:
866 case FIXED_CST:
867 case CONSTRUCTOR:
868 case SSA_NAME:
869 temp.op0 = ref;
870 break;
871 case ADDR_EXPR:
872 if (is_gimple_min_invariant (ref))
874 temp.op0 = ref;
875 break;
877 break;
878 /* These are only interesting for their operands, their
879 existence, and their type. They will never be the last
880 ref in the chain of references (IE they require an
881 operand), so we don't have to put anything
882 for op* as it will be handled by the iteration */
883 case REALPART_EXPR:
884 temp.off = 0;
885 break;
886 case VIEW_CONVERT_EXPR:
887 temp.off = 0;
888 temp.reverse = storage_order_barrier_p (ref);
889 break;
890 case IMAGPART_EXPR:
891 /* This is only interesting for its constant offset. */
892 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
893 break;
894 default:
895 gcc_unreachable ();
897 result->safe_push (temp);
899 if (REFERENCE_CLASS_P (ref)
900 || TREE_CODE (ref) == MODIFY_EXPR
901 || TREE_CODE (ref) == WITH_SIZE_EXPR
902 || (TREE_CODE (ref) == ADDR_EXPR
903 && !is_gimple_min_invariant (ref)))
904 ref = TREE_OPERAND (ref, 0);
905 else
906 ref = NULL_TREE;
910 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
911 operands in *OPS, the reference alias set SET and the reference type TYPE.
912 Return true if something useful was produced. */
914 bool
915 ao_ref_init_from_vn_reference (ao_ref *ref,
916 alias_set_type set, tree type,
917 vec<vn_reference_op_s> ops)
919 vn_reference_op_t op;
920 unsigned i;
921 tree base = NULL_TREE;
922 tree *op0_p = &base;
923 poly_offset_int offset = 0;
924 poly_offset_int max_size;
925 poly_offset_int size = -1;
926 tree size_tree = NULL_TREE;
927 alias_set_type base_alias_set = -1;
929 /* First get the final access size from just the outermost expression. */
930 op = &ops[0];
931 if (op->opcode == COMPONENT_REF)
932 size_tree = DECL_SIZE (op->op0);
933 else if (op->opcode == BIT_FIELD_REF)
934 size_tree = op->op0;
935 else
937 machine_mode mode = TYPE_MODE (type);
938 if (mode == BLKmode)
939 size_tree = TYPE_SIZE (type);
940 else
941 size = GET_MODE_BITSIZE (mode);
943 if (size_tree != NULL_TREE
944 && poly_int_tree_p (size_tree))
945 size = wi::to_poly_offset (size_tree);
947 /* Initially, maxsize is the same as the accessed element size.
948 In the following it will only grow (or become -1). */
949 max_size = size;
951 /* Compute cumulative bit-offset for nested component-refs and array-refs,
952 and find the ultimate containing object. */
953 FOR_EACH_VEC_ELT (ops, i, op)
955 switch (op->opcode)
957 /* These may be in the reference ops, but we cannot do anything
958 sensible with them here. */
959 case ADDR_EXPR:
960 /* Apart from ADDR_EXPR arguments to MEM_REF. */
961 if (base != NULL_TREE
962 && TREE_CODE (base) == MEM_REF
963 && op->op0
964 && DECL_P (TREE_OPERAND (op->op0, 0)))
966 vn_reference_op_t pop = &ops[i-1];
967 base = TREE_OPERAND (op->op0, 0);
968 if (known_eq (pop->off, -1))
970 max_size = -1;
971 offset = 0;
973 else
974 offset += pop->off * BITS_PER_UNIT;
975 op0_p = NULL;
976 break;
978 /* Fallthru. */
979 case CALL_EXPR:
980 return false;
982 /* Record the base objects. */
983 case MEM_REF:
984 base_alias_set = get_deref_alias_set (op->op0);
985 *op0_p = build2 (MEM_REF, op->type,
986 NULL_TREE, op->op0);
987 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
988 MR_DEPENDENCE_BASE (*op0_p) = op->base;
989 op0_p = &TREE_OPERAND (*op0_p, 0);
990 break;
992 case VAR_DECL:
993 case PARM_DECL:
994 case RESULT_DECL:
995 case SSA_NAME:
996 *op0_p = op->op0;
997 op0_p = NULL;
998 break;
1000 /* And now the usual component-reference style ops. */
1001 case BIT_FIELD_REF:
1002 offset += wi::to_offset (op->op1);
1003 break;
1005 case COMPONENT_REF:
1007 tree field = op->op0;
1008 /* We do not have a complete COMPONENT_REF tree here so we
1009 cannot use component_ref_field_offset. Do the interesting
1010 parts manually. */
1011 tree this_offset = DECL_FIELD_OFFSET (field);
1013 if (op->op1 || !poly_int_tree_p (this_offset))
1014 max_size = -1;
1015 else
1017 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1018 << LOG2_BITS_PER_UNIT);
1019 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1020 offset += woffset;
1022 break;
1025 case ARRAY_RANGE_REF:
1026 case ARRAY_REF:
1027 /* We recorded the lower bound and the element size. */
1028 if (!poly_int_tree_p (op->op0)
1029 || !poly_int_tree_p (op->op1)
1030 || TREE_CODE (op->op2) != INTEGER_CST)
1031 max_size = -1;
1032 else
1034 poly_offset_int woffset
1035 = wi::sext (wi::to_poly_offset (op->op0)
1036 - wi::to_poly_offset (op->op1),
1037 TYPE_PRECISION (TREE_TYPE (op->op0)));
1038 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1039 woffset <<= LOG2_BITS_PER_UNIT;
1040 offset += woffset;
1042 break;
1044 case REALPART_EXPR:
1045 break;
1047 case IMAGPART_EXPR:
1048 offset += size;
1049 break;
1051 case VIEW_CONVERT_EXPR:
1052 break;
1054 case STRING_CST:
1055 case INTEGER_CST:
1056 case COMPLEX_CST:
1057 case VECTOR_CST:
1058 case REAL_CST:
1059 case CONSTRUCTOR:
1060 case CONST_DECL:
1061 return false;
1063 default:
1064 return false;
1068 if (base == NULL_TREE)
1069 return false;
1071 ref->ref = NULL_TREE;
1072 ref->base = base;
1073 ref->ref_alias_set = set;
1074 if (base_alias_set != -1)
1075 ref->base_alias_set = base_alias_set;
1076 else
1077 ref->base_alias_set = get_alias_set (base);
1078 /* We discount volatiles from value-numbering elsewhere. */
1079 ref->volatile_p = false;
1081 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1083 ref->offset = 0;
1084 ref->size = -1;
1085 ref->max_size = -1;
1086 return true;
1089 if (!offset.to_shwi (&ref->offset))
1091 ref->offset = 0;
1092 ref->max_size = -1;
1093 return true;
1096 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1097 ref->max_size = -1;
1099 return true;
1102 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1103 vn_reference_op_s's. */
1105 static void
1106 copy_reference_ops_from_call (gcall *call,
1107 vec<vn_reference_op_s> *result)
1109 vn_reference_op_s temp;
1110 unsigned i;
1111 tree lhs = gimple_call_lhs (call);
1112 int lr;
1114 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1115 different. By adding the lhs here in the vector, we ensure that the
1116 hashcode is different, guaranteeing a different value number. */
1117 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1119 memset (&temp, 0, sizeof (temp));
1120 temp.opcode = MODIFY_EXPR;
1121 temp.type = TREE_TYPE (lhs);
1122 temp.op0 = lhs;
1123 temp.off = -1;
1124 result->safe_push (temp);
1127 /* Copy the type, opcode, function, static chain and EH region, if any. */
1128 memset (&temp, 0, sizeof (temp));
1129 temp.type = gimple_call_return_type (call);
1130 temp.opcode = CALL_EXPR;
1131 temp.op0 = gimple_call_fn (call);
1132 temp.op1 = gimple_call_chain (call);
1133 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1134 temp.op2 = size_int (lr);
1135 temp.off = -1;
1136 if (gimple_call_with_bounds_p (call))
1137 temp.with_bounds = 1;
1138 result->safe_push (temp);
1140 /* Copy the call arguments. As they can be references as well,
1141 just chain them together. */
1142 for (i = 0; i < gimple_call_num_args (call); ++i)
1144 tree callarg = gimple_call_arg (call, i);
1145 copy_reference_ops_from_ref (callarg, result);
1149 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1150 *I_P to point to the last element of the replacement. */
1151 static bool
1152 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1153 unsigned int *i_p)
1155 unsigned int i = *i_p;
1156 vn_reference_op_t op = &(*ops)[i];
1157 vn_reference_op_t mem_op = &(*ops)[i - 1];
1158 tree addr_base;
1159 poly_int64 addr_offset = 0;
1161 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1162 from .foo.bar to the preceding MEM_REF offset and replace the
1163 address with &OBJ. */
1164 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1165 &addr_offset);
1166 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1167 if (addr_base != TREE_OPERAND (op->op0, 0))
1169 poly_offset_int off
1170 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1171 SIGNED)
1172 + addr_offset);
1173 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1174 op->op0 = build_fold_addr_expr (addr_base);
1175 if (tree_fits_shwi_p (mem_op->op0))
1176 mem_op->off = tree_to_shwi (mem_op->op0);
1177 else
1178 mem_op->off = -1;
1179 return true;
1181 return false;
1184 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1185 *I_P to point to the last element of the replacement. */
1186 static bool
1187 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1188 unsigned int *i_p)
1190 unsigned int i = *i_p;
1191 vn_reference_op_t op = &(*ops)[i];
1192 vn_reference_op_t mem_op = &(*ops)[i - 1];
1193 gimple *def_stmt;
1194 enum tree_code code;
1195 poly_offset_int off;
1197 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1198 if (!is_gimple_assign (def_stmt))
1199 return false;
1201 code = gimple_assign_rhs_code (def_stmt);
1202 if (code != ADDR_EXPR
1203 && code != POINTER_PLUS_EXPR)
1204 return false;
1206 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1208 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1209 from .foo.bar to the preceding MEM_REF offset and replace the
1210 address with &OBJ. */
1211 if (code == ADDR_EXPR)
1213 tree addr, addr_base;
1214 poly_int64 addr_offset;
1216 addr = gimple_assign_rhs1 (def_stmt);
1217 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1218 &addr_offset);
1219 /* If that didn't work because the address isn't invariant propagate
1220 the reference tree from the address operation in case the current
1221 dereference isn't offsetted. */
1222 if (!addr_base
1223 && *i_p == ops->length () - 1
1224 && known_eq (off, 0)
1225 /* This makes us disable this transform for PRE where the
1226 reference ops might be also used for code insertion which
1227 is invalid. */
1228 && default_vn_walk_kind == VN_WALKREWRITE)
1230 auto_vec<vn_reference_op_s, 32> tem;
1231 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1232 /* Make sure to preserve TBAA info. The only objects not
1233 wrapped in MEM_REFs that can have their address taken are
1234 STRING_CSTs. */
1235 if (tem.length () >= 2
1236 && tem[tem.length () - 2].opcode == MEM_REF)
1238 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1239 new_mem_op->op0
1240 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1241 wi::to_poly_wide (new_mem_op->op0));
1243 else
1244 gcc_assert (tem.last ().opcode == STRING_CST);
1245 ops->pop ();
1246 ops->pop ();
1247 ops->safe_splice (tem);
1248 --*i_p;
1249 return true;
1251 if (!addr_base
1252 || TREE_CODE (addr_base) != MEM_REF
1253 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1254 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1255 return false;
1257 off += addr_offset;
1258 off += mem_ref_offset (addr_base);
1259 op->op0 = TREE_OPERAND (addr_base, 0);
1261 else
1263 tree ptr, ptroff;
1264 ptr = gimple_assign_rhs1 (def_stmt);
1265 ptroff = gimple_assign_rhs2 (def_stmt);
1266 if (TREE_CODE (ptr) != SSA_NAME
1267 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1268 || TREE_CODE (ptroff) != INTEGER_CST)
1269 return false;
1271 off += wi::to_offset (ptroff);
1272 op->op0 = ptr;
1275 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1276 if (tree_fits_shwi_p (mem_op->op0))
1277 mem_op->off = tree_to_shwi (mem_op->op0);
1278 else
1279 mem_op->off = -1;
1280 if (TREE_CODE (op->op0) == SSA_NAME)
1281 op->op0 = SSA_VAL (op->op0);
1282 if (TREE_CODE (op->op0) != SSA_NAME)
1283 op->opcode = TREE_CODE (op->op0);
1285 /* And recurse. */
1286 if (TREE_CODE (op->op0) == SSA_NAME)
1287 vn_reference_maybe_forwprop_address (ops, i_p);
1288 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1289 vn_reference_fold_indirect (ops, i_p);
1290 return true;
1293 /* Optimize the reference REF to a constant if possible or return
1294 NULL_TREE if not. */
1296 tree
1297 fully_constant_vn_reference_p (vn_reference_t ref)
1299 vec<vn_reference_op_s> operands = ref->operands;
1300 vn_reference_op_t op;
1302 /* Try to simplify the translated expression if it is
1303 a call to a builtin function with at most two arguments. */
1304 op = &operands[0];
1305 if (op->opcode == CALL_EXPR
1306 && TREE_CODE (op->op0) == ADDR_EXPR
1307 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1308 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1309 && operands.length () >= 2
1310 && operands.length () <= 3)
1312 vn_reference_op_t arg0, arg1 = NULL;
1313 bool anyconst = false;
1314 arg0 = &operands[1];
1315 if (operands.length () > 2)
1316 arg1 = &operands[2];
1317 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1318 || (arg0->opcode == ADDR_EXPR
1319 && is_gimple_min_invariant (arg0->op0)))
1320 anyconst = true;
1321 if (arg1
1322 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1323 || (arg1->opcode == ADDR_EXPR
1324 && is_gimple_min_invariant (arg1->op0))))
1325 anyconst = true;
1326 if (anyconst)
1328 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1329 arg1 ? 2 : 1,
1330 arg0->op0,
1331 arg1 ? arg1->op0 : NULL);
1332 if (folded
1333 && TREE_CODE (folded) == NOP_EXPR)
1334 folded = TREE_OPERAND (folded, 0);
1335 if (folded
1336 && is_gimple_min_invariant (folded))
1337 return folded;
1341 /* Simplify reads from constants or constant initializers. */
1342 else if (BITS_PER_UNIT == 8
1343 && is_gimple_reg_type (ref->type)
1344 && (!INTEGRAL_TYPE_P (ref->type)
1345 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1347 poly_int64 off = 0;
1348 HOST_WIDE_INT size;
1349 if (INTEGRAL_TYPE_P (ref->type))
1350 size = TYPE_PRECISION (ref->type);
1351 else
1352 size = tree_to_shwi (TYPE_SIZE (ref->type));
1353 if (size % BITS_PER_UNIT != 0
1354 || size > MAX_BITSIZE_MODE_ANY_MODE)
1355 return NULL_TREE;
1356 size /= BITS_PER_UNIT;
1357 unsigned i;
1358 for (i = 0; i < operands.length (); ++i)
1360 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1362 ++i;
1363 break;
1365 if (known_eq (operands[i].off, -1))
1366 return NULL_TREE;
1367 off += operands[i].off;
1368 if (operands[i].opcode == MEM_REF)
1370 ++i;
1371 break;
1374 vn_reference_op_t base = &operands[--i];
1375 tree ctor = error_mark_node;
1376 tree decl = NULL_TREE;
1377 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1378 ctor = base->op0;
1379 else if (base->opcode == MEM_REF
1380 && base[1].opcode == ADDR_EXPR
1381 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1382 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1383 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1385 decl = TREE_OPERAND (base[1].op0, 0);
1386 if (TREE_CODE (decl) == STRING_CST)
1387 ctor = decl;
1388 else
1389 ctor = ctor_for_folding (decl);
1391 if (ctor == NULL_TREE)
1392 return build_zero_cst (ref->type);
1393 else if (ctor != error_mark_node)
1395 HOST_WIDE_INT const_off;
1396 if (decl)
1398 tree res = fold_ctor_reference (ref->type, ctor,
1399 off * BITS_PER_UNIT,
1400 size * BITS_PER_UNIT, decl);
1401 if (res)
1403 STRIP_USELESS_TYPE_CONVERSION (res);
1404 if (is_gimple_min_invariant (res))
1405 return res;
1408 else if (off.is_constant (&const_off))
1410 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1411 int len = native_encode_expr (ctor, buf, size, const_off);
1412 if (len > 0)
1413 return native_interpret_expr (ref->type, buf, len);
1418 return NULL_TREE;
1421 /* Return true if OPS contain a storage order barrier. */
1423 static bool
1424 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1426 vn_reference_op_t op;
1427 unsigned i;
1429 FOR_EACH_VEC_ELT (ops, i, op)
1430 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1431 return true;
1433 return false;
1436 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1437 structures into their value numbers. This is done in-place, and
1438 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1439 whether any operands were valueized. */
1441 static vec<vn_reference_op_s>
1442 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1444 vn_reference_op_t vro;
1445 unsigned int i;
1447 *valueized_anything = false;
1449 FOR_EACH_VEC_ELT (orig, i, vro)
1451 if (vro->opcode == SSA_NAME
1452 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1454 tree tem = SSA_VAL (vro->op0);
1455 if (tem != vro->op0)
1457 *valueized_anything = true;
1458 vro->op0 = tem;
1460 /* If it transforms from an SSA_NAME to a constant, update
1461 the opcode. */
1462 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1463 vro->opcode = TREE_CODE (vro->op0);
1465 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1467 tree tem = SSA_VAL (vro->op1);
1468 if (tem != vro->op1)
1470 *valueized_anything = true;
1471 vro->op1 = tem;
1474 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1476 tree tem = SSA_VAL (vro->op2);
1477 if (tem != vro->op2)
1479 *valueized_anything = true;
1480 vro->op2 = tem;
1483 /* If it transforms from an SSA_NAME to an address, fold with
1484 a preceding indirect reference. */
1485 if (i > 0
1486 && vro->op0
1487 && TREE_CODE (vro->op0) == ADDR_EXPR
1488 && orig[i - 1].opcode == MEM_REF)
1490 if (vn_reference_fold_indirect (&orig, &i))
1491 *valueized_anything = true;
1493 else if (i > 0
1494 && vro->opcode == SSA_NAME
1495 && orig[i - 1].opcode == MEM_REF)
1497 if (vn_reference_maybe_forwprop_address (&orig, &i))
1498 *valueized_anything = true;
1500 /* If it transforms a non-constant ARRAY_REF into a constant
1501 one, adjust the constant offset. */
1502 else if (vro->opcode == ARRAY_REF
1503 && known_eq (vro->off, -1)
1504 && poly_int_tree_p (vro->op0)
1505 && poly_int_tree_p (vro->op1)
1506 && TREE_CODE (vro->op2) == INTEGER_CST)
1508 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1509 - wi::to_poly_offset (vro->op1))
1510 * wi::to_offset (vro->op2)
1511 * vn_ref_op_align_unit (vro));
1512 off.to_shwi (&vro->off);
1516 return orig;
1519 static vec<vn_reference_op_s>
1520 valueize_refs (vec<vn_reference_op_s> orig)
1522 bool tem;
1523 return valueize_refs_1 (orig, &tem);
1526 static vec<vn_reference_op_s> shared_lookup_references;
1528 /* Create a vector of vn_reference_op_s structures from REF, a
1529 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1530 this function. *VALUEIZED_ANYTHING will specify whether any
1531 operands were valueized. */
1533 static vec<vn_reference_op_s>
1534 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1536 if (!ref)
1537 return vNULL;
1538 shared_lookup_references.truncate (0);
1539 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1540 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1541 valueized_anything);
1542 return shared_lookup_references;
1545 /* Create a vector of vn_reference_op_s structures from CALL, a
1546 call statement. The vector is shared among all callers of
1547 this function. */
1549 static vec<vn_reference_op_s>
1550 valueize_shared_reference_ops_from_call (gcall *call)
1552 if (!call)
1553 return vNULL;
1554 shared_lookup_references.truncate (0);
1555 copy_reference_ops_from_call (call, &shared_lookup_references);
1556 shared_lookup_references = valueize_refs (shared_lookup_references);
1557 return shared_lookup_references;
1560 /* Lookup a SCCVN reference operation VR in the current hash table.
1561 Returns the resulting value number if it exists in the hash table,
1562 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1563 vn_reference_t stored in the hashtable if something is found. */
1565 static tree
1566 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1568 vn_reference_s **slot;
1569 hashval_t hash;
1571 hash = vr->hashcode;
1572 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1573 if (!slot && current_info == optimistic_info)
1574 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1575 if (slot)
1577 if (vnresult)
1578 *vnresult = (vn_reference_t)*slot;
1579 return ((vn_reference_t)*slot)->result;
1582 return NULL_TREE;
1585 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1586 with the current VUSE and performs the expression lookup. */
1588 static void *
1589 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1590 unsigned int cnt, void *vr_)
1592 vn_reference_t vr = (vn_reference_t)vr_;
1593 vn_reference_s **slot;
1594 hashval_t hash;
1596 /* This bounds the stmt walks we perform on reference lookups
1597 to O(1) instead of O(N) where N is the number of dominating
1598 stores. */
1599 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1600 return (void *)-1;
1602 if (last_vuse_ptr)
1603 *last_vuse_ptr = vuse;
1605 /* Fixup vuse and hash. */
1606 if (vr->vuse)
1607 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1608 vr->vuse = vuse_ssa_val (vuse);
1609 if (vr->vuse)
1610 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1612 hash = vr->hashcode;
1613 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1614 if (!slot && current_info == optimistic_info)
1615 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1616 if (slot)
1617 return *slot;
1619 return NULL;
1622 /* Lookup an existing or insert a new vn_reference entry into the
1623 value table for the VUSE, SET, TYPE, OPERANDS reference which
1624 has the value VALUE which is either a constant or an SSA name. */
1626 static vn_reference_t
1627 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1628 alias_set_type set,
1629 tree type,
1630 vec<vn_reference_op_s,
1631 va_heap> operands,
1632 tree value)
1634 vn_reference_s vr1;
1635 vn_reference_t result;
1636 unsigned value_id;
1637 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1638 vr1.operands = operands;
1639 vr1.type = type;
1640 vr1.set = set;
1641 vr1.hashcode = vn_reference_compute_hash (&vr1);
1642 if (vn_reference_lookup_1 (&vr1, &result))
1643 return result;
1644 if (TREE_CODE (value) == SSA_NAME)
1645 value_id = VN_INFO (value)->value_id;
1646 else
1647 value_id = get_or_alloc_constant_value_id (value);
1648 return vn_reference_insert_pieces (vuse, set, type,
1649 operands.copy (), value, value_id);
1652 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1654 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1656 static tree
1657 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1659 if (!rcode.is_tree_code ())
1660 return NULL_TREE;
1661 tree *ops = ops_;
1662 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1663 if (rcode == CONSTRUCTOR
1664 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1665 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1666 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1668 length = CONSTRUCTOR_NELTS (ops_[0]);
1669 ops = XALLOCAVEC (tree, length);
1670 for (unsigned i = 0; i < length; ++i)
1671 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1673 vn_nary_op_t vnresult = NULL;
1674 return vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1675 type, ops, &vnresult);
1678 /* Return a value-number for RCODE OPS... either by looking up an existing
1679 value-number for the simplified result or by inserting the operation if
1680 INSERT is true. */
1682 static tree
1683 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1684 bool insert)
1686 tree result = NULL_TREE;
1687 /* We will be creating a value number for
1688 RCODE (OPS...).
1689 So first simplify and lookup this expression to see if it
1690 is already available. */
1691 mprts_hook = vn_lookup_simplify_result;
1692 bool res = false;
1693 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1695 case 1:
1696 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1697 break;
1698 case 2:
1699 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1700 break;
1701 case 3:
1702 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1703 break;
1705 mprts_hook = NULL;
1706 gimple *new_stmt = NULL;
1707 if (res
1708 && gimple_simplified_result_is_gimple_val (rcode, ops))
1709 /* The expression is already available. */
1710 result = ops[0];
1711 else
1713 tree val = vn_lookup_simplify_result (rcode, type, ops);
1714 if (!val && insert)
1716 gimple_seq stmts = NULL;
1717 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1718 if (result)
1720 gcc_assert (gimple_seq_singleton_p (stmts));
1721 new_stmt = gimple_seq_first_stmt (stmts);
1724 else
1725 /* The expression is already available. */
1726 result = val;
1728 if (new_stmt)
1730 /* The expression is not yet available, value-number lhs to
1731 the new SSA_NAME we created. */
1732 /* Initialize value-number information properly. */
1733 VN_INFO_GET (result)->valnum = result;
1734 VN_INFO (result)->value_id = get_next_value_id ();
1735 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1736 new_stmt);
1737 VN_INFO (result)->needs_insertion = true;
1738 /* ??? PRE phi-translation inserts NARYs without corresponding
1739 SSA name result. Re-use those but set their result according
1740 to the stmt we just built. */
1741 vn_nary_op_t nary = NULL;
1742 vn_nary_op_lookup_stmt (new_stmt, &nary);
1743 if (nary)
1745 gcc_assert (nary->result == NULL_TREE);
1746 nary->result = gimple_assign_lhs (new_stmt);
1748 /* As all "inserted" statements are singleton SCCs, insert
1749 to the valid table. This is strictly needed to
1750 avoid re-generating new value SSA_NAMEs for the same
1751 expression during SCC iteration over and over (the
1752 optimistic table gets cleared after each iteration).
1753 We do not need to insert into the optimistic table, as
1754 lookups there will fall back to the valid table. */
1755 else if (current_info == optimistic_info)
1757 current_info = valid_info;
1758 vn_nary_op_insert_stmt (new_stmt, result);
1759 current_info = optimistic_info;
1761 else
1762 vn_nary_op_insert_stmt (new_stmt, result);
1763 if (dump_file && (dump_flags & TDF_DETAILS))
1765 fprintf (dump_file, "Inserting name ");
1766 print_generic_expr (dump_file, result);
1767 fprintf (dump_file, " for expression ");
1768 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1769 fprintf (dump_file, "\n");
1772 return result;
1775 /* Return a value-number for RCODE OPS... either by looking up an existing
1776 value-number for the simplified result or by inserting the operation. */
1778 static tree
1779 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1781 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1784 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1785 its value if present. */
1787 tree
1788 vn_nary_simplify (vn_nary_op_t nary)
1790 if (nary->length > 3)
1791 return NULL_TREE;
1792 tree ops[3];
1793 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1794 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1798 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1799 from the statement defining VUSE and if not successful tries to
1800 translate *REFP and VR_ through an aggregate copy at the definition
1801 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1802 of *REF and *VR. If only disambiguation was performed then
1803 *DISAMBIGUATE_ONLY is set to true. */
1805 static void *
1806 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1807 bool *disambiguate_only)
1809 vn_reference_t vr = (vn_reference_t)vr_;
1810 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1811 tree base = ao_ref_base (ref);
1812 HOST_WIDE_INT offseti, maxsizei;
1813 static vec<vn_reference_op_s> lhs_ops;
1814 ao_ref lhs_ref;
1815 bool lhs_ref_ok = false;
1816 poly_int64 copy_size;
1818 /* If the reference is based on a parameter that was determined as
1819 pointing to readonly memory it doesn't change. */
1820 if (TREE_CODE (base) == MEM_REF
1821 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1822 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1823 && bitmap_bit_p (const_parms,
1824 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1826 *disambiguate_only = true;
1827 return NULL;
1830 /* First try to disambiguate after value-replacing in the definitions LHS. */
1831 if (is_gimple_assign (def_stmt))
1833 tree lhs = gimple_assign_lhs (def_stmt);
1834 bool valueized_anything = false;
1835 /* Avoid re-allocation overhead. */
1836 lhs_ops.truncate (0);
1837 copy_reference_ops_from_ref (lhs, &lhs_ops);
1838 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1839 if (valueized_anything)
1841 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1842 get_alias_set (lhs),
1843 TREE_TYPE (lhs), lhs_ops);
1844 if (lhs_ref_ok
1845 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1847 *disambiguate_only = true;
1848 return NULL;
1851 else
1853 ao_ref_init (&lhs_ref, lhs);
1854 lhs_ref_ok = true;
1857 /* If we reach a clobbering statement try to skip it and see if
1858 we find a VN result with exactly the same value as the
1859 possible clobber. In this case we can ignore the clobber
1860 and return the found value.
1861 Note that we don't need to worry about partial overlapping
1862 accesses as we then can use TBAA to disambiguate against the
1863 clobbering statement when looking up a load (thus the
1864 VN_WALKREWRITE guard). */
1865 if (vn_walk_kind == VN_WALKREWRITE
1866 && is_gimple_reg_type (TREE_TYPE (lhs))
1867 && types_compatible_p (TREE_TYPE (lhs), vr->type)
1868 /* The overlap restriction breaks down when either access
1869 alias-set is zero. Still for accesses of the size of
1870 an addressable unit there can be no overlaps. Overlaps
1871 between different union members are not an issue since
1872 activation of a union member via a store makes the
1873 values of untouched bytes unspecified. */
1874 && (known_eq (ref->size, BITS_PER_UNIT)
1875 || (get_alias_set (lhs) != 0
1876 && ao_ref_alias_set (ref) != 0)))
1878 tree *saved_last_vuse_ptr = last_vuse_ptr;
1879 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1880 last_vuse_ptr = NULL;
1881 tree saved_vuse = vr->vuse;
1882 hashval_t saved_hashcode = vr->hashcode;
1883 void *res = vn_reference_lookup_2 (ref,
1884 gimple_vuse (def_stmt), 0, vr);
1885 /* Need to restore vr->vuse and vr->hashcode. */
1886 vr->vuse = saved_vuse;
1887 vr->hashcode = saved_hashcode;
1888 last_vuse_ptr = saved_last_vuse_ptr;
1889 if (res && res != (void *)-1)
1891 vn_reference_t vnresult = (vn_reference_t) res;
1892 if (vnresult->result
1893 && operand_equal_p (vnresult->result,
1894 gimple_assign_rhs1 (def_stmt), 0))
1895 return res;
1899 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1900 && gimple_call_num_args (def_stmt) <= 4)
1902 /* For builtin calls valueize its arguments and call the
1903 alias oracle again. Valueization may improve points-to
1904 info of pointers and constify size and position arguments.
1905 Originally this was motivated by PR61034 which has
1906 conditional calls to free falsely clobbering ref because
1907 of imprecise points-to info of the argument. */
1908 tree oldargs[4];
1909 bool valueized_anything = false;
1910 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1912 oldargs[i] = gimple_call_arg (def_stmt, i);
1913 tree val = vn_valueize (oldargs[i]);
1914 if (val != oldargs[i])
1916 gimple_call_set_arg (def_stmt, i, val);
1917 valueized_anything = true;
1920 if (valueized_anything)
1922 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1923 ref);
1924 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1925 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1926 if (!res)
1928 *disambiguate_only = true;
1929 return NULL;
1934 if (*disambiguate_only)
1935 return (void *)-1;
1937 /* If we cannot constrain the size of the reference we cannot
1938 test if anything kills it. */
1939 if (!ref->max_size_known_p ())
1940 return (void *)-1;
1942 poly_int64 offset = ref->offset;
1943 poly_int64 maxsize = ref->max_size;
1945 /* We can't deduce anything useful from clobbers. */
1946 if (gimple_clobber_p (def_stmt))
1947 return (void *)-1;
1949 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1950 from that definition.
1951 1) Memset. */
1952 if (is_gimple_reg_type (vr->type)
1953 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1954 && integer_zerop (gimple_call_arg (def_stmt, 1))
1955 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1956 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1958 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1959 tree base2;
1960 poly_int64 offset2, size2, maxsize2;
1961 bool reverse;
1962 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1963 &reverse);
1964 tree len = gimple_call_arg (def_stmt, 2);
1965 if (known_size_p (maxsize2)
1966 && operand_equal_p (base, base2, 0)
1967 && known_subrange_p (offset, maxsize, offset2,
1968 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
1970 tree val = build_zero_cst (vr->type);
1971 return vn_reference_lookup_or_insert_for_pieces
1972 (vuse, vr->set, vr->type, vr->operands, val);
1976 /* 2) Assignment from an empty CONSTRUCTOR. */
1977 else if (is_gimple_reg_type (vr->type)
1978 && gimple_assign_single_p (def_stmt)
1979 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1980 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1982 tree base2;
1983 poly_int64 offset2, size2, maxsize2;
1984 bool reverse;
1985 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1986 &offset2, &size2, &maxsize2, &reverse);
1987 if (known_size_p (maxsize2)
1988 && known_eq (maxsize2, size2)
1989 && operand_equal_p (base, base2, 0)
1990 && known_subrange_p (offset, maxsize, offset2, size2))
1992 tree val = build_zero_cst (vr->type);
1993 return vn_reference_lookup_or_insert_for_pieces
1994 (vuse, vr->set, vr->type, vr->operands, val);
1998 /* 3) Assignment from a constant. We can use folds native encode/interpret
1999 routines to extract the assigned bits. */
2000 else if (known_eq (ref->size, maxsize)
2001 && is_gimple_reg_type (vr->type)
2002 && !contains_storage_order_barrier_p (vr->operands)
2003 && gimple_assign_single_p (def_stmt)
2004 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2005 /* native_encode and native_decode operate on arrays of bytes
2006 and so fundamentally need a compile-time size and offset. */
2007 && maxsize.is_constant (&maxsizei)
2008 && maxsizei % BITS_PER_UNIT == 0
2009 && offset.is_constant (&offseti)
2010 && offseti % BITS_PER_UNIT == 0
2011 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2012 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2013 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2015 tree base2;
2016 HOST_WIDE_INT offset2, size2;
2017 bool reverse;
2018 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2019 &offset2, &size2, &reverse);
2020 if (base2
2021 && !reverse
2022 && size2 % BITS_PER_UNIT == 0
2023 && offset2 % BITS_PER_UNIT == 0
2024 && operand_equal_p (base, base2, 0)
2025 && known_subrange_p (offseti, maxsizei, offset2, size2))
2027 /* We support up to 512-bit values (for V8DFmode). */
2028 unsigned char buffer[64];
2029 int len;
2031 tree rhs = gimple_assign_rhs1 (def_stmt);
2032 if (TREE_CODE (rhs) == SSA_NAME)
2033 rhs = SSA_VAL (rhs);
2034 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2035 buffer, sizeof (buffer),
2036 (offseti - offset2) / BITS_PER_UNIT);
2037 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2039 tree type = vr->type;
2040 /* Make sure to interpret in a type that has a range
2041 covering the whole access size. */
2042 if (INTEGRAL_TYPE_P (vr->type)
2043 && maxsizei != TYPE_PRECISION (vr->type))
2044 type = build_nonstandard_integer_type (maxsizei,
2045 TYPE_UNSIGNED (type));
2046 tree val = native_interpret_expr (type, buffer,
2047 maxsizei / BITS_PER_UNIT);
2048 /* If we chop off bits because the types precision doesn't
2049 match the memory access size this is ok when optimizing
2050 reads but not when called from the DSE code during
2051 elimination. */
2052 if (val
2053 && type != vr->type)
2055 if (! int_fits_type_p (val, vr->type))
2056 val = NULL_TREE;
2057 else
2058 val = fold_convert (vr->type, val);
2061 if (val)
2062 return vn_reference_lookup_or_insert_for_pieces
2063 (vuse, vr->set, vr->type, vr->operands, val);
2068 /* 4) Assignment from an SSA name which definition we may be able
2069 to access pieces from. */
2070 else if (known_eq (ref->size, maxsize)
2071 && is_gimple_reg_type (vr->type)
2072 && !contains_storage_order_barrier_p (vr->operands)
2073 && gimple_assign_single_p (def_stmt)
2074 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2076 tree base2;
2077 poly_int64 offset2, size2, maxsize2;
2078 bool reverse;
2079 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2080 &offset2, &size2, &maxsize2,
2081 &reverse);
2082 tree def_rhs = gimple_assign_rhs1 (def_stmt);
2083 if (!reverse
2084 && known_size_p (maxsize2)
2085 && known_eq (maxsize2, size2)
2086 && operand_equal_p (base, base2, 0)
2087 && known_subrange_p (offset, maxsize, offset2, size2)
2088 /* ??? We can't handle bitfield precision extracts without
2089 either using an alternate type for the BIT_FIELD_REF and
2090 then doing a conversion or possibly adjusting the offset
2091 according to endianness. */
2092 && (! INTEGRAL_TYPE_P (vr->type)
2093 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2094 && multiple_p (ref->size, BITS_PER_UNIT)
2095 && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2096 || type_has_mode_precision_p (TREE_TYPE (def_rhs))))
2098 code_helper rcode = BIT_FIELD_REF;
2099 tree ops[3];
2100 ops[0] = SSA_VAL (def_rhs);
2101 ops[1] = bitsize_int (ref->size);
2102 ops[2] = bitsize_int (offset - offset2);
2103 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2104 if (val
2105 && (TREE_CODE (val) != SSA_NAME
2106 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2108 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2109 (vuse, vr->set, vr->type, vr->operands, val);
2110 return res;
2115 /* 5) For aggregate copies translate the reference through them if
2116 the copy kills ref. */
2117 else if (vn_walk_kind == VN_WALKREWRITE
2118 && gimple_assign_single_p (def_stmt)
2119 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2120 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2121 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2123 tree base2;
2124 int i, j, k;
2125 auto_vec<vn_reference_op_s> rhs;
2126 vn_reference_op_t vro;
2127 ao_ref r;
2129 if (!lhs_ref_ok)
2130 return (void *)-1;
2132 /* See if the assignment kills REF. */
2133 base2 = ao_ref_base (&lhs_ref);
2134 if (!lhs_ref.max_size_known_p ()
2135 || (base != base2
2136 && (TREE_CODE (base) != MEM_REF
2137 || TREE_CODE (base2) != MEM_REF
2138 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2139 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2140 TREE_OPERAND (base2, 1))))
2141 || !stmt_kills_ref_p (def_stmt, ref))
2142 return (void *)-1;
2144 /* Find the common base of ref and the lhs. lhs_ops already
2145 contains valueized operands for the lhs. */
2146 i = vr->operands.length () - 1;
2147 j = lhs_ops.length () - 1;
2148 while (j >= 0 && i >= 0
2149 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2151 i--;
2152 j--;
2155 /* ??? The innermost op should always be a MEM_REF and we already
2156 checked that the assignment to the lhs kills vr. Thus for
2157 aggregate copies using char[] types the vn_reference_op_eq
2158 may fail when comparing types for compatibility. But we really
2159 don't care here - further lookups with the rewritten operands
2160 will simply fail if we messed up types too badly. */
2161 poly_int64 extra_off = 0;
2162 if (j == 0 && i >= 0
2163 && lhs_ops[0].opcode == MEM_REF
2164 && maybe_ne (lhs_ops[0].off, -1))
2166 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2167 i--, j--;
2168 else if (vr->operands[i].opcode == MEM_REF
2169 && maybe_ne (vr->operands[i].off, -1))
2171 extra_off = vr->operands[i].off - lhs_ops[0].off;
2172 i--, j--;
2176 /* i now points to the first additional op.
2177 ??? LHS may not be completely contained in VR, one or more
2178 VIEW_CONVERT_EXPRs could be in its way. We could at least
2179 try handling outermost VIEW_CONVERT_EXPRs. */
2180 if (j != -1)
2181 return (void *)-1;
2183 /* Punt if the additional ops contain a storage order barrier. */
2184 for (k = i; k >= 0; k--)
2186 vro = &vr->operands[k];
2187 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2188 return (void *)-1;
2191 /* Now re-write REF to be based on the rhs of the assignment. */
2192 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2194 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2195 if (maybe_ne (extra_off, 0))
2197 if (rhs.length () < 2
2198 || rhs[0].opcode != MEM_REF
2199 || known_eq (rhs[0].off, -1))
2200 return (void *)-1;
2201 rhs[0].off += extra_off;
2202 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2203 build_int_cst (TREE_TYPE (rhs[0].op0),
2204 extra_off));
2207 /* We need to pre-pend vr->operands[0..i] to rhs. */
2208 vec<vn_reference_op_s> old = vr->operands;
2209 if (i + 1 + rhs.length () > vr->operands.length ())
2210 vr->operands.safe_grow (i + 1 + rhs.length ());
2211 else
2212 vr->operands.truncate (i + 1 + rhs.length ());
2213 FOR_EACH_VEC_ELT (rhs, j, vro)
2214 vr->operands[i + 1 + j] = *vro;
2215 vr->operands = valueize_refs (vr->operands);
2216 if (old == shared_lookup_references)
2217 shared_lookup_references = vr->operands;
2218 vr->hashcode = vn_reference_compute_hash (vr);
2220 /* Try folding the new reference to a constant. */
2221 tree val = fully_constant_vn_reference_p (vr);
2222 if (val)
2223 return vn_reference_lookup_or_insert_for_pieces
2224 (vuse, vr->set, vr->type, vr->operands, val);
2226 /* Adjust *ref from the new operands. */
2227 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2228 return (void *)-1;
2229 /* This can happen with bitfields. */
2230 if (maybe_ne (ref->size, r.size))
2231 return (void *)-1;
2232 *ref = r;
2234 /* Do not update last seen VUSE after translating. */
2235 last_vuse_ptr = NULL;
2237 /* Keep looking for the adjusted *REF / VR pair. */
2238 return NULL;
2241 /* 6) For memcpy copies translate the reference through them if
2242 the copy kills ref. */
2243 else if (vn_walk_kind == VN_WALKREWRITE
2244 && is_gimple_reg_type (vr->type)
2245 /* ??? Handle BCOPY as well. */
2246 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2247 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2248 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2249 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2250 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2251 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2252 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2253 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2255 tree lhs, rhs;
2256 ao_ref r;
2257 poly_int64 rhs_offset, lhs_offset;
2258 vn_reference_op_s op;
2259 poly_uint64 mem_offset;
2260 poly_int64 at, byte_maxsize;
2262 /* Only handle non-variable, addressable refs. */
2263 if (maybe_ne (ref->size, maxsize)
2264 || !multiple_p (offset, BITS_PER_UNIT, &at)
2265 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2266 return (void *)-1;
2268 /* Extract a pointer base and an offset for the destination. */
2269 lhs = gimple_call_arg (def_stmt, 0);
2270 lhs_offset = 0;
2271 if (TREE_CODE (lhs) == SSA_NAME)
2273 lhs = SSA_VAL (lhs);
2274 if (TREE_CODE (lhs) == SSA_NAME)
2276 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2277 if (gimple_assign_single_p (def_stmt)
2278 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2279 lhs = gimple_assign_rhs1 (def_stmt);
2282 if (TREE_CODE (lhs) == ADDR_EXPR)
2284 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2285 &lhs_offset);
2286 if (!tem)
2287 return (void *)-1;
2288 if (TREE_CODE (tem) == MEM_REF
2289 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2291 lhs = TREE_OPERAND (tem, 0);
2292 if (TREE_CODE (lhs) == SSA_NAME)
2293 lhs = SSA_VAL (lhs);
2294 lhs_offset += mem_offset;
2296 else if (DECL_P (tem))
2297 lhs = build_fold_addr_expr (tem);
2298 else
2299 return (void *)-1;
2301 if (TREE_CODE (lhs) != SSA_NAME
2302 && TREE_CODE (lhs) != ADDR_EXPR)
2303 return (void *)-1;
2305 /* Extract a pointer base and an offset for the source. */
2306 rhs = gimple_call_arg (def_stmt, 1);
2307 rhs_offset = 0;
2308 if (TREE_CODE (rhs) == SSA_NAME)
2309 rhs = SSA_VAL (rhs);
2310 if (TREE_CODE (rhs) == ADDR_EXPR)
2312 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2313 &rhs_offset);
2314 if (!tem)
2315 return (void *)-1;
2316 if (TREE_CODE (tem) == MEM_REF
2317 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2319 rhs = TREE_OPERAND (tem, 0);
2320 rhs_offset += mem_offset;
2322 else if (DECL_P (tem)
2323 || TREE_CODE (tem) == STRING_CST)
2324 rhs = build_fold_addr_expr (tem);
2325 else
2326 return (void *)-1;
2328 if (TREE_CODE (rhs) != SSA_NAME
2329 && TREE_CODE (rhs) != ADDR_EXPR)
2330 return (void *)-1;
2332 /* The bases of the destination and the references have to agree. */
2333 if (TREE_CODE (base) == MEM_REF)
2335 if (TREE_OPERAND (base, 0) != lhs
2336 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2337 return (void *) -1;
2338 at += mem_offset;
2340 else if (!DECL_P (base)
2341 || TREE_CODE (lhs) != ADDR_EXPR
2342 || TREE_OPERAND (lhs, 0) != base)
2343 return (void *)-1;
2345 /* If the access is completely outside of the memcpy destination
2346 area there is no aliasing. */
2347 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2348 return NULL;
2349 /* And the access has to be contained within the memcpy destination. */
2350 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2351 return (void *)-1;
2353 /* Make room for 2 operands in the new reference. */
2354 if (vr->operands.length () < 2)
2356 vec<vn_reference_op_s> old = vr->operands;
2357 vr->operands.safe_grow_cleared (2);
2358 if (old == shared_lookup_references)
2359 shared_lookup_references = vr->operands;
2361 else
2362 vr->operands.truncate (2);
2364 /* The looked-through reference is a simple MEM_REF. */
2365 memset (&op, 0, sizeof (op));
2366 op.type = vr->type;
2367 op.opcode = MEM_REF;
2368 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2369 op.off = at - lhs_offset + rhs_offset;
2370 vr->operands[0] = op;
2371 op.type = TREE_TYPE (rhs);
2372 op.opcode = TREE_CODE (rhs);
2373 op.op0 = rhs;
2374 op.off = -1;
2375 vr->operands[1] = op;
2376 vr->hashcode = vn_reference_compute_hash (vr);
2378 /* Try folding the new reference to a constant. */
2379 tree val = fully_constant_vn_reference_p (vr);
2380 if (val)
2381 return vn_reference_lookup_or_insert_for_pieces
2382 (vuse, vr->set, vr->type, vr->operands, val);
2384 /* Adjust *ref from the new operands. */
2385 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2386 return (void *)-1;
2387 /* This can happen with bitfields. */
2388 if (maybe_ne (ref->size, r.size))
2389 return (void *)-1;
2390 *ref = r;
2392 /* Do not update last seen VUSE after translating. */
2393 last_vuse_ptr = NULL;
2395 /* Keep looking for the adjusted *REF / VR pair. */
2396 return NULL;
2399 /* Bail out and stop walking. */
2400 return (void *)-1;
2403 /* Return a reference op vector from OP that can be used for
2404 vn_reference_lookup_pieces. The caller is responsible for releasing
2405 the vector. */
2407 vec<vn_reference_op_s>
2408 vn_reference_operands_for_lookup (tree op)
2410 bool valueized;
2411 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2414 /* Lookup a reference operation by it's parts, in the current hash table.
2415 Returns the resulting value number if it exists in the hash table,
2416 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2417 vn_reference_t stored in the hashtable if something is found. */
2419 tree
2420 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2421 vec<vn_reference_op_s> operands,
2422 vn_reference_t *vnresult, vn_lookup_kind kind)
2424 struct vn_reference_s vr1;
2425 vn_reference_t tmp;
2426 tree cst;
2428 if (!vnresult)
2429 vnresult = &tmp;
2430 *vnresult = NULL;
2432 vr1.vuse = vuse_ssa_val (vuse);
2433 shared_lookup_references.truncate (0);
2434 shared_lookup_references.safe_grow (operands.length ());
2435 memcpy (shared_lookup_references.address (),
2436 operands.address (),
2437 sizeof (vn_reference_op_s)
2438 * operands.length ());
2439 vr1.operands = operands = shared_lookup_references
2440 = valueize_refs (shared_lookup_references);
2441 vr1.type = type;
2442 vr1.set = set;
2443 vr1.hashcode = vn_reference_compute_hash (&vr1);
2444 if ((cst = fully_constant_vn_reference_p (&vr1)))
2445 return cst;
2447 vn_reference_lookup_1 (&vr1, vnresult);
2448 if (!*vnresult
2449 && kind != VN_NOWALK
2450 && vr1.vuse)
2452 ao_ref r;
2453 vn_walk_kind = kind;
2454 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2455 *vnresult =
2456 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2457 vn_reference_lookup_2,
2458 vn_reference_lookup_3,
2459 vuse_ssa_val, &vr1);
2460 gcc_checking_assert (vr1.operands == shared_lookup_references);
2463 if (*vnresult)
2464 return (*vnresult)->result;
2466 return NULL_TREE;
2469 /* Lookup OP in the current hash table, and return the resulting value
2470 number if it exists in the hash table. Return NULL_TREE if it does
2471 not exist in the hash table or if the result field of the structure
2472 was NULL.. VNRESULT will be filled in with the vn_reference_t
2473 stored in the hashtable if one exists. When TBAA_P is false assume
2474 we are looking up a store and treat it as having alias-set zero. */
2476 tree
2477 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2478 vn_reference_t *vnresult, bool tbaa_p)
2480 vec<vn_reference_op_s> operands;
2481 struct vn_reference_s vr1;
2482 tree cst;
2483 bool valuezied_anything;
2485 if (vnresult)
2486 *vnresult = NULL;
2488 vr1.vuse = vuse_ssa_val (vuse);
2489 vr1.operands = operands
2490 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2491 vr1.type = TREE_TYPE (op);
2492 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2493 vr1.hashcode = vn_reference_compute_hash (&vr1);
2494 if ((cst = fully_constant_vn_reference_p (&vr1)))
2495 return cst;
2497 if (kind != VN_NOWALK
2498 && vr1.vuse)
2500 vn_reference_t wvnresult;
2501 ao_ref r;
2502 /* Make sure to use a valueized reference if we valueized anything.
2503 Otherwise preserve the full reference for advanced TBAA. */
2504 if (!valuezied_anything
2505 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2506 vr1.operands))
2507 ao_ref_init (&r, op);
2508 if (! tbaa_p)
2509 r.ref_alias_set = r.base_alias_set = 0;
2510 vn_walk_kind = kind;
2511 wvnresult =
2512 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2513 vn_reference_lookup_2,
2514 vn_reference_lookup_3,
2515 vuse_ssa_val, &vr1);
2516 gcc_checking_assert (vr1.operands == shared_lookup_references);
2517 if (wvnresult)
2519 if (vnresult)
2520 *vnresult = wvnresult;
2521 return wvnresult->result;
2524 return NULL_TREE;
2527 return vn_reference_lookup_1 (&vr1, vnresult);
2530 /* Lookup CALL in the current hash table and return the entry in
2531 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2533 void
2534 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2535 vn_reference_t vr)
2537 if (vnresult)
2538 *vnresult = NULL;
2540 tree vuse = gimple_vuse (call);
2542 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2543 vr->operands = valueize_shared_reference_ops_from_call (call);
2544 vr->type = gimple_expr_type (call);
2545 vr->set = 0;
2546 vr->hashcode = vn_reference_compute_hash (vr);
2547 vn_reference_lookup_1 (vr, vnresult);
2550 /* Insert OP into the current hash table with a value number of
2551 RESULT, and return the resulting reference structure we created. */
2553 static vn_reference_t
2554 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2556 vn_reference_s **slot;
2557 vn_reference_t vr1;
2558 bool tem;
2560 vr1 = current_info->references_pool->allocate ();
2561 if (TREE_CODE (result) == SSA_NAME)
2562 vr1->value_id = VN_INFO (result)->value_id;
2563 else
2564 vr1->value_id = get_or_alloc_constant_value_id (result);
2565 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2566 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2567 vr1->type = TREE_TYPE (op);
2568 vr1->set = get_alias_set (op);
2569 vr1->hashcode = vn_reference_compute_hash (vr1);
2570 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2571 vr1->result_vdef = vdef;
2573 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2574 INSERT);
2576 /* Because we lookup stores using vuses, and value number failures
2577 using the vdefs (see visit_reference_op_store for how and why),
2578 it's possible that on failure we may try to insert an already
2579 inserted store. This is not wrong, there is no ssa name for a
2580 store that we could use as a differentiator anyway. Thus, unlike
2581 the other lookup functions, you cannot gcc_assert (!*slot)
2582 here. */
2584 /* But free the old slot in case of a collision. */
2585 if (*slot)
2586 free_reference (*slot);
2588 *slot = vr1;
2589 return vr1;
2592 /* Insert a reference by it's pieces into the current hash table with
2593 a value number of RESULT. Return the resulting reference
2594 structure we created. */
2596 vn_reference_t
2597 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2598 vec<vn_reference_op_s> operands,
2599 tree result, unsigned int value_id)
2602 vn_reference_s **slot;
2603 vn_reference_t vr1;
2605 vr1 = current_info->references_pool->allocate ();
2606 vr1->value_id = value_id;
2607 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2608 vr1->operands = valueize_refs (operands);
2609 vr1->type = type;
2610 vr1->set = set;
2611 vr1->hashcode = vn_reference_compute_hash (vr1);
2612 if (result && TREE_CODE (result) == SSA_NAME)
2613 result = SSA_VAL (result);
2614 vr1->result = result;
2616 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2617 INSERT);
2619 /* At this point we should have all the things inserted that we have
2620 seen before, and we should never try inserting something that
2621 already exists. */
2622 gcc_assert (!*slot);
2623 if (*slot)
2624 free_reference (*slot);
2626 *slot = vr1;
2627 return vr1;
2630 /* Compute and return the hash value for nary operation VBO1. */
2632 static hashval_t
2633 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2635 inchash::hash hstate;
2636 unsigned i;
2638 for (i = 0; i < vno1->length; ++i)
2639 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2640 vno1->op[i] = SSA_VAL (vno1->op[i]);
2642 if (((vno1->length == 2
2643 && commutative_tree_code (vno1->opcode))
2644 || (vno1->length == 3
2645 && commutative_ternary_tree_code (vno1->opcode)))
2646 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2647 std::swap (vno1->op[0], vno1->op[1]);
2648 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2649 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2651 std::swap (vno1->op[0], vno1->op[1]);
2652 vno1->opcode = swap_tree_comparison (vno1->opcode);
2655 hstate.add_int (vno1->opcode);
2656 for (i = 0; i < vno1->length; ++i)
2657 inchash::add_expr (vno1->op[i], hstate);
2659 return hstate.end ();
2662 /* Compare nary operations VNO1 and VNO2 and return true if they are
2663 equivalent. */
2665 bool
2666 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2668 unsigned i;
2670 if (vno1->hashcode != vno2->hashcode)
2671 return false;
2673 if (vno1->length != vno2->length)
2674 return false;
2676 if (vno1->opcode != vno2->opcode
2677 || !types_compatible_p (vno1->type, vno2->type))
2678 return false;
2680 for (i = 0; i < vno1->length; ++i)
2681 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2682 return false;
2684 /* BIT_INSERT_EXPR has an implict operand as the type precision
2685 of op1. Need to check to make sure they are the same. */
2686 if (vno1->opcode == BIT_INSERT_EXPR
2687 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2688 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2689 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2690 return false;
2692 return true;
2695 /* Initialize VNO from the pieces provided. */
2697 static void
2698 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2699 enum tree_code code, tree type, tree *ops)
2701 vno->opcode = code;
2702 vno->length = length;
2703 vno->type = type;
2704 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2707 /* Initialize VNO from OP. */
2709 static void
2710 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2712 unsigned i;
2714 vno->opcode = TREE_CODE (op);
2715 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2716 vno->type = TREE_TYPE (op);
2717 for (i = 0; i < vno->length; ++i)
2718 vno->op[i] = TREE_OPERAND (op, i);
2721 /* Return the number of operands for a vn_nary ops structure from STMT. */
2723 static unsigned int
2724 vn_nary_length_from_stmt (gimple *stmt)
2726 switch (gimple_assign_rhs_code (stmt))
2728 case REALPART_EXPR:
2729 case IMAGPART_EXPR:
2730 case VIEW_CONVERT_EXPR:
2731 return 1;
2733 case BIT_FIELD_REF:
2734 return 3;
2736 case CONSTRUCTOR:
2737 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2739 default:
2740 return gimple_num_ops (stmt) - 1;
2744 /* Initialize VNO from STMT. */
2746 static void
2747 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2749 unsigned i;
2751 vno->opcode = gimple_assign_rhs_code (stmt);
2752 vno->type = gimple_expr_type (stmt);
2753 switch (vno->opcode)
2755 case REALPART_EXPR:
2756 case IMAGPART_EXPR:
2757 case VIEW_CONVERT_EXPR:
2758 vno->length = 1;
2759 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2760 break;
2762 case BIT_FIELD_REF:
2763 vno->length = 3;
2764 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2765 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2766 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2767 break;
2769 case CONSTRUCTOR:
2770 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2771 for (i = 0; i < vno->length; ++i)
2772 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2773 break;
2775 default:
2776 gcc_checking_assert (!gimple_assign_single_p (stmt));
2777 vno->length = gimple_num_ops (stmt) - 1;
2778 for (i = 0; i < vno->length; ++i)
2779 vno->op[i] = gimple_op (stmt, i + 1);
2783 /* Compute the hashcode for VNO and look for it in the hash table;
2784 return the resulting value number if it exists in the hash table.
2785 Return NULL_TREE if it does not exist in the hash table or if the
2786 result field of the operation is NULL. VNRESULT will contain the
2787 vn_nary_op_t from the hashtable if it exists. */
2789 static tree
2790 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2792 vn_nary_op_s **slot;
2794 if (vnresult)
2795 *vnresult = NULL;
2797 vno->hashcode = vn_nary_op_compute_hash (vno);
2798 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2799 NO_INSERT);
2800 if (!slot && current_info == optimistic_info)
2801 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2802 NO_INSERT);
2803 if (!slot)
2804 return NULL_TREE;
2805 if (vnresult)
2806 *vnresult = *slot;
2807 return (*slot)->result;
2810 /* Lookup a n-ary operation by its pieces and return the resulting value
2811 number if it exists in the hash table. Return NULL_TREE if it does
2812 not exist in the hash table or if the result field of the operation
2813 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2814 if it exists. */
2816 tree
2817 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2818 tree type, tree *ops, vn_nary_op_t *vnresult)
2820 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2821 sizeof_vn_nary_op (length));
2822 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2823 return vn_nary_op_lookup_1 (vno1, vnresult);
2826 /* Lookup OP in the current hash table, and return the resulting value
2827 number if it exists in the hash table. Return NULL_TREE if it does
2828 not exist in the hash table or if the result field of the operation
2829 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2830 if it exists. */
2832 tree
2833 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2835 vn_nary_op_t vno1
2836 = XALLOCAVAR (struct vn_nary_op_s,
2837 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2838 init_vn_nary_op_from_op (vno1, op);
2839 return vn_nary_op_lookup_1 (vno1, vnresult);
2842 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2843 value number if it exists in the hash table. Return NULL_TREE if
2844 it does not exist in the hash table. VNRESULT will contain the
2845 vn_nary_op_t from the hashtable if it exists. */
2847 tree
2848 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2850 vn_nary_op_t vno1
2851 = XALLOCAVAR (struct vn_nary_op_s,
2852 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2853 init_vn_nary_op_from_stmt (vno1, stmt);
2854 return vn_nary_op_lookup_1 (vno1, vnresult);
2857 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2859 static vn_nary_op_t
2860 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2862 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2865 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2866 obstack. */
2868 static vn_nary_op_t
2869 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2871 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2872 &current_info->nary_obstack);
2874 vno1->value_id = value_id;
2875 vno1->length = length;
2876 vno1->result = result;
2878 return vno1;
2881 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2882 VNO->HASHCODE first. */
2884 static vn_nary_op_t
2885 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2886 bool compute_hash)
2888 vn_nary_op_s **slot;
2890 if (compute_hash)
2891 vno->hashcode = vn_nary_op_compute_hash (vno);
2893 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2894 /* While we do not want to insert things twice it's awkward to
2895 avoid it in the case where visit_nary_op pattern-matches stuff
2896 and ends up simplifying the replacement to itself. We then
2897 get two inserts, one from visit_nary_op and one from
2898 vn_nary_build_or_lookup.
2899 So allow inserts with the same value number. */
2900 if (*slot && (*slot)->result == vno->result)
2901 return *slot;
2903 gcc_assert (!*slot);
2905 *slot = vno;
2906 return vno;
2909 /* Insert a n-ary operation into the current hash table using it's
2910 pieces. Return the vn_nary_op_t structure we created and put in
2911 the hashtable. */
2913 vn_nary_op_t
2914 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2915 tree type, tree *ops,
2916 tree result, unsigned int value_id)
2918 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2919 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2920 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2923 /* Insert OP into the current hash table with a value number of
2924 RESULT. Return the vn_nary_op_t structure we created and put in
2925 the hashtable. */
2927 vn_nary_op_t
2928 vn_nary_op_insert (tree op, tree result)
2930 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2931 vn_nary_op_t vno1;
2933 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2934 init_vn_nary_op_from_op (vno1, op);
2935 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2938 /* Insert the rhs of STMT into the current hash table with a value number of
2939 RESULT. */
2941 static vn_nary_op_t
2942 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2944 vn_nary_op_t vno1
2945 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2946 result, VN_INFO (result)->value_id);
2947 init_vn_nary_op_from_stmt (vno1, stmt);
2948 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2951 /* Compute a hashcode for PHI operation VP1 and return it. */
2953 static inline hashval_t
2954 vn_phi_compute_hash (vn_phi_t vp1)
2956 inchash::hash hstate (vp1->phiargs.length () > 2
2957 ? vp1->block->index : vp1->phiargs.length ());
2958 tree phi1op;
2959 tree type;
2960 edge e;
2961 edge_iterator ei;
2963 /* If all PHI arguments are constants we need to distinguish
2964 the PHI node via its type. */
2965 type = vp1->type;
2966 hstate.merge_hash (vn_hash_type (type));
2968 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2970 /* Don't hash backedge values they need to be handled as VN_TOP
2971 for optimistic value-numbering. */
2972 if (e->flags & EDGE_DFS_BACK)
2973 continue;
2975 phi1op = vp1->phiargs[e->dest_idx];
2976 if (phi1op == VN_TOP)
2977 continue;
2978 inchash::add_expr (phi1op, hstate);
2981 return hstate.end ();
2985 /* Return true if COND1 and COND2 represent the same condition, set
2986 *INVERTED_P if one needs to be inverted to make it the same as
2987 the other. */
2989 static bool
2990 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2991 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2993 enum tree_code code1 = gimple_cond_code (cond1);
2994 enum tree_code code2 = gimple_cond_code (cond2);
2996 *inverted_p = false;
2997 if (code1 == code2)
2999 else if (code1 == swap_tree_comparison (code2))
3000 std::swap (lhs2, rhs2);
3001 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3002 *inverted_p = true;
3003 else if (code1 == invert_tree_comparison
3004 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3006 std::swap (lhs2, rhs2);
3007 *inverted_p = true;
3009 else
3010 return false;
3012 return ((expressions_equal_p (lhs1, lhs2)
3013 && expressions_equal_p (rhs1, rhs2))
3014 || (commutative_tree_code (code1)
3015 && expressions_equal_p (lhs1, rhs2)
3016 && expressions_equal_p (rhs1, lhs2)));
3019 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3021 static int
3022 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3024 if (vp1->hashcode != vp2->hashcode)
3025 return false;
3027 if (vp1->block != vp2->block)
3029 if (vp1->phiargs.length () != vp2->phiargs.length ())
3030 return false;
3032 switch (vp1->phiargs.length ())
3034 case 1:
3035 /* Single-arg PHIs are just copies. */
3036 break;
3038 case 2:
3040 /* Rule out backedges into the PHI. */
3041 if (vp1->block->loop_father->header == vp1->block
3042 || vp2->block->loop_father->header == vp2->block)
3043 return false;
3045 /* If the PHI nodes do not have compatible types
3046 they are not the same. */
3047 if (!types_compatible_p (vp1->type, vp2->type))
3048 return false;
3050 basic_block idom1
3051 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3052 basic_block idom2
3053 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3054 /* If the immediate dominator end in switch stmts multiple
3055 values may end up in the same PHI arg via intermediate
3056 CFG merges. */
3057 if (EDGE_COUNT (idom1->succs) != 2
3058 || EDGE_COUNT (idom2->succs) != 2)
3059 return false;
3061 /* Verify the controlling stmt is the same. */
3062 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3063 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3064 if (! last1 || ! last2)
3065 return false;
3066 bool inverted_p;
3067 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3068 last2, vp2->cclhs, vp2->ccrhs,
3069 &inverted_p))
3070 return false;
3072 /* Get at true/false controlled edges into the PHI. */
3073 edge te1, te2, fe1, fe2;
3074 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3075 &te1, &fe1)
3076 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3077 &te2, &fe2))
3078 return false;
3080 /* Swap edges if the second condition is the inverted of the
3081 first. */
3082 if (inverted_p)
3083 std::swap (te2, fe2);
3085 /* ??? Handle VN_TOP specially. */
3086 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3087 vp2->phiargs[te2->dest_idx])
3088 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3089 vp2->phiargs[fe2->dest_idx]))
3090 return false;
3092 return true;
3095 default:
3096 return false;
3100 /* If the PHI nodes do not have compatible types
3101 they are not the same. */
3102 if (!types_compatible_p (vp1->type, vp2->type))
3103 return false;
3105 /* Any phi in the same block will have it's arguments in the
3106 same edge order, because of how we store phi nodes. */
3107 int i;
3108 tree phi1op;
3109 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3111 tree phi2op = vp2->phiargs[i];
3112 if (phi1op == VN_TOP || phi2op == VN_TOP)
3113 continue;
3114 if (!expressions_equal_p (phi1op, phi2op))
3115 return false;
3118 return true;
3121 static vec<tree> shared_lookup_phiargs;
3123 /* Lookup PHI in the current hash table, and return the resulting
3124 value number if it exists in the hash table. Return NULL_TREE if
3125 it does not exist in the hash table. */
3127 static tree
3128 vn_phi_lookup (gimple *phi)
3130 vn_phi_s **slot;
3131 struct vn_phi_s vp1;
3132 edge e;
3133 edge_iterator ei;
3135 shared_lookup_phiargs.truncate (0);
3136 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3138 /* Canonicalize the SSA_NAME's to their value number. */
3139 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3141 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3142 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3143 shared_lookup_phiargs[e->dest_idx] = def;
3145 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3146 vp1.phiargs = shared_lookup_phiargs;
3147 vp1.block = gimple_bb (phi);
3148 /* Extract values of the controlling condition. */
3149 vp1.cclhs = NULL_TREE;
3150 vp1.ccrhs = NULL_TREE;
3151 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3152 if (EDGE_COUNT (idom1->succs) == 2)
3153 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3155 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3156 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3158 vp1.hashcode = vn_phi_compute_hash (&vp1);
3159 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3160 NO_INSERT);
3161 if (!slot && current_info == optimistic_info)
3162 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3163 NO_INSERT);
3164 if (!slot)
3165 return NULL_TREE;
3166 return (*slot)->result;
3169 /* Insert PHI into the current hash table with a value number of
3170 RESULT. */
3172 static vn_phi_t
3173 vn_phi_insert (gimple *phi, tree result)
3175 vn_phi_s **slot;
3176 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3177 vec<tree> args = vNULL;
3178 edge e;
3179 edge_iterator ei;
3181 args.safe_grow (gimple_phi_num_args (phi));
3183 /* Canonicalize the SSA_NAME's to their value number. */
3184 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3186 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3187 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3188 args[e->dest_idx] = def;
3190 vp1->value_id = VN_INFO (result)->value_id;
3191 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3192 vp1->phiargs = args;
3193 vp1->block = gimple_bb (phi);
3194 /* Extract values of the controlling condition. */
3195 vp1->cclhs = NULL_TREE;
3196 vp1->ccrhs = NULL_TREE;
3197 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3198 if (EDGE_COUNT (idom1->succs) == 2)
3199 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3201 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3202 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3204 vp1->result = result;
3205 vp1->hashcode = vn_phi_compute_hash (vp1);
3207 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3209 /* Because we iterate over phi operations more than once, it's
3210 possible the slot might already exist here, hence no assert.*/
3211 *slot = vp1;
3212 return vp1;
3216 /* Print set of components in strongly connected component SCC to OUT. */
3218 static void
3219 print_scc (FILE *out, vec<tree> scc)
3221 tree var;
3222 unsigned int i;
3224 fprintf (out, "SCC consists of %u:", scc.length ());
3225 FOR_EACH_VEC_ELT (scc, i, var)
3227 fprintf (out, " ");
3228 print_generic_expr (out, var);
3230 fprintf (out, "\n");
3233 /* Return true if BB1 is dominated by BB2 taking into account edges
3234 that are not executable. */
3236 static bool
3237 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3239 edge_iterator ei;
3240 edge e;
3242 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3243 return true;
3245 /* Before iterating we'd like to know if there exists a
3246 (executable) path from bb2 to bb1 at all, if not we can
3247 directly return false. For now simply iterate once. */
3249 /* Iterate to the single executable bb1 predecessor. */
3250 if (EDGE_COUNT (bb1->preds) > 1)
3252 edge prede = NULL;
3253 FOR_EACH_EDGE (e, ei, bb1->preds)
3254 if (e->flags & EDGE_EXECUTABLE)
3256 if (prede)
3258 prede = NULL;
3259 break;
3261 prede = e;
3263 if (prede)
3265 bb1 = prede->src;
3267 /* Re-do the dominance check with changed bb1. */
3268 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3269 return true;
3273 /* Iterate to the single executable bb2 successor. */
3274 edge succe = NULL;
3275 FOR_EACH_EDGE (e, ei, bb2->succs)
3276 if (e->flags & EDGE_EXECUTABLE)
3278 if (succe)
3280 succe = NULL;
3281 break;
3283 succe = e;
3285 if (succe)
3287 /* Verify the reached block is only reached through succe.
3288 If there is only one edge we can spare us the dominator
3289 check and iterate directly. */
3290 if (EDGE_COUNT (succe->dest->preds) > 1)
3292 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3293 if (e != succe
3294 && (e->flags & EDGE_EXECUTABLE))
3296 succe = NULL;
3297 break;
3300 if (succe)
3302 bb2 = succe->dest;
3304 /* Re-do the dominance check with changed bb2. */
3305 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3306 return true;
3310 /* We could now iterate updating bb1 / bb2. */
3311 return false;
3314 /* Set the value number of FROM to TO, return true if it has changed
3315 as a result. */
3317 static inline bool
3318 set_ssa_val_to (tree from, tree to)
3320 tree currval = SSA_VAL (from);
3321 poly_int64 toff, coff;
3323 /* The only thing we allow as value numbers are ssa_names
3324 and invariants. So assert that here. We don't allow VN_TOP
3325 as visiting a stmt should produce a value-number other than
3326 that.
3327 ??? Still VN_TOP can happen for unreachable code, so force
3328 it to varying in that case. Not all code is prepared to
3329 get VN_TOP on valueization. */
3330 if (to == VN_TOP)
3332 if (dump_file && (dump_flags & TDF_DETAILS))
3333 fprintf (dump_file, "Forcing value number to varying on "
3334 "receiving VN_TOP\n");
3335 to = from;
3338 gcc_assert (to != NULL_TREE
3339 && ((TREE_CODE (to) == SSA_NAME
3340 && (to == from || SSA_VAL (to) == to))
3341 || is_gimple_min_invariant (to)));
3343 if (from != to)
3345 if (currval == from)
3347 if (dump_file && (dump_flags & TDF_DETAILS))
3349 fprintf (dump_file, "Not changing value number of ");
3350 print_generic_expr (dump_file, from);
3351 fprintf (dump_file, " from VARYING to ");
3352 print_generic_expr (dump_file, to);
3353 fprintf (dump_file, "\n");
3355 return false;
3357 else if (currval != VN_TOP
3358 && ! is_gimple_min_invariant (currval)
3359 && is_gimple_min_invariant (to))
3361 if (dump_file && (dump_flags & TDF_DETAILS))
3363 fprintf (dump_file, "Forcing VARYING instead of changing "
3364 "value number of ");
3365 print_generic_expr (dump_file, from);
3366 fprintf (dump_file, " from ");
3367 print_generic_expr (dump_file, currval);
3368 fprintf (dump_file, " (non-constant) to ");
3369 print_generic_expr (dump_file, to);
3370 fprintf (dump_file, " (constant)\n");
3372 to = from;
3374 else if (TREE_CODE (to) == SSA_NAME
3375 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3376 to = from;
3379 if (dump_file && (dump_flags & TDF_DETAILS))
3381 fprintf (dump_file, "Setting value number of ");
3382 print_generic_expr (dump_file, from);
3383 fprintf (dump_file, " to ");
3384 print_generic_expr (dump_file, to);
3387 if (currval != to
3388 && !operand_equal_p (currval, to, 0)
3389 /* Different undefined SSA names are not actually different. See
3390 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3391 && !(TREE_CODE (currval) == SSA_NAME
3392 && TREE_CODE (to) == SSA_NAME
3393 && ssa_undefined_value_p (currval, false)
3394 && ssa_undefined_value_p (to, false))
3395 /* ??? For addresses involving volatile objects or types operand_equal_p
3396 does not reliably detect ADDR_EXPRs as equal. We know we are only
3397 getting invariant gimple addresses here, so can use
3398 get_addr_base_and_unit_offset to do this comparison. */
3399 && !(TREE_CODE (currval) == ADDR_EXPR
3400 && TREE_CODE (to) == ADDR_EXPR
3401 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3402 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3403 && known_eq (coff, toff)))
3405 if (dump_file && (dump_flags & TDF_DETAILS))
3406 fprintf (dump_file, " (changed)\n");
3408 /* If we equate two SSA names we have to make the side-band info
3409 of the leader conservative (and remember whatever original value
3410 was present). */
3411 if (TREE_CODE (to) == SSA_NAME)
3413 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3414 && SSA_NAME_RANGE_INFO (to))
3416 if (SSA_NAME_IS_DEFAULT_DEF (to)
3417 || dominated_by_p_w_unex
3418 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3419 gimple_bb (SSA_NAME_DEF_STMT (to))))
3420 /* Keep the info from the dominator. */
3422 else
3424 /* Save old info. */
3425 if (! VN_INFO (to)->info.range_info)
3427 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3428 VN_INFO (to)->range_info_anti_range_p
3429 = SSA_NAME_ANTI_RANGE_P (to);
3431 /* Rather than allocating memory and unioning the info
3432 just clear it. */
3433 if (dump_file && (dump_flags & TDF_DETAILS))
3435 fprintf (dump_file, "clearing range info of ");
3436 print_generic_expr (dump_file, to);
3437 fprintf (dump_file, "\n");
3439 SSA_NAME_RANGE_INFO (to) = NULL;
3442 else if (POINTER_TYPE_P (TREE_TYPE (to))
3443 && SSA_NAME_PTR_INFO (to))
3445 if (SSA_NAME_IS_DEFAULT_DEF (to)
3446 || dominated_by_p_w_unex
3447 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3448 gimple_bb (SSA_NAME_DEF_STMT (to))))
3449 /* Keep the info from the dominator. */
3451 else if (! SSA_NAME_PTR_INFO (from)
3452 /* Handle the case of trivially equivalent info. */
3453 || memcmp (SSA_NAME_PTR_INFO (to),
3454 SSA_NAME_PTR_INFO (from),
3455 sizeof (ptr_info_def)) != 0)
3457 /* Save old info. */
3458 if (! VN_INFO (to)->info.ptr_info)
3459 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3460 /* Rather than allocating memory and unioning the info
3461 just clear it. */
3462 if (dump_file && (dump_flags & TDF_DETAILS))
3464 fprintf (dump_file, "clearing points-to info of ");
3465 print_generic_expr (dump_file, to);
3466 fprintf (dump_file, "\n");
3468 SSA_NAME_PTR_INFO (to) = NULL;
3473 VN_INFO (from)->valnum = to;
3474 return true;
3476 if (dump_file && (dump_flags & TDF_DETAILS))
3477 fprintf (dump_file, "\n");
3478 return false;
3481 /* Mark as processed all the definitions in the defining stmt of USE, or
3482 the USE itself. */
3484 static void
3485 mark_use_processed (tree use)
3487 ssa_op_iter iter;
3488 def_operand_p defp;
3489 gimple *stmt = SSA_NAME_DEF_STMT (use);
3491 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3493 VN_INFO (use)->use_processed = true;
3494 return;
3497 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3499 tree def = DEF_FROM_PTR (defp);
3501 VN_INFO (def)->use_processed = true;
3505 /* Set all definitions in STMT to value number to themselves.
3506 Return true if a value number changed. */
3508 static bool
3509 defs_to_varying (gimple *stmt)
3511 bool changed = false;
3512 ssa_op_iter iter;
3513 def_operand_p defp;
3515 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3517 tree def = DEF_FROM_PTR (defp);
3518 changed |= set_ssa_val_to (def, def);
3520 return changed;
3523 /* Visit a copy between LHS and RHS, return true if the value number
3524 changed. */
3526 static bool
3527 visit_copy (tree lhs, tree rhs)
3529 /* Valueize. */
3530 rhs = SSA_VAL (rhs);
3532 return set_ssa_val_to (lhs, rhs);
3535 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3536 is the same. */
3538 static tree
3539 valueized_wider_op (tree wide_type, tree op)
3541 if (TREE_CODE (op) == SSA_NAME)
3542 op = SSA_VAL (op);
3544 /* Either we have the op widened available. */
3545 tree ops[3] = {};
3546 ops[0] = op;
3547 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3548 wide_type, ops, NULL);
3549 if (tem)
3550 return tem;
3552 /* Or the op is truncated from some existing value. */
3553 if (TREE_CODE (op) == SSA_NAME)
3555 gimple *def = SSA_NAME_DEF_STMT (op);
3556 if (is_gimple_assign (def)
3557 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3559 tem = gimple_assign_rhs1 (def);
3560 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3562 if (TREE_CODE (tem) == SSA_NAME)
3563 tem = SSA_VAL (tem);
3564 return tem;
3569 /* For constants simply extend it. */
3570 if (TREE_CODE (op) == INTEGER_CST)
3571 return wide_int_to_tree (wide_type, wi::to_wide (op));
3573 return NULL_TREE;
3576 /* Visit a nary operator RHS, value number it, and return true if the
3577 value number of LHS has changed as a result. */
3579 static bool
3580 visit_nary_op (tree lhs, gassign *stmt)
3582 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3583 if (result)
3584 return set_ssa_val_to (lhs, result);
3586 /* Do some special pattern matching for redundancies of operations
3587 in different types. */
3588 enum tree_code code = gimple_assign_rhs_code (stmt);
3589 tree type = TREE_TYPE (lhs);
3590 tree rhs1 = gimple_assign_rhs1 (stmt);
3591 switch (code)
3593 CASE_CONVERT:
3594 /* Match arithmetic done in a different type where we can easily
3595 substitute the result from some earlier sign-changed or widened
3596 operation. */
3597 if (INTEGRAL_TYPE_P (type)
3598 && TREE_CODE (rhs1) == SSA_NAME
3599 /* We only handle sign-changes or zero-extension -> & mask. */
3600 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3601 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3602 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3604 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3605 if (def
3606 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3607 || gimple_assign_rhs_code (def) == MINUS_EXPR
3608 || gimple_assign_rhs_code (def) == MULT_EXPR))
3610 tree ops[3] = {};
3611 /* Either we have the op widened available. */
3612 ops[0] = valueized_wider_op (type,
3613 gimple_assign_rhs1 (def));
3614 if (ops[0])
3615 ops[1] = valueized_wider_op (type,
3616 gimple_assign_rhs2 (def));
3617 if (ops[0] && ops[1])
3619 ops[0] = vn_nary_op_lookup_pieces
3620 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3621 /* We have wider operation available. */
3622 if (ops[0]
3623 /* If the leader is a wrapping operation we can
3624 insert it for code hoisting w/o introducing
3625 undefined overflow. If it is not it has to
3626 be available. See PR86554. */
3627 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
3628 || TREE_CODE (ops[0]) != SSA_NAME
3629 || SSA_NAME_IS_DEFAULT_DEF (ops[0])
3630 || dominated_by_p_w_unex
3631 (gimple_bb (stmt),
3632 gimple_bb (SSA_NAME_DEF_STMT (ops[0])))))
3634 unsigned lhs_prec = TYPE_PRECISION (type);
3635 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3636 if (lhs_prec == rhs_prec)
3638 ops[1] = NULL_TREE;
3639 result = vn_nary_build_or_lookup (NOP_EXPR,
3640 type, ops);
3641 if (result)
3643 bool changed = set_ssa_val_to (lhs, result);
3644 vn_nary_op_insert_stmt (stmt, result);
3645 return changed;
3648 else
3650 ops[1] = wide_int_to_tree (type,
3651 wi::mask (rhs_prec, false,
3652 lhs_prec));
3653 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3654 TREE_TYPE (lhs),
3655 ops);
3656 if (result)
3658 bool changed = set_ssa_val_to (lhs, result);
3659 vn_nary_op_insert_stmt (stmt, result);
3660 return changed;
3667 default:;
3670 bool changed = set_ssa_val_to (lhs, lhs);
3671 vn_nary_op_insert_stmt (stmt, lhs);
3672 return changed;
3675 /* Visit a call STMT storing into LHS. Return true if the value number
3676 of the LHS has changed as a result. */
3678 static bool
3679 visit_reference_op_call (tree lhs, gcall *stmt)
3681 bool changed = false;
3682 struct vn_reference_s vr1;
3683 vn_reference_t vnresult = NULL;
3684 tree vdef = gimple_vdef (stmt);
3686 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3687 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3688 lhs = NULL_TREE;
3690 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3691 if (vnresult)
3693 if (vnresult->result_vdef && vdef)
3694 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3695 else if (vdef)
3696 /* If the call was discovered to be pure or const reflect
3697 that as far as possible. */
3698 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3700 if (!vnresult->result && lhs)
3701 vnresult->result = lhs;
3703 if (vnresult->result && lhs)
3704 changed |= set_ssa_val_to (lhs, vnresult->result);
3706 else
3708 vn_reference_t vr2;
3709 vn_reference_s **slot;
3710 tree vdef_val = vdef;
3711 if (vdef)
3713 /* If we value numbered an indirect functions function to
3714 one not clobbering memory value number its VDEF to its
3715 VUSE. */
3716 tree fn = gimple_call_fn (stmt);
3717 if (fn && TREE_CODE (fn) == SSA_NAME)
3719 fn = SSA_VAL (fn);
3720 if (TREE_CODE (fn) == ADDR_EXPR
3721 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3722 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3723 & (ECF_CONST | ECF_PURE)))
3724 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3726 changed |= set_ssa_val_to (vdef, vdef_val);
3728 if (lhs)
3729 changed |= set_ssa_val_to (lhs, lhs);
3730 vr2 = current_info->references_pool->allocate ();
3731 vr2->vuse = vr1.vuse;
3732 /* As we are not walking the virtual operand chain we know the
3733 shared_lookup_references are still original so we can re-use
3734 them here. */
3735 vr2->operands = vr1.operands.copy ();
3736 vr2->type = vr1.type;
3737 vr2->set = vr1.set;
3738 vr2->hashcode = vr1.hashcode;
3739 vr2->result = lhs;
3740 vr2->result_vdef = vdef_val;
3741 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3742 INSERT);
3743 gcc_assert (!*slot);
3744 *slot = vr2;
3747 return changed;
3750 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3751 and return true if the value number of the LHS has changed as a result. */
3753 static bool
3754 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3756 bool changed = false;
3757 tree last_vuse;
3758 tree result;
3760 last_vuse = gimple_vuse (stmt);
3761 last_vuse_ptr = &last_vuse;
3762 result = vn_reference_lookup (op, gimple_vuse (stmt),
3763 default_vn_walk_kind, NULL, true);
3764 last_vuse_ptr = NULL;
3766 /* We handle type-punning through unions by value-numbering based
3767 on offset and size of the access. Be prepared to handle a
3768 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3769 if (result
3770 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3772 /* We will be setting the value number of lhs to the value number
3773 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3774 So first simplify and lookup this expression to see if it
3775 is already available. */
3776 code_helper rcode = VIEW_CONVERT_EXPR;
3777 tree ops[3] = { result };
3778 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3781 if (result)
3782 changed = set_ssa_val_to (lhs, result);
3783 else
3785 changed = set_ssa_val_to (lhs, lhs);
3786 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3789 return changed;
3793 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3794 and return true if the value number of the LHS has changed as a result. */
3796 static bool
3797 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3799 bool changed = false;
3800 vn_reference_t vnresult = NULL;
3801 tree assign;
3802 bool resultsame = false;
3803 tree vuse = gimple_vuse (stmt);
3804 tree vdef = gimple_vdef (stmt);
3806 if (TREE_CODE (op) == SSA_NAME)
3807 op = SSA_VAL (op);
3809 /* First we want to lookup using the *vuses* from the store and see
3810 if there the last store to this location with the same address
3811 had the same value.
3813 The vuses represent the memory state before the store. If the
3814 memory state, address, and value of the store is the same as the
3815 last store to this location, then this store will produce the
3816 same memory state as that store.
3818 In this case the vdef versions for this store are value numbered to those
3819 vuse versions, since they represent the same memory state after
3820 this store.
3822 Otherwise, the vdefs for the store are used when inserting into
3823 the table, since the store generates a new memory state. */
3825 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3826 if (vnresult
3827 && vnresult->result)
3829 tree result = vnresult->result;
3830 if (TREE_CODE (result) == SSA_NAME)
3831 result = SSA_VAL (result);
3832 resultsame = expressions_equal_p (result, op);
3833 if (resultsame)
3835 /* If the TBAA state isn't compatible for downstream reads
3836 we cannot value-number the VDEFs the same. */
3837 alias_set_type set = get_alias_set (lhs);
3838 if (vnresult->set != set
3839 && ! alias_set_subset_of (set, vnresult->set))
3840 resultsame = false;
3844 if (!resultsame)
3846 /* Only perform the following when being called from PRE
3847 which embeds tail merging. */
3848 if (default_vn_walk_kind == VN_WALK)
3850 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3851 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3852 if (vnresult)
3854 VN_INFO (vdef)->use_processed = true;
3855 return set_ssa_val_to (vdef, vnresult->result_vdef);
3859 if (dump_file && (dump_flags & TDF_DETAILS))
3861 fprintf (dump_file, "No store match\n");
3862 fprintf (dump_file, "Value numbering store ");
3863 print_generic_expr (dump_file, lhs);
3864 fprintf (dump_file, " to ");
3865 print_generic_expr (dump_file, op);
3866 fprintf (dump_file, "\n");
3868 /* Have to set value numbers before insert, since insert is
3869 going to valueize the references in-place. */
3870 if (vdef)
3871 changed |= set_ssa_val_to (vdef, vdef);
3873 /* Do not insert structure copies into the tables. */
3874 if (is_gimple_min_invariant (op)
3875 || is_gimple_reg (op))
3876 vn_reference_insert (lhs, op, vdef, NULL);
3878 /* Only perform the following when being called from PRE
3879 which embeds tail merging. */
3880 if (default_vn_walk_kind == VN_WALK)
3882 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3883 vn_reference_insert (assign, lhs, vuse, vdef);
3886 else
3888 /* We had a match, so value number the vdef to have the value
3889 number of the vuse it came from. */
3891 if (dump_file && (dump_flags & TDF_DETAILS))
3892 fprintf (dump_file, "Store matched earlier value, "
3893 "value numbering store vdefs to matching vuses.\n");
3895 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3898 return changed;
3901 /* Visit and value number PHI, return true if the value number
3902 changed. */
3904 static bool
3905 visit_phi (gimple *phi)
3907 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3908 unsigned n_executable = 0;
3909 bool allsame = true;
3910 edge_iterator ei;
3911 edge e;
3913 /* TODO: We could check for this in init_sccvn, and replace this
3914 with a gcc_assert. */
3915 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3916 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3918 /* See if all non-TOP arguments have the same value. TOP is
3919 equivalent to everything, so we can ignore it. */
3920 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3921 if (e->flags & EDGE_EXECUTABLE)
3923 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3925 ++n_executable;
3926 if (TREE_CODE (def) == SSA_NAME)
3927 def = SSA_VAL (def);
3928 if (def == VN_TOP)
3930 /* Ignore undefined defs for sameval but record one. */
3931 else if (TREE_CODE (def) == SSA_NAME
3932 && ssa_undefined_value_p (def, false))
3933 seen_undef = def;
3934 else if (sameval == VN_TOP)
3935 sameval = def;
3936 else if (!expressions_equal_p (def, sameval))
3938 allsame = false;
3939 break;
3944 /* If none of the edges was executable keep the value-number at VN_TOP,
3945 if only a single edge is exectuable use its value. */
3946 if (n_executable <= 1)
3947 result = seen_undef ? seen_undef : sameval;
3948 /* If we saw only undefined values and VN_TOP use one of the
3949 undefined values. */
3950 else if (sameval == VN_TOP)
3951 result = seen_undef ? seen_undef : sameval;
3952 /* First see if it is equivalent to a phi node in this block. We prefer
3953 this as it allows IV elimination - see PRs 66502 and 67167. */
3954 else if ((result = vn_phi_lookup (phi)))
3956 /* If all values are the same use that, unless we've seen undefined
3957 values as well and the value isn't constant.
3958 CCP/copyprop have the same restriction to not remove uninit warnings. */
3959 else if (allsame
3960 && (! seen_undef || is_gimple_min_invariant (sameval)))
3961 result = sameval;
3962 else
3964 result = PHI_RESULT (phi);
3965 /* Only insert PHIs that are varying, for constant value numbers
3966 we mess up equivalences otherwise as we are only comparing
3967 the immediate controlling predicates. */
3968 vn_phi_insert (phi, result);
3971 return set_ssa_val_to (PHI_RESULT (phi), result);
3974 /* Try to simplify RHS using equivalences and constant folding. */
3976 static tree
3977 try_to_simplify (gassign *stmt)
3979 enum tree_code code = gimple_assign_rhs_code (stmt);
3980 tree tem;
3982 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3983 in this case, there is no point in doing extra work. */
3984 if (code == SSA_NAME)
3985 return NULL_TREE;
3987 /* First try constant folding based on our current lattice. */
3988 mprts_hook = vn_lookup_simplify_result;
3989 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3990 mprts_hook = NULL;
3991 if (tem
3992 && (TREE_CODE (tem) == SSA_NAME
3993 || is_gimple_min_invariant (tem)))
3994 return tem;
3996 return NULL_TREE;
3999 /* Visit and value number USE, return true if the value number
4000 changed. */
4002 static bool
4003 visit_use (tree use)
4005 bool changed = false;
4006 gimple *stmt = SSA_NAME_DEF_STMT (use);
4008 mark_use_processed (use);
4010 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4011 && !SSA_NAME_IS_DEFAULT_DEF (use));
4013 if (dump_file && (dump_flags & TDF_DETAILS))
4015 fprintf (dump_file, "Value numbering ");
4016 print_generic_expr (dump_file, use);
4017 fprintf (dump_file, " stmt = ");
4018 print_gimple_stmt (dump_file, stmt, 0);
4021 if (gimple_code (stmt) == GIMPLE_PHI)
4022 changed = visit_phi (stmt);
4023 else if (gimple_has_volatile_ops (stmt))
4024 changed = defs_to_varying (stmt);
4025 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4027 enum tree_code code = gimple_assign_rhs_code (ass);
4028 tree lhs = gimple_assign_lhs (ass);
4029 tree rhs1 = gimple_assign_rhs1 (ass);
4030 tree simplified;
4032 /* Shortcut for copies. Simplifying copies is pointless,
4033 since we copy the expression and value they represent. */
4034 if (code == SSA_NAME
4035 && TREE_CODE (lhs) == SSA_NAME)
4037 changed = visit_copy (lhs, rhs1);
4038 goto done;
4040 simplified = try_to_simplify (ass);
4041 if (simplified)
4043 if (dump_file && (dump_flags & TDF_DETAILS))
4045 fprintf (dump_file, "RHS ");
4046 print_gimple_expr (dump_file, ass, 0);
4047 fprintf (dump_file, " simplified to ");
4048 print_generic_expr (dump_file, simplified);
4049 fprintf (dump_file, "\n");
4052 /* Setting value numbers to constants will occasionally
4053 screw up phi congruence because constants are not
4054 uniquely associated with a single ssa name that can be
4055 looked up. */
4056 if (simplified
4057 && is_gimple_min_invariant (simplified)
4058 && TREE_CODE (lhs) == SSA_NAME)
4060 changed = set_ssa_val_to (lhs, simplified);
4061 goto done;
4063 else if (simplified
4064 && TREE_CODE (simplified) == SSA_NAME
4065 && TREE_CODE (lhs) == SSA_NAME)
4067 changed = visit_copy (lhs, simplified);
4068 goto done;
4071 if ((TREE_CODE (lhs) == SSA_NAME
4072 /* We can substitute SSA_NAMEs that are live over
4073 abnormal edges with their constant value. */
4074 && !(gimple_assign_copy_p (ass)
4075 && is_gimple_min_invariant (rhs1))
4076 && !(simplified
4077 && is_gimple_min_invariant (simplified))
4078 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4079 /* Stores or copies from SSA_NAMEs that are live over
4080 abnormal edges are a problem. */
4081 || (code == SSA_NAME
4082 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4083 changed = defs_to_varying (ass);
4084 else if (REFERENCE_CLASS_P (lhs)
4085 || DECL_P (lhs))
4086 changed = visit_reference_op_store (lhs, rhs1, ass);
4087 else if (TREE_CODE (lhs) == SSA_NAME)
4089 if ((gimple_assign_copy_p (ass)
4090 && is_gimple_min_invariant (rhs1))
4091 || (simplified
4092 && is_gimple_min_invariant (simplified)))
4094 if (simplified)
4095 changed = set_ssa_val_to (lhs, simplified);
4096 else
4097 changed = set_ssa_val_to (lhs, rhs1);
4099 else
4101 /* Visit the original statement. */
4102 switch (vn_get_stmt_kind (ass))
4104 case VN_NARY:
4105 changed = visit_nary_op (lhs, ass);
4106 break;
4107 case VN_REFERENCE:
4108 changed = visit_reference_op_load (lhs, rhs1, ass);
4109 break;
4110 default:
4111 changed = defs_to_varying (ass);
4112 break;
4116 else
4117 changed = defs_to_varying (ass);
4119 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4121 tree lhs = gimple_call_lhs (call_stmt);
4122 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4124 /* Try constant folding based on our current lattice. */
4125 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4126 vn_valueize);
4127 if (simplified)
4129 if (dump_file && (dump_flags & TDF_DETAILS))
4131 fprintf (dump_file, "call ");
4132 print_gimple_expr (dump_file, call_stmt, 0);
4133 fprintf (dump_file, " simplified to ");
4134 print_generic_expr (dump_file, simplified);
4135 fprintf (dump_file, "\n");
4138 /* Setting value numbers to constants will occasionally
4139 screw up phi congruence because constants are not
4140 uniquely associated with a single ssa name that can be
4141 looked up. */
4142 if (simplified
4143 && is_gimple_min_invariant (simplified))
4145 changed = set_ssa_val_to (lhs, simplified);
4146 if (gimple_vdef (call_stmt))
4147 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4148 SSA_VAL (gimple_vuse (call_stmt)));
4149 goto done;
4151 else if (simplified
4152 && TREE_CODE (simplified) == SSA_NAME)
4154 changed = visit_copy (lhs, simplified);
4155 if (gimple_vdef (call_stmt))
4156 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4157 SSA_VAL (gimple_vuse (call_stmt)));
4158 goto done;
4160 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4162 changed = defs_to_varying (call_stmt);
4163 goto done;
4167 /* Pick up flags from a devirtualization target. */
4168 tree fn = gimple_call_fn (stmt);
4169 int extra_fnflags = 0;
4170 if (fn && TREE_CODE (fn) == SSA_NAME)
4172 fn = SSA_VAL (fn);
4173 if (TREE_CODE (fn) == ADDR_EXPR
4174 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4175 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4177 if (!gimple_call_internal_p (call_stmt)
4178 && (/* Calls to the same function with the same vuse
4179 and the same operands do not necessarily return the same
4180 value, unless they're pure or const. */
4181 ((gimple_call_flags (call_stmt) | extra_fnflags)
4182 & (ECF_PURE | ECF_CONST))
4183 /* If calls have a vdef, subsequent calls won't have
4184 the same incoming vuse. So, if 2 calls with vdef have the
4185 same vuse, we know they're not subsequent.
4186 We can value number 2 calls to the same function with the
4187 same vuse and the same operands which are not subsequent
4188 the same, because there is no code in the program that can
4189 compare the 2 values... */
4190 || (gimple_vdef (call_stmt)
4191 /* ... unless the call returns a pointer which does
4192 not alias with anything else. In which case the
4193 information that the values are distinct are encoded
4194 in the IL. */
4195 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4196 /* Only perform the following when being called from PRE
4197 which embeds tail merging. */
4198 && default_vn_walk_kind == VN_WALK)))
4199 changed = visit_reference_op_call (lhs, call_stmt);
4200 else
4201 changed = defs_to_varying (call_stmt);
4203 else
4204 changed = defs_to_varying (stmt);
4205 done:
4206 return changed;
4209 /* Compare two operands by reverse postorder index */
4211 static int
4212 compare_ops (const void *pa, const void *pb)
4214 const tree opa = *((const tree *)pa);
4215 const tree opb = *((const tree *)pb);
4216 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4217 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4218 basic_block bba;
4219 basic_block bbb;
4221 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4222 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4223 else if (gimple_nop_p (opstmta))
4224 return -1;
4225 else if (gimple_nop_p (opstmtb))
4226 return 1;
4228 bba = gimple_bb (opstmta);
4229 bbb = gimple_bb (opstmtb);
4231 if (!bba && !bbb)
4232 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4233 else if (!bba)
4234 return -1;
4235 else if (!bbb)
4236 return 1;
4238 if (bba == bbb)
4240 if (gimple_code (opstmta) == GIMPLE_PHI
4241 && gimple_code (opstmtb) == GIMPLE_PHI)
4242 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4243 else if (gimple_code (opstmta) == GIMPLE_PHI)
4244 return -1;
4245 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4246 return 1;
4247 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4248 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4249 else
4250 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4252 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4255 /* Sort an array containing members of a strongly connected component
4256 SCC so that the members are ordered by RPO number.
4257 This means that when the sort is complete, iterating through the
4258 array will give you the members in RPO order. */
4260 static void
4261 sort_scc (vec<tree> scc)
4263 scc.qsort (compare_ops);
4266 /* Insert the no longer used nary ONARY to the hash INFO. */
4268 static void
4269 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4271 size_t size = sizeof_vn_nary_op (onary->length);
4272 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4273 &info->nary_obstack);
4274 memcpy (nary, onary, size);
4275 vn_nary_op_insert_into (nary, info->nary, false);
4278 /* Insert the no longer used phi OPHI to the hash INFO. */
4280 static void
4281 copy_phi (vn_phi_t ophi, vn_tables_t info)
4283 vn_phi_t phi = info->phis_pool->allocate ();
4284 vn_phi_s **slot;
4285 memcpy (phi, ophi, sizeof (*phi));
4286 ophi->phiargs.create (0);
4287 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4288 gcc_assert (!*slot);
4289 *slot = phi;
4292 /* Insert the no longer used reference OREF to the hash INFO. */
4294 static void
4295 copy_reference (vn_reference_t oref, vn_tables_t info)
4297 vn_reference_t ref;
4298 vn_reference_s **slot;
4299 ref = info->references_pool->allocate ();
4300 memcpy (ref, oref, sizeof (*ref));
4301 oref->operands.create (0);
4302 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4303 if (*slot)
4304 free_reference (*slot);
4305 *slot = ref;
4308 /* Process a strongly connected component in the SSA graph. */
4310 static void
4311 process_scc (vec<tree> scc)
4313 tree var;
4314 unsigned int i;
4315 unsigned int iterations = 0;
4316 bool changed = true;
4317 vn_nary_op_iterator_type hin;
4318 vn_phi_iterator_type hip;
4319 vn_reference_iterator_type hir;
4320 vn_nary_op_t nary;
4321 vn_phi_t phi;
4322 vn_reference_t ref;
4324 /* If the SCC has a single member, just visit it. */
4325 if (scc.length () == 1)
4327 tree use = scc[0];
4328 if (VN_INFO (use)->use_processed)
4329 return;
4330 /* We need to make sure it doesn't form a cycle itself, which can
4331 happen for self-referential PHI nodes. In that case we would
4332 end up inserting an expression with VN_TOP operands into the
4333 valid table which makes us derive bogus equivalences later.
4334 The cheapest way to check this is to assume it for all PHI nodes. */
4335 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4336 /* Fallthru to iteration. */ ;
4337 else
4339 visit_use (use);
4340 return;
4344 if (dump_file && (dump_flags & TDF_DETAILS))
4345 print_scc (dump_file, scc);
4347 /* Iterate over the SCC with the optimistic table until it stops
4348 changing. */
4349 current_info = optimistic_info;
4350 while (changed)
4352 changed = false;
4353 iterations++;
4354 if (dump_file && (dump_flags & TDF_DETAILS))
4355 fprintf (dump_file, "Starting iteration %d\n", iterations);
4356 /* As we are value-numbering optimistically we have to
4357 clear the expression tables and the simplified expressions
4358 in each iteration until we converge. */
4359 optimistic_info->nary->empty ();
4360 optimistic_info->phis->empty ();
4361 optimistic_info->references->empty ();
4362 obstack_free (&optimistic_info->nary_obstack, NULL);
4363 gcc_obstack_init (&optimistic_info->nary_obstack);
4364 optimistic_info->phis_pool->release ();
4365 optimistic_info->references_pool->release ();
4366 FOR_EACH_VEC_ELT (scc, i, var)
4367 gcc_assert (!VN_INFO (var)->needs_insertion
4368 && VN_INFO (var)->expr == NULL);
4369 FOR_EACH_VEC_ELT (scc, i, var)
4370 changed |= visit_use (var);
4373 if (dump_file && (dump_flags & TDF_DETAILS))
4374 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4375 statistics_histogram_event (cfun, "SCC iterations", iterations);
4377 /* Finally, copy the contents of the no longer used optimistic
4378 table to the valid table. */
4379 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4380 copy_nary (nary, valid_info);
4381 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4382 copy_phi (phi, valid_info);
4383 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4384 ref, vn_reference_t, hir)
4385 copy_reference (ref, valid_info);
4387 current_info = valid_info;
4391 /* Pop the components of the found SCC for NAME off the SCC stack
4392 and process them. Returns true if all went well, false if
4393 we run into resource limits. */
4395 static void
4396 extract_and_process_scc_for_name (tree name)
4398 auto_vec<tree> scc;
4399 tree x;
4401 /* Found an SCC, pop the components off the SCC stack and
4402 process them. */
4405 x = sccstack.pop ();
4407 VN_INFO (x)->on_sccstack = false;
4408 scc.safe_push (x);
4409 } while (x != name);
4411 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4412 incredibly large.
4413 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4414 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4416 if (dump_file)
4418 print_scc (dump_file, scc);
4419 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4420 "size %u exceeding %u\n", scc.length (),
4421 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4423 tree var;
4424 unsigned i;
4425 FOR_EACH_VEC_ELT (scc, i, var)
4427 gimple *def = SSA_NAME_DEF_STMT (var);
4428 mark_use_processed (var);
4429 if (SSA_NAME_IS_DEFAULT_DEF (var)
4430 || gimple_code (def) == GIMPLE_PHI)
4431 set_ssa_val_to (var, var);
4432 else
4433 defs_to_varying (def);
4435 return;
4438 if (scc.length () > 1)
4439 sort_scc (scc);
4441 process_scc (scc);
4444 /* Depth first search on NAME to discover and process SCC's in the SSA
4445 graph.
4446 Execution of this algorithm relies on the fact that the SCC's are
4447 popped off the stack in topological order.
4448 Returns true if successful, false if we stopped processing SCC's due
4449 to resource constraints. */
4451 static void
4452 DFS (tree name)
4454 auto_vec<ssa_op_iter> itervec;
4455 auto_vec<tree> namevec;
4456 use_operand_p usep = NULL;
4457 gimple *defstmt;
4458 tree use;
4459 ssa_op_iter iter;
4461 start_over:
4462 /* SCC info */
4463 VN_INFO (name)->dfsnum = next_dfs_num++;
4464 VN_INFO (name)->visited = true;
4465 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4467 sccstack.safe_push (name);
4468 VN_INFO (name)->on_sccstack = true;
4469 defstmt = SSA_NAME_DEF_STMT (name);
4471 /* Recursively DFS on our operands, looking for SCC's. */
4472 if (!gimple_nop_p (defstmt))
4474 /* Push a new iterator. */
4475 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4476 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4477 else
4478 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4480 else
4481 clear_and_done_ssa_iter (&iter);
4483 while (1)
4485 /* If we are done processing uses of a name, go up the stack
4486 of iterators and process SCCs as we found them. */
4487 if (op_iter_done (&iter))
4489 /* See if we found an SCC. */
4490 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4491 extract_and_process_scc_for_name (name);
4493 /* Check if we are done. */
4494 if (namevec.is_empty ())
4495 return;
4497 /* Restore the last use walker and continue walking there. */
4498 use = name;
4499 name = namevec.pop ();
4500 memcpy (&iter, &itervec.last (),
4501 sizeof (ssa_op_iter));
4502 itervec.pop ();
4503 goto continue_walking;
4506 use = USE_FROM_PTR (usep);
4508 /* Since we handle phi nodes, we will sometimes get
4509 invariants in the use expression. */
4510 if (TREE_CODE (use) == SSA_NAME)
4512 if (! (VN_INFO (use)->visited))
4514 /* Recurse by pushing the current use walking state on
4515 the stack and starting over. */
4516 itervec.safe_push (iter);
4517 namevec.safe_push (name);
4518 name = use;
4519 goto start_over;
4521 continue_walking:
4522 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4523 VN_INFO (use)->low);
4525 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4526 && VN_INFO (use)->on_sccstack)
4528 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4529 VN_INFO (name)->low);
4533 usep = op_iter_next_use (&iter);
4537 /* Allocate a value number table. */
4539 static void
4540 allocate_vn_table (vn_tables_t table)
4542 table->phis = new vn_phi_table_type (23);
4543 table->nary = new vn_nary_op_table_type (23);
4544 table->references = new vn_reference_table_type (23);
4546 gcc_obstack_init (&table->nary_obstack);
4547 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4548 table->references_pool = new object_allocator<vn_reference_s>
4549 ("VN references");
4552 /* Free a value number table. */
4554 static void
4555 free_vn_table (vn_tables_t table)
4557 delete table->phis;
4558 table->phis = NULL;
4559 delete table->nary;
4560 table->nary = NULL;
4561 delete table->references;
4562 table->references = NULL;
4563 obstack_free (&table->nary_obstack, NULL);
4564 delete table->phis_pool;
4565 delete table->references_pool;
4568 static void
4569 init_scc_vn (void)
4571 int j;
4572 int *rpo_numbers_temp;
4574 calculate_dominance_info (CDI_DOMINATORS);
4575 mark_dfs_back_edges ();
4577 sccstack.create (0);
4578 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4580 constant_value_ids = BITMAP_ALLOC (NULL);
4582 next_dfs_num = 1;
4583 next_value_id = 1;
4585 vn_ssa_aux_table.create (num_ssa_names + 1);
4586 /* VEC_alloc doesn't actually grow it to the right size, it just
4587 preallocates the space to do so. */
4588 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4589 gcc_obstack_init (&vn_ssa_aux_obstack);
4591 shared_lookup_phiargs.create (0);
4592 shared_lookup_references.create (0);
4593 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4594 rpo_numbers_temp =
4595 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4596 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4598 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4599 the i'th block in RPO order is bb. We want to map bb's to RPO
4600 numbers, so we need to rearrange this array. */
4601 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4602 rpo_numbers[rpo_numbers_temp[j]] = j;
4604 XDELETE (rpo_numbers_temp);
4606 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4607 get_identifier ("VN_TOP"), error_mark_node);
4609 renumber_gimple_stmt_uids ();
4611 /* Create the valid and optimistic value numbering tables. */
4612 valid_info = XCNEW (struct vn_tables_s);
4613 allocate_vn_table (valid_info);
4614 optimistic_info = XCNEW (struct vn_tables_s);
4615 allocate_vn_table (optimistic_info);
4616 current_info = valid_info;
4618 /* Create the VN_INFO structures, and initialize value numbers to
4619 TOP or VARYING for parameters. */
4620 size_t i;
4621 tree name;
4623 FOR_EACH_SSA_NAME (i, name, cfun)
4625 VN_INFO_GET (name)->valnum = VN_TOP;
4626 VN_INFO (name)->needs_insertion = false;
4627 VN_INFO (name)->expr = NULL;
4628 VN_INFO (name)->value_id = 0;
4630 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4631 continue;
4633 switch (TREE_CODE (SSA_NAME_VAR (name)))
4635 case VAR_DECL:
4636 /* All undefined vars are VARYING. */
4637 VN_INFO (name)->valnum = name;
4638 VN_INFO (name)->visited = true;
4639 break;
4641 case PARM_DECL:
4642 /* Parameters are VARYING but we can record a condition
4643 if we know it is a non-NULL pointer. */
4644 VN_INFO (name)->visited = true;
4645 VN_INFO (name)->valnum = name;
4646 if (POINTER_TYPE_P (TREE_TYPE (name))
4647 && nonnull_arg_p (SSA_NAME_VAR (name)))
4649 tree ops[2];
4650 ops[0] = name;
4651 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4652 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4653 boolean_true_node, 0);
4654 if (dump_file && (dump_flags & TDF_DETAILS))
4656 fprintf (dump_file, "Recording ");
4657 print_generic_expr (dump_file, name, TDF_SLIM);
4658 fprintf (dump_file, " != 0\n");
4661 break;
4663 case RESULT_DECL:
4664 /* If the result is passed by invisible reference the default
4665 def is initialized, otherwise it's uninitialized. Still
4666 undefined is varying. */
4667 VN_INFO (name)->visited = true;
4668 VN_INFO (name)->valnum = name;
4669 break;
4671 default:
4672 gcc_unreachable ();
4677 /* Restore SSA info that has been reset on value leaders. */
4679 void
4680 scc_vn_restore_ssa_info (void)
4682 unsigned i;
4683 tree name;
4685 FOR_EACH_SSA_NAME (i, name, cfun)
4687 if (has_VN_INFO (name))
4689 if (VN_INFO (name)->needs_insertion)
4691 else if (POINTER_TYPE_P (TREE_TYPE (name))
4692 && VN_INFO (name)->info.ptr_info)
4693 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4694 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4695 && VN_INFO (name)->info.range_info)
4697 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4698 SSA_NAME_ANTI_RANGE_P (name)
4699 = VN_INFO (name)->range_info_anti_range_p;
4705 void
4706 free_scc_vn (void)
4708 size_t i;
4709 tree name;
4711 delete constant_to_value_id;
4712 constant_to_value_id = NULL;
4713 BITMAP_FREE (constant_value_ids);
4714 shared_lookup_phiargs.release ();
4715 shared_lookup_references.release ();
4716 XDELETEVEC (rpo_numbers);
4718 FOR_EACH_SSA_NAME (i, name, cfun)
4720 if (has_VN_INFO (name)
4721 && VN_INFO (name)->needs_insertion)
4722 release_ssa_name (name);
4724 obstack_free (&vn_ssa_aux_obstack, NULL);
4725 vn_ssa_aux_table.release ();
4727 sccstack.release ();
4728 free_vn_table (valid_info);
4729 XDELETE (valid_info);
4730 free_vn_table (optimistic_info);
4731 XDELETE (optimistic_info);
4733 BITMAP_FREE (const_parms);
4736 /* Set *ID according to RESULT. */
4738 static void
4739 set_value_id_for_result (tree result, unsigned int *id)
4741 if (result && TREE_CODE (result) == SSA_NAME)
4742 *id = VN_INFO (result)->value_id;
4743 else if (result && is_gimple_min_invariant (result))
4744 *id = get_or_alloc_constant_value_id (result);
4745 else
4746 *id = get_next_value_id ();
4749 /* Set the value ids in the valid hash tables. */
4751 static void
4752 set_hashtable_value_ids (void)
4754 vn_nary_op_iterator_type hin;
4755 vn_phi_iterator_type hip;
4756 vn_reference_iterator_type hir;
4757 vn_nary_op_t vno;
4758 vn_reference_t vr;
4759 vn_phi_t vp;
4761 /* Now set the value ids of the things we had put in the hash
4762 table. */
4764 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4765 set_value_id_for_result (vno->result, &vno->value_id);
4767 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4768 set_value_id_for_result (vp->result, &vp->value_id);
4770 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4771 hir)
4772 set_value_id_for_result (vr->result, &vr->value_id);
4775 class sccvn_dom_walker : public dom_walker
4777 public:
4778 sccvn_dom_walker ()
4779 : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4781 virtual edge before_dom_children (basic_block);
4782 virtual void after_dom_children (basic_block);
4784 void record_cond (basic_block,
4785 enum tree_code code, tree lhs, tree rhs, bool value);
4786 void record_conds (basic_block,
4787 enum tree_code code, tree lhs, tree rhs, bool value);
4789 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4790 cond_stack;
4793 /* Record a temporary condition for the BB and its dominated blocks. */
4795 void
4796 sccvn_dom_walker::record_cond (basic_block bb,
4797 enum tree_code code, tree lhs, tree rhs,
4798 bool value)
4800 tree ops[2] = { lhs, rhs };
4801 vn_nary_op_t old = NULL;
4802 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4803 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4804 vn_nary_op_t cond
4805 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4806 value
4807 ? boolean_true_node
4808 : boolean_false_node, 0);
4809 if (dump_file && (dump_flags & TDF_DETAILS))
4811 fprintf (dump_file, "Recording temporarily ");
4812 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4813 fprintf (dump_file, " %s ", get_tree_code_name (code));
4814 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4815 fprintf (dump_file, " == %s%s\n",
4816 value ? "true" : "false",
4817 old ? " (old entry saved)" : "");
4819 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4822 /* Record temporary conditions for the BB and its dominated blocks
4823 according to LHS CODE RHS == VALUE and its dominated conditions. */
4825 void
4826 sccvn_dom_walker::record_conds (basic_block bb,
4827 enum tree_code code, tree lhs, tree rhs,
4828 bool value)
4830 /* Record the original condition. */
4831 record_cond (bb, code, lhs, rhs, value);
4833 if (!value)
4834 return;
4836 /* Record dominated conditions if the condition is true. Note that
4837 the inversion is already recorded. */
4838 switch (code)
4840 case LT_EXPR:
4841 case GT_EXPR:
4842 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4843 record_cond (bb, NE_EXPR, lhs, rhs, true);
4844 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4845 break;
4847 case EQ_EXPR:
4848 record_cond (bb, LE_EXPR, lhs, rhs, true);
4849 record_cond (bb, GE_EXPR, lhs, rhs, true);
4850 record_cond (bb, LT_EXPR, lhs, rhs, false);
4851 record_cond (bb, GT_EXPR, lhs, rhs, false);
4852 break;
4854 default:
4855 break;
4859 /* Restore expressions and values derived from conditionals. */
4861 void
4862 sccvn_dom_walker::after_dom_children (basic_block bb)
4864 while (!cond_stack.is_empty ()
4865 && cond_stack.last ().first == bb)
4867 vn_nary_op_t cond = cond_stack.last ().second.first;
4868 vn_nary_op_t old = cond_stack.last ().second.second;
4869 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4870 if (old)
4871 vn_nary_op_insert_into (old, current_info->nary, false);
4872 cond_stack.pop ();
4876 /* Value number all statements in BB. */
4878 edge
4879 sccvn_dom_walker::before_dom_children (basic_block bb)
4881 if (dump_file && (dump_flags & TDF_DETAILS))
4882 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4884 /* If we have a single predecessor record the equivalence from a
4885 possible condition on the predecessor edge. */
4886 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4887 if (pred_e)
4889 /* Check if there are multiple executable successor edges in
4890 the source block. Otherwise there is no additional info
4891 to be recorded. */
4892 edge_iterator ei;
4893 edge e2;
4894 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4895 if (e2 != pred_e
4896 && e2->flags & EDGE_EXECUTABLE)
4897 break;
4898 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4900 gimple *stmt = last_stmt (pred_e->src);
4901 if (stmt
4902 && gimple_code (stmt) == GIMPLE_COND)
4904 enum tree_code code = gimple_cond_code (stmt);
4905 tree lhs = gimple_cond_lhs (stmt);
4906 tree rhs = gimple_cond_rhs (stmt);
4907 record_conds (bb, code, lhs, rhs,
4908 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4909 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4910 if (code != ERROR_MARK)
4911 record_conds (bb, code, lhs, rhs,
4912 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4917 /* Value-number all defs in the basic-block. */
4918 for (gphi_iterator gsi = gsi_start_phis (bb);
4919 !gsi_end_p (gsi); gsi_next (&gsi))
4921 gphi *phi = gsi.phi ();
4922 tree res = PHI_RESULT (phi);
4923 if (!VN_INFO (res)->visited)
4924 DFS (res);
4926 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4927 !gsi_end_p (gsi); gsi_next (&gsi))
4929 ssa_op_iter i;
4930 tree op;
4931 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4932 if (!VN_INFO (op)->visited)
4933 DFS (op);
4936 /* Finally look at the last stmt. */
4937 gimple *stmt = last_stmt (bb);
4938 if (!stmt)
4939 return NULL;
4941 enum gimple_code code = gimple_code (stmt);
4942 if (code != GIMPLE_COND
4943 && code != GIMPLE_SWITCH
4944 && code != GIMPLE_GOTO)
4945 return NULL;
4947 if (dump_file && (dump_flags & TDF_DETAILS))
4949 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4950 print_gimple_stmt (dump_file, stmt, 0);
4953 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4954 if value-numbering can prove they are not reachable. Handling
4955 computed gotos is also possible. */
4956 tree val;
4957 switch (code)
4959 case GIMPLE_COND:
4961 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4962 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4963 val = gimple_simplify (gimple_cond_code (stmt),
4964 boolean_type_node, lhs, rhs,
4965 NULL, vn_valueize);
4966 /* If that didn't simplify to a constant see if we have recorded
4967 temporary expressions from taken edges. */
4968 if (!val || TREE_CODE (val) != INTEGER_CST)
4970 tree ops[2];
4971 ops[0] = lhs;
4972 ops[1] = rhs;
4973 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4974 boolean_type_node, ops, NULL);
4976 break;
4978 case GIMPLE_SWITCH:
4979 val = gimple_switch_index (as_a <gswitch *> (stmt));
4980 break;
4981 case GIMPLE_GOTO:
4982 val = gimple_goto_dest (stmt);
4983 break;
4984 default:
4985 gcc_unreachable ();
4987 if (!val)
4988 return NULL;
4990 edge taken = find_taken_edge (bb, vn_valueize (val));
4991 if (!taken)
4992 return NULL;
4994 if (dump_file && (dump_flags & TDF_DETAILS))
4995 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4996 "not executable\n", bb->index, bb->index, taken->dest->index);
4998 return taken;
5001 /* Do SCCVN. Returns true if it finished, false if we bailed out
5002 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5003 how we use the alias oracle walking during the VN process. */
5005 void
5006 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5008 size_t i;
5010 default_vn_walk_kind = default_vn_walk_kind_;
5012 init_scc_vn ();
5014 /* Collect pointers we know point to readonly memory. */
5015 const_parms = BITMAP_ALLOC (NULL);
5016 tree fnspec = lookup_attribute ("fn spec",
5017 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5018 if (fnspec)
5020 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5021 i = 1;
5022 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5023 arg; arg = DECL_CHAIN (arg), ++i)
5025 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5026 break;
5027 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5028 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5030 tree name = ssa_default_def (cfun, arg);
5031 if (name)
5032 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5037 /* Walk all blocks in dominator order, value-numbering stmts
5038 SSA defs and decide whether outgoing edges are not executable. */
5039 sccvn_dom_walker walker;
5040 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5042 /* Initialize the value ids and prune out remaining VN_TOPs
5043 from dead code. */
5044 tree name;
5045 FOR_EACH_SSA_NAME (i, name, cfun)
5047 vn_ssa_aux_t info = VN_INFO (name);
5048 if (!info->visited
5049 || info->valnum == VN_TOP)
5050 info->valnum = name;
5051 if (info->valnum == name)
5052 info->value_id = get_next_value_id ();
5053 else if (is_gimple_min_invariant (info->valnum))
5054 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5057 /* Propagate. */
5058 FOR_EACH_SSA_NAME (i, name, cfun)
5060 vn_ssa_aux_t info = VN_INFO (name);
5061 if (TREE_CODE (info->valnum) == SSA_NAME
5062 && info->valnum != name
5063 && info->value_id != VN_INFO (info->valnum)->value_id)
5064 info->value_id = VN_INFO (info->valnum)->value_id;
5067 set_hashtable_value_ids ();
5069 if (dump_file && (dump_flags & TDF_DETAILS))
5071 fprintf (dump_file, "Value numbers:\n");
5072 FOR_EACH_SSA_NAME (i, name, cfun)
5074 if (VN_INFO (name)->visited
5075 && SSA_VAL (name) != name)
5077 print_generic_expr (dump_file, name);
5078 fprintf (dump_file, " = ");
5079 print_generic_expr (dump_file, SSA_VAL (name));
5080 fprintf (dump_file, "\n");
5086 /* Return the maximum value id we have ever seen. */
5088 unsigned int
5089 get_max_value_id (void)
5091 return next_value_id;
5094 /* Return the next unique value id. */
5096 unsigned int
5097 get_next_value_id (void)
5099 return next_value_id++;
5103 /* Compare two expressions E1 and E2 and return true if they are equal. */
5105 bool
5106 expressions_equal_p (tree e1, tree e2)
5108 /* The obvious case. */
5109 if (e1 == e2)
5110 return true;
5112 /* If either one is VN_TOP consider them equal. */
5113 if (e1 == VN_TOP || e2 == VN_TOP)
5114 return true;
5116 /* If only one of them is null, they cannot be equal. */
5117 if (!e1 || !e2)
5118 return false;
5120 /* Now perform the actual comparison. */
5121 if (TREE_CODE (e1) == TREE_CODE (e2)
5122 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5123 return true;
5125 return false;
5129 /* Return true if the nary operation NARY may trap. This is a copy
5130 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5132 bool
5133 vn_nary_may_trap (vn_nary_op_t nary)
5135 tree type;
5136 tree rhs2 = NULL_TREE;
5137 bool honor_nans = false;
5138 bool honor_snans = false;
5139 bool fp_operation = false;
5140 bool honor_trapv = false;
5141 bool handled, ret;
5142 unsigned i;
5144 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5145 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5146 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5148 type = nary->type;
5149 fp_operation = FLOAT_TYPE_P (type);
5150 if (fp_operation)
5152 honor_nans = flag_trapping_math && !flag_finite_math_only;
5153 honor_snans = flag_signaling_nans != 0;
5155 else if (INTEGRAL_TYPE_P (type)
5156 && TYPE_OVERFLOW_TRAPS (type))
5157 honor_trapv = true;
5159 if (nary->length >= 2)
5160 rhs2 = nary->op[1];
5161 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5162 honor_trapv,
5163 honor_nans, honor_snans, rhs2,
5164 &handled);
5165 if (handled
5166 && ret)
5167 return true;
5169 for (i = 0; i < nary->length; ++i)
5170 if (tree_could_trap_p (nary->op[i]))
5171 return true;
5173 return false;
5177 class eliminate_dom_walker : public dom_walker
5179 public:
5180 eliminate_dom_walker (cdi_direction, bitmap);
5181 ~eliminate_dom_walker ();
5183 virtual edge before_dom_children (basic_block);
5184 virtual void after_dom_children (basic_block);
5186 tree eliminate_avail (tree op);
5187 void eliminate_push_avail (tree op);
5188 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5190 bool do_pre;
5191 unsigned int el_todo;
5192 unsigned int eliminations;
5193 unsigned int insertions;
5195 /* SSA names that had their defs inserted by PRE if do_pre. */
5196 bitmap inserted_exprs;
5198 /* Blocks with statements that have had their EH properties changed. */
5199 bitmap need_eh_cleanup;
5201 /* Blocks with statements that have had their AB properties changed. */
5202 bitmap need_ab_cleanup;
5204 auto_vec<gimple *> to_remove;
5205 auto_vec<gimple *> to_fixup;
5206 auto_vec<tree> avail;
5207 auto_vec<tree> avail_stack;
5210 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5211 bitmap inserted_exprs_)
5212 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5213 el_todo (0), eliminations (0), insertions (0),
5214 inserted_exprs (inserted_exprs_)
5216 need_eh_cleanup = BITMAP_ALLOC (NULL);
5217 need_ab_cleanup = BITMAP_ALLOC (NULL);
5220 eliminate_dom_walker::~eliminate_dom_walker ()
5222 BITMAP_FREE (need_eh_cleanup);
5223 BITMAP_FREE (need_ab_cleanup);
5226 /* Return a leader for OP that is available at the current point of the
5227 eliminate domwalk. */
5229 tree
5230 eliminate_dom_walker::eliminate_avail (tree op)
5232 tree valnum = VN_INFO (op)->valnum;
5233 if (TREE_CODE (valnum) == SSA_NAME)
5235 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5236 return valnum;
5237 if (avail.length () > SSA_NAME_VERSION (valnum))
5238 return avail[SSA_NAME_VERSION (valnum)];
5240 else if (is_gimple_min_invariant (valnum))
5241 return valnum;
5242 return NULL_TREE;
5245 /* At the current point of the eliminate domwalk make OP available. */
5247 void
5248 eliminate_dom_walker::eliminate_push_avail (tree op)
5250 tree valnum = VN_INFO (op)->valnum;
5251 if (TREE_CODE (valnum) == SSA_NAME)
5253 if (avail.length () <= SSA_NAME_VERSION (valnum))
5254 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5255 tree pushop = op;
5256 if (avail[SSA_NAME_VERSION (valnum)])
5257 pushop = avail[SSA_NAME_VERSION (valnum)];
5258 avail_stack.safe_push (pushop);
5259 avail[SSA_NAME_VERSION (valnum)] = op;
5263 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5264 the leader for the expression if insertion was successful. */
5266 tree
5267 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5269 /* We can insert a sequence with a single assignment only. */
5270 gimple_seq stmts = VN_INFO (val)->expr;
5271 if (!gimple_seq_singleton_p (stmts))
5272 return NULL_TREE;
5273 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5274 if (!stmt
5275 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5276 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5277 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5278 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5279 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5280 return NULL_TREE;
5282 tree op = gimple_assign_rhs1 (stmt);
5283 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5284 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5285 op = TREE_OPERAND (op, 0);
5286 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5287 if (!leader)
5288 return NULL_TREE;
5290 tree res;
5291 stmts = NULL;
5292 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5293 res = gimple_build (&stmts, BIT_FIELD_REF,
5294 TREE_TYPE (val), leader,
5295 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5296 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5297 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5298 res = gimple_build (&stmts, BIT_AND_EXPR,
5299 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5300 else
5301 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5302 TREE_TYPE (val), leader);
5303 if (TREE_CODE (res) != SSA_NAME
5304 || SSA_NAME_IS_DEFAULT_DEF (res)
5305 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5307 gimple_seq_discard (stmts);
5309 /* During propagation we have to treat SSA info conservatively
5310 and thus we can end up simplifying the inserted expression
5311 at elimination time to sth not defined in stmts. */
5312 /* But then this is a redundancy we failed to detect. Which means
5313 res now has two values. That doesn't play well with how
5314 we track availability here, so give up. */
5315 if (dump_file && (dump_flags & TDF_DETAILS))
5317 if (TREE_CODE (res) == SSA_NAME)
5318 res = eliminate_avail (res);
5319 if (res)
5321 fprintf (dump_file, "Failed to insert expression for value ");
5322 print_generic_expr (dump_file, val);
5323 fprintf (dump_file, " which is really fully redundant to ");
5324 print_generic_expr (dump_file, res);
5325 fprintf (dump_file, "\n");
5329 return NULL_TREE;
5331 else
5333 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5334 VN_INFO_GET (res)->valnum = val;
5337 insertions++;
5338 if (dump_file && (dump_flags & TDF_DETAILS))
5340 fprintf (dump_file, "Inserted ");
5341 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5344 return res;
5349 /* Perform elimination for the basic-block B during the domwalk. */
5351 edge
5352 eliminate_dom_walker::before_dom_children (basic_block b)
5354 /* Mark new bb. */
5355 avail_stack.safe_push (NULL_TREE);
5357 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5358 edge_iterator ei;
5359 edge e;
5360 FOR_EACH_EDGE (e, ei, b->preds)
5361 if (e->flags & EDGE_EXECUTABLE)
5362 break;
5363 if (! e)
5364 return NULL;
5366 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5368 gphi *phi = gsi.phi ();
5369 tree res = PHI_RESULT (phi);
5371 if (virtual_operand_p (res))
5373 gsi_next (&gsi);
5374 continue;
5377 tree sprime = eliminate_avail (res);
5378 if (sprime
5379 && sprime != res)
5381 if (dump_file && (dump_flags & TDF_DETAILS))
5383 fprintf (dump_file, "Replaced redundant PHI node defining ");
5384 print_generic_expr (dump_file, res);
5385 fprintf (dump_file, " with ");
5386 print_generic_expr (dump_file, sprime);
5387 fprintf (dump_file, "\n");
5390 /* If we inserted this PHI node ourself, it's not an elimination. */
5391 if (! inserted_exprs
5392 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5393 eliminations++;
5395 /* If we will propagate into all uses don't bother to do
5396 anything. */
5397 if (may_propagate_copy (res, sprime))
5399 /* Mark the PHI for removal. */
5400 to_remove.safe_push (phi);
5401 gsi_next (&gsi);
5402 continue;
5405 remove_phi_node (&gsi, false);
5407 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5408 sprime = fold_convert (TREE_TYPE (res), sprime);
5409 gimple *stmt = gimple_build_assign (res, sprime);
5410 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5411 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5412 continue;
5415 eliminate_push_avail (res);
5416 gsi_next (&gsi);
5419 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5420 !gsi_end_p (gsi);
5421 gsi_next (&gsi))
5423 tree sprime = NULL_TREE;
5424 gimple *stmt = gsi_stmt (gsi);
5425 tree lhs = gimple_get_lhs (stmt);
5426 if (lhs && TREE_CODE (lhs) == SSA_NAME
5427 && !gimple_has_volatile_ops (stmt)
5428 /* See PR43491. Do not replace a global register variable when
5429 it is a the RHS of an assignment. Do replace local register
5430 variables since gcc does not guarantee a local variable will
5431 be allocated in register.
5432 ??? The fix isn't effective here. This should instead
5433 be ensured by not value-numbering them the same but treating
5434 them like volatiles? */
5435 && !(gimple_assign_single_p (stmt)
5436 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5437 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5438 && is_global_var (gimple_assign_rhs1 (stmt)))))
5440 sprime = eliminate_avail (lhs);
5441 if (!sprime)
5443 /* If there is no existing usable leader but SCCVN thinks
5444 it has an expression it wants to use as replacement,
5445 insert that. */
5446 tree val = VN_INFO (lhs)->valnum;
5447 if (val != VN_TOP
5448 && TREE_CODE (val) == SSA_NAME
5449 && VN_INFO (val)->needs_insertion
5450 && VN_INFO (val)->expr != NULL
5451 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5452 eliminate_push_avail (sprime);
5455 /* If this now constitutes a copy duplicate points-to
5456 and range info appropriately. This is especially
5457 important for inserted code. See tree-ssa-copy.c
5458 for similar code. */
5459 if (sprime
5460 && TREE_CODE (sprime) == SSA_NAME)
5462 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5463 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5464 && VN_INFO_PTR_INFO (lhs)
5465 && ! VN_INFO_PTR_INFO (sprime))
5467 duplicate_ssa_name_ptr_info (sprime,
5468 VN_INFO_PTR_INFO (lhs));
5469 if (b != sprime_b)
5470 mark_ptr_info_alignment_unknown
5471 (SSA_NAME_PTR_INFO (sprime));
5473 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5474 && VN_INFO_RANGE_INFO (lhs)
5475 && ! VN_INFO_RANGE_INFO (sprime)
5476 && b == sprime_b)
5477 duplicate_ssa_name_range_info (sprime,
5478 VN_INFO_RANGE_TYPE (lhs),
5479 VN_INFO_RANGE_INFO (lhs));
5482 /* Inhibit the use of an inserted PHI on a loop header when
5483 the address of the memory reference is a simple induction
5484 variable. In other cases the vectorizer won't do anything
5485 anyway (either it's loop invariant or a complicated
5486 expression). */
5487 if (sprime
5488 && TREE_CODE (sprime) == SSA_NAME
5489 && do_pre
5490 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5491 && loop_outer (b->loop_father)
5492 && has_zero_uses (sprime)
5493 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5494 && gimple_assign_load_p (stmt))
5496 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5497 basic_block def_bb = gimple_bb (def_stmt);
5498 if (gimple_code (def_stmt) == GIMPLE_PHI
5499 && def_bb->loop_father->header == def_bb)
5501 loop_p loop = def_bb->loop_father;
5502 ssa_op_iter iter;
5503 tree op;
5504 bool found = false;
5505 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5507 affine_iv iv;
5508 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5509 if (def_bb
5510 && flow_bb_inside_loop_p (loop, def_bb)
5511 && simple_iv (loop, loop, op, &iv, true))
5513 found = true;
5514 break;
5517 if (found)
5519 if (dump_file && (dump_flags & TDF_DETAILS))
5521 fprintf (dump_file, "Not replacing ");
5522 print_gimple_expr (dump_file, stmt, 0);
5523 fprintf (dump_file, " with ");
5524 print_generic_expr (dump_file, sprime);
5525 fprintf (dump_file, " which would add a loop"
5526 " carried dependence to loop %d\n",
5527 loop->num);
5529 /* Don't keep sprime available. */
5530 sprime = NULL_TREE;
5535 if (sprime)
5537 /* If we can propagate the value computed for LHS into
5538 all uses don't bother doing anything with this stmt. */
5539 if (may_propagate_copy (lhs, sprime))
5541 /* Mark it for removal. */
5542 to_remove.safe_push (stmt);
5544 /* ??? Don't count copy/constant propagations. */
5545 if (gimple_assign_single_p (stmt)
5546 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5547 || gimple_assign_rhs1 (stmt) == sprime))
5548 continue;
5550 if (dump_file && (dump_flags & TDF_DETAILS))
5552 fprintf (dump_file, "Replaced ");
5553 print_gimple_expr (dump_file, stmt, 0);
5554 fprintf (dump_file, " with ");
5555 print_generic_expr (dump_file, sprime);
5556 fprintf (dump_file, " in all uses of ");
5557 print_gimple_stmt (dump_file, stmt, 0);
5560 eliminations++;
5561 continue;
5564 /* If this is an assignment from our leader (which
5565 happens in the case the value-number is a constant)
5566 then there is nothing to do. */
5567 if (gimple_assign_single_p (stmt)
5568 && sprime == gimple_assign_rhs1 (stmt))
5569 continue;
5571 /* Else replace its RHS. */
5572 bool can_make_abnormal_goto
5573 = is_gimple_call (stmt)
5574 && stmt_can_make_abnormal_goto (stmt);
5576 if (dump_file && (dump_flags & TDF_DETAILS))
5578 fprintf (dump_file, "Replaced ");
5579 print_gimple_expr (dump_file, stmt, 0);
5580 fprintf (dump_file, " with ");
5581 print_generic_expr (dump_file, sprime);
5582 fprintf (dump_file, " in ");
5583 print_gimple_stmt (dump_file, stmt, 0);
5586 eliminations++;
5587 gimple *orig_stmt = stmt;
5588 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5589 TREE_TYPE (sprime)))
5590 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5591 tree vdef = gimple_vdef (stmt);
5592 tree vuse = gimple_vuse (stmt);
5593 propagate_tree_value_into_stmt (&gsi, sprime);
5594 stmt = gsi_stmt (gsi);
5595 update_stmt (stmt);
5596 if (vdef != gimple_vdef (stmt))
5597 VN_INFO (vdef)->valnum = vuse;
5599 /* If we removed EH side-effects from the statement, clean
5600 its EH information. */
5601 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5603 bitmap_set_bit (need_eh_cleanup,
5604 gimple_bb (stmt)->index);
5605 if (dump_file && (dump_flags & TDF_DETAILS))
5606 fprintf (dump_file, " Removed EH side-effects.\n");
5609 /* Likewise for AB side-effects. */
5610 if (can_make_abnormal_goto
5611 && !stmt_can_make_abnormal_goto (stmt))
5613 bitmap_set_bit (need_ab_cleanup,
5614 gimple_bb (stmt)->index);
5615 if (dump_file && (dump_flags & TDF_DETAILS))
5616 fprintf (dump_file, " Removed AB side-effects.\n");
5619 continue;
5623 /* If the statement is a scalar store, see if the expression
5624 has the same value number as its rhs. If so, the store is
5625 dead. */
5626 if (gimple_assign_single_p (stmt)
5627 && !gimple_has_volatile_ops (stmt)
5628 && !is_gimple_reg (gimple_assign_lhs (stmt))
5629 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5630 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5632 tree val;
5633 tree rhs = gimple_assign_rhs1 (stmt);
5634 vn_reference_t vnresult;
5635 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5636 &vnresult, false);
5637 if (TREE_CODE (rhs) == SSA_NAME)
5638 rhs = VN_INFO (rhs)->valnum;
5639 if (val
5640 && operand_equal_p (val, rhs, 0))
5642 /* We can only remove the later store if the former aliases
5643 at least all accesses the later one does or if the store
5644 was to readonly memory storing the same value. */
5645 alias_set_type set = get_alias_set (lhs);
5646 if (! vnresult
5647 || vnresult->set == set
5648 || alias_set_subset_of (set, vnresult->set))
5650 if (dump_file && (dump_flags & TDF_DETAILS))
5652 fprintf (dump_file, "Deleted redundant store ");
5653 print_gimple_stmt (dump_file, stmt, 0);
5656 /* Queue stmt for removal. */
5657 to_remove.safe_push (stmt);
5658 continue;
5663 /* If this is a control statement value numbering left edges
5664 unexecuted on force the condition in a way consistent with
5665 that. */
5666 if (gcond *cond = dyn_cast <gcond *> (stmt))
5668 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5669 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5671 if (dump_file && (dump_flags & TDF_DETAILS))
5673 fprintf (dump_file, "Removing unexecutable edge from ");
5674 print_gimple_stmt (dump_file, stmt, 0);
5676 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5677 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5678 gimple_cond_make_true (cond);
5679 else
5680 gimple_cond_make_false (cond);
5681 update_stmt (cond);
5682 el_todo |= TODO_cleanup_cfg;
5683 continue;
5687 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5688 bool was_noreturn = (is_gimple_call (stmt)
5689 && gimple_call_noreturn_p (stmt));
5690 tree vdef = gimple_vdef (stmt);
5691 tree vuse = gimple_vuse (stmt);
5693 /* If we didn't replace the whole stmt (or propagate the result
5694 into all uses), replace all uses on this stmt with their
5695 leaders. */
5696 bool modified = false;
5697 use_operand_p use_p;
5698 ssa_op_iter iter;
5699 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5701 tree use = USE_FROM_PTR (use_p);
5702 /* ??? The call code above leaves stmt operands un-updated. */
5703 if (TREE_CODE (use) != SSA_NAME)
5704 continue;
5705 tree sprime = eliminate_avail (use);
5706 if (sprime && sprime != use
5707 && may_propagate_copy (use, sprime)
5708 /* We substitute into debug stmts to avoid excessive
5709 debug temporaries created by removed stmts, but we need
5710 to avoid doing so for inserted sprimes as we never want
5711 to create debug temporaries for them. */
5712 && (!inserted_exprs
5713 || TREE_CODE (sprime) != SSA_NAME
5714 || !is_gimple_debug (stmt)
5715 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5717 propagate_value (use_p, sprime);
5718 modified = true;
5722 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5723 into which is a requirement for the IPA devirt machinery. */
5724 gimple *old_stmt = stmt;
5725 if (modified)
5727 /* If a formerly non-invariant ADDR_EXPR is turned into an
5728 invariant one it was on a separate stmt. */
5729 if (gimple_assign_single_p (stmt)
5730 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5731 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5732 gimple_stmt_iterator prev = gsi;
5733 gsi_prev (&prev);
5734 if (fold_stmt (&gsi))
5736 /* fold_stmt may have created new stmts inbetween
5737 the previous stmt and the folded stmt. Mark
5738 all defs created there as varying to not confuse
5739 the SCCVN machinery as we're using that even during
5740 elimination. */
5741 if (gsi_end_p (prev))
5742 prev = gsi_start_bb (b);
5743 else
5744 gsi_next (&prev);
5745 if (gsi_stmt (prev) != gsi_stmt (gsi))
5748 tree def;
5749 ssa_op_iter dit;
5750 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5751 dit, SSA_OP_ALL_DEFS)
5752 /* As existing DEFs may move between stmts
5753 we have to guard VN_INFO_GET. */
5754 if (! has_VN_INFO (def))
5755 VN_INFO_GET (def)->valnum = def;
5756 if (gsi_stmt (prev) == gsi_stmt (gsi))
5757 break;
5758 gsi_next (&prev);
5760 while (1);
5762 stmt = gsi_stmt (gsi);
5763 /* In case we folded the stmt away schedule the NOP for removal. */
5764 if (gimple_nop_p (stmt))
5765 to_remove.safe_push (stmt);
5768 /* Visit indirect calls and turn them into direct calls if
5769 possible using the devirtualization machinery. Do this before
5770 checking for required EH/abnormal/noreturn cleanup as devird
5771 may expose more of those. */
5772 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5774 tree fn = gimple_call_fn (call_stmt);
5775 if (fn
5776 && flag_devirtualize
5777 && virtual_method_call_p (fn))
5779 tree otr_type = obj_type_ref_class (fn);
5780 unsigned HOST_WIDE_INT otr_tok
5781 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5782 tree instance;
5783 ipa_polymorphic_call_context context (current_function_decl,
5784 fn, stmt, &instance);
5785 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5786 otr_type, stmt);
5787 bool final;
5788 vec <cgraph_node *> targets
5789 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5790 otr_tok, context, &final);
5791 if (dump_file)
5792 dump_possible_polymorphic_call_targets (dump_file,
5793 obj_type_ref_class (fn),
5794 otr_tok, context);
5795 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5797 tree fn;
5798 if (targets.length () == 1)
5799 fn = targets[0]->decl;
5800 else
5801 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5802 if (dump_enabled_p ())
5804 location_t loc = gimple_location (stmt);
5805 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5806 "converting indirect call to "
5807 "function %s\n",
5808 lang_hooks.decl_printable_name (fn, 2));
5810 gimple_call_set_fndecl (call_stmt, fn);
5811 /* If changing the call to __builtin_unreachable
5812 or similar noreturn function, adjust gimple_call_fntype
5813 too. */
5814 if (gimple_call_noreturn_p (call_stmt)
5815 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5816 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5817 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5818 == void_type_node))
5819 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5820 maybe_remove_unused_call_args (cfun, call_stmt);
5821 modified = true;
5826 if (modified)
5828 /* When changing a call into a noreturn call, cfg cleanup
5829 is needed to fix up the noreturn call. */
5830 if (!was_noreturn
5831 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5832 to_fixup.safe_push (stmt);
5833 /* When changing a condition or switch into one we know what
5834 edge will be executed, schedule a cfg cleanup. */
5835 if ((gimple_code (stmt) == GIMPLE_COND
5836 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5837 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5838 || (gimple_code (stmt) == GIMPLE_SWITCH
5839 && TREE_CODE (gimple_switch_index
5840 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5841 el_todo |= TODO_cleanup_cfg;
5842 /* If we removed EH side-effects from the statement, clean
5843 its EH information. */
5844 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5846 bitmap_set_bit (need_eh_cleanup,
5847 gimple_bb (stmt)->index);
5848 if (dump_file && (dump_flags & TDF_DETAILS))
5849 fprintf (dump_file, " Removed EH side-effects.\n");
5851 /* Likewise for AB side-effects. */
5852 if (can_make_abnormal_goto
5853 && !stmt_can_make_abnormal_goto (stmt))
5855 bitmap_set_bit (need_ab_cleanup,
5856 gimple_bb (stmt)->index);
5857 if (dump_file && (dump_flags & TDF_DETAILS))
5858 fprintf (dump_file, " Removed AB side-effects.\n");
5860 update_stmt (stmt);
5861 if (vdef != gimple_vdef (stmt))
5862 VN_INFO (vdef)->valnum = vuse;
5865 /* Make new values available - for fully redundant LHS we
5866 continue with the next stmt above and skip this. */
5867 def_operand_p defp;
5868 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5869 eliminate_push_avail (DEF_FROM_PTR (defp));
5872 /* Replace destination PHI arguments. */
5873 FOR_EACH_EDGE (e, ei, b->succs)
5874 if (e->flags & EDGE_EXECUTABLE)
5875 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5876 !gsi_end_p (gsi);
5877 gsi_next (&gsi))
5879 gphi *phi = gsi.phi ();
5880 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5881 tree arg = USE_FROM_PTR (use_p);
5882 if (TREE_CODE (arg) != SSA_NAME
5883 || virtual_operand_p (arg))
5884 continue;
5885 tree sprime = eliminate_avail (arg);
5886 if (sprime && may_propagate_copy (arg, sprime))
5887 propagate_value (use_p, sprime);
5889 return NULL;
5892 /* Make no longer available leaders no longer available. */
5894 void
5895 eliminate_dom_walker::after_dom_children (basic_block)
5897 tree entry;
5898 while ((entry = avail_stack.pop ()) != NULL_TREE)
5900 tree valnum = VN_INFO (entry)->valnum;
5901 tree old = avail[SSA_NAME_VERSION (valnum)];
5902 if (old == entry)
5903 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5904 else
5905 avail[SSA_NAME_VERSION (valnum)] = entry;
5909 /* Eliminate fully redundant computations. */
5911 unsigned int
5912 vn_eliminate (bitmap inserted_exprs)
5914 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5915 el.avail.reserve (num_ssa_names);
5917 el.walk (cfun->cfg->x_entry_block_ptr);
5919 /* We cannot remove stmts during BB walk, especially not release SSA
5920 names there as this confuses the VN machinery. The stmts ending
5921 up in to_remove are either stores or simple copies.
5922 Remove stmts in reverse order to make debug stmt creation possible. */
5923 while (!el.to_remove.is_empty ())
5925 gimple *stmt = el.to_remove.pop ();
5927 if (dump_file && (dump_flags & TDF_DETAILS))
5929 fprintf (dump_file, "Removing dead stmt ");
5930 print_gimple_stmt (dump_file, stmt, 0, 0);
5933 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5934 if (gimple_code (stmt) == GIMPLE_PHI)
5935 remove_phi_node (&gsi, true);
5936 else
5938 basic_block bb = gimple_bb (stmt);
5939 unlink_stmt_vdef (stmt);
5940 if (gsi_remove (&gsi, true))
5941 bitmap_set_bit (el.need_eh_cleanup, bb->index);
5942 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5943 bitmap_set_bit (el.need_ab_cleanup, bb->index);
5944 release_defs (stmt);
5947 /* Removing a stmt may expose a forwarder block. */
5948 el.el_todo |= TODO_cleanup_cfg;
5951 /* Fixup stmts that became noreturn calls. This may require splitting
5952 blocks and thus isn't possible during the dominator walk. Do this
5953 in reverse order so we don't inadvertedly remove a stmt we want to
5954 fixup by visiting a dominating now noreturn call first. */
5955 while (!el.to_fixup.is_empty ())
5957 gimple *stmt = el.to_fixup.pop ();
5959 if (dump_file && (dump_flags & TDF_DETAILS))
5961 fprintf (dump_file, "Fixing up noreturn call ");
5962 print_gimple_stmt (dump_file, stmt, 0);
5965 if (fixup_noreturn_call (stmt))
5966 el.el_todo |= TODO_cleanup_cfg;
5969 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
5970 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
5972 if (do_eh_cleanup)
5973 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
5975 if (do_ab_cleanup)
5976 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
5978 if (do_eh_cleanup || do_ab_cleanup)
5979 el.el_todo |= TODO_cleanup_cfg;
5981 statistics_counter_event (cfun, "Eliminated", el.eliminations);
5982 statistics_counter_event (cfun, "Insertions", el.insertions);
5984 return el.el_todo;
5988 namespace {
5990 const pass_data pass_data_fre =
5992 GIMPLE_PASS, /* type */
5993 "fre", /* name */
5994 OPTGROUP_NONE, /* optinfo_flags */
5995 TV_TREE_FRE, /* tv_id */
5996 ( PROP_cfg | PROP_ssa ), /* properties_required */
5997 0, /* properties_provided */
5998 0, /* properties_destroyed */
5999 0, /* todo_flags_start */
6000 0, /* todo_flags_finish */
6003 class pass_fre : public gimple_opt_pass
6005 public:
6006 pass_fre (gcc::context *ctxt)
6007 : gimple_opt_pass (pass_data_fre, ctxt)
6010 /* opt_pass methods: */
6011 opt_pass * clone () { return new pass_fre (m_ctxt); }
6012 virtual bool gate (function *) { return flag_tree_fre != 0; }
6013 virtual unsigned int execute (function *);
6015 }; // class pass_fre
6017 unsigned int
6018 pass_fre::execute (function *)
6020 unsigned int todo = 0;
6022 run_scc_vn (VN_WALKREWRITE);
6024 /* Remove all the redundant expressions. */
6025 todo |= vn_eliminate (NULL);
6027 scc_vn_restore_ssa_info ();
6028 free_scc_vn ();
6030 return todo;
6033 } // anon namespace
6035 gimple_opt_pass *
6036 make_pass_fre (gcc::context *ctxt)
6038 return new pass_fre (ctxt);