[NDS32] Change enabled attribute to yes/no instead of 1/0.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobfea452bd132033795b98358f8e4bb11d38a1502f
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vn_phi_eq (vp1, vp2);
195 /* Free a phi operation structure VP. */
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
200 phi->phiargs.release ();
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
227 /* Free a reference operation structure VP. */
229 static inline void
230 free_reference (vn_reference_s *vr)
232 vr->operands.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return vn_reference_eq (v, c);
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
262 free_reference (v);
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
290 /* Hash table hash function for vn_constant_t. */
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 return vc1->hashcode;
298 /* Hash table equality function for vn_constant_t. */
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
313 /* Valid hashtables storing information we have proven to be
314 correct. */
316 static vn_tables_t valid_info;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
327 static vn_tables_t current_info;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
340 static inline tree
341 vuse_ssa_val (tree x)
343 if (!x)
344 return NULL_TREE;
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
355 while (SSA_NAME_IN_FREE_LIST (x));
357 return x;
360 /* This represents the top of the VN lattice, which is the universal
361 value. */
363 tree VN_TOP;
365 /* Unique counter for our value ids. */
367 static unsigned int next_value_id;
369 /* Next DFS number and the stack for strongly connected component
370 detection. */
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
384 /* Return whether there is value numbering information for a given SSA name. */
386 bool
387 has_VN_INFO (tree name)
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
394 /* Return the value numbering information for a given SSA name. */
396 vn_ssa_aux_t
397 VN_INFO (tree name)
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
404 /* Set the value numbering info for a given SSA name to a given
405 value. */
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
419 vn_ssa_aux_t newinfo;
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
438 switch (gimple_code (stmt))
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
470 case tcc_constant:
471 return VN_CONSTANT;
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
481 default:
482 return VN_NONE;
485 default:
486 return VN_NONE;
490 /* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
493 unsigned int
494 get_constant_value_id (tree constant)
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
532 /* Return true if V is a value id for a constant. */
534 bool
535 value_id_constant_p (unsigned int v)
537 return bitmap_bit_p (constant_value_ids, v);
540 /* Compute the hash for a reference operand VRO1. */
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
554 /* Compute a hash for the reference operation VR1 and return it. */
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 poly_int64 off = -1;
564 bool deref = false;
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (maybe_ne (vro->off, -1))
574 if (known_eq (off, -1))
575 off = 0;
576 off += vro->off;
578 else
580 if (maybe_ne (off, -1)
581 && maybe_ne (off, 0))
582 hstate.add_poly_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
587 if (vro->op0)
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
594 else
595 vn_reference_op_compute_hash (vro, hstate);
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
603 return result;
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
612 unsigned i, j;
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
644 i = 0;
645 j = 0;
648 poly_int64 off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (known_eq (vro1->off, -1))
660 break;
661 off1 += vro1->off;
663 for (; vr2->operands.iterate (j, &vro2); j++)
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (known_eq (vro2->off, -1))
671 break;
672 off2 += vro2->off;
674 if (maybe_ne (off1, off2))
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
685 if (deref2 && vro2->opcode == ADDR_EXPR)
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
704 return true;
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
715 vn_reference_op_s temp;
717 result->reserve (3);
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
750 vn_reference_op_s temp;
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
757 switch (temp.opcode)
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
769 if (!mem_ref_offset (ref).to_shwi (&temp.off))
770 temp.off = -1;
771 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
772 temp.base = MR_DEPENDENCE_BASE (ref);
773 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 break;
775 case BIT_FIELD_REF:
776 /* Record bits, position and storage order. */
777 temp.op0 = TREE_OPERAND (ref, 1);
778 temp.op1 = TREE_OPERAND (ref, 2);
779 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
780 temp.off = -1;
781 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
782 break;
783 case COMPONENT_REF:
784 /* The field decl is enough to unambiguously specify the field,
785 a matching type is not necessary and a mismatching type
786 is always a spurious difference. */
787 temp.type = NULL_TREE;
788 temp.op0 = TREE_OPERAND (ref, 1);
789 temp.op1 = TREE_OPERAND (ref, 2);
791 tree this_offset = component_ref_field_offset (ref);
792 if (this_offset
793 && poly_int_tree_p (this_offset))
795 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
796 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
798 poly_offset_int off
799 = (wi::to_poly_offset (this_offset)
800 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
801 /* Probibit value-numbering zero offset components
802 of addresses the same before the pass folding
803 __builtin_object_size had a chance to run
804 (checking cfun->after_inlining does the
805 trick here). */
806 if (TREE_CODE (orig) != ADDR_EXPR
807 || maybe_ne (off, 0)
808 || cfun->after_inlining)
809 off.to_shwi (&temp.off);
813 break;
814 case ARRAY_RANGE_REF:
815 case ARRAY_REF:
817 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
818 /* Record index as operand. */
819 temp.op0 = TREE_OPERAND (ref, 1);
820 /* Always record lower bounds and element size. */
821 temp.op1 = array_ref_low_bound (ref);
822 /* But record element size in units of the type alignment. */
823 temp.op2 = TREE_OPERAND (ref, 3);
824 temp.align = eltype->type_common.align;
825 if (! temp.op2)
826 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
827 size_int (TYPE_ALIGN_UNIT (eltype)));
828 if (poly_int_tree_p (temp.op0)
829 && poly_int_tree_p (temp.op1)
830 && TREE_CODE (temp.op2) == INTEGER_CST)
832 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
833 - wi::to_poly_offset (temp.op1))
834 * wi::to_offset (temp.op2)
835 * vn_ref_op_align_unit (&temp));
836 off.to_shwi (&temp.off);
839 break;
840 case VAR_DECL:
841 if (DECL_HARD_REGISTER (ref))
843 temp.op0 = ref;
844 break;
846 /* Fallthru. */
847 case PARM_DECL:
848 case CONST_DECL:
849 case RESULT_DECL:
850 /* Canonicalize decls to MEM[&decl] which is what we end up with
851 when valueizing MEM[ptr] with ptr = &decl. */
852 temp.opcode = MEM_REF;
853 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
854 temp.off = 0;
855 result->safe_push (temp);
856 temp.opcode = ADDR_EXPR;
857 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
858 temp.type = TREE_TYPE (temp.op0);
859 temp.off = -1;
860 break;
861 case STRING_CST:
862 case INTEGER_CST:
863 case COMPLEX_CST:
864 case VECTOR_CST:
865 case REAL_CST:
866 case FIXED_CST:
867 case CONSTRUCTOR:
868 case SSA_NAME:
869 temp.op0 = ref;
870 break;
871 case ADDR_EXPR:
872 if (is_gimple_min_invariant (ref))
874 temp.op0 = ref;
875 break;
877 break;
878 /* These are only interesting for their operands, their
879 existence, and their type. They will never be the last
880 ref in the chain of references (IE they require an
881 operand), so we don't have to put anything
882 for op* as it will be handled by the iteration */
883 case REALPART_EXPR:
884 temp.off = 0;
885 break;
886 case VIEW_CONVERT_EXPR:
887 temp.off = 0;
888 temp.reverse = storage_order_barrier_p (ref);
889 break;
890 case IMAGPART_EXPR:
891 /* This is only interesting for its constant offset. */
892 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
893 break;
894 default:
895 gcc_unreachable ();
897 result->safe_push (temp);
899 if (REFERENCE_CLASS_P (ref)
900 || TREE_CODE (ref) == MODIFY_EXPR
901 || TREE_CODE (ref) == WITH_SIZE_EXPR
902 || (TREE_CODE (ref) == ADDR_EXPR
903 && !is_gimple_min_invariant (ref)))
904 ref = TREE_OPERAND (ref, 0);
905 else
906 ref = NULL_TREE;
910 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
911 operands in *OPS, the reference alias set SET and the reference type TYPE.
912 Return true if something useful was produced. */
914 bool
915 ao_ref_init_from_vn_reference (ao_ref *ref,
916 alias_set_type set, tree type,
917 vec<vn_reference_op_s> ops)
919 vn_reference_op_t op;
920 unsigned i;
921 tree base = NULL_TREE;
922 tree *op0_p = &base;
923 poly_offset_int offset = 0;
924 poly_offset_int max_size;
925 poly_offset_int size = -1;
926 tree size_tree = NULL_TREE;
927 alias_set_type base_alias_set = -1;
929 /* First get the final access size from just the outermost expression. */
930 op = &ops[0];
931 if (op->opcode == COMPONENT_REF)
932 size_tree = DECL_SIZE (op->op0);
933 else if (op->opcode == BIT_FIELD_REF)
934 size_tree = op->op0;
935 else
937 machine_mode mode = TYPE_MODE (type);
938 if (mode == BLKmode)
939 size_tree = TYPE_SIZE (type);
940 else
941 size = GET_MODE_BITSIZE (mode);
943 if (size_tree != NULL_TREE
944 && poly_int_tree_p (size_tree))
945 size = wi::to_poly_offset (size_tree);
947 /* Initially, maxsize is the same as the accessed element size.
948 In the following it will only grow (or become -1). */
949 max_size = size;
951 /* Compute cumulative bit-offset for nested component-refs and array-refs,
952 and find the ultimate containing object. */
953 FOR_EACH_VEC_ELT (ops, i, op)
955 switch (op->opcode)
957 /* These may be in the reference ops, but we cannot do anything
958 sensible with them here. */
959 case ADDR_EXPR:
960 /* Apart from ADDR_EXPR arguments to MEM_REF. */
961 if (base != NULL_TREE
962 && TREE_CODE (base) == MEM_REF
963 && op->op0
964 && DECL_P (TREE_OPERAND (op->op0, 0)))
966 vn_reference_op_t pop = &ops[i-1];
967 base = TREE_OPERAND (op->op0, 0);
968 if (known_eq (pop->off, -1))
970 max_size = -1;
971 offset = 0;
973 else
974 offset += pop->off * BITS_PER_UNIT;
975 op0_p = NULL;
976 break;
978 /* Fallthru. */
979 case CALL_EXPR:
980 return false;
982 /* Record the base objects. */
983 case MEM_REF:
984 base_alias_set = get_deref_alias_set (op->op0);
985 *op0_p = build2 (MEM_REF, op->type,
986 NULL_TREE, op->op0);
987 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
988 MR_DEPENDENCE_BASE (*op0_p) = op->base;
989 op0_p = &TREE_OPERAND (*op0_p, 0);
990 break;
992 case VAR_DECL:
993 case PARM_DECL:
994 case RESULT_DECL:
995 case SSA_NAME:
996 *op0_p = op->op0;
997 op0_p = NULL;
998 break;
1000 /* And now the usual component-reference style ops. */
1001 case BIT_FIELD_REF:
1002 offset += wi::to_offset (op->op1);
1003 break;
1005 case COMPONENT_REF:
1007 tree field = op->op0;
1008 /* We do not have a complete COMPONENT_REF tree here so we
1009 cannot use component_ref_field_offset. Do the interesting
1010 parts manually. */
1011 tree this_offset = DECL_FIELD_OFFSET (field);
1013 if (op->op1 || !poly_int_tree_p (this_offset))
1014 max_size = -1;
1015 else
1017 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1018 << LOG2_BITS_PER_UNIT);
1019 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1020 offset += woffset;
1022 break;
1025 case ARRAY_RANGE_REF:
1026 case ARRAY_REF:
1027 /* We recorded the lower bound and the element size. */
1028 if (!poly_int_tree_p (op->op0)
1029 || !poly_int_tree_p (op->op1)
1030 || TREE_CODE (op->op2) != INTEGER_CST)
1031 max_size = -1;
1032 else
1034 poly_offset_int woffset
1035 = wi::sext (wi::to_poly_offset (op->op0)
1036 - wi::to_poly_offset (op->op1),
1037 TYPE_PRECISION (TREE_TYPE (op->op0)));
1038 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1039 woffset <<= LOG2_BITS_PER_UNIT;
1040 offset += woffset;
1042 break;
1044 case REALPART_EXPR:
1045 break;
1047 case IMAGPART_EXPR:
1048 offset += size;
1049 break;
1051 case VIEW_CONVERT_EXPR:
1052 break;
1054 case STRING_CST:
1055 case INTEGER_CST:
1056 case COMPLEX_CST:
1057 case VECTOR_CST:
1058 case REAL_CST:
1059 case CONSTRUCTOR:
1060 case CONST_DECL:
1061 return false;
1063 default:
1064 return false;
1068 if (base == NULL_TREE)
1069 return false;
1071 ref->ref = NULL_TREE;
1072 ref->base = base;
1073 ref->ref_alias_set = set;
1074 if (base_alias_set != -1)
1075 ref->base_alias_set = base_alias_set;
1076 else
1077 ref->base_alias_set = get_alias_set (base);
1078 /* We discount volatiles from value-numbering elsewhere. */
1079 ref->volatile_p = false;
1081 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1083 ref->offset = 0;
1084 ref->size = -1;
1085 ref->max_size = -1;
1086 return true;
1089 if (!offset.to_shwi (&ref->offset))
1091 ref->offset = 0;
1092 ref->max_size = -1;
1093 return true;
1096 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1097 ref->max_size = -1;
1099 return true;
1102 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1103 vn_reference_op_s's. */
1105 static void
1106 copy_reference_ops_from_call (gcall *call,
1107 vec<vn_reference_op_s> *result)
1109 vn_reference_op_s temp;
1110 unsigned i;
1111 tree lhs = gimple_call_lhs (call);
1112 int lr;
1114 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1115 different. By adding the lhs here in the vector, we ensure that the
1116 hashcode is different, guaranteeing a different value number. */
1117 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1119 memset (&temp, 0, sizeof (temp));
1120 temp.opcode = MODIFY_EXPR;
1121 temp.type = TREE_TYPE (lhs);
1122 temp.op0 = lhs;
1123 temp.off = -1;
1124 result->safe_push (temp);
1127 /* Copy the type, opcode, function, static chain and EH region, if any. */
1128 memset (&temp, 0, sizeof (temp));
1129 temp.type = gimple_call_return_type (call);
1130 temp.opcode = CALL_EXPR;
1131 temp.op0 = gimple_call_fn (call);
1132 temp.op1 = gimple_call_chain (call);
1133 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1134 temp.op2 = size_int (lr);
1135 temp.off = -1;
1136 if (gimple_call_with_bounds_p (call))
1137 temp.with_bounds = 1;
1138 result->safe_push (temp);
1140 /* Copy the call arguments. As they can be references as well,
1141 just chain them together. */
1142 for (i = 0; i < gimple_call_num_args (call); ++i)
1144 tree callarg = gimple_call_arg (call, i);
1145 copy_reference_ops_from_ref (callarg, result);
1149 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1150 *I_P to point to the last element of the replacement. */
1151 static bool
1152 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1153 unsigned int *i_p)
1155 unsigned int i = *i_p;
1156 vn_reference_op_t op = &(*ops)[i];
1157 vn_reference_op_t mem_op = &(*ops)[i - 1];
1158 tree addr_base;
1159 poly_int64 addr_offset = 0;
1161 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1162 from .foo.bar to the preceding MEM_REF offset and replace the
1163 address with &OBJ. */
1164 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1165 &addr_offset);
1166 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1167 if (addr_base != TREE_OPERAND (op->op0, 0))
1169 poly_offset_int off
1170 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1171 SIGNED)
1172 + addr_offset);
1173 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1174 op->op0 = build_fold_addr_expr (addr_base);
1175 if (tree_fits_shwi_p (mem_op->op0))
1176 mem_op->off = tree_to_shwi (mem_op->op0);
1177 else
1178 mem_op->off = -1;
1179 return true;
1181 return false;
1184 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1185 *I_P to point to the last element of the replacement. */
1186 static bool
1187 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1188 unsigned int *i_p)
1190 unsigned int i = *i_p;
1191 vn_reference_op_t op = &(*ops)[i];
1192 vn_reference_op_t mem_op = &(*ops)[i - 1];
1193 gimple *def_stmt;
1194 enum tree_code code;
1195 poly_offset_int off;
1197 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1198 if (!is_gimple_assign (def_stmt))
1199 return false;
1201 code = gimple_assign_rhs_code (def_stmt);
1202 if (code != ADDR_EXPR
1203 && code != POINTER_PLUS_EXPR)
1204 return false;
1206 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1208 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1209 from .foo.bar to the preceding MEM_REF offset and replace the
1210 address with &OBJ. */
1211 if (code == ADDR_EXPR)
1213 tree addr, addr_base;
1214 poly_int64 addr_offset;
1216 addr = gimple_assign_rhs1 (def_stmt);
1217 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1218 &addr_offset);
1219 /* If that didn't work because the address isn't invariant propagate
1220 the reference tree from the address operation in case the current
1221 dereference isn't offsetted. */
1222 if (!addr_base
1223 && *i_p == ops->length () - 1
1224 && known_eq (off, 0)
1225 /* This makes us disable this transform for PRE where the
1226 reference ops might be also used for code insertion which
1227 is invalid. */
1228 && default_vn_walk_kind == VN_WALKREWRITE)
1230 auto_vec<vn_reference_op_s, 32> tem;
1231 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1232 /* Make sure to preserve TBAA info. The only objects not
1233 wrapped in MEM_REFs that can have their address taken are
1234 STRING_CSTs. */
1235 if (tem.length () >= 2
1236 && tem[tem.length () - 2].opcode == MEM_REF)
1238 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1239 new_mem_op->op0
1240 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1241 wi::to_poly_wide (new_mem_op->op0));
1243 else
1244 gcc_assert (tem.last ().opcode == STRING_CST);
1245 ops->pop ();
1246 ops->pop ();
1247 ops->safe_splice (tem);
1248 --*i_p;
1249 return true;
1251 if (!addr_base
1252 || TREE_CODE (addr_base) != MEM_REF)
1253 return false;
1255 off += addr_offset;
1256 off += mem_ref_offset (addr_base);
1257 op->op0 = TREE_OPERAND (addr_base, 0);
1259 else
1261 tree ptr, ptroff;
1262 ptr = gimple_assign_rhs1 (def_stmt);
1263 ptroff = gimple_assign_rhs2 (def_stmt);
1264 if (TREE_CODE (ptr) != SSA_NAME
1265 || TREE_CODE (ptroff) != INTEGER_CST)
1266 return false;
1268 off += wi::to_offset (ptroff);
1269 op->op0 = ptr;
1272 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1273 if (tree_fits_shwi_p (mem_op->op0))
1274 mem_op->off = tree_to_shwi (mem_op->op0);
1275 else
1276 mem_op->off = -1;
1277 if (TREE_CODE (op->op0) == SSA_NAME)
1278 op->op0 = SSA_VAL (op->op0);
1279 if (TREE_CODE (op->op0) != SSA_NAME)
1280 op->opcode = TREE_CODE (op->op0);
1282 /* And recurse. */
1283 if (TREE_CODE (op->op0) == SSA_NAME)
1284 vn_reference_maybe_forwprop_address (ops, i_p);
1285 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1286 vn_reference_fold_indirect (ops, i_p);
1287 return true;
1290 /* Optimize the reference REF to a constant if possible or return
1291 NULL_TREE if not. */
1293 tree
1294 fully_constant_vn_reference_p (vn_reference_t ref)
1296 vec<vn_reference_op_s> operands = ref->operands;
1297 vn_reference_op_t op;
1299 /* Try to simplify the translated expression if it is
1300 a call to a builtin function with at most two arguments. */
1301 op = &operands[0];
1302 if (op->opcode == CALL_EXPR
1303 && TREE_CODE (op->op0) == ADDR_EXPR
1304 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1305 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1306 && operands.length () >= 2
1307 && operands.length () <= 3)
1309 vn_reference_op_t arg0, arg1 = NULL;
1310 bool anyconst = false;
1311 arg0 = &operands[1];
1312 if (operands.length () > 2)
1313 arg1 = &operands[2];
1314 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1315 || (arg0->opcode == ADDR_EXPR
1316 && is_gimple_min_invariant (arg0->op0)))
1317 anyconst = true;
1318 if (arg1
1319 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1320 || (arg1->opcode == ADDR_EXPR
1321 && is_gimple_min_invariant (arg1->op0))))
1322 anyconst = true;
1323 if (anyconst)
1325 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1326 arg1 ? 2 : 1,
1327 arg0->op0,
1328 arg1 ? arg1->op0 : NULL);
1329 if (folded
1330 && TREE_CODE (folded) == NOP_EXPR)
1331 folded = TREE_OPERAND (folded, 0);
1332 if (folded
1333 && is_gimple_min_invariant (folded))
1334 return folded;
1338 /* Simplify reads from constants or constant initializers. */
1339 else if (BITS_PER_UNIT == 8
1340 && is_gimple_reg_type (ref->type)
1341 && (!INTEGRAL_TYPE_P (ref->type)
1342 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1344 poly_int64 off = 0;
1345 HOST_WIDE_INT size;
1346 if (INTEGRAL_TYPE_P (ref->type))
1347 size = TYPE_PRECISION (ref->type);
1348 else
1349 size = tree_to_shwi (TYPE_SIZE (ref->type));
1350 if (size % BITS_PER_UNIT != 0
1351 || size > MAX_BITSIZE_MODE_ANY_MODE)
1352 return NULL_TREE;
1353 size /= BITS_PER_UNIT;
1354 unsigned i;
1355 for (i = 0; i < operands.length (); ++i)
1357 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1359 ++i;
1360 break;
1362 if (known_eq (operands[i].off, -1))
1363 return NULL_TREE;
1364 off += operands[i].off;
1365 if (operands[i].opcode == MEM_REF)
1367 ++i;
1368 break;
1371 vn_reference_op_t base = &operands[--i];
1372 tree ctor = error_mark_node;
1373 tree decl = NULL_TREE;
1374 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1375 ctor = base->op0;
1376 else if (base->opcode == MEM_REF
1377 && base[1].opcode == ADDR_EXPR
1378 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1379 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1380 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1382 decl = TREE_OPERAND (base[1].op0, 0);
1383 if (TREE_CODE (decl) == STRING_CST)
1384 ctor = decl;
1385 else
1386 ctor = ctor_for_folding (decl);
1388 if (ctor == NULL_TREE)
1389 return build_zero_cst (ref->type);
1390 else if (ctor != error_mark_node)
1392 HOST_WIDE_INT const_off;
1393 if (decl)
1395 tree res = fold_ctor_reference (ref->type, ctor,
1396 off * BITS_PER_UNIT,
1397 size * BITS_PER_UNIT, decl);
1398 if (res)
1400 STRIP_USELESS_TYPE_CONVERSION (res);
1401 if (is_gimple_min_invariant (res))
1402 return res;
1405 else if (off.is_constant (&const_off))
1407 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1408 int len = native_encode_expr (ctor, buf, size, const_off);
1409 if (len > 0)
1410 return native_interpret_expr (ref->type, buf, len);
1415 return NULL_TREE;
1418 /* Return true if OPS contain a storage order barrier. */
1420 static bool
1421 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1423 vn_reference_op_t op;
1424 unsigned i;
1426 FOR_EACH_VEC_ELT (ops, i, op)
1427 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1428 return true;
1430 return false;
1433 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1434 structures into their value numbers. This is done in-place, and
1435 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1436 whether any operands were valueized. */
1438 static vec<vn_reference_op_s>
1439 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1441 vn_reference_op_t vro;
1442 unsigned int i;
1444 *valueized_anything = false;
1446 FOR_EACH_VEC_ELT (orig, i, vro)
1448 if (vro->opcode == SSA_NAME
1449 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1451 tree tem = SSA_VAL (vro->op0);
1452 if (tem != vro->op0)
1454 *valueized_anything = true;
1455 vro->op0 = tem;
1457 /* If it transforms from an SSA_NAME to a constant, update
1458 the opcode. */
1459 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1460 vro->opcode = TREE_CODE (vro->op0);
1462 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1464 tree tem = SSA_VAL (vro->op1);
1465 if (tem != vro->op1)
1467 *valueized_anything = true;
1468 vro->op1 = tem;
1471 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1473 tree tem = SSA_VAL (vro->op2);
1474 if (tem != vro->op2)
1476 *valueized_anything = true;
1477 vro->op2 = tem;
1480 /* If it transforms from an SSA_NAME to an address, fold with
1481 a preceding indirect reference. */
1482 if (i > 0
1483 && vro->op0
1484 && TREE_CODE (vro->op0) == ADDR_EXPR
1485 && orig[i - 1].opcode == MEM_REF)
1487 if (vn_reference_fold_indirect (&orig, &i))
1488 *valueized_anything = true;
1490 else if (i > 0
1491 && vro->opcode == SSA_NAME
1492 && orig[i - 1].opcode == MEM_REF)
1494 if (vn_reference_maybe_forwprop_address (&orig, &i))
1495 *valueized_anything = true;
1497 /* If it transforms a non-constant ARRAY_REF into a constant
1498 one, adjust the constant offset. */
1499 else if (vro->opcode == ARRAY_REF
1500 && known_eq (vro->off, -1)
1501 && poly_int_tree_p (vro->op0)
1502 && poly_int_tree_p (vro->op1)
1503 && TREE_CODE (vro->op2) == INTEGER_CST)
1505 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1506 - wi::to_poly_offset (vro->op1))
1507 * wi::to_offset (vro->op2)
1508 * vn_ref_op_align_unit (vro));
1509 off.to_shwi (&vro->off);
1513 return orig;
1516 static vec<vn_reference_op_s>
1517 valueize_refs (vec<vn_reference_op_s> orig)
1519 bool tem;
1520 return valueize_refs_1 (orig, &tem);
1523 static vec<vn_reference_op_s> shared_lookup_references;
1525 /* Create a vector of vn_reference_op_s structures from REF, a
1526 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1527 this function. *VALUEIZED_ANYTHING will specify whether any
1528 operands were valueized. */
1530 static vec<vn_reference_op_s>
1531 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1533 if (!ref)
1534 return vNULL;
1535 shared_lookup_references.truncate (0);
1536 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1537 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1538 valueized_anything);
1539 return shared_lookup_references;
1542 /* Create a vector of vn_reference_op_s structures from CALL, a
1543 call statement. The vector is shared among all callers of
1544 this function. */
1546 static vec<vn_reference_op_s>
1547 valueize_shared_reference_ops_from_call (gcall *call)
1549 if (!call)
1550 return vNULL;
1551 shared_lookup_references.truncate (0);
1552 copy_reference_ops_from_call (call, &shared_lookup_references);
1553 shared_lookup_references = valueize_refs (shared_lookup_references);
1554 return shared_lookup_references;
1557 /* Lookup a SCCVN reference operation VR in the current hash table.
1558 Returns the resulting value number if it exists in the hash table,
1559 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1560 vn_reference_t stored in the hashtable if something is found. */
1562 static tree
1563 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1565 vn_reference_s **slot;
1566 hashval_t hash;
1568 hash = vr->hashcode;
1569 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1570 if (!slot && current_info == optimistic_info)
1571 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1572 if (slot)
1574 if (vnresult)
1575 *vnresult = (vn_reference_t)*slot;
1576 return ((vn_reference_t)*slot)->result;
1579 return NULL_TREE;
1582 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1583 with the current VUSE and performs the expression lookup. */
1585 static void *
1586 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1587 unsigned int cnt, void *vr_)
1589 vn_reference_t vr = (vn_reference_t)vr_;
1590 vn_reference_s **slot;
1591 hashval_t hash;
1593 /* This bounds the stmt walks we perform on reference lookups
1594 to O(1) instead of O(N) where N is the number of dominating
1595 stores. */
1596 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1597 return (void *)-1;
1599 if (last_vuse_ptr)
1600 *last_vuse_ptr = vuse;
1602 /* Fixup vuse and hash. */
1603 if (vr->vuse)
1604 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1605 vr->vuse = vuse_ssa_val (vuse);
1606 if (vr->vuse)
1607 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1609 hash = vr->hashcode;
1610 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1611 if (!slot && current_info == optimistic_info)
1612 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1613 if (slot)
1614 return *slot;
1616 return NULL;
1619 /* Lookup an existing or insert a new vn_reference entry into the
1620 value table for the VUSE, SET, TYPE, OPERANDS reference which
1621 has the value VALUE which is either a constant or an SSA name. */
1623 static vn_reference_t
1624 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1625 alias_set_type set,
1626 tree type,
1627 vec<vn_reference_op_s,
1628 va_heap> operands,
1629 tree value)
1631 vn_reference_s vr1;
1632 vn_reference_t result;
1633 unsigned value_id;
1634 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1635 vr1.operands = operands;
1636 vr1.type = type;
1637 vr1.set = set;
1638 vr1.hashcode = vn_reference_compute_hash (&vr1);
1639 if (vn_reference_lookup_1 (&vr1, &result))
1640 return result;
1641 if (TREE_CODE (value) == SSA_NAME)
1642 value_id = VN_INFO (value)->value_id;
1643 else
1644 value_id = get_or_alloc_constant_value_id (value);
1645 return vn_reference_insert_pieces (vuse, set, type,
1646 operands.copy (), value, value_id);
1649 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1650 static unsigned mprts_hook_cnt;
1652 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1654 static tree
1655 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1657 if (!rcode.is_tree_code ())
1658 return NULL_TREE;
1659 tree *ops = ops_;
1660 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1661 if (rcode == CONSTRUCTOR
1662 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1663 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1664 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1666 length = CONSTRUCTOR_NELTS (ops_[0]);
1667 ops = XALLOCAVEC (tree, length);
1668 for (unsigned i = 0; i < length; ++i)
1669 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1671 vn_nary_op_t vnresult = NULL;
1672 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1673 type, ops, &vnresult);
1674 /* We can end up endlessly recursing simplifications if the lookup above
1675 presents us with a def-use chain that mirrors the original simplification.
1676 See PR80887 for an example. Limit successful lookup artificially
1677 to 10 times if we are called as mprts_hook. */
1678 if (res
1679 && mprts_hook
1680 && --mprts_hook_cnt == 0)
1682 if (dump_file && (dump_flags & TDF_DETAILS))
1683 fprintf (dump_file, "Resetting mprts_hook after too many "
1684 "invocations.\n");
1685 mprts_hook = NULL;
1687 return res;
1690 /* Return a value-number for RCODE OPS... either by looking up an existing
1691 value-number for the simplified result or by inserting the operation if
1692 INSERT is true. */
1694 static tree
1695 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1696 bool insert)
1698 tree result = NULL_TREE;
1699 /* We will be creating a value number for
1700 RCODE (OPS...).
1701 So first simplify and lookup this expression to see if it
1702 is already available. */
1703 mprts_hook = vn_lookup_simplify_result;
1704 mprts_hook_cnt = 9;
1705 bool res = false;
1706 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1708 case 1:
1709 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1710 break;
1711 case 2:
1712 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1713 break;
1714 case 3:
1715 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1716 break;
1718 mprts_hook = NULL;
1719 gimple *new_stmt = NULL;
1720 if (res
1721 && gimple_simplified_result_is_gimple_val (rcode, ops))
1722 /* The expression is already available. */
1723 result = ops[0];
1724 else
1726 tree val = vn_lookup_simplify_result (rcode, type, ops);
1727 if (!val && insert)
1729 gimple_seq stmts = NULL;
1730 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1731 if (result)
1733 gcc_assert (gimple_seq_singleton_p (stmts));
1734 new_stmt = gimple_seq_first_stmt (stmts);
1737 else
1738 /* The expression is already available. */
1739 result = val;
1741 if (new_stmt)
1743 /* The expression is not yet available, value-number lhs to
1744 the new SSA_NAME we created. */
1745 /* Initialize value-number information properly. */
1746 VN_INFO_GET (result)->valnum = result;
1747 VN_INFO (result)->value_id = get_next_value_id ();
1748 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1749 new_stmt);
1750 VN_INFO (result)->needs_insertion = true;
1751 /* ??? PRE phi-translation inserts NARYs without corresponding
1752 SSA name result. Re-use those but set their result according
1753 to the stmt we just built. */
1754 vn_nary_op_t nary = NULL;
1755 vn_nary_op_lookup_stmt (new_stmt, &nary);
1756 if (nary)
1758 gcc_assert (nary->result == NULL_TREE);
1759 nary->result = gimple_assign_lhs (new_stmt);
1761 /* As all "inserted" statements are singleton SCCs, insert
1762 to the valid table. This is strictly needed to
1763 avoid re-generating new value SSA_NAMEs for the same
1764 expression during SCC iteration over and over (the
1765 optimistic table gets cleared after each iteration).
1766 We do not need to insert into the optimistic table, as
1767 lookups there will fall back to the valid table. */
1768 else if (current_info == optimistic_info)
1770 current_info = valid_info;
1771 vn_nary_op_insert_stmt (new_stmt, result);
1772 current_info = optimistic_info;
1774 else
1775 vn_nary_op_insert_stmt (new_stmt, result);
1776 if (dump_file && (dump_flags & TDF_DETAILS))
1778 fprintf (dump_file, "Inserting name ");
1779 print_generic_expr (dump_file, result);
1780 fprintf (dump_file, " for expression ");
1781 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1782 fprintf (dump_file, "\n");
1785 return result;
1788 /* Return a value-number for RCODE OPS... either by looking up an existing
1789 value-number for the simplified result or by inserting the operation. */
1791 static tree
1792 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1794 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1797 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1798 its value if present. */
1800 tree
1801 vn_nary_simplify (vn_nary_op_t nary)
1803 if (nary->length > 3)
1804 return NULL_TREE;
1805 tree ops[3];
1806 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1807 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1811 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1812 from the statement defining VUSE and if not successful tries to
1813 translate *REFP and VR_ through an aggregate copy at the definition
1814 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1815 of *REF and *VR. If only disambiguation was performed then
1816 *DISAMBIGUATE_ONLY is set to true. */
1818 static void *
1819 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1820 bool *disambiguate_only)
1822 vn_reference_t vr = (vn_reference_t)vr_;
1823 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1824 tree base = ao_ref_base (ref);
1825 HOST_WIDE_INT offseti, maxsizei;
1826 static vec<vn_reference_op_s> lhs_ops;
1827 ao_ref lhs_ref;
1828 bool lhs_ref_ok = false;
1829 poly_int64 copy_size;
1831 /* If the reference is based on a parameter that was determined as
1832 pointing to readonly memory it doesn't change. */
1833 if (TREE_CODE (base) == MEM_REF
1834 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1835 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1836 && bitmap_bit_p (const_parms,
1837 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1839 *disambiguate_only = true;
1840 return NULL;
1843 /* First try to disambiguate after value-replacing in the definitions LHS. */
1844 if (is_gimple_assign (def_stmt))
1846 tree lhs = gimple_assign_lhs (def_stmt);
1847 bool valueized_anything = false;
1848 /* Avoid re-allocation overhead. */
1849 lhs_ops.truncate (0);
1850 copy_reference_ops_from_ref (lhs, &lhs_ops);
1851 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1852 if (valueized_anything)
1854 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1855 get_alias_set (lhs),
1856 TREE_TYPE (lhs), lhs_ops);
1857 if (lhs_ref_ok
1858 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1860 *disambiguate_only = true;
1861 return NULL;
1864 else
1866 ao_ref_init (&lhs_ref, lhs);
1867 lhs_ref_ok = true;
1870 /* If we reach a clobbering statement try to skip it and see if
1871 we find a VN result with exactly the same value as the
1872 possible clobber. In this case we can ignore the clobber
1873 and return the found value.
1874 Note that we don't need to worry about partial overlapping
1875 accesses as we then can use TBAA to disambiguate against the
1876 clobbering statement when looking up a load (thus the
1877 VN_WALKREWRITE guard). */
1878 if (vn_walk_kind == VN_WALKREWRITE
1879 && is_gimple_reg_type (TREE_TYPE (lhs))
1880 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1882 tree *saved_last_vuse_ptr = last_vuse_ptr;
1883 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1884 last_vuse_ptr = NULL;
1885 tree saved_vuse = vr->vuse;
1886 hashval_t saved_hashcode = vr->hashcode;
1887 void *res = vn_reference_lookup_2 (ref,
1888 gimple_vuse (def_stmt), 0, vr);
1889 /* Need to restore vr->vuse and vr->hashcode. */
1890 vr->vuse = saved_vuse;
1891 vr->hashcode = saved_hashcode;
1892 last_vuse_ptr = saved_last_vuse_ptr;
1893 if (res && res != (void *)-1)
1895 vn_reference_t vnresult = (vn_reference_t) res;
1896 if (vnresult->result
1897 && operand_equal_p (vnresult->result,
1898 gimple_assign_rhs1 (def_stmt), 0))
1899 return res;
1903 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1904 && gimple_call_num_args (def_stmt) <= 4)
1906 /* For builtin calls valueize its arguments and call the
1907 alias oracle again. Valueization may improve points-to
1908 info of pointers and constify size and position arguments.
1909 Originally this was motivated by PR61034 which has
1910 conditional calls to free falsely clobbering ref because
1911 of imprecise points-to info of the argument. */
1912 tree oldargs[4];
1913 bool valueized_anything = false;
1914 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1916 oldargs[i] = gimple_call_arg (def_stmt, i);
1917 tree val = vn_valueize (oldargs[i]);
1918 if (val != oldargs[i])
1920 gimple_call_set_arg (def_stmt, i, val);
1921 valueized_anything = true;
1924 if (valueized_anything)
1926 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1927 ref);
1928 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1929 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1930 if (!res)
1932 *disambiguate_only = true;
1933 return NULL;
1938 if (*disambiguate_only)
1939 return (void *)-1;
1941 /* If we cannot constrain the size of the reference we cannot
1942 test if anything kills it. */
1943 if (!ref->max_size_known_p ())
1944 return (void *)-1;
1946 poly_int64 offset = ref->offset;
1947 poly_int64 maxsize = ref->max_size;
1949 /* We can't deduce anything useful from clobbers. */
1950 if (gimple_clobber_p (def_stmt))
1951 return (void *)-1;
1953 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1954 from that definition.
1955 1) Memset. */
1956 if (is_gimple_reg_type (vr->type)
1957 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1958 && integer_zerop (gimple_call_arg (def_stmt, 1))
1959 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1960 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1962 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1963 tree base2;
1964 poly_int64 offset2, size2, maxsize2;
1965 bool reverse;
1966 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1967 &reverse);
1968 tree len = gimple_call_arg (def_stmt, 2);
1969 if (known_size_p (maxsize2)
1970 && operand_equal_p (base, base2, 0)
1971 && known_subrange_p (offset, maxsize, offset2,
1972 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
1974 tree val = build_zero_cst (vr->type);
1975 return vn_reference_lookup_or_insert_for_pieces
1976 (vuse, vr->set, vr->type, vr->operands, val);
1980 /* 2) Assignment from an empty CONSTRUCTOR. */
1981 else if (is_gimple_reg_type (vr->type)
1982 && gimple_assign_single_p (def_stmt)
1983 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1984 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1986 tree base2;
1987 poly_int64 offset2, size2, maxsize2;
1988 bool reverse;
1989 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1990 &offset2, &size2, &maxsize2, &reverse);
1991 if (known_size_p (maxsize2)
1992 && operand_equal_p (base, base2, 0)
1993 && known_subrange_p (offset, maxsize, offset2, size2))
1995 tree val = build_zero_cst (vr->type);
1996 return vn_reference_lookup_or_insert_for_pieces
1997 (vuse, vr->set, vr->type, vr->operands, val);
2001 /* 3) Assignment from a constant. We can use folds native encode/interpret
2002 routines to extract the assigned bits. */
2003 else if (known_eq (ref->size, maxsize)
2004 && is_gimple_reg_type (vr->type)
2005 && !contains_storage_order_barrier_p (vr->operands)
2006 && gimple_assign_single_p (def_stmt)
2007 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2008 /* native_encode and native_decode operate on arrays of bytes
2009 and so fundamentally need a compile-time size and offset. */
2010 && maxsize.is_constant (&maxsizei)
2011 && maxsizei % BITS_PER_UNIT == 0
2012 && offset.is_constant (&offseti)
2013 && offseti % BITS_PER_UNIT == 0
2014 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2015 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2016 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2018 tree base2;
2019 HOST_WIDE_INT offset2, size2;
2020 bool reverse;
2021 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2022 &offset2, &size2, &reverse);
2023 if (base2
2024 && !reverse
2025 && size2 % BITS_PER_UNIT == 0
2026 && offset2 % BITS_PER_UNIT == 0
2027 && operand_equal_p (base, base2, 0)
2028 && known_subrange_p (offseti, maxsizei, offset2, size2))
2030 /* We support up to 512-bit values (for V8DFmode). */
2031 unsigned char buffer[64];
2032 int len;
2034 tree rhs = gimple_assign_rhs1 (def_stmt);
2035 if (TREE_CODE (rhs) == SSA_NAME)
2036 rhs = SSA_VAL (rhs);
2037 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2038 buffer, sizeof (buffer));
2039 if (len > 0)
2041 tree type = vr->type;
2042 /* Make sure to interpret in a type that has a range
2043 covering the whole access size. */
2044 if (INTEGRAL_TYPE_P (vr->type)
2045 && maxsizei != TYPE_PRECISION (vr->type))
2046 type = build_nonstandard_integer_type (maxsizei,
2047 TYPE_UNSIGNED (type));
2048 tree val = native_interpret_expr (type,
2049 buffer
2050 + ((offseti - offset2)
2051 / BITS_PER_UNIT),
2052 maxsizei / BITS_PER_UNIT);
2053 /* If we chop off bits because the types precision doesn't
2054 match the memory access size this is ok when optimizing
2055 reads but not when called from the DSE code during
2056 elimination. */
2057 if (val
2058 && type != vr->type)
2060 if (! int_fits_type_p (val, vr->type))
2061 val = NULL_TREE;
2062 else
2063 val = fold_convert (vr->type, val);
2066 if (val)
2067 return vn_reference_lookup_or_insert_for_pieces
2068 (vuse, vr->set, vr->type, vr->operands, val);
2073 /* 4) Assignment from an SSA name which definition we may be able
2074 to access pieces from. */
2075 else if (known_eq (ref->size, maxsize)
2076 && is_gimple_reg_type (vr->type)
2077 && !contains_storage_order_barrier_p (vr->operands)
2078 && gimple_assign_single_p (def_stmt)
2079 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2081 tree base2;
2082 poly_int64 offset2, size2, maxsize2;
2083 bool reverse;
2084 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2085 &offset2, &size2, &maxsize2,
2086 &reverse);
2087 if (!reverse
2088 && known_size_p (maxsize2)
2089 && known_eq (maxsize2, size2)
2090 && operand_equal_p (base, base2, 0)
2091 && known_subrange_p (offset, maxsize, offset2, size2)
2092 /* ??? We can't handle bitfield precision extracts without
2093 either using an alternate type for the BIT_FIELD_REF and
2094 then doing a conversion or possibly adjusting the offset
2095 according to endianness. */
2096 && (! INTEGRAL_TYPE_P (vr->type)
2097 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2098 && multiple_p (ref->size, BITS_PER_UNIT))
2100 code_helper rcode = BIT_FIELD_REF;
2101 tree ops[3];
2102 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2103 ops[1] = bitsize_int (ref->size);
2104 ops[2] = bitsize_int (offset - offset2);
2105 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2106 if (val
2107 && (TREE_CODE (val) != SSA_NAME
2108 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2110 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2111 (vuse, vr->set, vr->type, vr->operands, val);
2112 return res;
2117 /* 5) For aggregate copies translate the reference through them if
2118 the copy kills ref. */
2119 else if (vn_walk_kind == VN_WALKREWRITE
2120 && gimple_assign_single_p (def_stmt)
2121 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2122 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2123 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2125 tree base2;
2126 int i, j, k;
2127 auto_vec<vn_reference_op_s> rhs;
2128 vn_reference_op_t vro;
2129 ao_ref r;
2131 if (!lhs_ref_ok)
2132 return (void *)-1;
2134 /* See if the assignment kills REF. */
2135 base2 = ao_ref_base (&lhs_ref);
2136 if (!lhs_ref.max_size_known_p ()
2137 || (base != base2
2138 && (TREE_CODE (base) != MEM_REF
2139 || TREE_CODE (base2) != MEM_REF
2140 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2141 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2142 TREE_OPERAND (base2, 1))))
2143 || !stmt_kills_ref_p (def_stmt, ref))
2144 return (void *)-1;
2146 /* Find the common base of ref and the lhs. lhs_ops already
2147 contains valueized operands for the lhs. */
2148 i = vr->operands.length () - 1;
2149 j = lhs_ops.length () - 1;
2150 while (j >= 0 && i >= 0
2151 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2153 i--;
2154 j--;
2157 /* ??? The innermost op should always be a MEM_REF and we already
2158 checked that the assignment to the lhs kills vr. Thus for
2159 aggregate copies using char[] types the vn_reference_op_eq
2160 may fail when comparing types for compatibility. But we really
2161 don't care here - further lookups with the rewritten operands
2162 will simply fail if we messed up types too badly. */
2163 poly_int64 extra_off = 0;
2164 if (j == 0 && i >= 0
2165 && lhs_ops[0].opcode == MEM_REF
2166 && maybe_ne (lhs_ops[0].off, -1))
2168 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2169 i--, j--;
2170 else if (vr->operands[i].opcode == MEM_REF
2171 && maybe_ne (vr->operands[i].off, -1))
2173 extra_off = vr->operands[i].off - lhs_ops[0].off;
2174 i--, j--;
2178 /* i now points to the first additional op.
2179 ??? LHS may not be completely contained in VR, one or more
2180 VIEW_CONVERT_EXPRs could be in its way. We could at least
2181 try handling outermost VIEW_CONVERT_EXPRs. */
2182 if (j != -1)
2183 return (void *)-1;
2185 /* Punt if the additional ops contain a storage order barrier. */
2186 for (k = i; k >= 0; k--)
2188 vro = &vr->operands[k];
2189 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2190 return (void *)-1;
2193 /* Now re-write REF to be based on the rhs of the assignment. */
2194 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2196 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2197 if (maybe_ne (extra_off, 0))
2199 if (rhs.length () < 2
2200 || rhs[0].opcode != MEM_REF
2201 || known_eq (rhs[0].off, -1))
2202 return (void *)-1;
2203 rhs[0].off += extra_off;
2204 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2205 build_int_cst (TREE_TYPE (rhs[0].op0),
2206 extra_off));
2209 /* We need to pre-pend vr->operands[0..i] to rhs. */
2210 vec<vn_reference_op_s> old = vr->operands;
2211 if (i + 1 + rhs.length () > vr->operands.length ())
2212 vr->operands.safe_grow (i + 1 + rhs.length ());
2213 else
2214 vr->operands.truncate (i + 1 + rhs.length ());
2215 FOR_EACH_VEC_ELT (rhs, j, vro)
2216 vr->operands[i + 1 + j] = *vro;
2217 vr->operands = valueize_refs (vr->operands);
2218 if (old == shared_lookup_references)
2219 shared_lookup_references = vr->operands;
2220 vr->hashcode = vn_reference_compute_hash (vr);
2222 /* Try folding the new reference to a constant. */
2223 tree val = fully_constant_vn_reference_p (vr);
2224 if (val)
2225 return vn_reference_lookup_or_insert_for_pieces
2226 (vuse, vr->set, vr->type, vr->operands, val);
2228 /* Adjust *ref from the new operands. */
2229 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2230 return (void *)-1;
2231 /* This can happen with bitfields. */
2232 if (maybe_ne (ref->size, r.size))
2233 return (void *)-1;
2234 *ref = r;
2236 /* Do not update last seen VUSE after translating. */
2237 last_vuse_ptr = NULL;
2239 /* Keep looking for the adjusted *REF / VR pair. */
2240 return NULL;
2243 /* 6) For memcpy copies translate the reference through them if
2244 the copy kills ref. */
2245 else if (vn_walk_kind == VN_WALKREWRITE
2246 && is_gimple_reg_type (vr->type)
2247 /* ??? Handle BCOPY as well. */
2248 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2249 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2250 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2251 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2252 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2253 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2254 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2255 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2257 tree lhs, rhs;
2258 ao_ref r;
2259 poly_int64 rhs_offset, lhs_offset;
2260 vn_reference_op_s op;
2261 poly_uint64 mem_offset;
2262 poly_int64 at, byte_maxsize;
2264 /* Only handle non-variable, addressable refs. */
2265 if (maybe_ne (ref->size, maxsize)
2266 || !multiple_p (offset, BITS_PER_UNIT, &at)
2267 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2268 return (void *)-1;
2270 /* Extract a pointer base and an offset for the destination. */
2271 lhs = gimple_call_arg (def_stmt, 0);
2272 lhs_offset = 0;
2273 if (TREE_CODE (lhs) == SSA_NAME)
2275 lhs = SSA_VAL (lhs);
2276 if (TREE_CODE (lhs) == SSA_NAME)
2278 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2279 if (gimple_assign_single_p (def_stmt)
2280 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2281 lhs = gimple_assign_rhs1 (def_stmt);
2284 if (TREE_CODE (lhs) == ADDR_EXPR)
2286 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2287 &lhs_offset);
2288 if (!tem)
2289 return (void *)-1;
2290 if (TREE_CODE (tem) == MEM_REF
2291 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2293 lhs = TREE_OPERAND (tem, 0);
2294 if (TREE_CODE (lhs) == SSA_NAME)
2295 lhs = SSA_VAL (lhs);
2296 lhs_offset += mem_offset;
2298 else if (DECL_P (tem))
2299 lhs = build_fold_addr_expr (tem);
2300 else
2301 return (void *)-1;
2303 if (TREE_CODE (lhs) != SSA_NAME
2304 && TREE_CODE (lhs) != ADDR_EXPR)
2305 return (void *)-1;
2307 /* Extract a pointer base and an offset for the source. */
2308 rhs = gimple_call_arg (def_stmt, 1);
2309 rhs_offset = 0;
2310 if (TREE_CODE (rhs) == SSA_NAME)
2311 rhs = SSA_VAL (rhs);
2312 if (TREE_CODE (rhs) == ADDR_EXPR)
2314 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2315 &rhs_offset);
2316 if (!tem)
2317 return (void *)-1;
2318 if (TREE_CODE (tem) == MEM_REF
2319 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2321 rhs = TREE_OPERAND (tem, 0);
2322 rhs_offset += mem_offset;
2324 else if (DECL_P (tem)
2325 || TREE_CODE (tem) == STRING_CST)
2326 rhs = build_fold_addr_expr (tem);
2327 else
2328 return (void *)-1;
2330 if (TREE_CODE (rhs) != SSA_NAME
2331 && TREE_CODE (rhs) != ADDR_EXPR)
2332 return (void *)-1;
2334 /* The bases of the destination and the references have to agree. */
2335 if (TREE_CODE (base) == MEM_REF)
2337 if (TREE_OPERAND (base, 0) != lhs
2338 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2339 return (void *) -1;
2340 at += mem_offset;
2342 else if (!DECL_P (base)
2343 || TREE_CODE (lhs) != ADDR_EXPR
2344 || TREE_OPERAND (lhs, 0) != base)
2345 return (void *)-1;
2347 /* If the access is completely outside of the memcpy destination
2348 area there is no aliasing. */
2349 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2350 return NULL;
2351 /* And the access has to be contained within the memcpy destination. */
2352 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2353 return (void *)-1;
2355 /* Make room for 2 operands in the new reference. */
2356 if (vr->operands.length () < 2)
2358 vec<vn_reference_op_s> old = vr->operands;
2359 vr->operands.safe_grow_cleared (2);
2360 if (old == shared_lookup_references)
2361 shared_lookup_references = vr->operands;
2363 else
2364 vr->operands.truncate (2);
2366 /* The looked-through reference is a simple MEM_REF. */
2367 memset (&op, 0, sizeof (op));
2368 op.type = vr->type;
2369 op.opcode = MEM_REF;
2370 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2371 op.off = at - lhs_offset + rhs_offset;
2372 vr->operands[0] = op;
2373 op.type = TREE_TYPE (rhs);
2374 op.opcode = TREE_CODE (rhs);
2375 op.op0 = rhs;
2376 op.off = -1;
2377 vr->operands[1] = op;
2378 vr->hashcode = vn_reference_compute_hash (vr);
2380 /* Try folding the new reference to a constant. */
2381 tree val = fully_constant_vn_reference_p (vr);
2382 if (val)
2383 return vn_reference_lookup_or_insert_for_pieces
2384 (vuse, vr->set, vr->type, vr->operands, val);
2386 /* Adjust *ref from the new operands. */
2387 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2388 return (void *)-1;
2389 /* This can happen with bitfields. */
2390 if (maybe_ne (ref->size, r.size))
2391 return (void *)-1;
2392 *ref = r;
2394 /* Do not update last seen VUSE after translating. */
2395 last_vuse_ptr = NULL;
2397 /* Keep looking for the adjusted *REF / VR pair. */
2398 return NULL;
2401 /* Bail out and stop walking. */
2402 return (void *)-1;
2405 /* Return a reference op vector from OP that can be used for
2406 vn_reference_lookup_pieces. The caller is responsible for releasing
2407 the vector. */
2409 vec<vn_reference_op_s>
2410 vn_reference_operands_for_lookup (tree op)
2412 bool valueized;
2413 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2416 /* Lookup a reference operation by it's parts, in the current hash table.
2417 Returns the resulting value number if it exists in the hash table,
2418 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2419 vn_reference_t stored in the hashtable if something is found. */
2421 tree
2422 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2423 vec<vn_reference_op_s> operands,
2424 vn_reference_t *vnresult, vn_lookup_kind kind)
2426 struct vn_reference_s vr1;
2427 vn_reference_t tmp;
2428 tree cst;
2430 if (!vnresult)
2431 vnresult = &tmp;
2432 *vnresult = NULL;
2434 vr1.vuse = vuse_ssa_val (vuse);
2435 shared_lookup_references.truncate (0);
2436 shared_lookup_references.safe_grow (operands.length ());
2437 memcpy (shared_lookup_references.address (),
2438 operands.address (),
2439 sizeof (vn_reference_op_s)
2440 * operands.length ());
2441 vr1.operands = operands = shared_lookup_references
2442 = valueize_refs (shared_lookup_references);
2443 vr1.type = type;
2444 vr1.set = set;
2445 vr1.hashcode = vn_reference_compute_hash (&vr1);
2446 if ((cst = fully_constant_vn_reference_p (&vr1)))
2447 return cst;
2449 vn_reference_lookup_1 (&vr1, vnresult);
2450 if (!*vnresult
2451 && kind != VN_NOWALK
2452 && vr1.vuse)
2454 ao_ref r;
2455 vn_walk_kind = kind;
2456 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2457 *vnresult =
2458 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2459 vn_reference_lookup_2,
2460 vn_reference_lookup_3,
2461 vuse_ssa_val, &vr1);
2462 gcc_checking_assert (vr1.operands == shared_lookup_references);
2465 if (*vnresult)
2466 return (*vnresult)->result;
2468 return NULL_TREE;
2471 /* Lookup OP in the current hash table, and return the resulting value
2472 number if it exists in the hash table. Return NULL_TREE if it does
2473 not exist in the hash table or if the result field of the structure
2474 was NULL.. VNRESULT will be filled in with the vn_reference_t
2475 stored in the hashtable if one exists. When TBAA_P is false assume
2476 we are looking up a store and treat it as having alias-set zero. */
2478 tree
2479 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2480 vn_reference_t *vnresult, bool tbaa_p)
2482 vec<vn_reference_op_s> operands;
2483 struct vn_reference_s vr1;
2484 tree cst;
2485 bool valuezied_anything;
2487 if (vnresult)
2488 *vnresult = NULL;
2490 vr1.vuse = vuse_ssa_val (vuse);
2491 vr1.operands = operands
2492 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2493 vr1.type = TREE_TYPE (op);
2494 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2495 vr1.hashcode = vn_reference_compute_hash (&vr1);
2496 if ((cst = fully_constant_vn_reference_p (&vr1)))
2497 return cst;
2499 if (kind != VN_NOWALK
2500 && vr1.vuse)
2502 vn_reference_t wvnresult;
2503 ao_ref r;
2504 /* Make sure to use a valueized reference if we valueized anything.
2505 Otherwise preserve the full reference for advanced TBAA. */
2506 if (!valuezied_anything
2507 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2508 vr1.operands))
2509 ao_ref_init (&r, op);
2510 if (! tbaa_p)
2511 r.ref_alias_set = r.base_alias_set = 0;
2512 vn_walk_kind = kind;
2513 wvnresult =
2514 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2515 vn_reference_lookup_2,
2516 vn_reference_lookup_3,
2517 vuse_ssa_val, &vr1);
2518 gcc_checking_assert (vr1.operands == shared_lookup_references);
2519 if (wvnresult)
2521 if (vnresult)
2522 *vnresult = wvnresult;
2523 return wvnresult->result;
2526 return NULL_TREE;
2529 return vn_reference_lookup_1 (&vr1, vnresult);
2532 /* Lookup CALL in the current hash table and return the entry in
2533 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2535 void
2536 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2537 vn_reference_t vr)
2539 if (vnresult)
2540 *vnresult = NULL;
2542 tree vuse = gimple_vuse (call);
2544 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2545 vr->operands = valueize_shared_reference_ops_from_call (call);
2546 vr->type = gimple_expr_type (call);
2547 vr->set = 0;
2548 vr->hashcode = vn_reference_compute_hash (vr);
2549 vn_reference_lookup_1 (vr, vnresult);
2552 /* Insert OP into the current hash table with a value number of
2553 RESULT, and return the resulting reference structure we created. */
2555 static vn_reference_t
2556 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2558 vn_reference_s **slot;
2559 vn_reference_t vr1;
2560 bool tem;
2562 vr1 = current_info->references_pool->allocate ();
2563 if (TREE_CODE (result) == SSA_NAME)
2564 vr1->value_id = VN_INFO (result)->value_id;
2565 else
2566 vr1->value_id = get_or_alloc_constant_value_id (result);
2567 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2568 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2569 vr1->type = TREE_TYPE (op);
2570 vr1->set = get_alias_set (op);
2571 vr1->hashcode = vn_reference_compute_hash (vr1);
2572 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2573 vr1->result_vdef = vdef;
2575 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2576 INSERT);
2578 /* Because we lookup stores using vuses, and value number failures
2579 using the vdefs (see visit_reference_op_store for how and why),
2580 it's possible that on failure we may try to insert an already
2581 inserted store. This is not wrong, there is no ssa name for a
2582 store that we could use as a differentiator anyway. Thus, unlike
2583 the other lookup functions, you cannot gcc_assert (!*slot)
2584 here. */
2586 /* But free the old slot in case of a collision. */
2587 if (*slot)
2588 free_reference (*slot);
2590 *slot = vr1;
2591 return vr1;
2594 /* Insert a reference by it's pieces into the current hash table with
2595 a value number of RESULT. Return the resulting reference
2596 structure we created. */
2598 vn_reference_t
2599 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2600 vec<vn_reference_op_s> operands,
2601 tree result, unsigned int value_id)
2604 vn_reference_s **slot;
2605 vn_reference_t vr1;
2607 vr1 = current_info->references_pool->allocate ();
2608 vr1->value_id = value_id;
2609 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2610 vr1->operands = valueize_refs (operands);
2611 vr1->type = type;
2612 vr1->set = set;
2613 vr1->hashcode = vn_reference_compute_hash (vr1);
2614 if (result && TREE_CODE (result) == SSA_NAME)
2615 result = SSA_VAL (result);
2616 vr1->result = result;
2618 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2619 INSERT);
2621 /* At this point we should have all the things inserted that we have
2622 seen before, and we should never try inserting something that
2623 already exists. */
2624 gcc_assert (!*slot);
2625 if (*slot)
2626 free_reference (*slot);
2628 *slot = vr1;
2629 return vr1;
2632 /* Compute and return the hash value for nary operation VBO1. */
2634 static hashval_t
2635 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2637 inchash::hash hstate;
2638 unsigned i;
2640 for (i = 0; i < vno1->length; ++i)
2641 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2642 vno1->op[i] = SSA_VAL (vno1->op[i]);
2644 if (((vno1->length == 2
2645 && commutative_tree_code (vno1->opcode))
2646 || (vno1->length == 3
2647 && commutative_ternary_tree_code (vno1->opcode)))
2648 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2649 std::swap (vno1->op[0], vno1->op[1]);
2650 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2651 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2653 std::swap (vno1->op[0], vno1->op[1]);
2654 vno1->opcode = swap_tree_comparison (vno1->opcode);
2657 hstate.add_int (vno1->opcode);
2658 for (i = 0; i < vno1->length; ++i)
2659 inchash::add_expr (vno1->op[i], hstate);
2661 return hstate.end ();
2664 /* Compare nary operations VNO1 and VNO2 and return true if they are
2665 equivalent. */
2667 bool
2668 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2670 unsigned i;
2672 if (vno1->hashcode != vno2->hashcode)
2673 return false;
2675 if (vno1->length != vno2->length)
2676 return false;
2678 if (vno1->opcode != vno2->opcode
2679 || !types_compatible_p (vno1->type, vno2->type))
2680 return false;
2682 for (i = 0; i < vno1->length; ++i)
2683 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2684 return false;
2686 /* BIT_INSERT_EXPR has an implict operand as the type precision
2687 of op1. Need to check to make sure they are the same. */
2688 if (vno1->opcode == BIT_INSERT_EXPR
2689 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2690 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2691 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2692 return false;
2694 return true;
2697 /* Initialize VNO from the pieces provided. */
2699 static void
2700 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2701 enum tree_code code, tree type, tree *ops)
2703 vno->opcode = code;
2704 vno->length = length;
2705 vno->type = type;
2706 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2709 /* Initialize VNO from OP. */
2711 static void
2712 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2714 unsigned i;
2716 vno->opcode = TREE_CODE (op);
2717 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2718 vno->type = TREE_TYPE (op);
2719 for (i = 0; i < vno->length; ++i)
2720 vno->op[i] = TREE_OPERAND (op, i);
2723 /* Return the number of operands for a vn_nary ops structure from STMT. */
2725 static unsigned int
2726 vn_nary_length_from_stmt (gimple *stmt)
2728 switch (gimple_assign_rhs_code (stmt))
2730 case REALPART_EXPR:
2731 case IMAGPART_EXPR:
2732 case VIEW_CONVERT_EXPR:
2733 return 1;
2735 case BIT_FIELD_REF:
2736 return 3;
2738 case CONSTRUCTOR:
2739 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2741 default:
2742 return gimple_num_ops (stmt) - 1;
2746 /* Initialize VNO from STMT. */
2748 static void
2749 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2751 unsigned i;
2753 vno->opcode = gimple_assign_rhs_code (stmt);
2754 vno->type = gimple_expr_type (stmt);
2755 switch (vno->opcode)
2757 case REALPART_EXPR:
2758 case IMAGPART_EXPR:
2759 case VIEW_CONVERT_EXPR:
2760 vno->length = 1;
2761 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2762 break;
2764 case BIT_FIELD_REF:
2765 vno->length = 3;
2766 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2767 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2768 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2769 break;
2771 case CONSTRUCTOR:
2772 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2773 for (i = 0; i < vno->length; ++i)
2774 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2775 break;
2777 default:
2778 gcc_checking_assert (!gimple_assign_single_p (stmt));
2779 vno->length = gimple_num_ops (stmt) - 1;
2780 for (i = 0; i < vno->length; ++i)
2781 vno->op[i] = gimple_op (stmt, i + 1);
2785 /* Compute the hashcode for VNO and look for it in the hash table;
2786 return the resulting value number if it exists in the hash table.
2787 Return NULL_TREE if it does not exist in the hash table or if the
2788 result field of the operation is NULL. VNRESULT will contain the
2789 vn_nary_op_t from the hashtable if it exists. */
2791 static tree
2792 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2794 vn_nary_op_s **slot;
2796 if (vnresult)
2797 *vnresult = NULL;
2799 vno->hashcode = vn_nary_op_compute_hash (vno);
2800 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2801 NO_INSERT);
2802 if (!slot && current_info == optimistic_info)
2803 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2804 NO_INSERT);
2805 if (!slot)
2806 return NULL_TREE;
2807 if (vnresult)
2808 *vnresult = *slot;
2809 return (*slot)->result;
2812 /* Lookup a n-ary operation by its pieces and return the resulting value
2813 number if it exists in the hash table. Return NULL_TREE if it does
2814 not exist in the hash table or if the result field of the operation
2815 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2816 if it exists. */
2818 tree
2819 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2820 tree type, tree *ops, vn_nary_op_t *vnresult)
2822 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2823 sizeof_vn_nary_op (length));
2824 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2825 return vn_nary_op_lookup_1 (vno1, vnresult);
2828 /* Lookup OP in the current hash table, and return the resulting value
2829 number if it exists in the hash table. Return NULL_TREE if it does
2830 not exist in the hash table or if the result field of the operation
2831 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2832 if it exists. */
2834 tree
2835 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2837 vn_nary_op_t vno1
2838 = XALLOCAVAR (struct vn_nary_op_s,
2839 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2840 init_vn_nary_op_from_op (vno1, op);
2841 return vn_nary_op_lookup_1 (vno1, vnresult);
2844 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2845 value number if it exists in the hash table. Return NULL_TREE if
2846 it does not exist in the hash table. VNRESULT will contain the
2847 vn_nary_op_t from the hashtable if it exists. */
2849 tree
2850 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2852 vn_nary_op_t vno1
2853 = XALLOCAVAR (struct vn_nary_op_s,
2854 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2855 init_vn_nary_op_from_stmt (vno1, stmt);
2856 return vn_nary_op_lookup_1 (vno1, vnresult);
2859 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2861 static vn_nary_op_t
2862 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2864 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2867 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2868 obstack. */
2870 static vn_nary_op_t
2871 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2873 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2874 &current_info->nary_obstack);
2876 vno1->value_id = value_id;
2877 vno1->length = length;
2878 vno1->result = result;
2880 return vno1;
2883 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2884 VNO->HASHCODE first. */
2886 static vn_nary_op_t
2887 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2888 bool compute_hash)
2890 vn_nary_op_s **slot;
2892 if (compute_hash)
2893 vno->hashcode = vn_nary_op_compute_hash (vno);
2895 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2896 /* While we do not want to insert things twice it's awkward to
2897 avoid it in the case where visit_nary_op pattern-matches stuff
2898 and ends up simplifying the replacement to itself. We then
2899 get two inserts, one from visit_nary_op and one from
2900 vn_nary_build_or_lookup.
2901 So allow inserts with the same value number. */
2902 if (*slot && (*slot)->result == vno->result)
2903 return *slot;
2905 gcc_assert (!*slot);
2907 *slot = vno;
2908 return vno;
2911 /* Insert a n-ary operation into the current hash table using it's
2912 pieces. Return the vn_nary_op_t structure we created and put in
2913 the hashtable. */
2915 vn_nary_op_t
2916 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2917 tree type, tree *ops,
2918 tree result, unsigned int value_id)
2920 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2921 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2922 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2925 /* Insert OP into the current hash table with a value number of
2926 RESULT. Return the vn_nary_op_t structure we created and put in
2927 the hashtable. */
2929 vn_nary_op_t
2930 vn_nary_op_insert (tree op, tree result)
2932 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2933 vn_nary_op_t vno1;
2935 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2936 init_vn_nary_op_from_op (vno1, op);
2937 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2940 /* Insert the rhs of STMT into the current hash table with a value number of
2941 RESULT. */
2943 static vn_nary_op_t
2944 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2946 vn_nary_op_t vno1
2947 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2948 result, VN_INFO (result)->value_id);
2949 init_vn_nary_op_from_stmt (vno1, stmt);
2950 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2953 /* Compute a hashcode for PHI operation VP1 and return it. */
2955 static inline hashval_t
2956 vn_phi_compute_hash (vn_phi_t vp1)
2958 inchash::hash hstate (vp1->phiargs.length () > 2
2959 ? vp1->block->index : vp1->phiargs.length ());
2960 tree phi1op;
2961 tree type;
2962 edge e;
2963 edge_iterator ei;
2965 /* If all PHI arguments are constants we need to distinguish
2966 the PHI node via its type. */
2967 type = vp1->type;
2968 hstate.merge_hash (vn_hash_type (type));
2970 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2972 /* Don't hash backedge values they need to be handled as VN_TOP
2973 for optimistic value-numbering. */
2974 if (e->flags & EDGE_DFS_BACK)
2975 continue;
2977 phi1op = vp1->phiargs[e->dest_idx];
2978 if (phi1op == VN_TOP)
2979 continue;
2980 inchash::add_expr (phi1op, hstate);
2983 return hstate.end ();
2987 /* Return true if COND1 and COND2 represent the same condition, set
2988 *INVERTED_P if one needs to be inverted to make it the same as
2989 the other. */
2991 static bool
2992 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2993 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2995 enum tree_code code1 = gimple_cond_code (cond1);
2996 enum tree_code code2 = gimple_cond_code (cond2);
2998 *inverted_p = false;
2999 if (code1 == code2)
3001 else if (code1 == swap_tree_comparison (code2))
3002 std::swap (lhs2, rhs2);
3003 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3004 *inverted_p = true;
3005 else if (code1 == invert_tree_comparison
3006 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3008 std::swap (lhs2, rhs2);
3009 *inverted_p = true;
3011 else
3012 return false;
3014 return ((expressions_equal_p (lhs1, lhs2)
3015 && expressions_equal_p (rhs1, rhs2))
3016 || (commutative_tree_code (code1)
3017 && expressions_equal_p (lhs1, rhs2)
3018 && expressions_equal_p (rhs1, lhs2)));
3021 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3023 static int
3024 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3026 if (vp1->hashcode != vp2->hashcode)
3027 return false;
3029 if (vp1->block != vp2->block)
3031 if (vp1->phiargs.length () != vp2->phiargs.length ())
3032 return false;
3034 switch (vp1->phiargs.length ())
3036 case 1:
3037 /* Single-arg PHIs are just copies. */
3038 break;
3040 case 2:
3042 /* Rule out backedges into the PHI. */
3043 if (vp1->block->loop_father->header == vp1->block
3044 || vp2->block->loop_father->header == vp2->block)
3045 return false;
3047 /* If the PHI nodes do not have compatible types
3048 they are not the same. */
3049 if (!types_compatible_p (vp1->type, vp2->type))
3050 return false;
3052 basic_block idom1
3053 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3054 basic_block idom2
3055 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3056 /* If the immediate dominator end in switch stmts multiple
3057 values may end up in the same PHI arg via intermediate
3058 CFG merges. */
3059 if (EDGE_COUNT (idom1->succs) != 2
3060 || EDGE_COUNT (idom2->succs) != 2)
3061 return false;
3063 /* Verify the controlling stmt is the same. */
3064 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3065 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3066 if (! last1 || ! last2)
3067 return false;
3068 bool inverted_p;
3069 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3070 last2, vp2->cclhs, vp2->ccrhs,
3071 &inverted_p))
3072 return false;
3074 /* Get at true/false controlled edges into the PHI. */
3075 edge te1, te2, fe1, fe2;
3076 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3077 &te1, &fe1)
3078 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3079 &te2, &fe2))
3080 return false;
3082 /* Swap edges if the second condition is the inverted of the
3083 first. */
3084 if (inverted_p)
3085 std::swap (te2, fe2);
3087 /* ??? Handle VN_TOP specially. */
3088 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3089 vp2->phiargs[te2->dest_idx])
3090 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3091 vp2->phiargs[fe2->dest_idx]))
3092 return false;
3094 return true;
3097 default:
3098 return false;
3102 /* If the PHI nodes do not have compatible types
3103 they are not the same. */
3104 if (!types_compatible_p (vp1->type, vp2->type))
3105 return false;
3107 /* Any phi in the same block will have it's arguments in the
3108 same edge order, because of how we store phi nodes. */
3109 int i;
3110 tree phi1op;
3111 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3113 tree phi2op = vp2->phiargs[i];
3114 if (phi1op == VN_TOP || phi2op == VN_TOP)
3115 continue;
3116 if (!expressions_equal_p (phi1op, phi2op))
3117 return false;
3120 return true;
3123 static vec<tree> shared_lookup_phiargs;
3125 /* Lookup PHI in the current hash table, and return the resulting
3126 value number if it exists in the hash table. Return NULL_TREE if
3127 it does not exist in the hash table. */
3129 static tree
3130 vn_phi_lookup (gimple *phi)
3132 vn_phi_s **slot;
3133 struct vn_phi_s vp1;
3134 edge e;
3135 edge_iterator ei;
3137 shared_lookup_phiargs.truncate (0);
3138 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3140 /* Canonicalize the SSA_NAME's to their value number. */
3141 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3143 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3144 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3145 shared_lookup_phiargs[e->dest_idx] = def;
3147 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3148 vp1.phiargs = shared_lookup_phiargs;
3149 vp1.block = gimple_bb (phi);
3150 /* Extract values of the controlling condition. */
3151 vp1.cclhs = NULL_TREE;
3152 vp1.ccrhs = NULL_TREE;
3153 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3154 if (EDGE_COUNT (idom1->succs) == 2)
3155 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3157 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3158 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3160 vp1.hashcode = vn_phi_compute_hash (&vp1);
3161 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3162 NO_INSERT);
3163 if (!slot && current_info == optimistic_info)
3164 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3165 NO_INSERT);
3166 if (!slot)
3167 return NULL_TREE;
3168 return (*slot)->result;
3171 /* Insert PHI into the current hash table with a value number of
3172 RESULT. */
3174 static vn_phi_t
3175 vn_phi_insert (gimple *phi, tree result)
3177 vn_phi_s **slot;
3178 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3179 vec<tree> args = vNULL;
3180 edge e;
3181 edge_iterator ei;
3183 args.safe_grow (gimple_phi_num_args (phi));
3185 /* Canonicalize the SSA_NAME's to their value number. */
3186 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3188 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3189 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3190 args[e->dest_idx] = def;
3192 vp1->value_id = VN_INFO (result)->value_id;
3193 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3194 vp1->phiargs = args;
3195 vp1->block = gimple_bb (phi);
3196 /* Extract values of the controlling condition. */
3197 vp1->cclhs = NULL_TREE;
3198 vp1->ccrhs = NULL_TREE;
3199 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3200 if (EDGE_COUNT (idom1->succs) == 2)
3201 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3203 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3204 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3206 vp1->result = result;
3207 vp1->hashcode = vn_phi_compute_hash (vp1);
3209 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3211 /* Because we iterate over phi operations more than once, it's
3212 possible the slot might already exist here, hence no assert.*/
3213 *slot = vp1;
3214 return vp1;
3218 /* Print set of components in strongly connected component SCC to OUT. */
3220 static void
3221 print_scc (FILE *out, vec<tree> scc)
3223 tree var;
3224 unsigned int i;
3226 fprintf (out, "SCC consists of %u:", scc.length ());
3227 FOR_EACH_VEC_ELT (scc, i, var)
3229 fprintf (out, " ");
3230 print_generic_expr (out, var);
3232 fprintf (out, "\n");
3235 /* Return true if BB1 is dominated by BB2 taking into account edges
3236 that are not executable. */
3238 static bool
3239 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3241 edge_iterator ei;
3242 edge e;
3244 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3245 return true;
3247 /* Before iterating we'd like to know if there exists a
3248 (executable) path from bb2 to bb1 at all, if not we can
3249 directly return false. For now simply iterate once. */
3251 /* Iterate to the single executable bb1 predecessor. */
3252 if (EDGE_COUNT (bb1->preds) > 1)
3254 edge prede = NULL;
3255 FOR_EACH_EDGE (e, ei, bb1->preds)
3256 if (e->flags & EDGE_EXECUTABLE)
3258 if (prede)
3260 prede = NULL;
3261 break;
3263 prede = e;
3265 if (prede)
3267 bb1 = prede->src;
3269 /* Re-do the dominance check with changed bb1. */
3270 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3271 return true;
3275 /* Iterate to the single executable bb2 successor. */
3276 edge succe = NULL;
3277 FOR_EACH_EDGE (e, ei, bb2->succs)
3278 if (e->flags & EDGE_EXECUTABLE)
3280 if (succe)
3282 succe = NULL;
3283 break;
3285 succe = e;
3287 if (succe)
3289 /* Verify the reached block is only reached through succe.
3290 If there is only one edge we can spare us the dominator
3291 check and iterate directly. */
3292 if (EDGE_COUNT (succe->dest->preds) > 1)
3294 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3295 if (e != succe
3296 && (e->flags & EDGE_EXECUTABLE))
3298 succe = NULL;
3299 break;
3302 if (succe)
3304 bb2 = succe->dest;
3306 /* Re-do the dominance check with changed bb2. */
3307 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3308 return true;
3312 /* We could now iterate updating bb1 / bb2. */
3313 return false;
3316 /* Set the value number of FROM to TO, return true if it has changed
3317 as a result. */
3319 static inline bool
3320 set_ssa_val_to (tree from, tree to)
3322 tree currval = SSA_VAL (from);
3323 poly_int64 toff, coff;
3325 /* The only thing we allow as value numbers are ssa_names
3326 and invariants. So assert that here. We don't allow VN_TOP
3327 as visiting a stmt should produce a value-number other than
3328 that.
3329 ??? Still VN_TOP can happen for unreachable code, so force
3330 it to varying in that case. Not all code is prepared to
3331 get VN_TOP on valueization. */
3332 if (to == VN_TOP)
3334 if (dump_file && (dump_flags & TDF_DETAILS))
3335 fprintf (dump_file, "Forcing value number to varying on "
3336 "receiving VN_TOP\n");
3337 to = from;
3340 gcc_assert (to != NULL_TREE
3341 && ((TREE_CODE (to) == SSA_NAME
3342 && (to == from || SSA_VAL (to) == to))
3343 || is_gimple_min_invariant (to)));
3345 if (from != to)
3347 if (currval == from)
3349 if (dump_file && (dump_flags & TDF_DETAILS))
3351 fprintf (dump_file, "Not changing value number of ");
3352 print_generic_expr (dump_file, from);
3353 fprintf (dump_file, " from VARYING to ");
3354 print_generic_expr (dump_file, to);
3355 fprintf (dump_file, "\n");
3357 return false;
3359 else if (currval != VN_TOP
3360 && ! is_gimple_min_invariant (currval)
3361 && is_gimple_min_invariant (to))
3363 if (dump_file && (dump_flags & TDF_DETAILS))
3365 fprintf (dump_file, "Forcing VARYING instead of changing "
3366 "value number of ");
3367 print_generic_expr (dump_file, from);
3368 fprintf (dump_file, " from ");
3369 print_generic_expr (dump_file, currval);
3370 fprintf (dump_file, " (non-constant) to ");
3371 print_generic_expr (dump_file, to);
3372 fprintf (dump_file, " (constant)\n");
3374 to = from;
3376 else if (TREE_CODE (to) == SSA_NAME
3377 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3378 to = from;
3381 if (dump_file && (dump_flags & TDF_DETAILS))
3383 fprintf (dump_file, "Setting value number of ");
3384 print_generic_expr (dump_file, from);
3385 fprintf (dump_file, " to ");
3386 print_generic_expr (dump_file, to);
3389 if (currval != to
3390 && !operand_equal_p (currval, to, 0)
3391 /* Different undefined SSA names are not actually different. See
3392 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3393 && !(TREE_CODE (currval) == SSA_NAME
3394 && TREE_CODE (to) == SSA_NAME
3395 && ssa_undefined_value_p (currval, false)
3396 && ssa_undefined_value_p (to, false))
3397 /* ??? For addresses involving volatile objects or types operand_equal_p
3398 does not reliably detect ADDR_EXPRs as equal. We know we are only
3399 getting invariant gimple addresses here, so can use
3400 get_addr_base_and_unit_offset to do this comparison. */
3401 && !(TREE_CODE (currval) == ADDR_EXPR
3402 && TREE_CODE (to) == ADDR_EXPR
3403 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3404 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3405 && known_eq (coff, toff)))
3407 if (dump_file && (dump_flags & TDF_DETAILS))
3408 fprintf (dump_file, " (changed)\n");
3410 /* If we equate two SSA names we have to make the side-band info
3411 of the leader conservative (and remember whatever original value
3412 was present). */
3413 if (TREE_CODE (to) == SSA_NAME)
3415 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3416 && SSA_NAME_RANGE_INFO (to))
3418 if (SSA_NAME_IS_DEFAULT_DEF (to)
3419 || dominated_by_p_w_unex
3420 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3421 gimple_bb (SSA_NAME_DEF_STMT (to))))
3422 /* Keep the info from the dominator. */
3424 else
3426 /* Save old info. */
3427 if (! VN_INFO (to)->info.range_info)
3429 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3430 VN_INFO (to)->range_info_anti_range_p
3431 = SSA_NAME_ANTI_RANGE_P (to);
3433 /* Rather than allocating memory and unioning the info
3434 just clear it. */
3435 if (dump_file && (dump_flags & TDF_DETAILS))
3437 fprintf (dump_file, "clearing range info of ");
3438 print_generic_expr (dump_file, to);
3439 fprintf (dump_file, "\n");
3441 SSA_NAME_RANGE_INFO (to) = NULL;
3444 else if (POINTER_TYPE_P (TREE_TYPE (to))
3445 && SSA_NAME_PTR_INFO (to))
3447 if (SSA_NAME_IS_DEFAULT_DEF (to)
3448 || dominated_by_p_w_unex
3449 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3450 gimple_bb (SSA_NAME_DEF_STMT (to))))
3451 /* Keep the info from the dominator. */
3453 else if (! SSA_NAME_PTR_INFO (from)
3454 /* Handle the case of trivially equivalent info. */
3455 || memcmp (SSA_NAME_PTR_INFO (to),
3456 SSA_NAME_PTR_INFO (from),
3457 sizeof (ptr_info_def)) != 0)
3459 /* Save old info. */
3460 if (! VN_INFO (to)->info.ptr_info)
3461 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3462 /* Rather than allocating memory and unioning the info
3463 just clear it. */
3464 if (dump_file && (dump_flags & TDF_DETAILS))
3466 fprintf (dump_file, "clearing points-to info of ");
3467 print_generic_expr (dump_file, to);
3468 fprintf (dump_file, "\n");
3470 SSA_NAME_PTR_INFO (to) = NULL;
3475 VN_INFO (from)->valnum = to;
3476 return true;
3478 if (dump_file && (dump_flags & TDF_DETAILS))
3479 fprintf (dump_file, "\n");
3480 return false;
3483 /* Mark as processed all the definitions in the defining stmt of USE, or
3484 the USE itself. */
3486 static void
3487 mark_use_processed (tree use)
3489 ssa_op_iter iter;
3490 def_operand_p defp;
3491 gimple *stmt = SSA_NAME_DEF_STMT (use);
3493 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3495 VN_INFO (use)->use_processed = true;
3496 return;
3499 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3501 tree def = DEF_FROM_PTR (defp);
3503 VN_INFO (def)->use_processed = true;
3507 /* Set all definitions in STMT to value number to themselves.
3508 Return true if a value number changed. */
3510 static bool
3511 defs_to_varying (gimple *stmt)
3513 bool changed = false;
3514 ssa_op_iter iter;
3515 def_operand_p defp;
3517 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3519 tree def = DEF_FROM_PTR (defp);
3520 changed |= set_ssa_val_to (def, def);
3522 return changed;
3525 /* Visit a copy between LHS and RHS, return true if the value number
3526 changed. */
3528 static bool
3529 visit_copy (tree lhs, tree rhs)
3531 /* Valueize. */
3532 rhs = SSA_VAL (rhs);
3534 return set_ssa_val_to (lhs, rhs);
3537 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3538 is the same. */
3540 static tree
3541 valueized_wider_op (tree wide_type, tree op)
3543 if (TREE_CODE (op) == SSA_NAME)
3544 op = SSA_VAL (op);
3546 /* Either we have the op widened available. */
3547 tree ops[3] = {};
3548 ops[0] = op;
3549 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3550 wide_type, ops, NULL);
3551 if (tem)
3552 return tem;
3554 /* Or the op is truncated from some existing value. */
3555 if (TREE_CODE (op) == SSA_NAME)
3557 gimple *def = SSA_NAME_DEF_STMT (op);
3558 if (is_gimple_assign (def)
3559 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3561 tem = gimple_assign_rhs1 (def);
3562 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3564 if (TREE_CODE (tem) == SSA_NAME)
3565 tem = SSA_VAL (tem);
3566 return tem;
3571 /* For constants simply extend it. */
3572 if (TREE_CODE (op) == INTEGER_CST)
3573 return wide_int_to_tree (wide_type, wi::to_wide (op));
3575 return NULL_TREE;
3578 /* Visit a nary operator RHS, value number it, and return true if the
3579 value number of LHS has changed as a result. */
3581 static bool
3582 visit_nary_op (tree lhs, gassign *stmt)
3584 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3585 if (result)
3586 return set_ssa_val_to (lhs, result);
3588 /* Do some special pattern matching for redundancies of operations
3589 in different types. */
3590 enum tree_code code = gimple_assign_rhs_code (stmt);
3591 tree type = TREE_TYPE (lhs);
3592 tree rhs1 = gimple_assign_rhs1 (stmt);
3593 switch (code)
3595 CASE_CONVERT:
3596 /* Match arithmetic done in a different type where we can easily
3597 substitute the result from some earlier sign-changed or widened
3598 operation. */
3599 if (INTEGRAL_TYPE_P (type)
3600 && TREE_CODE (rhs1) == SSA_NAME
3601 /* We only handle sign-changes or zero-extension -> & mask. */
3602 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3603 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3604 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3606 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3607 if (def
3608 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3609 || gimple_assign_rhs_code (def) == MINUS_EXPR
3610 || gimple_assign_rhs_code (def) == MULT_EXPR))
3612 tree ops[3] = {};
3613 /* Either we have the op widened available. */
3614 ops[0] = valueized_wider_op (type,
3615 gimple_assign_rhs1 (def));
3616 if (ops[0])
3617 ops[1] = valueized_wider_op (type,
3618 gimple_assign_rhs2 (def));
3619 if (ops[0] && ops[1])
3621 ops[0] = vn_nary_op_lookup_pieces
3622 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3623 /* We have wider operation available. */
3624 if (ops[0])
3626 unsigned lhs_prec = TYPE_PRECISION (type);
3627 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3628 if (lhs_prec == rhs_prec)
3630 ops[1] = NULL_TREE;
3631 result = vn_nary_build_or_lookup (NOP_EXPR,
3632 type, ops);
3633 if (result)
3635 bool changed = set_ssa_val_to (lhs, result);
3636 vn_nary_op_insert_stmt (stmt, result);
3637 return changed;
3640 else
3642 ops[1] = wide_int_to_tree (type,
3643 wi::mask (rhs_prec, false,
3644 lhs_prec));
3645 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3646 TREE_TYPE (lhs),
3647 ops);
3648 if (result)
3650 bool changed = set_ssa_val_to (lhs, result);
3651 vn_nary_op_insert_stmt (stmt, result);
3652 return changed;
3659 default:;
3662 bool changed = set_ssa_val_to (lhs, lhs);
3663 vn_nary_op_insert_stmt (stmt, lhs);
3664 return changed;
3667 /* Visit a call STMT storing into LHS. Return true if the value number
3668 of the LHS has changed as a result. */
3670 static bool
3671 visit_reference_op_call (tree lhs, gcall *stmt)
3673 bool changed = false;
3674 struct vn_reference_s vr1;
3675 vn_reference_t vnresult = NULL;
3676 tree vdef = gimple_vdef (stmt);
3678 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3679 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3680 lhs = NULL_TREE;
3682 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3683 if (vnresult)
3685 if (vnresult->result_vdef && vdef)
3686 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3687 else if (vdef)
3688 /* If the call was discovered to be pure or const reflect
3689 that as far as possible. */
3690 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3692 if (!vnresult->result && lhs)
3693 vnresult->result = lhs;
3695 if (vnresult->result && lhs)
3696 changed |= set_ssa_val_to (lhs, vnresult->result);
3698 else
3700 vn_reference_t vr2;
3701 vn_reference_s **slot;
3702 tree vdef_val = vdef;
3703 if (vdef)
3705 /* If we value numbered an indirect functions function to
3706 one not clobbering memory value number its VDEF to its
3707 VUSE. */
3708 tree fn = gimple_call_fn (stmt);
3709 if (fn && TREE_CODE (fn) == SSA_NAME)
3711 fn = SSA_VAL (fn);
3712 if (TREE_CODE (fn) == ADDR_EXPR
3713 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3714 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3715 & (ECF_CONST | ECF_PURE)))
3716 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3718 changed |= set_ssa_val_to (vdef, vdef_val);
3720 if (lhs)
3721 changed |= set_ssa_val_to (lhs, lhs);
3722 vr2 = current_info->references_pool->allocate ();
3723 vr2->vuse = vr1.vuse;
3724 /* As we are not walking the virtual operand chain we know the
3725 shared_lookup_references are still original so we can re-use
3726 them here. */
3727 vr2->operands = vr1.operands.copy ();
3728 vr2->type = vr1.type;
3729 vr2->set = vr1.set;
3730 vr2->hashcode = vr1.hashcode;
3731 vr2->result = lhs;
3732 vr2->result_vdef = vdef_val;
3733 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3734 INSERT);
3735 gcc_assert (!*slot);
3736 *slot = vr2;
3739 return changed;
3742 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3743 and return true if the value number of the LHS has changed as a result. */
3745 static bool
3746 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3748 bool changed = false;
3749 tree last_vuse;
3750 tree result;
3752 last_vuse = gimple_vuse (stmt);
3753 last_vuse_ptr = &last_vuse;
3754 result = vn_reference_lookup (op, gimple_vuse (stmt),
3755 default_vn_walk_kind, NULL, true);
3756 last_vuse_ptr = NULL;
3758 /* We handle type-punning through unions by value-numbering based
3759 on offset and size of the access. Be prepared to handle a
3760 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3761 if (result
3762 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3764 /* We will be setting the value number of lhs to the value number
3765 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3766 So first simplify and lookup this expression to see if it
3767 is already available. */
3768 code_helper rcode = VIEW_CONVERT_EXPR;
3769 tree ops[3] = { result };
3770 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3773 if (result)
3774 changed = set_ssa_val_to (lhs, result);
3775 else
3777 changed = set_ssa_val_to (lhs, lhs);
3778 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3781 return changed;
3785 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3786 and return true if the value number of the LHS has changed as a result. */
3788 static bool
3789 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3791 bool changed = false;
3792 vn_reference_t vnresult = NULL;
3793 tree assign;
3794 bool resultsame = false;
3795 tree vuse = gimple_vuse (stmt);
3796 tree vdef = gimple_vdef (stmt);
3798 if (TREE_CODE (op) == SSA_NAME)
3799 op = SSA_VAL (op);
3801 /* First we want to lookup using the *vuses* from the store and see
3802 if there the last store to this location with the same address
3803 had the same value.
3805 The vuses represent the memory state before the store. If the
3806 memory state, address, and value of the store is the same as the
3807 last store to this location, then this store will produce the
3808 same memory state as that store.
3810 In this case the vdef versions for this store are value numbered to those
3811 vuse versions, since they represent the same memory state after
3812 this store.
3814 Otherwise, the vdefs for the store are used when inserting into
3815 the table, since the store generates a new memory state. */
3817 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3818 if (vnresult
3819 && vnresult->result)
3821 tree result = vnresult->result;
3822 if (TREE_CODE (result) == SSA_NAME)
3823 result = SSA_VAL (result);
3824 resultsame = expressions_equal_p (result, op);
3825 if (resultsame)
3827 /* If the TBAA state isn't compatible for downstream reads
3828 we cannot value-number the VDEFs the same. */
3829 alias_set_type set = get_alias_set (lhs);
3830 if (vnresult->set != set
3831 && ! alias_set_subset_of (set, vnresult->set))
3832 resultsame = false;
3836 if (!resultsame)
3838 /* Only perform the following when being called from PRE
3839 which embeds tail merging. */
3840 if (default_vn_walk_kind == VN_WALK)
3842 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3843 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3844 if (vnresult)
3846 VN_INFO (vdef)->use_processed = true;
3847 return set_ssa_val_to (vdef, vnresult->result_vdef);
3851 if (dump_file && (dump_flags & TDF_DETAILS))
3853 fprintf (dump_file, "No store match\n");
3854 fprintf (dump_file, "Value numbering store ");
3855 print_generic_expr (dump_file, lhs);
3856 fprintf (dump_file, " to ");
3857 print_generic_expr (dump_file, op);
3858 fprintf (dump_file, "\n");
3860 /* Have to set value numbers before insert, since insert is
3861 going to valueize the references in-place. */
3862 if (vdef)
3863 changed |= set_ssa_val_to (vdef, vdef);
3865 /* Do not insert structure copies into the tables. */
3866 if (is_gimple_min_invariant (op)
3867 || is_gimple_reg (op))
3868 vn_reference_insert (lhs, op, vdef, NULL);
3870 /* Only perform the following when being called from PRE
3871 which embeds tail merging. */
3872 if (default_vn_walk_kind == VN_WALK)
3874 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3875 vn_reference_insert (assign, lhs, vuse, vdef);
3878 else
3880 /* We had a match, so value number the vdef to have the value
3881 number of the vuse it came from. */
3883 if (dump_file && (dump_flags & TDF_DETAILS))
3884 fprintf (dump_file, "Store matched earlier value, "
3885 "value numbering store vdefs to matching vuses.\n");
3887 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3890 return changed;
3893 /* Visit and value number PHI, return true if the value number
3894 changed. */
3896 static bool
3897 visit_phi (gimple *phi)
3899 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3900 unsigned n_executable = 0;
3901 bool allsame = true;
3902 edge_iterator ei;
3903 edge e;
3905 /* TODO: We could check for this in init_sccvn, and replace this
3906 with a gcc_assert. */
3907 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3908 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3910 /* See if all non-TOP arguments have the same value. TOP is
3911 equivalent to everything, so we can ignore it. */
3912 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3913 if (e->flags & EDGE_EXECUTABLE)
3915 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3917 ++n_executable;
3918 if (TREE_CODE (def) == SSA_NAME)
3919 def = SSA_VAL (def);
3920 if (def == VN_TOP)
3922 /* Ignore undefined defs for sameval but record one. */
3923 else if (TREE_CODE (def) == SSA_NAME
3924 && ssa_undefined_value_p (def, false))
3925 seen_undef = def;
3926 else if (sameval == VN_TOP)
3927 sameval = def;
3928 else if (!expressions_equal_p (def, sameval))
3930 allsame = false;
3931 break;
3936 /* If none of the edges was executable keep the value-number at VN_TOP,
3937 if only a single edge is exectuable use its value. */
3938 if (n_executable <= 1)
3939 result = seen_undef ? seen_undef : sameval;
3940 /* If we saw only undefined values and VN_TOP use one of the
3941 undefined values. */
3942 else if (sameval == VN_TOP)
3943 result = seen_undef ? seen_undef : sameval;
3944 /* First see if it is equivalent to a phi node in this block. We prefer
3945 this as it allows IV elimination - see PRs 66502 and 67167. */
3946 else if ((result = vn_phi_lookup (phi)))
3948 /* If all values are the same use that, unless we've seen undefined
3949 values as well and the value isn't constant.
3950 CCP/copyprop have the same restriction to not remove uninit warnings. */
3951 else if (allsame
3952 && (! seen_undef || is_gimple_min_invariant (sameval)))
3953 result = sameval;
3954 else
3956 result = PHI_RESULT (phi);
3957 /* Only insert PHIs that are varying, for constant value numbers
3958 we mess up equivalences otherwise as we are only comparing
3959 the immediate controlling predicates. */
3960 vn_phi_insert (phi, result);
3963 return set_ssa_val_to (PHI_RESULT (phi), result);
3966 /* Try to simplify RHS using equivalences and constant folding. */
3968 static tree
3969 try_to_simplify (gassign *stmt)
3971 enum tree_code code = gimple_assign_rhs_code (stmt);
3972 tree tem;
3974 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3975 in this case, there is no point in doing extra work. */
3976 if (code == SSA_NAME)
3977 return NULL_TREE;
3979 /* First try constant folding based on our current lattice. */
3980 mprts_hook = vn_lookup_simplify_result;
3981 mprts_hook_cnt = 9;
3982 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3983 mprts_hook = NULL;
3984 if (tem
3985 && (TREE_CODE (tem) == SSA_NAME
3986 || is_gimple_min_invariant (tem)))
3987 return tem;
3989 return NULL_TREE;
3992 /* Visit and value number USE, return true if the value number
3993 changed. */
3995 static bool
3996 visit_use (tree use)
3998 bool changed = false;
3999 gimple *stmt = SSA_NAME_DEF_STMT (use);
4001 mark_use_processed (use);
4003 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4004 && !SSA_NAME_IS_DEFAULT_DEF (use));
4006 if (dump_file && (dump_flags & TDF_DETAILS))
4008 fprintf (dump_file, "Value numbering ");
4009 print_generic_expr (dump_file, use);
4010 fprintf (dump_file, " stmt = ");
4011 print_gimple_stmt (dump_file, stmt, 0);
4014 if (gimple_code (stmt) == GIMPLE_PHI)
4015 changed = visit_phi (stmt);
4016 else if (gimple_has_volatile_ops (stmt))
4017 changed = defs_to_varying (stmt);
4018 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4020 enum tree_code code = gimple_assign_rhs_code (ass);
4021 tree lhs = gimple_assign_lhs (ass);
4022 tree rhs1 = gimple_assign_rhs1 (ass);
4023 tree simplified;
4025 /* Shortcut for copies. Simplifying copies is pointless,
4026 since we copy the expression and value they represent. */
4027 if (code == SSA_NAME
4028 && TREE_CODE (lhs) == SSA_NAME)
4030 changed = visit_copy (lhs, rhs1);
4031 goto done;
4033 simplified = try_to_simplify (ass);
4034 if (simplified)
4036 if (dump_file && (dump_flags & TDF_DETAILS))
4038 fprintf (dump_file, "RHS ");
4039 print_gimple_expr (dump_file, ass, 0);
4040 fprintf (dump_file, " simplified to ");
4041 print_generic_expr (dump_file, simplified);
4042 fprintf (dump_file, "\n");
4045 /* Setting value numbers to constants will occasionally
4046 screw up phi congruence because constants are not
4047 uniquely associated with a single ssa name that can be
4048 looked up. */
4049 if (simplified
4050 && is_gimple_min_invariant (simplified)
4051 && TREE_CODE (lhs) == SSA_NAME)
4053 changed = set_ssa_val_to (lhs, simplified);
4054 goto done;
4056 else if (simplified
4057 && TREE_CODE (simplified) == SSA_NAME
4058 && TREE_CODE (lhs) == SSA_NAME)
4060 changed = visit_copy (lhs, simplified);
4061 goto done;
4064 if ((TREE_CODE (lhs) == SSA_NAME
4065 /* We can substitute SSA_NAMEs that are live over
4066 abnormal edges with their constant value. */
4067 && !(gimple_assign_copy_p (ass)
4068 && is_gimple_min_invariant (rhs1))
4069 && !(simplified
4070 && is_gimple_min_invariant (simplified))
4071 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4072 /* Stores or copies from SSA_NAMEs that are live over
4073 abnormal edges are a problem. */
4074 || (code == SSA_NAME
4075 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4076 changed = defs_to_varying (ass);
4077 else if (REFERENCE_CLASS_P (lhs)
4078 || DECL_P (lhs))
4079 changed = visit_reference_op_store (lhs, rhs1, ass);
4080 else if (TREE_CODE (lhs) == SSA_NAME)
4082 if ((gimple_assign_copy_p (ass)
4083 && is_gimple_min_invariant (rhs1))
4084 || (simplified
4085 && is_gimple_min_invariant (simplified)))
4087 if (simplified)
4088 changed = set_ssa_val_to (lhs, simplified);
4089 else
4090 changed = set_ssa_val_to (lhs, rhs1);
4092 else
4094 /* Visit the original statement. */
4095 switch (vn_get_stmt_kind (ass))
4097 case VN_NARY:
4098 changed = visit_nary_op (lhs, ass);
4099 break;
4100 case VN_REFERENCE:
4101 changed = visit_reference_op_load (lhs, rhs1, ass);
4102 break;
4103 default:
4104 changed = defs_to_varying (ass);
4105 break;
4109 else
4110 changed = defs_to_varying (ass);
4112 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4114 tree lhs = gimple_call_lhs (call_stmt);
4115 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4117 /* Try constant folding based on our current lattice. */
4118 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4119 vn_valueize);
4120 if (simplified)
4122 if (dump_file && (dump_flags & TDF_DETAILS))
4124 fprintf (dump_file, "call ");
4125 print_gimple_expr (dump_file, call_stmt, 0);
4126 fprintf (dump_file, " simplified to ");
4127 print_generic_expr (dump_file, simplified);
4128 fprintf (dump_file, "\n");
4131 /* Setting value numbers to constants will occasionally
4132 screw up phi congruence because constants are not
4133 uniquely associated with a single ssa name that can be
4134 looked up. */
4135 if (simplified
4136 && is_gimple_min_invariant (simplified))
4138 changed = set_ssa_val_to (lhs, simplified);
4139 if (gimple_vdef (call_stmt))
4140 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4141 SSA_VAL (gimple_vuse (call_stmt)));
4142 goto done;
4144 else if (simplified
4145 && TREE_CODE (simplified) == SSA_NAME)
4147 changed = visit_copy (lhs, simplified);
4148 if (gimple_vdef (call_stmt))
4149 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4150 SSA_VAL (gimple_vuse (call_stmt)));
4151 goto done;
4153 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4155 changed = defs_to_varying (call_stmt);
4156 goto done;
4160 /* Pick up flags from a devirtualization target. */
4161 tree fn = gimple_call_fn (stmt);
4162 int extra_fnflags = 0;
4163 if (fn && TREE_CODE (fn) == SSA_NAME)
4165 fn = SSA_VAL (fn);
4166 if (TREE_CODE (fn) == ADDR_EXPR
4167 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4168 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4170 if (!gimple_call_internal_p (call_stmt)
4171 && (/* Calls to the same function with the same vuse
4172 and the same operands do not necessarily return the same
4173 value, unless they're pure or const. */
4174 ((gimple_call_flags (call_stmt) | extra_fnflags)
4175 & (ECF_PURE | ECF_CONST))
4176 /* If calls have a vdef, subsequent calls won't have
4177 the same incoming vuse. So, if 2 calls with vdef have the
4178 same vuse, we know they're not subsequent.
4179 We can value number 2 calls to the same function with the
4180 same vuse and the same operands which are not subsequent
4181 the same, because there is no code in the program that can
4182 compare the 2 values... */
4183 || (gimple_vdef (call_stmt)
4184 /* ... unless the call returns a pointer which does
4185 not alias with anything else. In which case the
4186 information that the values are distinct are encoded
4187 in the IL. */
4188 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4189 /* Only perform the following when being called from PRE
4190 which embeds tail merging. */
4191 && default_vn_walk_kind == VN_WALK)))
4192 changed = visit_reference_op_call (lhs, call_stmt);
4193 else
4194 changed = defs_to_varying (call_stmt);
4196 else
4197 changed = defs_to_varying (stmt);
4198 done:
4199 return changed;
4202 /* Compare two operands by reverse postorder index */
4204 static int
4205 compare_ops (const void *pa, const void *pb)
4207 const tree opa = *((const tree *)pa);
4208 const tree opb = *((const tree *)pb);
4209 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4210 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4211 basic_block bba;
4212 basic_block bbb;
4214 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4215 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4216 else if (gimple_nop_p (opstmta))
4217 return -1;
4218 else if (gimple_nop_p (opstmtb))
4219 return 1;
4221 bba = gimple_bb (opstmta);
4222 bbb = gimple_bb (opstmtb);
4224 if (!bba && !bbb)
4225 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4226 else if (!bba)
4227 return -1;
4228 else if (!bbb)
4229 return 1;
4231 if (bba == bbb)
4233 if (gimple_code (opstmta) == GIMPLE_PHI
4234 && gimple_code (opstmtb) == GIMPLE_PHI)
4235 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4236 else if (gimple_code (opstmta) == GIMPLE_PHI)
4237 return -1;
4238 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4239 return 1;
4240 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4241 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4242 else
4243 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4245 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4248 /* Sort an array containing members of a strongly connected component
4249 SCC so that the members are ordered by RPO number.
4250 This means that when the sort is complete, iterating through the
4251 array will give you the members in RPO order. */
4253 static void
4254 sort_scc (vec<tree> scc)
4256 scc.qsort (compare_ops);
4259 /* Insert the no longer used nary ONARY to the hash INFO. */
4261 static void
4262 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4264 size_t size = sizeof_vn_nary_op (onary->length);
4265 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4266 &info->nary_obstack);
4267 memcpy (nary, onary, size);
4268 vn_nary_op_insert_into (nary, info->nary, false);
4271 /* Insert the no longer used phi OPHI to the hash INFO. */
4273 static void
4274 copy_phi (vn_phi_t ophi, vn_tables_t info)
4276 vn_phi_t phi = info->phis_pool->allocate ();
4277 vn_phi_s **slot;
4278 memcpy (phi, ophi, sizeof (*phi));
4279 ophi->phiargs.create (0);
4280 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4281 gcc_assert (!*slot);
4282 *slot = phi;
4285 /* Insert the no longer used reference OREF to the hash INFO. */
4287 static void
4288 copy_reference (vn_reference_t oref, vn_tables_t info)
4290 vn_reference_t ref;
4291 vn_reference_s **slot;
4292 ref = info->references_pool->allocate ();
4293 memcpy (ref, oref, sizeof (*ref));
4294 oref->operands.create (0);
4295 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4296 if (*slot)
4297 free_reference (*slot);
4298 *slot = ref;
4301 /* Process a strongly connected component in the SSA graph. */
4303 static void
4304 process_scc (vec<tree> scc)
4306 tree var;
4307 unsigned int i;
4308 unsigned int iterations = 0;
4309 bool changed = true;
4310 vn_nary_op_iterator_type hin;
4311 vn_phi_iterator_type hip;
4312 vn_reference_iterator_type hir;
4313 vn_nary_op_t nary;
4314 vn_phi_t phi;
4315 vn_reference_t ref;
4317 /* If the SCC has a single member, just visit it. */
4318 if (scc.length () == 1)
4320 tree use = scc[0];
4321 if (VN_INFO (use)->use_processed)
4322 return;
4323 /* We need to make sure it doesn't form a cycle itself, which can
4324 happen for self-referential PHI nodes. In that case we would
4325 end up inserting an expression with VN_TOP operands into the
4326 valid table which makes us derive bogus equivalences later.
4327 The cheapest way to check this is to assume it for all PHI nodes. */
4328 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4329 /* Fallthru to iteration. */ ;
4330 else
4332 visit_use (use);
4333 return;
4337 if (dump_file && (dump_flags & TDF_DETAILS))
4338 print_scc (dump_file, scc);
4340 /* Iterate over the SCC with the optimistic table until it stops
4341 changing. */
4342 current_info = optimistic_info;
4343 while (changed)
4345 changed = false;
4346 iterations++;
4347 if (dump_file && (dump_flags & TDF_DETAILS))
4348 fprintf (dump_file, "Starting iteration %d\n", iterations);
4349 /* As we are value-numbering optimistically we have to
4350 clear the expression tables and the simplified expressions
4351 in each iteration until we converge. */
4352 optimistic_info->nary->empty ();
4353 optimistic_info->phis->empty ();
4354 optimistic_info->references->empty ();
4355 obstack_free (&optimistic_info->nary_obstack, NULL);
4356 gcc_obstack_init (&optimistic_info->nary_obstack);
4357 optimistic_info->phis_pool->release ();
4358 optimistic_info->references_pool->release ();
4359 FOR_EACH_VEC_ELT (scc, i, var)
4360 gcc_assert (!VN_INFO (var)->needs_insertion
4361 && VN_INFO (var)->expr == NULL);
4362 FOR_EACH_VEC_ELT (scc, i, var)
4363 changed |= visit_use (var);
4366 if (dump_file && (dump_flags & TDF_DETAILS))
4367 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4368 statistics_histogram_event (cfun, "SCC iterations", iterations);
4370 /* Finally, copy the contents of the no longer used optimistic
4371 table to the valid table. */
4372 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4373 copy_nary (nary, valid_info);
4374 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4375 copy_phi (phi, valid_info);
4376 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4377 ref, vn_reference_t, hir)
4378 copy_reference (ref, valid_info);
4380 current_info = valid_info;
4384 /* Pop the components of the found SCC for NAME off the SCC stack
4385 and process them. Returns true if all went well, false if
4386 we run into resource limits. */
4388 static void
4389 extract_and_process_scc_for_name (tree name)
4391 auto_vec<tree> scc;
4392 tree x;
4394 /* Found an SCC, pop the components off the SCC stack and
4395 process them. */
4398 x = sccstack.pop ();
4400 VN_INFO (x)->on_sccstack = false;
4401 scc.safe_push (x);
4402 } while (x != name);
4404 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4405 incredibly large.
4406 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4407 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4409 if (dump_file)
4411 print_scc (dump_file, scc);
4412 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4413 "size %u exceeding %u\n", scc.length (),
4414 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4416 tree var;
4417 unsigned i;
4418 FOR_EACH_VEC_ELT (scc, i, var)
4420 gimple *def = SSA_NAME_DEF_STMT (var);
4421 mark_use_processed (var);
4422 if (SSA_NAME_IS_DEFAULT_DEF (var)
4423 || gimple_code (def) == GIMPLE_PHI)
4424 set_ssa_val_to (var, var);
4425 else
4426 defs_to_varying (def);
4428 return;
4431 if (scc.length () > 1)
4432 sort_scc (scc);
4434 process_scc (scc);
4437 /* Depth first search on NAME to discover and process SCC's in the SSA
4438 graph.
4439 Execution of this algorithm relies on the fact that the SCC's are
4440 popped off the stack in topological order.
4441 Returns true if successful, false if we stopped processing SCC's due
4442 to resource constraints. */
4444 static void
4445 DFS (tree name)
4447 auto_vec<ssa_op_iter> itervec;
4448 auto_vec<tree> namevec;
4449 use_operand_p usep = NULL;
4450 gimple *defstmt;
4451 tree use;
4452 ssa_op_iter iter;
4454 start_over:
4455 /* SCC info */
4456 VN_INFO (name)->dfsnum = next_dfs_num++;
4457 VN_INFO (name)->visited = true;
4458 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4460 sccstack.safe_push (name);
4461 VN_INFO (name)->on_sccstack = true;
4462 defstmt = SSA_NAME_DEF_STMT (name);
4464 /* Recursively DFS on our operands, looking for SCC's. */
4465 if (!gimple_nop_p (defstmt))
4467 /* Push a new iterator. */
4468 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4469 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4470 else
4471 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4473 else
4474 clear_and_done_ssa_iter (&iter);
4476 while (1)
4478 /* If we are done processing uses of a name, go up the stack
4479 of iterators and process SCCs as we found them. */
4480 if (op_iter_done (&iter))
4482 /* See if we found an SCC. */
4483 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4484 extract_and_process_scc_for_name (name);
4486 /* Check if we are done. */
4487 if (namevec.is_empty ())
4488 return;
4490 /* Restore the last use walker and continue walking there. */
4491 use = name;
4492 name = namevec.pop ();
4493 memcpy (&iter, &itervec.last (),
4494 sizeof (ssa_op_iter));
4495 itervec.pop ();
4496 goto continue_walking;
4499 use = USE_FROM_PTR (usep);
4501 /* Since we handle phi nodes, we will sometimes get
4502 invariants in the use expression. */
4503 if (TREE_CODE (use) == SSA_NAME)
4505 if (! (VN_INFO (use)->visited))
4507 /* Recurse by pushing the current use walking state on
4508 the stack and starting over. */
4509 itervec.safe_push (iter);
4510 namevec.safe_push (name);
4511 name = use;
4512 goto start_over;
4514 continue_walking:
4515 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4516 VN_INFO (use)->low);
4518 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4519 && VN_INFO (use)->on_sccstack)
4521 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4522 VN_INFO (name)->low);
4526 usep = op_iter_next_use (&iter);
4530 /* Allocate a value number table. */
4532 static void
4533 allocate_vn_table (vn_tables_t table)
4535 table->phis = new vn_phi_table_type (23);
4536 table->nary = new vn_nary_op_table_type (23);
4537 table->references = new vn_reference_table_type (23);
4539 gcc_obstack_init (&table->nary_obstack);
4540 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4541 table->references_pool = new object_allocator<vn_reference_s>
4542 ("VN references");
4545 /* Free a value number table. */
4547 static void
4548 free_vn_table (vn_tables_t table)
4550 delete table->phis;
4551 table->phis = NULL;
4552 delete table->nary;
4553 table->nary = NULL;
4554 delete table->references;
4555 table->references = NULL;
4556 obstack_free (&table->nary_obstack, NULL);
4557 delete table->phis_pool;
4558 delete table->references_pool;
4561 static void
4562 init_scc_vn (void)
4564 int j;
4565 int *rpo_numbers_temp;
4567 calculate_dominance_info (CDI_DOMINATORS);
4568 mark_dfs_back_edges ();
4570 sccstack.create (0);
4571 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4573 constant_value_ids = BITMAP_ALLOC (NULL);
4575 next_dfs_num = 1;
4576 next_value_id = 1;
4578 vn_ssa_aux_table.create (num_ssa_names + 1);
4579 /* VEC_alloc doesn't actually grow it to the right size, it just
4580 preallocates the space to do so. */
4581 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4582 gcc_obstack_init (&vn_ssa_aux_obstack);
4584 shared_lookup_phiargs.create (0);
4585 shared_lookup_references.create (0);
4586 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4587 rpo_numbers_temp =
4588 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4589 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4591 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4592 the i'th block in RPO order is bb. We want to map bb's to RPO
4593 numbers, so we need to rearrange this array. */
4594 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4595 rpo_numbers[rpo_numbers_temp[j]] = j;
4597 XDELETE (rpo_numbers_temp);
4599 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4600 get_identifier ("VN_TOP"), error_mark_node);
4602 renumber_gimple_stmt_uids ();
4604 /* Create the valid and optimistic value numbering tables. */
4605 valid_info = XCNEW (struct vn_tables_s);
4606 allocate_vn_table (valid_info);
4607 optimistic_info = XCNEW (struct vn_tables_s);
4608 allocate_vn_table (optimistic_info);
4609 current_info = valid_info;
4611 /* Create the VN_INFO structures, and initialize value numbers to
4612 TOP or VARYING for parameters. */
4613 size_t i;
4614 tree name;
4616 FOR_EACH_SSA_NAME (i, name, cfun)
4618 VN_INFO_GET (name)->valnum = VN_TOP;
4619 VN_INFO (name)->needs_insertion = false;
4620 VN_INFO (name)->expr = NULL;
4621 VN_INFO (name)->value_id = 0;
4623 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4624 continue;
4626 switch (TREE_CODE (SSA_NAME_VAR (name)))
4628 case VAR_DECL:
4629 /* All undefined vars are VARYING. */
4630 VN_INFO (name)->valnum = name;
4631 VN_INFO (name)->visited = true;
4632 break;
4634 case PARM_DECL:
4635 /* Parameters are VARYING but we can record a condition
4636 if we know it is a non-NULL pointer. */
4637 VN_INFO (name)->visited = true;
4638 VN_INFO (name)->valnum = name;
4639 if (POINTER_TYPE_P (TREE_TYPE (name))
4640 && nonnull_arg_p (SSA_NAME_VAR (name)))
4642 tree ops[2];
4643 ops[0] = name;
4644 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4645 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4646 boolean_true_node, 0);
4647 if (dump_file && (dump_flags & TDF_DETAILS))
4649 fprintf (dump_file, "Recording ");
4650 print_generic_expr (dump_file, name, TDF_SLIM);
4651 fprintf (dump_file, " != 0\n");
4654 break;
4656 case RESULT_DECL:
4657 /* If the result is passed by invisible reference the default
4658 def is initialized, otherwise it's uninitialized. Still
4659 undefined is varying. */
4660 VN_INFO (name)->visited = true;
4661 VN_INFO (name)->valnum = name;
4662 break;
4664 default:
4665 gcc_unreachable ();
4670 /* Restore SSA info that has been reset on value leaders. */
4672 void
4673 scc_vn_restore_ssa_info (void)
4675 unsigned i;
4676 tree name;
4678 FOR_EACH_SSA_NAME (i, name, cfun)
4680 if (has_VN_INFO (name))
4682 if (VN_INFO (name)->needs_insertion)
4684 else if (POINTER_TYPE_P (TREE_TYPE (name))
4685 && VN_INFO (name)->info.ptr_info)
4686 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4687 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4688 && VN_INFO (name)->info.range_info)
4690 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4691 SSA_NAME_ANTI_RANGE_P (name)
4692 = VN_INFO (name)->range_info_anti_range_p;
4698 void
4699 free_scc_vn (void)
4701 size_t i;
4702 tree name;
4704 delete constant_to_value_id;
4705 constant_to_value_id = NULL;
4706 BITMAP_FREE (constant_value_ids);
4707 shared_lookup_phiargs.release ();
4708 shared_lookup_references.release ();
4709 XDELETEVEC (rpo_numbers);
4711 FOR_EACH_SSA_NAME (i, name, cfun)
4713 if (has_VN_INFO (name)
4714 && VN_INFO (name)->needs_insertion)
4715 release_ssa_name (name);
4717 obstack_free (&vn_ssa_aux_obstack, NULL);
4718 vn_ssa_aux_table.release ();
4720 sccstack.release ();
4721 free_vn_table (valid_info);
4722 XDELETE (valid_info);
4723 free_vn_table (optimistic_info);
4724 XDELETE (optimistic_info);
4726 BITMAP_FREE (const_parms);
4729 /* Set *ID according to RESULT. */
4731 static void
4732 set_value_id_for_result (tree result, unsigned int *id)
4734 if (result && TREE_CODE (result) == SSA_NAME)
4735 *id = VN_INFO (result)->value_id;
4736 else if (result && is_gimple_min_invariant (result))
4737 *id = get_or_alloc_constant_value_id (result);
4738 else
4739 *id = get_next_value_id ();
4742 /* Set the value ids in the valid hash tables. */
4744 static void
4745 set_hashtable_value_ids (void)
4747 vn_nary_op_iterator_type hin;
4748 vn_phi_iterator_type hip;
4749 vn_reference_iterator_type hir;
4750 vn_nary_op_t vno;
4751 vn_reference_t vr;
4752 vn_phi_t vp;
4754 /* Now set the value ids of the things we had put in the hash
4755 table. */
4757 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4758 set_value_id_for_result (vno->result, &vno->value_id);
4760 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4761 set_value_id_for_result (vp->result, &vp->value_id);
4763 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4764 hir)
4765 set_value_id_for_result (vr->result, &vr->value_id);
4768 class sccvn_dom_walker : public dom_walker
4770 public:
4771 sccvn_dom_walker ()
4772 : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4774 virtual edge before_dom_children (basic_block);
4775 virtual void after_dom_children (basic_block);
4777 void record_cond (basic_block,
4778 enum tree_code code, tree lhs, tree rhs, bool value);
4779 void record_conds (basic_block,
4780 enum tree_code code, tree lhs, tree rhs, bool value);
4782 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4783 cond_stack;
4786 /* Record a temporary condition for the BB and its dominated blocks. */
4788 void
4789 sccvn_dom_walker::record_cond (basic_block bb,
4790 enum tree_code code, tree lhs, tree rhs,
4791 bool value)
4793 tree ops[2] = { lhs, rhs };
4794 vn_nary_op_t old = NULL;
4795 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4796 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4797 vn_nary_op_t cond
4798 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4799 value
4800 ? boolean_true_node
4801 : boolean_false_node, 0);
4802 if (dump_file && (dump_flags & TDF_DETAILS))
4804 fprintf (dump_file, "Recording temporarily ");
4805 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4806 fprintf (dump_file, " %s ", get_tree_code_name (code));
4807 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4808 fprintf (dump_file, " == %s%s\n",
4809 value ? "true" : "false",
4810 old ? " (old entry saved)" : "");
4812 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4815 /* Record temporary conditions for the BB and its dominated blocks
4816 according to LHS CODE RHS == VALUE and its dominated conditions. */
4818 void
4819 sccvn_dom_walker::record_conds (basic_block bb,
4820 enum tree_code code, tree lhs, tree rhs,
4821 bool value)
4823 /* Record the original condition. */
4824 record_cond (bb, code, lhs, rhs, value);
4826 if (!value)
4827 return;
4829 /* Record dominated conditions if the condition is true. Note that
4830 the inversion is already recorded. */
4831 switch (code)
4833 case LT_EXPR:
4834 case GT_EXPR:
4835 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4836 record_cond (bb, NE_EXPR, lhs, rhs, true);
4837 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4838 break;
4840 case EQ_EXPR:
4841 record_cond (bb, LE_EXPR, lhs, rhs, true);
4842 record_cond (bb, GE_EXPR, lhs, rhs, true);
4843 record_cond (bb, LT_EXPR, lhs, rhs, false);
4844 record_cond (bb, GT_EXPR, lhs, rhs, false);
4845 break;
4847 default:
4848 break;
4852 /* Restore expressions and values derived from conditionals. */
4854 void
4855 sccvn_dom_walker::after_dom_children (basic_block bb)
4857 while (!cond_stack.is_empty ()
4858 && cond_stack.last ().first == bb)
4860 vn_nary_op_t cond = cond_stack.last ().second.first;
4861 vn_nary_op_t old = cond_stack.last ().second.second;
4862 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4863 if (old)
4864 vn_nary_op_insert_into (old, current_info->nary, false);
4865 cond_stack.pop ();
4869 /* Value number all statements in BB. */
4871 edge
4872 sccvn_dom_walker::before_dom_children (basic_block bb)
4874 if (dump_file && (dump_flags & TDF_DETAILS))
4875 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4877 /* If we have a single predecessor record the equivalence from a
4878 possible condition on the predecessor edge. */
4879 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4880 if (pred_e)
4882 /* Check if there are multiple executable successor edges in
4883 the source block. Otherwise there is no additional info
4884 to be recorded. */
4885 edge_iterator ei;
4886 edge e2;
4887 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4888 if (e2 != pred_e
4889 && e2->flags & EDGE_EXECUTABLE)
4890 break;
4891 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4893 gimple *stmt = last_stmt (pred_e->src);
4894 if (stmt
4895 && gimple_code (stmt) == GIMPLE_COND)
4897 enum tree_code code = gimple_cond_code (stmt);
4898 tree lhs = gimple_cond_lhs (stmt);
4899 tree rhs = gimple_cond_rhs (stmt);
4900 record_conds (bb, code, lhs, rhs,
4901 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4902 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4903 if (code != ERROR_MARK)
4904 record_conds (bb, code, lhs, rhs,
4905 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4910 /* Value-number all defs in the basic-block. */
4911 for (gphi_iterator gsi = gsi_start_phis (bb);
4912 !gsi_end_p (gsi); gsi_next (&gsi))
4914 gphi *phi = gsi.phi ();
4915 tree res = PHI_RESULT (phi);
4916 if (!VN_INFO (res)->visited)
4917 DFS (res);
4919 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4920 !gsi_end_p (gsi); gsi_next (&gsi))
4922 ssa_op_iter i;
4923 tree op;
4924 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4925 if (!VN_INFO (op)->visited)
4926 DFS (op);
4929 /* Finally look at the last stmt. */
4930 gimple *stmt = last_stmt (bb);
4931 if (!stmt)
4932 return NULL;
4934 enum gimple_code code = gimple_code (stmt);
4935 if (code != GIMPLE_COND
4936 && code != GIMPLE_SWITCH
4937 && code != GIMPLE_GOTO)
4938 return NULL;
4940 if (dump_file && (dump_flags & TDF_DETAILS))
4942 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4943 print_gimple_stmt (dump_file, stmt, 0);
4946 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4947 if value-numbering can prove they are not reachable. Handling
4948 computed gotos is also possible. */
4949 tree val;
4950 switch (code)
4952 case GIMPLE_COND:
4954 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4955 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4956 val = gimple_simplify (gimple_cond_code (stmt),
4957 boolean_type_node, lhs, rhs,
4958 NULL, vn_valueize);
4959 /* If that didn't simplify to a constant see if we have recorded
4960 temporary expressions from taken edges. */
4961 if (!val || TREE_CODE (val) != INTEGER_CST)
4963 tree ops[2];
4964 ops[0] = lhs;
4965 ops[1] = rhs;
4966 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4967 boolean_type_node, ops, NULL);
4969 break;
4971 case GIMPLE_SWITCH:
4972 val = gimple_switch_index (as_a <gswitch *> (stmt));
4973 break;
4974 case GIMPLE_GOTO:
4975 val = gimple_goto_dest (stmt);
4976 break;
4977 default:
4978 gcc_unreachable ();
4980 if (!val)
4981 return NULL;
4983 edge taken = find_taken_edge (bb, vn_valueize (val));
4984 if (!taken)
4985 return NULL;
4987 if (dump_file && (dump_flags & TDF_DETAILS))
4988 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4989 "not executable\n", bb->index, bb->index, taken->dest->index);
4991 return taken;
4994 /* Do SCCVN. Returns true if it finished, false if we bailed out
4995 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4996 how we use the alias oracle walking during the VN process. */
4998 void
4999 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5001 size_t i;
5003 default_vn_walk_kind = default_vn_walk_kind_;
5005 init_scc_vn ();
5007 /* Collect pointers we know point to readonly memory. */
5008 const_parms = BITMAP_ALLOC (NULL);
5009 tree fnspec = lookup_attribute ("fn spec",
5010 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5011 if (fnspec)
5013 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5014 i = 1;
5015 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5016 arg; arg = DECL_CHAIN (arg), ++i)
5018 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5019 break;
5020 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5021 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5023 tree name = ssa_default_def (cfun, arg);
5024 if (name)
5025 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5030 /* Walk all blocks in dominator order, value-numbering stmts
5031 SSA defs and decide whether outgoing edges are not executable. */
5032 sccvn_dom_walker walker;
5033 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5035 /* Initialize the value ids and prune out remaining VN_TOPs
5036 from dead code. */
5037 tree name;
5038 FOR_EACH_SSA_NAME (i, name, cfun)
5040 vn_ssa_aux_t info = VN_INFO (name);
5041 if (!info->visited
5042 || info->valnum == VN_TOP)
5043 info->valnum = name;
5044 if (info->valnum == name)
5045 info->value_id = get_next_value_id ();
5046 else if (is_gimple_min_invariant (info->valnum))
5047 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5050 /* Propagate. */
5051 FOR_EACH_SSA_NAME (i, name, cfun)
5053 vn_ssa_aux_t info = VN_INFO (name);
5054 if (TREE_CODE (info->valnum) == SSA_NAME
5055 && info->valnum != name
5056 && info->value_id != VN_INFO (info->valnum)->value_id)
5057 info->value_id = VN_INFO (info->valnum)->value_id;
5060 set_hashtable_value_ids ();
5062 if (dump_file && (dump_flags & TDF_DETAILS))
5064 fprintf (dump_file, "Value numbers:\n");
5065 FOR_EACH_SSA_NAME (i, name, cfun)
5067 if (VN_INFO (name)->visited
5068 && SSA_VAL (name) != name)
5070 print_generic_expr (dump_file, name);
5071 fprintf (dump_file, " = ");
5072 print_generic_expr (dump_file, SSA_VAL (name));
5073 fprintf (dump_file, "\n");
5079 /* Return the maximum value id we have ever seen. */
5081 unsigned int
5082 get_max_value_id (void)
5084 return next_value_id;
5087 /* Return the next unique value id. */
5089 unsigned int
5090 get_next_value_id (void)
5092 return next_value_id++;
5096 /* Compare two expressions E1 and E2 and return true if they are equal. */
5098 bool
5099 expressions_equal_p (tree e1, tree e2)
5101 /* The obvious case. */
5102 if (e1 == e2)
5103 return true;
5105 /* If either one is VN_TOP consider them equal. */
5106 if (e1 == VN_TOP || e2 == VN_TOP)
5107 return true;
5109 /* If only one of them is null, they cannot be equal. */
5110 if (!e1 || !e2)
5111 return false;
5113 /* Now perform the actual comparison. */
5114 if (TREE_CODE (e1) == TREE_CODE (e2)
5115 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5116 return true;
5118 return false;
5122 /* Return true if the nary operation NARY may trap. This is a copy
5123 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5125 bool
5126 vn_nary_may_trap (vn_nary_op_t nary)
5128 tree type;
5129 tree rhs2 = NULL_TREE;
5130 bool honor_nans = false;
5131 bool honor_snans = false;
5132 bool fp_operation = false;
5133 bool honor_trapv = false;
5134 bool handled, ret;
5135 unsigned i;
5137 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5138 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5139 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5141 type = nary->type;
5142 fp_operation = FLOAT_TYPE_P (type);
5143 if (fp_operation)
5145 honor_nans = flag_trapping_math && !flag_finite_math_only;
5146 honor_snans = flag_signaling_nans != 0;
5148 else if (INTEGRAL_TYPE_P (type)
5149 && TYPE_OVERFLOW_TRAPS (type))
5150 honor_trapv = true;
5152 if (nary->length >= 2)
5153 rhs2 = nary->op[1];
5154 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5155 honor_trapv,
5156 honor_nans, honor_snans, rhs2,
5157 &handled);
5158 if (handled
5159 && ret)
5160 return true;
5162 for (i = 0; i < nary->length; ++i)
5163 if (tree_could_trap_p (nary->op[i]))
5164 return true;
5166 return false;
5170 class eliminate_dom_walker : public dom_walker
5172 public:
5173 eliminate_dom_walker (cdi_direction, bitmap);
5174 ~eliminate_dom_walker ();
5176 virtual edge before_dom_children (basic_block);
5177 virtual void after_dom_children (basic_block);
5179 tree eliminate_avail (tree op);
5180 void eliminate_push_avail (tree op);
5181 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5183 bool do_pre;
5184 unsigned int el_todo;
5185 unsigned int eliminations;
5186 unsigned int insertions;
5188 /* SSA names that had their defs inserted by PRE if do_pre. */
5189 bitmap inserted_exprs;
5191 /* Blocks with statements that have had their EH properties changed. */
5192 bitmap need_eh_cleanup;
5194 /* Blocks with statements that have had their AB properties changed. */
5195 bitmap need_ab_cleanup;
5197 auto_vec<gimple *> to_remove;
5198 auto_vec<gimple *> to_fixup;
5199 auto_vec<tree> avail;
5200 auto_vec<tree> avail_stack;
5203 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5204 bitmap inserted_exprs_)
5205 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5206 el_todo (0), eliminations (0), insertions (0),
5207 inserted_exprs (inserted_exprs_)
5209 need_eh_cleanup = BITMAP_ALLOC (NULL);
5210 need_ab_cleanup = BITMAP_ALLOC (NULL);
5213 eliminate_dom_walker::~eliminate_dom_walker ()
5215 BITMAP_FREE (need_eh_cleanup);
5216 BITMAP_FREE (need_ab_cleanup);
5219 /* Return a leader for OP that is available at the current point of the
5220 eliminate domwalk. */
5222 tree
5223 eliminate_dom_walker::eliminate_avail (tree op)
5225 tree valnum = VN_INFO (op)->valnum;
5226 if (TREE_CODE (valnum) == SSA_NAME)
5228 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5229 return valnum;
5230 if (avail.length () > SSA_NAME_VERSION (valnum))
5231 return avail[SSA_NAME_VERSION (valnum)];
5233 else if (is_gimple_min_invariant (valnum))
5234 return valnum;
5235 return NULL_TREE;
5238 /* At the current point of the eliminate domwalk make OP available. */
5240 void
5241 eliminate_dom_walker::eliminate_push_avail (tree op)
5243 tree valnum = VN_INFO (op)->valnum;
5244 if (TREE_CODE (valnum) == SSA_NAME)
5246 if (avail.length () <= SSA_NAME_VERSION (valnum))
5247 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5248 tree pushop = op;
5249 if (avail[SSA_NAME_VERSION (valnum)])
5250 pushop = avail[SSA_NAME_VERSION (valnum)];
5251 avail_stack.safe_push (pushop);
5252 avail[SSA_NAME_VERSION (valnum)] = op;
5256 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5257 the leader for the expression if insertion was successful. */
5259 tree
5260 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5262 /* We can insert a sequence with a single assignment only. */
5263 gimple_seq stmts = VN_INFO (val)->expr;
5264 if (!gimple_seq_singleton_p (stmts))
5265 return NULL_TREE;
5266 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5267 if (!stmt
5268 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5269 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5270 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5271 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5272 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5273 return NULL_TREE;
5275 tree op = gimple_assign_rhs1 (stmt);
5276 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5277 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5278 op = TREE_OPERAND (op, 0);
5279 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5280 if (!leader)
5281 return NULL_TREE;
5283 tree res;
5284 stmts = NULL;
5285 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5286 res = gimple_build (&stmts, BIT_FIELD_REF,
5287 TREE_TYPE (val), leader,
5288 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5289 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5290 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5291 res = gimple_build (&stmts, BIT_AND_EXPR,
5292 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5293 else
5294 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5295 TREE_TYPE (val), leader);
5296 if (TREE_CODE (res) != SSA_NAME
5297 || SSA_NAME_IS_DEFAULT_DEF (res)
5298 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5300 gimple_seq_discard (stmts);
5302 /* During propagation we have to treat SSA info conservatively
5303 and thus we can end up simplifying the inserted expression
5304 at elimination time to sth not defined in stmts. */
5305 /* But then this is a redundancy we failed to detect. Which means
5306 res now has two values. That doesn't play well with how
5307 we track availability here, so give up. */
5308 if (dump_file && (dump_flags & TDF_DETAILS))
5310 if (TREE_CODE (res) == SSA_NAME)
5311 res = eliminate_avail (res);
5312 if (res)
5314 fprintf (dump_file, "Failed to insert expression for value ");
5315 print_generic_expr (dump_file, val);
5316 fprintf (dump_file, " which is really fully redundant to ");
5317 print_generic_expr (dump_file, res);
5318 fprintf (dump_file, "\n");
5322 return NULL_TREE;
5324 else
5326 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5327 VN_INFO_GET (res)->valnum = val;
5330 insertions++;
5331 if (dump_file && (dump_flags & TDF_DETAILS))
5333 fprintf (dump_file, "Inserted ");
5334 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5337 return res;
5342 /* Perform elimination for the basic-block B during the domwalk. */
5344 edge
5345 eliminate_dom_walker::before_dom_children (basic_block b)
5347 /* Mark new bb. */
5348 avail_stack.safe_push (NULL_TREE);
5350 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5351 edge_iterator ei;
5352 edge e;
5353 FOR_EACH_EDGE (e, ei, b->preds)
5354 if (e->flags & EDGE_EXECUTABLE)
5355 break;
5356 if (! e)
5357 return NULL;
5359 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5361 gphi *phi = gsi.phi ();
5362 tree res = PHI_RESULT (phi);
5364 if (virtual_operand_p (res))
5366 gsi_next (&gsi);
5367 continue;
5370 tree sprime = eliminate_avail (res);
5371 if (sprime
5372 && sprime != res)
5374 if (dump_file && (dump_flags & TDF_DETAILS))
5376 fprintf (dump_file, "Replaced redundant PHI node defining ");
5377 print_generic_expr (dump_file, res);
5378 fprintf (dump_file, " with ");
5379 print_generic_expr (dump_file, sprime);
5380 fprintf (dump_file, "\n");
5383 /* If we inserted this PHI node ourself, it's not an elimination. */
5384 if (! inserted_exprs
5385 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5386 eliminations++;
5388 /* If we will propagate into all uses don't bother to do
5389 anything. */
5390 if (may_propagate_copy (res, sprime))
5392 /* Mark the PHI for removal. */
5393 to_remove.safe_push (phi);
5394 gsi_next (&gsi);
5395 continue;
5398 remove_phi_node (&gsi, false);
5400 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5401 sprime = fold_convert (TREE_TYPE (res), sprime);
5402 gimple *stmt = gimple_build_assign (res, sprime);
5403 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5404 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5405 continue;
5408 eliminate_push_avail (res);
5409 gsi_next (&gsi);
5412 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5413 !gsi_end_p (gsi);
5414 gsi_next (&gsi))
5416 tree sprime = NULL_TREE;
5417 gimple *stmt = gsi_stmt (gsi);
5418 tree lhs = gimple_get_lhs (stmt);
5419 if (lhs && TREE_CODE (lhs) == SSA_NAME
5420 && !gimple_has_volatile_ops (stmt)
5421 /* See PR43491. Do not replace a global register variable when
5422 it is a the RHS of an assignment. Do replace local register
5423 variables since gcc does not guarantee a local variable will
5424 be allocated in register.
5425 ??? The fix isn't effective here. This should instead
5426 be ensured by not value-numbering them the same but treating
5427 them like volatiles? */
5428 && !(gimple_assign_single_p (stmt)
5429 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5430 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5431 && is_global_var (gimple_assign_rhs1 (stmt)))))
5433 sprime = eliminate_avail (lhs);
5434 if (!sprime)
5436 /* If there is no existing usable leader but SCCVN thinks
5437 it has an expression it wants to use as replacement,
5438 insert that. */
5439 tree val = VN_INFO (lhs)->valnum;
5440 if (val != VN_TOP
5441 && TREE_CODE (val) == SSA_NAME
5442 && VN_INFO (val)->needs_insertion
5443 && VN_INFO (val)->expr != NULL
5444 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5445 eliminate_push_avail (sprime);
5448 /* If this now constitutes a copy duplicate points-to
5449 and range info appropriately. This is especially
5450 important for inserted code. See tree-ssa-copy.c
5451 for similar code. */
5452 if (sprime
5453 && TREE_CODE (sprime) == SSA_NAME)
5455 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5456 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5457 && VN_INFO_PTR_INFO (lhs)
5458 && ! VN_INFO_PTR_INFO (sprime))
5460 duplicate_ssa_name_ptr_info (sprime,
5461 VN_INFO_PTR_INFO (lhs));
5462 if (b != sprime_b)
5463 mark_ptr_info_alignment_unknown
5464 (SSA_NAME_PTR_INFO (sprime));
5466 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5467 && VN_INFO_RANGE_INFO (lhs)
5468 && ! VN_INFO_RANGE_INFO (sprime)
5469 && b == sprime_b)
5470 duplicate_ssa_name_range_info (sprime,
5471 VN_INFO_RANGE_TYPE (lhs),
5472 VN_INFO_RANGE_INFO (lhs));
5475 /* Inhibit the use of an inserted PHI on a loop header when
5476 the address of the memory reference is a simple induction
5477 variable. In other cases the vectorizer won't do anything
5478 anyway (either it's loop invariant or a complicated
5479 expression). */
5480 if (sprime
5481 && TREE_CODE (sprime) == SSA_NAME
5482 && do_pre
5483 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5484 && loop_outer (b->loop_father)
5485 && has_zero_uses (sprime)
5486 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5487 && gimple_assign_load_p (stmt))
5489 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5490 basic_block def_bb = gimple_bb (def_stmt);
5491 if (gimple_code (def_stmt) == GIMPLE_PHI
5492 && def_bb->loop_father->header == def_bb)
5494 loop_p loop = def_bb->loop_father;
5495 ssa_op_iter iter;
5496 tree op;
5497 bool found = false;
5498 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5500 affine_iv iv;
5501 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5502 if (def_bb
5503 && flow_bb_inside_loop_p (loop, def_bb)
5504 && simple_iv (loop, loop, op, &iv, true))
5506 found = true;
5507 break;
5510 if (found)
5512 if (dump_file && (dump_flags & TDF_DETAILS))
5514 fprintf (dump_file, "Not replacing ");
5515 print_gimple_expr (dump_file, stmt, 0);
5516 fprintf (dump_file, " with ");
5517 print_generic_expr (dump_file, sprime);
5518 fprintf (dump_file, " which would add a loop"
5519 " carried dependence to loop %d\n",
5520 loop->num);
5522 /* Don't keep sprime available. */
5523 sprime = NULL_TREE;
5528 if (sprime)
5530 /* If we can propagate the value computed for LHS into
5531 all uses don't bother doing anything with this stmt. */
5532 if (may_propagate_copy (lhs, sprime))
5534 /* Mark it for removal. */
5535 to_remove.safe_push (stmt);
5537 /* ??? Don't count copy/constant propagations. */
5538 if (gimple_assign_single_p (stmt)
5539 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5540 || gimple_assign_rhs1 (stmt) == sprime))
5541 continue;
5543 if (dump_file && (dump_flags & TDF_DETAILS))
5545 fprintf (dump_file, "Replaced ");
5546 print_gimple_expr (dump_file, stmt, 0);
5547 fprintf (dump_file, " with ");
5548 print_generic_expr (dump_file, sprime);
5549 fprintf (dump_file, " in all uses of ");
5550 print_gimple_stmt (dump_file, stmt, 0);
5553 eliminations++;
5554 continue;
5557 /* If this is an assignment from our leader (which
5558 happens in the case the value-number is a constant)
5559 then there is nothing to do. */
5560 if (gimple_assign_single_p (stmt)
5561 && sprime == gimple_assign_rhs1 (stmt))
5562 continue;
5564 /* Else replace its RHS. */
5565 bool can_make_abnormal_goto
5566 = is_gimple_call (stmt)
5567 && stmt_can_make_abnormal_goto (stmt);
5569 if (dump_file && (dump_flags & TDF_DETAILS))
5571 fprintf (dump_file, "Replaced ");
5572 print_gimple_expr (dump_file, stmt, 0);
5573 fprintf (dump_file, " with ");
5574 print_generic_expr (dump_file, sprime);
5575 fprintf (dump_file, " in ");
5576 print_gimple_stmt (dump_file, stmt, 0);
5579 eliminations++;
5580 gimple *orig_stmt = stmt;
5581 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5582 TREE_TYPE (sprime)))
5583 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5584 tree vdef = gimple_vdef (stmt);
5585 tree vuse = gimple_vuse (stmt);
5586 propagate_tree_value_into_stmt (&gsi, sprime);
5587 stmt = gsi_stmt (gsi);
5588 update_stmt (stmt);
5589 if (vdef != gimple_vdef (stmt))
5590 VN_INFO (vdef)->valnum = vuse;
5592 /* If we removed EH side-effects from the statement, clean
5593 its EH information. */
5594 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5596 bitmap_set_bit (need_eh_cleanup,
5597 gimple_bb (stmt)->index);
5598 if (dump_file && (dump_flags & TDF_DETAILS))
5599 fprintf (dump_file, " Removed EH side-effects.\n");
5602 /* Likewise for AB side-effects. */
5603 if (can_make_abnormal_goto
5604 && !stmt_can_make_abnormal_goto (stmt))
5606 bitmap_set_bit (need_ab_cleanup,
5607 gimple_bb (stmt)->index);
5608 if (dump_file && (dump_flags & TDF_DETAILS))
5609 fprintf (dump_file, " Removed AB side-effects.\n");
5612 continue;
5616 /* If the statement is a scalar store, see if the expression
5617 has the same value number as its rhs. If so, the store is
5618 dead. */
5619 if (gimple_assign_single_p (stmt)
5620 && !gimple_has_volatile_ops (stmt)
5621 && !is_gimple_reg (gimple_assign_lhs (stmt))
5622 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5623 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5625 tree val;
5626 tree rhs = gimple_assign_rhs1 (stmt);
5627 vn_reference_t vnresult;
5628 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5629 &vnresult, false);
5630 if (TREE_CODE (rhs) == SSA_NAME)
5631 rhs = VN_INFO (rhs)->valnum;
5632 if (val
5633 && operand_equal_p (val, rhs, 0))
5635 /* We can only remove the later store if the former aliases
5636 at least all accesses the later one does or if the store
5637 was to readonly memory storing the same value. */
5638 alias_set_type set = get_alias_set (lhs);
5639 if (! vnresult
5640 || vnresult->set == set
5641 || alias_set_subset_of (set, vnresult->set))
5643 if (dump_file && (dump_flags & TDF_DETAILS))
5645 fprintf (dump_file, "Deleted redundant store ");
5646 print_gimple_stmt (dump_file, stmt, 0);
5649 /* Queue stmt for removal. */
5650 to_remove.safe_push (stmt);
5651 continue;
5656 /* If this is a control statement value numbering left edges
5657 unexecuted on force the condition in a way consistent with
5658 that. */
5659 if (gcond *cond = dyn_cast <gcond *> (stmt))
5661 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5662 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5664 if (dump_file && (dump_flags & TDF_DETAILS))
5666 fprintf (dump_file, "Removing unexecutable edge from ");
5667 print_gimple_stmt (dump_file, stmt, 0);
5669 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5670 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5671 gimple_cond_make_true (cond);
5672 else
5673 gimple_cond_make_false (cond);
5674 update_stmt (cond);
5675 el_todo |= TODO_cleanup_cfg;
5676 continue;
5680 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5681 bool was_noreturn = (is_gimple_call (stmt)
5682 && gimple_call_noreturn_p (stmt));
5683 tree vdef = gimple_vdef (stmt);
5684 tree vuse = gimple_vuse (stmt);
5686 /* If we didn't replace the whole stmt (or propagate the result
5687 into all uses), replace all uses on this stmt with their
5688 leaders. */
5689 bool modified = false;
5690 use_operand_p use_p;
5691 ssa_op_iter iter;
5692 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5694 tree use = USE_FROM_PTR (use_p);
5695 /* ??? The call code above leaves stmt operands un-updated. */
5696 if (TREE_CODE (use) != SSA_NAME)
5697 continue;
5698 tree sprime = eliminate_avail (use);
5699 if (sprime && sprime != use
5700 && may_propagate_copy (use, sprime)
5701 /* We substitute into debug stmts to avoid excessive
5702 debug temporaries created by removed stmts, but we need
5703 to avoid doing so for inserted sprimes as we never want
5704 to create debug temporaries for them. */
5705 && (!inserted_exprs
5706 || TREE_CODE (sprime) != SSA_NAME
5707 || !is_gimple_debug (stmt)
5708 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5710 propagate_value (use_p, sprime);
5711 modified = true;
5715 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5716 into which is a requirement for the IPA devirt machinery. */
5717 gimple *old_stmt = stmt;
5718 if (modified)
5720 /* If a formerly non-invariant ADDR_EXPR is turned into an
5721 invariant one it was on a separate stmt. */
5722 if (gimple_assign_single_p (stmt)
5723 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5724 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5725 gimple_stmt_iterator prev = gsi;
5726 gsi_prev (&prev);
5727 if (fold_stmt (&gsi))
5729 /* fold_stmt may have created new stmts inbetween
5730 the previous stmt and the folded stmt. Mark
5731 all defs created there as varying to not confuse
5732 the SCCVN machinery as we're using that even during
5733 elimination. */
5734 if (gsi_end_p (prev))
5735 prev = gsi_start_bb (b);
5736 else
5737 gsi_next (&prev);
5738 if (gsi_stmt (prev) != gsi_stmt (gsi))
5741 tree def;
5742 ssa_op_iter dit;
5743 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5744 dit, SSA_OP_ALL_DEFS)
5745 /* As existing DEFs may move between stmts
5746 we have to guard VN_INFO_GET. */
5747 if (! has_VN_INFO (def))
5748 VN_INFO_GET (def)->valnum = def;
5749 if (gsi_stmt (prev) == gsi_stmt (gsi))
5750 break;
5751 gsi_next (&prev);
5753 while (1);
5755 stmt = gsi_stmt (gsi);
5756 /* In case we folded the stmt away schedule the NOP for removal. */
5757 if (gimple_nop_p (stmt))
5758 to_remove.safe_push (stmt);
5761 /* Visit indirect calls and turn them into direct calls if
5762 possible using the devirtualization machinery. Do this before
5763 checking for required EH/abnormal/noreturn cleanup as devird
5764 may expose more of those. */
5765 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5767 tree fn = gimple_call_fn (call_stmt);
5768 if (fn
5769 && flag_devirtualize
5770 && virtual_method_call_p (fn))
5772 tree otr_type = obj_type_ref_class (fn);
5773 unsigned HOST_WIDE_INT otr_tok
5774 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5775 tree instance;
5776 ipa_polymorphic_call_context context (current_function_decl,
5777 fn, stmt, &instance);
5778 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5779 otr_type, stmt);
5780 bool final;
5781 vec <cgraph_node *> targets
5782 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5783 otr_tok, context, &final);
5784 if (dump_file)
5785 dump_possible_polymorphic_call_targets (dump_file,
5786 obj_type_ref_class (fn),
5787 otr_tok, context);
5788 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5790 tree fn;
5791 if (targets.length () == 1)
5792 fn = targets[0]->decl;
5793 else
5794 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5795 if (dump_enabled_p ())
5797 location_t loc = gimple_location (stmt);
5798 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5799 "converting indirect call to "
5800 "function %s\n",
5801 lang_hooks.decl_printable_name (fn, 2));
5803 gimple_call_set_fndecl (call_stmt, fn);
5804 /* If changing the call to __builtin_unreachable
5805 or similar noreturn function, adjust gimple_call_fntype
5806 too. */
5807 if (gimple_call_noreturn_p (call_stmt)
5808 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5809 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5810 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5811 == void_type_node))
5812 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5813 maybe_remove_unused_call_args (cfun, call_stmt);
5814 modified = true;
5819 if (modified)
5821 /* When changing a call into a noreturn call, cfg cleanup
5822 is needed to fix up the noreturn call. */
5823 if (!was_noreturn
5824 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5825 to_fixup.safe_push (stmt);
5826 /* When changing a condition or switch into one we know what
5827 edge will be executed, schedule a cfg cleanup. */
5828 if ((gimple_code (stmt) == GIMPLE_COND
5829 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5830 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5831 || (gimple_code (stmt) == GIMPLE_SWITCH
5832 && TREE_CODE (gimple_switch_index
5833 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5834 el_todo |= TODO_cleanup_cfg;
5835 /* If we removed EH side-effects from the statement, clean
5836 its EH information. */
5837 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5839 bitmap_set_bit (need_eh_cleanup,
5840 gimple_bb (stmt)->index);
5841 if (dump_file && (dump_flags & TDF_DETAILS))
5842 fprintf (dump_file, " Removed EH side-effects.\n");
5844 /* Likewise for AB side-effects. */
5845 if (can_make_abnormal_goto
5846 && !stmt_can_make_abnormal_goto (stmt))
5848 bitmap_set_bit (need_ab_cleanup,
5849 gimple_bb (stmt)->index);
5850 if (dump_file && (dump_flags & TDF_DETAILS))
5851 fprintf (dump_file, " Removed AB side-effects.\n");
5853 update_stmt (stmt);
5854 if (vdef != gimple_vdef (stmt))
5855 VN_INFO (vdef)->valnum = vuse;
5858 /* Make new values available - for fully redundant LHS we
5859 continue with the next stmt above and skip this. */
5860 def_operand_p defp;
5861 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5862 eliminate_push_avail (DEF_FROM_PTR (defp));
5865 /* Replace destination PHI arguments. */
5866 FOR_EACH_EDGE (e, ei, b->succs)
5867 if (e->flags & EDGE_EXECUTABLE)
5868 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5869 !gsi_end_p (gsi);
5870 gsi_next (&gsi))
5872 gphi *phi = gsi.phi ();
5873 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5874 tree arg = USE_FROM_PTR (use_p);
5875 if (TREE_CODE (arg) != SSA_NAME
5876 || virtual_operand_p (arg))
5877 continue;
5878 tree sprime = eliminate_avail (arg);
5879 if (sprime && may_propagate_copy (arg, sprime))
5880 propagate_value (use_p, sprime);
5882 return NULL;
5885 /* Make no longer available leaders no longer available. */
5887 void
5888 eliminate_dom_walker::after_dom_children (basic_block)
5890 tree entry;
5891 while ((entry = avail_stack.pop ()) != NULL_TREE)
5893 tree valnum = VN_INFO (entry)->valnum;
5894 tree old = avail[SSA_NAME_VERSION (valnum)];
5895 if (old == entry)
5896 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5897 else
5898 avail[SSA_NAME_VERSION (valnum)] = entry;
5902 /* Eliminate fully redundant computations. */
5904 unsigned int
5905 vn_eliminate (bitmap inserted_exprs)
5907 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5908 el.avail.reserve (num_ssa_names);
5910 el.walk (cfun->cfg->x_entry_block_ptr);
5912 /* We cannot remove stmts during BB walk, especially not release SSA
5913 names there as this confuses the VN machinery. The stmts ending
5914 up in to_remove are either stores or simple copies.
5915 Remove stmts in reverse order to make debug stmt creation possible. */
5916 while (!el.to_remove.is_empty ())
5918 gimple *stmt = el.to_remove.pop ();
5920 if (dump_file && (dump_flags & TDF_DETAILS))
5922 fprintf (dump_file, "Removing dead stmt ");
5923 print_gimple_stmt (dump_file, stmt, 0, 0);
5926 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5927 if (gimple_code (stmt) == GIMPLE_PHI)
5928 remove_phi_node (&gsi, true);
5929 else
5931 basic_block bb = gimple_bb (stmt);
5932 unlink_stmt_vdef (stmt);
5933 if (gsi_remove (&gsi, true))
5934 bitmap_set_bit (el.need_eh_cleanup, bb->index);
5935 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5936 bitmap_set_bit (el.need_ab_cleanup, bb->index);
5937 release_defs (stmt);
5940 /* Removing a stmt may expose a forwarder block. */
5941 el.el_todo |= TODO_cleanup_cfg;
5944 /* Fixup stmts that became noreturn calls. This may require splitting
5945 blocks and thus isn't possible during the dominator walk. Do this
5946 in reverse order so we don't inadvertedly remove a stmt we want to
5947 fixup by visiting a dominating now noreturn call first. */
5948 while (!el.to_fixup.is_empty ())
5950 gimple *stmt = el.to_fixup.pop ();
5952 if (dump_file && (dump_flags & TDF_DETAILS))
5954 fprintf (dump_file, "Fixing up noreturn call ");
5955 print_gimple_stmt (dump_file, stmt, 0);
5958 if (fixup_noreturn_call (stmt))
5959 el.el_todo |= TODO_cleanup_cfg;
5962 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
5963 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
5965 if (do_eh_cleanup)
5966 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
5968 if (do_ab_cleanup)
5969 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
5971 if (do_eh_cleanup || do_ab_cleanup)
5972 el.el_todo |= TODO_cleanup_cfg;
5974 statistics_counter_event (cfun, "Eliminated", el.eliminations);
5975 statistics_counter_event (cfun, "Insertions", el.insertions);
5977 return el.el_todo;
5981 namespace {
5983 const pass_data pass_data_fre =
5985 GIMPLE_PASS, /* type */
5986 "fre", /* name */
5987 OPTGROUP_NONE, /* optinfo_flags */
5988 TV_TREE_FRE, /* tv_id */
5989 ( PROP_cfg | PROP_ssa ), /* properties_required */
5990 0, /* properties_provided */
5991 0, /* properties_destroyed */
5992 0, /* todo_flags_start */
5993 0, /* todo_flags_finish */
5996 class pass_fre : public gimple_opt_pass
5998 public:
5999 pass_fre (gcc::context *ctxt)
6000 : gimple_opt_pass (pass_data_fre, ctxt)
6003 /* opt_pass methods: */
6004 opt_pass * clone () { return new pass_fre (m_ctxt); }
6005 virtual bool gate (function *) { return flag_tree_fre != 0; }
6006 virtual unsigned int execute (function *);
6008 }; // class pass_fre
6010 unsigned int
6011 pass_fre::execute (function *)
6013 unsigned int todo = 0;
6015 run_scc_vn (VN_WALKREWRITE);
6017 /* Remove all the redundant expressions. */
6018 todo |= vn_eliminate (NULL);
6020 scc_vn_restore_ssa_info ();
6021 free_scc_vn ();
6023 return todo;
6026 } // anon namespace
6028 gimple_opt_pass *
6029 make_pass_fre (gcc::context *ctxt)
6031 return new pass_fre (ctxt);